Clean up some GC logic and fix a bug where large object allocation could grow the heap unnecessarily

db4
Slava Pestov 2011-09-05 16:21:09 -07:00
parent b13eb201bd
commit 83b8717bad
6 changed files with 44 additions and 17 deletions

View File

@ -14,10 +14,8 @@ void factor_vm::collect_aging()
/* Promote objects referenced from tenured space to tenured space, copy
everything else to the aging semi-space, and reset the nursery pointer. */
{
/* Change the op so that if we fail here, we proceed to a full
tenured collection. We are collecting to tenured space, and
cards were unmarked, so we can't proceed with a to_tenured
collection. */
/* Change the op so that if we fail here, an assertion will be
raised. */
current_gc->op = collect_to_tenured_op;
to_tenured_collector collector(this);

View File

@ -330,6 +330,14 @@ void factor_vm::collect_compact(bool trace_contexts_p)
{
collect_mark_impl(trace_contexts_p);
collect_compact_impl(trace_contexts_p);
if(data->high_fragmentation_p())
{
/* Compaction did not free up enough memory. Grow the heap. */
set_current_gc_op(collect_growing_heap_op);
collect_growing_heap(0,trace_contexts_p);
}
code->flush_icache();
}

View File

@ -100,12 +100,12 @@ void data_heap::reset_generation(tenured_space *gen)
bool data_heap::high_fragmentation_p()
{
return (tenured->largest_free_block() <= nursery->size + aging->size);
return (tenured->largest_free_block() <= high_water_mark());
}
bool data_heap::low_memory_p()
{
return (tenured->free_space() <= nursery->size + aging->size);
return (tenured->free_space() <= high_water_mark());
}
void data_heap::mark_all_cards()

View File

@ -32,6 +32,9 @@ struct data_heap {
bool high_fragmentation_p();
bool low_memory_p();
void mark_all_cards();
cell high_water_mark() {
return nursery->size + aging->size;
}
};
struct data_heap_room {

View File

@ -112,11 +112,14 @@ void factor_vm::collect_full(bool trace_contexts_p)
if(data->low_memory_p())
{
/* Full GC did not free up enough memory. Grow the heap. */
set_current_gc_op(collect_growing_heap_op);
collect_growing_heap(0,trace_contexts_p);
}
else if(data->high_fragmentation_p())
{
/* Enough free memory, but it is not contiguous. Perform a
compaction. */
set_current_gc_op(collect_compact_op);
collect_compact_impl(trace_contexts_p);
}

View File

@ -116,19 +116,19 @@ void factor_vm::start_gc_again()
switch(current_gc->op)
{
case collect_nursery_op:
/* Nursery collection can fail if aging does not have enough
free space to fit all live objects from nursery. */
current_gc->op = collect_aging_op;
break;
case collect_aging_op:
/* Aging collection can fail if the aging semispace cannot fit
all the live objects from the other aging semispace and the
nursery. */
current_gc->op = collect_to_tenured_op;
break;
case collect_to_tenured_op:
current_gc->op = collect_full_op;
break;
case collect_full_op:
case collect_compact_op:
current_gc->op = collect_growing_heap_op;
break;
default:
/* Nothing else should fail mid-collection due to insufficient
space in the target generation. */
critical_error("Bad GC op",current_gc->op);
break;
}
@ -148,10 +148,16 @@ void factor_vm::gc(gc_op op, cell requested_bytes, bool trace_contexts_p)
assert(!gc_off);
assert(!current_gc);
/* Important invariant: tenured space must have enough contiguous free
space to fit the entire contents of the aging space and nursery. This is
because when doing a full collection, objects from younger generations
are promoted before any unreachable tenured objects are freed. */
assert(!data->high_fragmentation_p());
current_gc = new gc_state(op,this);
/* Keep trying to GC higher and higher generations until we don't run out
of space */
/* Keep trying to GC higher and higher generations until we don't run
out of space in the target generation. */
for(;;)
{
try
@ -164,17 +170,23 @@ void factor_vm::gc(gc_op op, cell requested_bytes, bool trace_contexts_p)
collect_nursery();
break;
case collect_aging_op:
/* We end up here if the above fails. */
collect_aging();
if(data->high_fragmentation_p())
{
/* Change GC op so that if we fail again,
we crash. */
set_current_gc_op(collect_full_op);
collect_full(trace_contexts_p);
}
break;
case collect_to_tenured_op:
/* We end up here if the above fails. */
collect_to_tenured();
if(data->high_fragmentation_p())
{
/* Change GC op so that if we fail again,
we crash. */
set_current_gc_op(collect_full_op);
collect_full(trace_contexts_p);
}
@ -197,7 +209,7 @@ void factor_vm::gc(gc_op op, cell requested_bytes, bool trace_contexts_p)
}
catch(const must_start_gc_again &)
{
/* We come back here if a generation is full */
/* We come back here if the target generation is full. */
start_gc_again();
continue;
}
@ -207,6 +219,9 @@ void factor_vm::gc(gc_op op, cell requested_bytes, bool trace_contexts_p)
delete current_gc;
current_gc = NULL;
/* Check the invariant again, just in case. */
assert(!data->high_fragmentation_p());
}
/* primitive_minor_gc() is invoked by inline GC checks, and it needs to fill in
@ -283,7 +298,7 @@ void factor_vm::primitive_compact_gc()
object *factor_vm::allot_large_object(cell type, cell size)
{
/* If tenured space does not have enough room, collect and compact */
if(!data->tenured->can_allot_p(size))
if(!data->tenured->can_allot_p(size + data->high_water_mark()))
{
primitive_compact_gc();