vm: move compaction algorithm to mark_bits.hpp since it doesn't rely on properties of heaps per se

db4
Slava Pestov 2009-10-19 03:05:20 -05:00
parent ac25b8ebbb
commit 4ddd63d83e
7 changed files with 102 additions and 77 deletions

View File

@ -59,7 +59,7 @@ struct word_updater {
factor_vm *parent;
explicit word_updater(factor_vm *parent_) : parent(parent_) {}
void operator()(code_block *compiled)
void operator()(code_block *compiled, cell size)
{
parent->update_word_references(compiled);
}
@ -80,7 +80,7 @@ struct word_and_literal_code_heap_updater {
word_and_literal_code_heap_updater(factor_vm *parent_) : parent(parent_) {}
void operator()(heap_block *block)
void operator()(heap_block *block, cell size)
{
parent->update_code_block_words_and_literals((code_block *)block);
}
@ -99,16 +99,17 @@ struct code_heap_relocator {
code_heap_relocator(factor_vm *parent_) : parent(parent_) {}
void operator()(heap_block *block)
void operator()(code_block *block, cell size)
{
parent->relocate_code_block((code_block *)block);
parent->relocate_code_block(block);
}
};
void factor_vm::relocate_code_heap()
{
code_heap_relocator relocator(this);
code->sweep_heap(relocator);
code_heap_iterator<code_heap_relocator> iter(relocator);
code->sweep_heap(iter);
}
void factor_vm::primitive_modify_code_heap()
@ -275,7 +276,10 @@ on entry to this function. XTs in code blocks must be updated after this
function returns. */
void factor_vm::compact_code_heap(bool trace_contexts_p)
{
code->compact_heap();
/* Figure out where blocks are going to go */
code->state->compute_forwarding();
/* Update references to the code heap from the data heap */
forward_object_xts();
if(trace_contexts_p)
{
@ -283,14 +287,17 @@ void factor_vm::compact_code_heap(bool trace_contexts_p)
forward_callback_xts();
}
/* Move code blocks and update references amongst them (this requires
that the data heap is up to date since relocation looks up object XTs) */
code_heap_relocator relocator(this);
iterate_code_heap(relocator);
code_heap_iterator<code_heap_relocator> iter(relocator);
code->compact_heap(iter);
}
struct stack_trace_stripper {
explicit stack_trace_stripper() {}
void operator()(code_block *compiled)
void operator()(code_block *compiled, cell size)
{
compiled->owner = false_object;
}

View File

@ -189,35 +189,6 @@ cell heap::heap_size()
return (cell)scan - (cell)first_block();
}
void heap::compact_heap()
{
state->compute_forwarding();
heap_block *scan = first_block();
heap_block *end = last_block();
char *address = (char *)scan;
/* Slide blocks up while building the forwarding hashtable. */
while(scan != end)
{
heap_block *next = scan->next();
if(state->is_marked_p(scan))
{
cell size = scan->size();
memmove(address,scan,size);
address += size;
}
scan = next;
}
/* Now update the free list; there will be a single free block at
the end */
build_free_list((cell)address - seg->start);
}
heap_block *heap::free_allocated(heap_block *prev, heap_block *scan)
{
if(secure_gc)

View File

@ -29,7 +29,6 @@ struct heap {
}
void clear_free_list();
void new_heap(cell size);
void add_to_free_list(free_heap_block *block);
void build_free_list(cell size);
void assert_free_block(free_heap_block *block);
@ -44,41 +43,69 @@ struct heap {
heap_block *free_allocated(heap_block *prev, heap_block *scan);
/* After code GC, all live code blocks are marked, so any
which are not marked can be reclaimed. */
template<typename Iterator> void sweep_heap(Iterator &iter)
template<typename Iterator> void sweep_heap(Iterator &iter);
template<typename Iterator> void compact_heap(Iterator &iter);
template<typename Iterator> void iterate_heap(Iterator &iter)
{
clear_free_list();
heap_block *prev = NULL;
heap_block *scan = first_block();
heap_block *end = last_block();
while(scan != end)
{
if(scan->type() == FREE_BLOCK_TYPE)
{
if(prev && prev->type() == FREE_BLOCK_TYPE)
prev->set_size(prev->size() + scan->size());
else
prev = scan;
}
else if(state->is_marked_p(scan))
{
if(prev && prev->type() == FREE_BLOCK_TYPE)
add_to_free_list((free_heap_block *)prev);
prev = scan;
iter(scan);
}
else
prev = free_allocated(prev,scan);
scan = scan->next();
heap_block *next = scan->next();
if(scan->type() != FREE_BLOCK_TYPE) iter(scan,scan->size());
scan = next;
}
if(prev && prev->type() == FREE_BLOCK_TYPE)
add_to_free_list((free_heap_block *)prev);
}
};
/* After code GC, all live code blocks are marked, so any
which are not marked can be reclaimed. */
template<typename Iterator> void heap::sweep_heap(Iterator &iter)
{
this->clear_free_list();
heap_block *prev = NULL;
heap_block *scan = this->first_block();
heap_block *end = this->last_block();
while(scan != end)
{
if(scan->type() == FREE_BLOCK_TYPE)
{
if(prev && prev->type() == FREE_BLOCK_TYPE)
prev->set_size(prev->size() + scan->size());
else
prev = scan;
}
else if(this->state->is_marked_p(scan))
{
if(prev && prev->type() == FREE_BLOCK_TYPE)
this->add_to_free_list((free_heap_block *)prev);
prev = scan;
iter(scan,scan->size());
}
else
prev = this->free_allocated(prev,scan);
scan = scan->next();
}
if(prev && prev->type() == FREE_BLOCK_TYPE)
this->add_to_free_list((free_heap_block *)prev);
}
/* The forwarding map must be computed first by calling
state->compute_forwarding(). */
template<typename Iterator> void heap::compact_heap(Iterator &iter)
{
heap_compacter<heap_block,block_size_increment,Iterator> compacter(state,first_block(),iter);
this->iterate_heap(compacter);
/* Now update the free list; there will be a single free block at
the end */
this->build_free_list((cell)compacter.address - this->seg->start);
}
}

View File

@ -225,7 +225,7 @@ struct code_block_fixupper {
code_block_fixupper(factor_vm *parent_, cell data_relocation_base_) :
parent(parent_), data_relocation_base(data_relocation_base_) { }
void operator()(code_block *compiled)
void operator()(code_block *compiled, cell size)
{
parent->fixup_code_block(compiled,data_relocation_base);
}

View File

@ -159,4 +159,23 @@ template<typename Block, int Granularity> struct mark_bits {
}
};
template<typename Block, int Granularity, typename Iterator> struct heap_compacter {
mark_bits<Block,Granularity> *state;
char *address;
Iterator &iter;
explicit heap_compacter(mark_bits<Block,Granularity> *state_, Block *address_, Iterator &iter_) :
state(state_), address((char *)address_), iter(iter_) {}
void operator()(Block *block, cell size)
{
if(this->state->is_marked_p(block))
{
memmove(address,block,size);
iter((Block *)address,size);
address += size;
}
}
};
}

View File

@ -283,7 +283,6 @@ void quotation_jit::iterate_quotation()
void factor_vm::set_quot_xt(quotation *quot, code_block *code)
{
assert(code->type() == QUOTATION_TYPE);
quot->code = code;
quot->xt = code->xt();
}

View File

@ -524,17 +524,19 @@ struct factor_vm
void primitive_strip_stack_traces();
/* Apply a function to every code block */
template<typename Iterator> void iterate_code_heap(Iterator &iter)
{
heap_block *scan = code->first_block();
heap_block *end = code->last_block();
while(scan != end)
template<typename Iterator> struct code_heap_iterator {
Iterator &iter;
explicit code_heap_iterator(Iterator &iter_) : iter(iter_) {}
void operator()(heap_block *block, cell size)
{
if(scan->type() != FREE_BLOCK_TYPE)
iter((code_block *)scan);
scan = scan->next();
iter((code_block *)block,size);
}
};
template<typename Iterator> void iterate_code_heap(Iterator &iter_)
{
code_heap_iterator<Iterator> iter(iter_);
code->iterate_heap(iter);
}
//callbacks