vm: combine heap_block and code_block structs, eliminates some boilerplate

db4
Slava Pestov 2009-10-24 21:24:06 -05:00
parent 45a955b5bb
commit 62e718eaa9
8 changed files with 59 additions and 74 deletions

View File

@ -439,7 +439,7 @@ void factor_vm::fixup_labels(array *labels, code_block *compiled)
/* Might GC */
code_block *factor_vm::allot_code_block(cell size, code_block_type type)
{
heap_block *block = code->allocator->allot(size + sizeof(code_block));
code_block *block = code->allocator->allot(size + sizeof(code_block));
/* If allocation failed, do a full GC and compact the code heap.
A full GC that occurs as a result of the data heap filling up does not
@ -465,9 +465,8 @@ code_block *factor_vm::allot_code_block(cell size, code_block_type type)
}
}
code_block *compiled = (code_block *)block;
compiled->set_type(type);
return compiled;
block->set_type(type);
return block;
}
/* Might GC */

View File

@ -8,7 +8,7 @@ code_heap::code_heap(cell size)
if(size > (1L << (sizeof(cell) * 8 - 6))) fatal_error("Heap too large",size);
seg = new segment(align_page(size),true);
if(!seg) fatal_error("Out of memory in heap allocator",size);
allocator = new free_list_allocator<heap_block>(size,seg->start);
allocator = new free_list_allocator<code_block>(size,seg->start);
}
code_heap::~code_heap()
@ -36,7 +36,7 @@ bool code_heap::needs_fixup_p(code_block *compiled)
return needs_fixup.count(compiled) > 0;
}
bool code_heap::marked_p(heap_block *compiled)
bool code_heap::marked_p(code_block *compiled)
{
return allocator->state.marked_p(compiled);
}
@ -109,9 +109,9 @@ struct word_and_literal_code_heap_updater {
word_and_literal_code_heap_updater(factor_vm *parent_) : parent(parent_) {}
void operator()(heap_block *block, cell size)
void operator()(code_block *block, cell size)
{
parent->update_code_block_words_and_literals((code_block *)block);
parent->update_code_block_words_and_literals(block);
}
};
@ -137,8 +137,7 @@ struct code_heap_relocator {
void factor_vm::relocate_code_heap()
{
code_heap_relocator relocator(this);
code_heap_iterator<code_heap_relocator> iter(relocator);
code->allocator->sweep(iter);
code->allocator->sweep(relocator);
}
void factor_vm::primitive_modify_code_heap()

View File

@ -6,7 +6,7 @@ struct code_heap {
segment *seg;
/* Memory allocator */
free_list_allocator<heap_block> *allocator;
free_list_allocator<code_block> *allocator;
/* Set of blocks which need full relocation. */
std::set<code_block *> needs_fixup;
@ -22,7 +22,7 @@ struct code_heap {
void write_barrier(code_block *compiled);
void clear_remembered_set();
bool needs_fixup_p(code_block *compiled);
bool marked_p(heap_block *compiled);
bool marked_p(code_block *compiled);
void set_marked_p(code_block *compiled);
void clear_mark_bits();
void code_heap_free(code_block *compiled);

View File

@ -15,14 +15,14 @@ struct object_slot_forwarder {
};
struct code_block_forwarder {
mark_bits<heap_block> *forwarding_map;
mark_bits<code_block> *forwarding_map;
explicit code_block_forwarder(mark_bits<heap_block> *forwarding_map_) :
explicit code_block_forwarder(mark_bits<code_block> *forwarding_map_) :
forwarding_map(forwarding_map_) {}
code_block *operator()(code_block *compiled)
{
return (code_block *)forwarding_map->forward_block(compiled);
return forwarding_map->forward_block(compiled);
}
};
@ -63,7 +63,7 @@ void factor_vm::compact_full_impl(bool trace_contexts_p)
{
tenured_space *tenured = data->tenured;
mark_bits<object> *data_forwarding_map = &tenured->state;
mark_bits<heap_block> *code_forwarding_map = &code->allocator->state;
mark_bits<code_block> *code_forwarding_map = &code->allocator->state;
/* Figure out where blocks are going to go */
data_forwarding_map->compute_forwarding();
@ -89,8 +89,7 @@ void factor_vm::compact_full_impl(bool trace_contexts_p)
/* Slide everything in the code heap up, and update data and code heap
pointers inside code blocks. */
code_block_compaction_updater code_block_updater(this,slot_forwarder);
code_heap_iterator<code_block_compaction_updater> iter(code_block_updater);
code->allocator->compact(iter);
code->allocator->compact(code_block_updater);
}
}

View File

@ -293,21 +293,21 @@ struct code_block_printer {
code_block_printer(factor_vm *parent_) :
parent(parent_), reloc_size(0), literal_size(0) {}
void operator()(heap_block *scan, cell size)
void operator()(code_block *scan, cell size)
{
const char *status;
if(scan->free_p())
status = "free";
else if(parent->code->marked_p(scan))
{
reloc_size += parent->object_size(((code_block *)scan)->relocation);
literal_size += parent->object_size(((code_block *)scan)->literals);
reloc_size += parent->object_size(scan->relocation);
literal_size += parent->object_size(scan->literals);
status = "marked";
}
else
{
reloc_size += parent->object_size(((code_block *)scan)->relocation);
literal_size += parent->object_size(((code_block *)scan)->literals);
reloc_size += parent->object_size(scan->relocation);
literal_size += parent->object_size(scan->literals);
status = "allocated";
}

View File

@ -3,6 +3,27 @@ namespace factor
static const cell free_list_count = 32;
struct free_heap_block
{
cell header;
free_heap_block *next_free;
bool free_p() const
{
return header & 1 == 1;
}
cell size() const
{
return header >> 3;
}
void make_free(cell size)
{
header = (size << 3) | 1;
}
};
struct free_list {
free_heap_block *small_blocks[free_list_count];
free_heap_block *large_blocks;
@ -248,7 +269,7 @@ void free_list_allocator<Block>::sweep()
if(prev && prev->free_p())
{
free_heap_block *free_prev = (free_heap_block *)prev;
free_prev->set_size(free_prev->size() + size);
free_prev->make_free(free_prev->size() + size);
}
else
prev = scan;
@ -264,7 +285,7 @@ void free_list_allocator<Block>::sweep()
if(prev && prev->free_p())
{
free_heap_block *free_prev = (free_heap_block *)prev;
free_prev->set_size(free_prev->size() + size);
free_prev->make_free(free_prev->size() + size);
}
else
{
@ -300,7 +321,7 @@ void free_list_allocator<Block>::sweep(Iterator &iter)
if(prev && prev->free_p())
{
free_heap_block *free_prev = (free_heap_block *)prev;
free_prev->set_size(free_prev->size() + size);
free_prev->make_free(free_prev->size() + size);
}
else
prev = scan;
@ -317,7 +338,7 @@ void free_list_allocator<Block>::sweep(Iterator &iter)
if(prev && prev->free_p())
{
free_heap_block *free_prev = (free_heap_block *)prev;
free_prev->set_size(free_prev->size() + size);
free_prev->make_free(free_prev->size() + size);
}
else
{

View File

@ -221,49 +221,16 @@ struct string : public object {
};
/* The compiled code heap is structured into blocks. */
struct heap_block
struct code_block
{
cell header;
bool free_p() const
{
return header & 1 == 1;
}
cell size() const
{
cell bytes = header >> 3;
#ifdef FACTOR_DEBUG
assert(bytes > 0);
#endif
return bytes;
}
void set_size(cell size)
{
header = ((header & 0x7) | (size << 3));
}
};
struct free_heap_block : public heap_block
{
free_heap_block *next_free;
void make_free(cell size)
{
header = (size << 3) | 1;
}
};
struct code_block : public heap_block
{
cell owner; /* tagged pointer to word, quotation or f */
cell literals; /* tagged pointer to array or f */
cell relocation; /* tagged pointer to byte-array or f */
void *xt() const
bool free_p() const
{
return (void *)(this + 1);
return header & 1 == 1;
}
code_block_type type() const
@ -285,6 +252,16 @@ struct code_block : public heap_block
{
return type() == code_block_optimized;
}
cell size() const
{
return header >> 3;
}
void *xt() const
{
return (void *)(this + 1);
}
};
/* Assembly code makes assumptions about the layout of this struct */

View File

@ -504,18 +504,8 @@ struct factor_vm
void primitive_strip_stack_traces();
/* Apply a function to every code block */
template<typename Iterator> struct code_heap_iterator {
Iterator &iter;
explicit code_heap_iterator(Iterator &iter_) : iter(iter_) {}
void operator()(heap_block *block, cell size)
{
iter((code_block *)block,size);
}
};
template<typename Iterator> void iterate_code_heap(Iterator &iter_)
template<typename Iterator> void iterate_code_heap(Iterator &iter)
{
code_heap_iterator<Iterator> iter(iter_);
code->allocator->iterate(iter);
}