vm: free up a cell in compiled code block headers

db4
Slava Pestov 2009-10-06 05:52:45 -05:00
parent 2ca0044dd0
commit 16c3251072
14 changed files with 124 additions and 134 deletions

View File

@ -81,7 +81,7 @@ code_block *factor_vm::frame_code(stack_frame *frame)
cell factor_vm::frame_type(stack_frame *frame)
{
return frame_code(frame)->type;
return frame_code(frame)->type();
}
cell factor_vm::frame_executing(stack_frame *frame)

View File

@ -20,7 +20,7 @@ cell factor_vm::relocation_offset_of(relocation_entry r)
void factor_vm::flush_icache_for(code_block *block)
{
flush_icache((cell)block,block->size);
flush_icache((cell)block,block->size());
}
int factor_vm::number_of_parameters(relocation_type type)
@ -290,7 +290,7 @@ struct literal_references_updater {
/* Update pointers to literals from compiled code. */
void factor_vm::update_literal_references(code_block *compiled)
{
if(!compiled->needs_fixup)
if(!code->needs_fixup_p(compiled))
{
literal_references_updater updater(this);
iterate_relocations(compiled,updater);
@ -331,7 +331,7 @@ to update references to other words, without worrying about literals
or dlsyms. */
void factor_vm::update_word_references(code_block *compiled)
{
if(compiled->needs_fixup)
if(code->needs_fixup_p(compiled))
relocate_code_block(compiled);
/* update_word_references() is always applied to every block in
the code heap. Since it resets all call sites to point to
@ -340,7 +340,7 @@ void factor_vm::update_word_references(code_block *compiled)
are referenced after this is done. So instead of polluting
the code heap with dead PICs that will be freed on the next
GC, we add them to the free list immediately. */
else if(compiled->type == PIC_TYPE)
else if(compiled->type() == PIC_TYPE)
code->code_heap_free(compiled);
else
{
@ -372,7 +372,7 @@ struct code_block_relocator {
/* Perform all fixups on a code block */
void factor_vm::relocate_code_block(code_block *compiled)
{
compiled->needs_fixup = false;
code->needs_fixup.erase(compiled);
code_block_relocator relocator(this);
iterate_relocations(compiled,relocator);
flush_icache_for(compiled);
@ -397,15 +397,15 @@ void factor_vm::fixup_labels(array *labels, code_block *compiled)
}
/* Might GC */
code_block *factor_vm::allot_code_block(cell size)
code_block *factor_vm::allot_code_block(cell size, cell type)
{
heap_block *block = code->heap_allot(size + sizeof(code_block));
heap_block *block = code->heap_allot(size + sizeof(code_block),type);
/* If allocation failed, do a code GC */
if(block == NULL)
{
gc();
block = code->heap_allot(size + sizeof(code_block));
block = code->heap_allot(size + sizeof(code_block),type);
/* Insufficient room even after code GC, give up */
if(block == NULL)
@ -433,11 +433,7 @@ code_block *factor_vm::add_code_block(cell type, cell code_, cell labels_, cell
gc_root<array> literals(literals_,this);
cell code_length = align8(array_capacity(code.untagged()));
code_block *compiled = allot_code_block(code_length);
/* compiled header */
compiled->type = type;
compiled->needs_fixup = true;
code_block *compiled = allot_code_block(code_length,type);
/* slight space optimization */
if(relocation.type() == BYTE_ARRAY_TYPE && array_capacity(relocation.untagged()) == 0)
@ -460,6 +456,7 @@ code_block *factor_vm::add_code_block(cell type, cell code_, cell labels_, cell
/* next time we do a minor GC, we have to scan the code heap for
literals */
this->code->write_barrier(compiled);
this->code->needs_fixup.insert(compiled);
return compiled;
}

View File

@ -11,9 +11,15 @@ void code_heap::write_barrier(code_block *compiled)
youngest_referenced_generation = myvm->data->nursery();
}
bool code_heap::needs_fixup_p(code_block *compiled)
{
return needs_fixup.count(compiled) > 0;
}
void code_heap::code_heap_free(code_block *compiled)
{
remembered_set.erase(compiled);
needs_fixup.erase(compiled);
heap_free(compiled);
}

View File

@ -2,16 +2,20 @@ namespace factor
{
struct code_heap : heap {
/* Set of blocks which need full relocation. */
unordered_set<code_block *> needs_fixup;
/* Maps code blocks to the youngest generation containing
one of their literals. If this is tenured (0), the code block
is not part of the remembered set. */
unordered_map<code_block *, cell> remembered_set;
/* Minimum value in the above map. */
cell youngest_referenced_generation;
explicit code_heap(factor_vm *myvm, cell size);
void write_barrier(code_block *compiled);
bool needs_fixup_p(code_block *compiled);
void code_heap_free(code_block *compiled);
};

View File

@ -308,28 +308,23 @@ void factor_vm::dump_code_heap()
while(scan)
{
const char *status;
switch(scan->status)
{
case B_FREE:
if(scan->type() == FREE_BLOCK_TYPE)
status = "free";
break;
case B_ALLOCATED:
reloc_size += object_size(((code_block *)scan)->relocation);
literal_size += object_size(((code_block *)scan)->literals);
status = "allocated";
break;
case B_MARKED:
else if(scan->marked_p())
{
reloc_size += object_size(((code_block *)scan)->relocation);
literal_size += object_size(((code_block *)scan)->literals);
status = "marked";
break;
default:
status = "invalid";
break;
}
else
{
reloc_size += object_size(((code_block *)scan)->relocation);
literal_size += object_size(((code_block *)scan)->literals);
status = "allocated";
}
print_cell_hex((cell)scan); print_string(" ");
print_cell_hex(scan->size); print_string(" ");
print_cell_hex(scan->size()); print_string(" ");
print_string(status); print_string("\n");
scan = code->next_block(scan);

View File

@ -21,9 +21,9 @@ heap::heap(factor_vm *myvm_, cell size)
void heap::add_to_free_list(free_heap_block *block)
{
if(block->size < free_list_count * block_size_increment)
if(block->size() < free_list_count * block_size_increment)
{
int index = block->size / block_size_increment;
int index = block->size() / block_size_increment;
block->next_free = free.small_blocks[index];
free.small_blocks[index] = block;
}
@ -52,17 +52,8 @@ void heap::build_free_list(cell size)
/* Add all free blocks to the free list */
while(scan && scan < (heap_block *)end)
{
switch(scan->status)
{
case B_FREE:
if(scan->type() == FREE_BLOCK_TYPE)
add_to_free_list((free_heap_block *)scan);
break;
case B_ALLOCATED:
break;
default:
myvm->critical_error("Invalid scan->status",(cell)scan);
break;
}
prev = scan;
scan = next_block(scan);
@ -72,8 +63,9 @@ void heap::build_free_list(cell size)
branch is only taken after loading a new image, not after code GC */
if((cell)(end + 1) <= seg->end)
{
end->status = B_FREE;
end->size = seg->end - (cell)end;
end->set_marked_p(false);
end->set_type(FREE_BLOCK_TYPE);
end->set_size(seg->end - (cell)end);
/* add final free block */
add_to_free_list(end);
@ -85,14 +77,14 @@ void heap::build_free_list(cell size)
/* even if there's no room at the end of the heap for a new
free block, we might have to jigger it up by a few bytes in
case prev + prev->size */
if(prev) prev->size = seg->end - (cell)prev;
if(prev) prev->set_size(seg->end - (cell)prev);
}
}
void heap::assert_free_block(free_heap_block *block)
{
if(block->status != B_FREE)
if(block->type() != FREE_BLOCK_TYPE)
myvm->critical_error("Invalid block in free list",(cell)block);
}
@ -120,7 +112,7 @@ free_heap_block *heap::find_free_block(cell size)
while(block)
{
assert_free_block(block);
if(block->size >= size)
if(block->size() >= size)
{
if(prev)
prev->next_free = block->next_free;
@ -138,14 +130,14 @@ free_heap_block *heap::find_free_block(cell size)
free_heap_block *heap::split_free_block(free_heap_block *block, cell size)
{
if(block->size != size )
if(block->size() != size )
{
/* split the block in two */
free_heap_block *split = (free_heap_block *)((cell)block + size);
split->status = B_FREE;
split->size = block->size - size;
split->set_type(FREE_BLOCK_TYPE);
split->set_size(block->size() - size);
split->next_free = block->next_free;
block->size = size;
block->set_size(size);
add_to_free_list(split);
}
@ -153,7 +145,7 @@ free_heap_block *heap::split_free_block(free_heap_block *block, cell size)
}
/* Allocate a block of memory from the mark and sweep GC heap */
heap_block *heap::heap_allot(cell size)
heap_block *heap::heap_allot(cell size, cell type)
{
size = (size + block_size_increment - 1) & ~(block_size_increment - 1);
@ -161,8 +153,8 @@ heap_block *heap::heap_allot(cell size)
if(block)
{
block = split_free_block(block,size);
block->status = B_ALLOCATED;
block->set_type(type);
block->set_marked_p(false);
return block;
}
else
@ -172,24 +164,13 @@ heap_block *heap::heap_allot(cell size)
/* Deallocates a block manually */
void heap::heap_free(heap_block *block)
{
block->status = B_FREE;
block->set_type(FREE_BLOCK_TYPE);
add_to_free_list((free_heap_block *)block);
}
void heap::mark_block(heap_block *block)
{
/* If already marked, do nothing */
switch(block->status)
{
case B_MARKED:
return;
case B_ALLOCATED:
block->status = B_MARKED;
break;
default:
myvm->critical_error("Marking the wrong block",(cell)block);
break;
}
block->set_marked_p(true);
}
/* If in the middle of code GC, we have to grow the heap, data GC restarts from
@ -200,9 +181,7 @@ void heap::unmark_marked()
while(scan)
{
if(scan->status == B_MARKED)
scan->status = B_ALLOCATED;
scan->set_marked_p(false);
scan = next_block(scan);
}
}
@ -218,19 +197,16 @@ void heap::heap_usage(cell *used, cell *total_free, cell *max_free)
while(scan)
{
switch(scan->status)
cell size = scan->size();
if(scan->type() == FREE_BLOCK_TYPE)
{
case B_ALLOCATED:
*used += scan->size;
break;
case B_FREE:
*total_free += scan->size;
if(scan->size > *max_free)
*max_free = scan->size;
break;
default:
myvm->critical_error("Invalid scan->status",(cell)scan);
*total_free += size;
if(size > *max_free)
*max_free = size;
}
else
*used += size;
scan = next_block(scan);
}
@ -245,7 +221,7 @@ cell heap::heap_size()
scan = next_block(scan);
/* this is the last block in the heap, and it is free */
if(scan->status == B_FREE)
if(scan->type() == FREE_BLOCK_TYPE)
return (cell)scan - seg->start;
/* otherwise the last block is allocated */
else
@ -260,14 +236,11 @@ cell heap::compute_heap_forwarding()
while(scan)
{
if(scan->status == B_ALLOCATED)
if(scan->type() != FREE_BLOCK_TYPE)
{
forwarding[scan] = address;
address += scan->size;
address += scan->size();
}
else if(scan->status == B_MARKED)
myvm->critical_error("Why is the block marked?",0);
scan = next_block(scan);
}
@ -282,8 +255,8 @@ void heap::compact_heap()
{
heap_block *next = next_block(scan);
if(scan->status == B_ALLOCATED)
memmove(forwarding[scan],scan,scan->size);
if(scan->type() != FREE_BLOCK_TYPE)
memmove(forwarding[scan],scan,scan->size());
scan = next;
}
}
@ -291,16 +264,16 @@ void heap::compact_heap()
heap_block *heap::free_allocated(heap_block *prev, heap_block *scan)
{
if(myvm->secure_gc)
memset(scan + 1,0,scan->size - sizeof(heap_block));
memset(scan + 1,0,scan->size() - sizeof(heap_block));
if(prev && prev->status == B_FREE)
if(prev && prev->type() == FREE_BLOCK_TYPE)
{
prev->size += scan->size;
prev->set_size(prev->size() + scan->size());
return prev;
}
else
{
scan->status = B_FREE;
scan->set_type(FREE_BLOCK_TYPE);
return scan;
}
}

View File

@ -19,7 +19,7 @@ struct heap {
inline heap_block *next_block(heap_block *block)
{
cell next = ((cell)block + block->size);
cell next = ((cell)block + block->size());
if(next == seg->end)
return NULL;
else
@ -43,7 +43,7 @@ struct heap {
void assert_free_block(free_heap_block *block);
free_heap_block *find_free_block(cell size);
free_heap_block *split_free_block(free_heap_block *block, cell size);
heap_block *heap_allot(cell size);
heap_block *heap_allot(cell size, cell type);
void heap_free(heap_block *block);
void mark_block(heap_block *block);
void unmark_marked();
@ -65,30 +65,28 @@ struct heap {
while(scan)
{
switch(scan->status)
if(scan->type() == FREE_BLOCK_TYPE)
{
case B_ALLOCATED:
prev = free_allocated(prev,scan);
break;
case B_FREE:
if(prev && prev->status == B_FREE)
prev->size += scan->size;
if(prev && prev->type() == FREE_BLOCK_TYPE)
prev->set_size(prev->size() + scan->size());
else
prev = scan;
break;
case B_MARKED:
if(prev && prev->status == B_FREE)
}
else if(scan->marked_p())
{
if(prev && prev->type() == FREE_BLOCK_TYPE)
add_to_free_list((free_heap_block *)prev);
scan->status = B_ALLOCATED;
scan->set_marked_p(false);
prev = scan;
iter(scan);
break;
}
else
prev = free_allocated(prev,scan);
scan = next_block(scan);
}
if(prev && prev->status == B_FREE)
if(prev && prev->type() == FREE_BLOCK_TYPE)
add_to_free_list((free_heap_block *)prev);
}
};

View File

@ -19,7 +19,7 @@ void factor_vm::deallocate_inline_cache(cell return_address)
check_code_pointer((cell)old_xt);
code_block *old_block = (code_block *)old_xt - 1;
cell old_type = old_block->type;
cell old_type = old_block->type();
#ifdef FACTOR_DEBUG
/* The call target was either another PIC,

View File

@ -65,7 +65,8 @@ inline static cell align8(cell a)
#define TYPE_COUNT 15
/* Not a real type, but code_block's type field can be set to this */
#define PIC_TYPE 69
#define PIC_TYPE 42
#define FREE_BLOCK_TYPE 69
/* Constants used when floating-point trap exceptions are thrown */
enum
@ -196,34 +197,46 @@ struct string : public object {
};
/* The compiled code heap is structured into blocks. */
enum block_status
{
B_FREE,
B_ALLOCATED,
B_MARKED
};
struct heap_block
{
unsigned char status; /* free or allocated? */
unsigned char type; /* this is WORD_TYPE or QUOTATION_TYPE */
unsigned char unused;
unsigned char needs_fixup; /* is this a new block that needs full fixup? */
/* Bit 0: mark
Bit 1-7: type
Bit 8-...: size */
cell header;
/* In bytes, includes this header */
cell size;
bool marked_p() { return header & 1; }
void set_marked_p(bool marked)
{
if(marked)
header |= 1;
else
header &= ~1;
}
cell type() { return (header >> 1) & 0x7f; }
void set_type(cell type)
{
header = ((header & ~(0x7f << 1)) | (type << 1));
}
cell size() { return (header >> 8); }
void set_size(cell size)
{
header = (header & 0xff) | (size << 8);
}
};
struct free_heap_block : public heap_block
{
free_heap_block *next_free;
free_heap_block *next_free;
};
struct code_block : public heap_block
{
cell unused;
cell literals; /* # bytes */
cell relocation; /* tagged pointer to byte-array or f */
void *xt() { return (void *)(this + 1); }
};

View File

@ -30,17 +30,21 @@
#if __GNUC__ == 4
#include <tr1/unordered_map>
#include <tr1/unordered_set>
namespace factor
{
using std::tr1::unordered_map;
using std::tr1::unordered_set;
}
#elif __GNUC__ == 3
#include <boost/unordered_map.hpp>
#include <boost/unordered_set.hpp>
namespace factor
{
using boost::unordered_map;
using boost::unordered_set;
}
#else
#error Factor requires GCC 3.x or later

View File

@ -281,7 +281,7 @@ void quotation_jit::iterate_quotation()
void factor_vm::set_quot_xt(quotation *quot, code_block *code)
{
if(code->type != QUOTATION_TYPE)
if(code->type() != QUOTATION_TYPE)
critical_error("Bad param to set_quot_xt",(cell)code);
quot->code = code;

View File

@ -513,8 +513,8 @@ struct factor_vm
void check_code_address(cell address);
void relocate_code_block(code_block *compiled);
void fixup_labels(array *labels, code_block *compiled);
code_block *allot_code_block(cell size);
code_block *add_code_block(cell type,cell code_,cell labels_,cell relocation_,cell literals_);
code_block *allot_code_block(cell size, cell type);
code_block *add_code_block(cell type, cell code_, cell labels_, cell relocation_, cell literals_);
inline bool stack_traces_p()
{
return userenv[STACK_TRACES_ENV] != F;
@ -548,7 +548,7 @@ struct factor_vm
while(scan)
{
if(scan->status != B_FREE)
if(scan->type() != FREE_BLOCK_TYPE)
iter((code_block *)scan);
scan = code->next_block(scan);
}

View File

@ -49,12 +49,12 @@ void factor_vm::primitive_word_xt()
if(profiling_p)
{
dpush(allot_cell((cell)w->profiling->xt()));
dpush(allot_cell((cell)w->profiling + w->profiling->size));
dpush(allot_cell((cell)w->profiling + w->profiling->size()));
}
else
{
dpush(allot_cell((cell)w->code->xt()));
dpush(allot_cell((cell)w->code + w->code->size));
dpush(allot_cell((cell)w->code + w->code->size()));
}
}

View File

@ -3,7 +3,7 @@ namespace factor
inline bool word_optimized_p(word *word)
{
return word->code->type == WORD_TYPE;
return word->code->type() == WORD_TYPE;
}
}