vm: split off free_list_allocator from heap class, rename zone to bump_allocator
parent
5608bc1268
commit
f0816d72f1
2
Makefile
2
Makefile
|
@ -55,7 +55,7 @@ DLL_OBJS = $(PLAF_DLL_OBJS) \
|
|||
vm/jit.o \
|
||||
vm/math.o \
|
||||
vm/nursery_collector.o \
|
||||
vm/old_space.o \
|
||||
vm/object_start_map.o \
|
||||
vm/primitives.o \
|
||||
vm/profiler.o \
|
||||
vm/quotations.o \
|
||||
|
|
|
@ -3,7 +3,8 @@ namespace factor
|
|||
|
||||
struct aging_policy {
|
||||
factor_vm *parent;
|
||||
zone *aging, *tenured;
|
||||
aging_space *aging;
|
||||
tenured_space *tenured;
|
||||
|
||||
aging_policy(factor_vm *parent_) :
|
||||
parent(parent_),
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
namespace factor
|
||||
{
|
||||
|
||||
struct aging_space : zone {
|
||||
struct aging_space : bump_allocator {
|
||||
object_start_map starts;
|
||||
|
||||
aging_space(cell size, cell start) :
|
||||
zone(size,start), starts(size,start) {}
|
||||
bump_allocator(size,start), starts(size,start) {}
|
||||
|
||||
object *allot(cell size)
|
||||
{
|
||||
if(here + size > end) return NULL;
|
||||
|
||||
object *obj = zone::allot(size);
|
||||
object *obj = bump_allocator::allot(size);
|
||||
starts.record_object_start_offset(obj);
|
||||
return obj;
|
||||
}
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
namespace factor
|
||||
{
|
||||
|
||||
struct zone {
|
||||
struct bump_allocator {
|
||||
/* offset of 'here' and 'end' is hardcoded in compiler backends */
|
||||
cell here;
|
||||
cell start;
|
||||
cell end;
|
||||
cell size;
|
||||
|
||||
zone(cell size_, cell start_) : here(0), start(start_), end(start_ + size_), size(size_) {}
|
||||
bump_allocator(cell size_, cell start_) :
|
||||
here(0), start(start_), end(start_ + size_), size(size_) {}
|
||||
|
||||
inline bool contains_p(object *pointer)
|
||||
{
|
|
@ -439,7 +439,7 @@ void factor_vm::fixup_labels(array *labels, code_block *compiled)
|
|||
/* Might GC */
|
||||
code_block *factor_vm::allot_code_block(cell size, code_block_type type)
|
||||
{
|
||||
heap_block *block = code->heap_allot(size + sizeof(code_block));
|
||||
heap_block *block = code->allocator->allot(size + sizeof(code_block));
|
||||
|
||||
/* If allocation failed, do a full GC and compact the code heap.
|
||||
A full GC that occurs as a result of the data heap filling up does not
|
||||
|
@ -449,13 +449,13 @@ code_block *factor_vm::allot_code_block(cell size, code_block_type type)
|
|||
if(block == NULL)
|
||||
{
|
||||
primitive_compact_gc();
|
||||
block = code->heap_allot(size + sizeof(code_block));
|
||||
block = code->allocator->allot(size + sizeof(code_block));
|
||||
|
||||
/* Insufficient room even after code GC, give up */
|
||||
if(block == NULL)
|
||||
{
|
||||
cell used, total_free, max_free;
|
||||
code->heap_usage(&used,&total_free,&max_free);
|
||||
code->allocator->usage(&used,&total_free,&max_free);
|
||||
|
||||
print_string("Code heap stats:\n");
|
||||
print_string("Used: "); print_cell(used); nl();
|
||||
|
|
|
@ -3,7 +3,21 @@
|
|||
namespace factor
|
||||
{
|
||||
|
||||
code_heap::code_heap(bool secure_gc, cell size) : heap<heap_block>(secure_gc,size,true) {}
|
||||
code_heap::code_heap(cell size)
|
||||
{
|
||||
if(size > (1L << (sizeof(cell) * 8 - 6))) fatal_error("Heap too large",size);
|
||||
seg = new segment(align_page(size),true);
|
||||
if(!seg) fatal_error("Out of memory in heap allocator",size);
|
||||
allocator = new free_list_allocator<heap_block>(seg->start,size);
|
||||
}
|
||||
|
||||
code_heap::~code_heap()
|
||||
{
|
||||
delete allocator;
|
||||
allocator = NULL;
|
||||
delete seg;
|
||||
seg = NULL;
|
||||
}
|
||||
|
||||
void code_heap::write_barrier(code_block *compiled)
|
||||
{
|
||||
|
@ -22,18 +36,33 @@ bool code_heap::needs_fixup_p(code_block *compiled)
|
|||
return needs_fixup.count(compiled) > 0;
|
||||
}
|
||||
|
||||
bool code_heap::marked_p(heap_block *compiled)
|
||||
{
|
||||
return allocator->state.marked_p(compiled);
|
||||
}
|
||||
|
||||
void code_heap::set_marked_p(code_block *compiled)
|
||||
{
|
||||
allocator->state.set_marked_p(compiled);
|
||||
}
|
||||
|
||||
void code_heap::clear_mark_bits()
|
||||
{
|
||||
allocator->state.clear_mark_bits();
|
||||
}
|
||||
|
||||
void code_heap::code_heap_free(code_block *compiled)
|
||||
{
|
||||
points_to_nursery.erase(compiled);
|
||||
points_to_aging.erase(compiled);
|
||||
needs_fixup.erase(compiled);
|
||||
heap_free(compiled);
|
||||
allocator->free(compiled);
|
||||
}
|
||||
|
||||
/* Allocate a code heap during startup */
|
||||
void factor_vm::init_code_heap(cell size)
|
||||
{
|
||||
code = new code_heap(secure_gc,size);
|
||||
code = new code_heap(size);
|
||||
}
|
||||
|
||||
bool factor_vm::in_code_heap_p(cell ptr)
|
||||
|
@ -89,7 +118,7 @@ struct word_and_literal_code_heap_updater {
|
|||
void factor_vm::update_code_heap_words_and_literals()
|
||||
{
|
||||
word_and_literal_code_heap_updater updater(this);
|
||||
code->sweep_heap(updater);
|
||||
code->allocator->sweep(updater);
|
||||
}
|
||||
|
||||
/* After growing the heap, we have to perform a full relocation to update
|
||||
|
@ -109,7 +138,7 @@ void factor_vm::relocate_code_heap()
|
|||
{
|
||||
code_heap_relocator relocator(this);
|
||||
code_heap_iterator<code_heap_relocator> iter(relocator);
|
||||
code->sweep_heap(iter);
|
||||
code->allocator->sweep(iter);
|
||||
}
|
||||
|
||||
void factor_vm::primitive_modify_code_heap()
|
||||
|
@ -169,7 +198,7 @@ void factor_vm::primitive_modify_code_heap()
|
|||
void factor_vm::primitive_code_room()
|
||||
{
|
||||
cell used, total_free, max_free;
|
||||
code->heap_usage(&used,&total_free,&max_free);
|
||||
code->allocator->usage(&used,&total_free,&max_free);
|
||||
dpush(tag_fixnum(code->seg->size / 1024));
|
||||
dpush(tag_fixnum(used / 1024));
|
||||
dpush(tag_fixnum(total_free / 1024));
|
||||
|
@ -178,7 +207,7 @@ void factor_vm::primitive_code_room()
|
|||
|
||||
code_block *code_heap::forward_code_block(code_block *compiled)
|
||||
{
|
||||
return (code_block *)state->forward_block(compiled);
|
||||
return (code_block *)allocator->state.forward_block(compiled);
|
||||
}
|
||||
|
||||
struct callframe_forwarder {
|
||||
|
@ -277,7 +306,7 @@ function returns. */
|
|||
void factor_vm::compact_code_heap(bool trace_contexts_p)
|
||||
{
|
||||
/* Figure out where blocks are going to go */
|
||||
code->state->compute_forwarding();
|
||||
code->allocator->state.compute_forwarding();
|
||||
|
||||
/* Update references to the code heap from the data heap */
|
||||
forward_object_xts();
|
||||
|
@ -291,7 +320,7 @@ void factor_vm::compact_code_heap(bool trace_contexts_p)
|
|||
that the data heap is up to date since relocation looks up object XTs) */
|
||||
code_heap_relocator relocator(this);
|
||||
code_heap_iterator<code_heap_relocator> iter(relocator);
|
||||
code->compact_heap(iter);
|
||||
code->allocator->compact(iter);
|
||||
}
|
||||
|
||||
struct stack_trace_stripper {
|
||||
|
|
|
@ -1,7 +1,13 @@
|
|||
namespace factor
|
||||
{
|
||||
|
||||
struct code_heap : heap<heap_block> {
|
||||
struct code_heap {
|
||||
/* The actual memory area */
|
||||
segment *seg;
|
||||
|
||||
/* Memory allocator */
|
||||
free_list_allocator<heap_block> *allocator;
|
||||
|
||||
/* Set of blocks which need full relocation. */
|
||||
std::set<code_block *> needs_fixup;
|
||||
|
||||
|
@ -11,10 +17,14 @@ struct code_heap : heap<heap_block> {
|
|||
/* Code blocks which may reference objects in aging space or the nursery */
|
||||
std::set<code_block *> points_to_aging;
|
||||
|
||||
explicit code_heap(bool secure_gc, cell size);
|
||||
explicit code_heap(cell size);
|
||||
~code_heap();
|
||||
void write_barrier(code_block *compiled);
|
||||
void clear_remembered_set();
|
||||
bool needs_fixup_p(code_block *compiled);
|
||||
bool marked_p(heap_block *compiled);
|
||||
void set_marked_p(code_block *compiled);
|
||||
void clear_mark_bits();
|
||||
void code_heap_free(code_block *compiled);
|
||||
code_block *forward_code_block(code_block *compiled);
|
||||
};
|
||||
|
|
|
@ -42,7 +42,7 @@ data_heap::data_heap(cell young_size_, cell aging_size_, cell tenured_size_)
|
|||
aging = new aging_space(aging_size,tenured_semispace->end);
|
||||
aging_semispace = new aging_space(aging_size,aging->end);
|
||||
|
||||
nursery = new zone(young_size,aging_semispace->end);
|
||||
nursery = new bump_allocator(young_size,aging_semispace->end);
|
||||
|
||||
assert(seg->end - nursery->end <= deck_size);
|
||||
}
|
||||
|
@ -75,10 +75,9 @@ void factor_vm::set_data_heap(data_heap *data_)
|
|||
data->reset_generation(data->tenured);
|
||||
}
|
||||
|
||||
void factor_vm::init_data_heap(cell young_size, cell aging_size, cell tenured_size, bool secure_gc_)
|
||||
void factor_vm::init_data_heap(cell young_size, cell aging_size, cell tenured_size)
|
||||
{
|
||||
set_data_heap(new data_heap(young_size,aging_size,tenured_size));
|
||||
secure_gc = secure_gc_;
|
||||
}
|
||||
|
||||
/* Size of the object pointed to by a tagged pointer */
|
||||
|
|
|
@ -10,7 +10,7 @@ struct data_heap {
|
|||
|
||||
segment *seg;
|
||||
|
||||
zone *nursery;
|
||||
bump_allocator *nursery;
|
||||
aging_space *aging;
|
||||
aging_space *aging_semispace;
|
||||
tenured_space *tenured;
|
||||
|
|
|
@ -209,7 +209,7 @@ void factor_vm::dump_memory(cell from, cell to)
|
|||
dump_cell(from);
|
||||
}
|
||||
|
||||
void factor_vm::dump_zone(const char *name, zone *z)
|
||||
void factor_vm::dump_zone(const char *name, bump_allocator *z)
|
||||
{
|
||||
print_string(name); print_string(": ");
|
||||
print_string("Start="); print_cell(z->start);
|
||||
|
@ -296,7 +296,7 @@ struct code_block_printer {
|
|||
const char *status;
|
||||
if(scan->free_p())
|
||||
status = "free";
|
||||
else if(parent->code->state->is_marked_p(scan))
|
||||
else if(parent->code->marked_p(scan))
|
||||
{
|
||||
reloc_size += parent->object_size(((code_block *)scan)->relocation);
|
||||
literal_size += parent->object_size(((code_block *)scan)->literals);
|
||||
|
@ -319,7 +319,7 @@ struct code_block_printer {
|
|||
void factor_vm::dump_code_heap()
|
||||
{
|
||||
code_block_printer printer(this);
|
||||
code->iterate_heap(printer);
|
||||
code->allocator->iterate(printer);
|
||||
print_cell(printer.reloc_size); print_string(" bytes of relocation data\n");
|
||||
print_cell(printer.literal_size); print_string(" bytes of literal data\n");
|
||||
}
|
||||
|
|
|
@ -37,7 +37,6 @@ void factor_vm::default_parameters(vm_parameters *p)
|
|||
|
||||
p->max_pic_size = 3;
|
||||
|
||||
p->secure_gc = false;
|
||||
p->fep = false;
|
||||
p->signals = true;
|
||||
|
||||
|
@ -85,7 +84,6 @@ void factor_vm::init_parameters_from_args(vm_parameters *p, int argc, vm_char **
|
|||
else if(factor_arg(arg,STRING_LITERAL("-codeheap=%d"),&p->code_size));
|
||||
else if(factor_arg(arg,STRING_LITERAL("-pic=%d"),&p->max_pic_size));
|
||||
else if(factor_arg(arg,STRING_LITERAL("-callbacks=%d"),&p->callback_size));
|
||||
else if(STRCMP(arg,STRING_LITERAL("-securegc")) == 0) p->secure_gc = true;
|
||||
else if(STRCMP(arg,STRING_LITERAL("-fep")) == 0) p->fep = true;
|
||||
else if(STRCMP(arg,STRING_LITERAL("-nosignals")) == 0) p->signals = false;
|
||||
else if(STRNCMP(arg,STRING_LITERAL("-i="),3) == 0) p->image_path = arg + 3;
|
||||
|
|
|
@ -3,28 +3,28 @@ namespace factor
|
|||
|
||||
static const cell free_list_count = 32;
|
||||
|
||||
struct heap_free_list {
|
||||
struct free_list {
|
||||
free_heap_block *small_blocks[free_list_count];
|
||||
free_heap_block *large_blocks;
|
||||
};
|
||||
|
||||
template<typename Block> struct heap {
|
||||
bool secure_gc;
|
||||
segment *seg;
|
||||
heap_free_list free;
|
||||
mark_bits<Block> *state;
|
||||
template<typename Block> struct free_list_allocator {
|
||||
cell start;
|
||||
cell size;
|
||||
cell end;
|
||||
free_list free_blocks;
|
||||
mark_bits<Block> state;
|
||||
|
||||
explicit heap(bool secure_gc_, cell size, bool executable_p);
|
||||
~heap();
|
||||
explicit free_list_allocator(cell start, cell size);
|
||||
|
||||
inline Block *first_block()
|
||||
{
|
||||
return (Block *)seg->start;
|
||||
return (Block *)start;
|
||||
}
|
||||
|
||||
inline Block *last_block()
|
||||
{
|
||||
return (Block *)seg->end;
|
||||
return (Block *)end;
|
||||
}
|
||||
|
||||
Block *next_block_after(heap_block *block)
|
||||
|
@ -38,16 +38,15 @@ template<typename Block> struct heap {
|
|||
void assert_free_block(free_heap_block *block);
|
||||
free_heap_block *find_free_block(cell size);
|
||||
free_heap_block *split_free_block(free_heap_block *block, cell size);
|
||||
Block *heap_allot(cell size);
|
||||
void heap_free(Block *block);
|
||||
void mark_block(Block *block);
|
||||
void heap_usage(cell *used, cell *total_free, cell *max_free);
|
||||
cell heap_size();
|
||||
Block *allot(cell size);
|
||||
void free(Block *block);
|
||||
void usage(cell *used, cell *total_free, cell *max_free);
|
||||
cell occupied();
|
||||
|
||||
template<typename Iterator> void sweep_heap(Iterator &iter);
|
||||
template<typename Iterator> void compact_heap(Iterator &iter);
|
||||
template<typename Iterator> void sweep(Iterator &iter);
|
||||
template<typename Iterator> void compact(Iterator &iter);
|
||||
|
||||
template<typename Iterator> void iterate_heap(Iterator &iter)
|
||||
template<typename Iterator> void iterate(Iterator &iter)
|
||||
{
|
||||
Block *scan = first_block();
|
||||
Block *end = last_block();
|
||||
|
@ -62,73 +61,63 @@ template<typename Block> struct heap {
|
|||
}
|
||||
};
|
||||
|
||||
template<typename Block> void heap<Block>::clear_free_list()
|
||||
template<typename Block> void free_list_allocator<Block>::clear_free_list()
|
||||
{
|
||||
memset(&free,0,sizeof(heap_free_list));
|
||||
memset(&free_blocks,0,sizeof(free_list));
|
||||
}
|
||||
|
||||
template<typename Block> heap<Block>::heap(bool secure_gc_, cell size, bool executable_p) : secure_gc(secure_gc_)
|
||||
template<typename Block>
|
||||
free_list_allocator<Block>::free_list_allocator(cell start_, cell size_) :
|
||||
start(start_), size(size_), end(start_ + size_), state(mark_bits<Block>(start_,size_))
|
||||
{
|
||||
if(size > (1L << (sizeof(cell) * 8 - 6))) fatal_error("Heap too large",size);
|
||||
seg = new segment(align_page(size),executable_p);
|
||||
if(!seg) fatal_error("Out of memory in heap allocator",size);
|
||||
state = new mark_bits<Block>(seg->start,size);
|
||||
clear_free_list();
|
||||
}
|
||||
|
||||
template<typename Block> heap<Block>::~heap()
|
||||
{
|
||||
delete seg;
|
||||
seg = NULL;
|
||||
delete state;
|
||||
state = NULL;
|
||||
}
|
||||
|
||||
template<typename Block> void heap<Block>::add_to_free_list(free_heap_block *block)
|
||||
template<typename Block> void free_list_allocator<Block>::add_to_free_list(free_heap_block *block)
|
||||
{
|
||||
if(block->size() < free_list_count * block_granularity)
|
||||
{
|
||||
int index = block->size() / block_granularity;
|
||||
block->next_free = free.small_blocks[index];
|
||||
free.small_blocks[index] = block;
|
||||
block->next_free = free_blocks.small_blocks[index];
|
||||
free_blocks.small_blocks[index] = block;
|
||||
}
|
||||
else
|
||||
{
|
||||
block->next_free = free.large_blocks;
|
||||
free.large_blocks = block;
|
||||
block->next_free = free_blocks.large_blocks;
|
||||
free_blocks.large_blocks = block;
|
||||
}
|
||||
}
|
||||
|
||||
/* Called after reading the code heap from the image file, and after code heap
|
||||
compaction. Makes a free list consisting of one free block, at the very end. */
|
||||
template<typename Block> void heap<Block>::build_free_list(cell size)
|
||||
/* Called after reading the heap from the image file, and after heap compaction.
|
||||
Makes a free list consisting of one free block, at the very end. */
|
||||
template<typename Block> void free_list_allocator<Block>::build_free_list(cell size)
|
||||
{
|
||||
clear_free_list();
|
||||
free_heap_block *end = (free_heap_block *)(seg->start + size);
|
||||
end->set_free();
|
||||
end->set_size(seg->end - (cell)end);
|
||||
add_to_free_list(end);
|
||||
free_heap_block *last_block = (free_heap_block *)(start + size);
|
||||
last_block->set_free();
|
||||
last_block->set_size(end - (cell)last_block);
|
||||
add_to_free_list(last_block);
|
||||
}
|
||||
|
||||
template<typename Block> void heap<Block>::assert_free_block(free_heap_block *block)
|
||||
template<typename Block> void free_list_allocator<Block>::assert_free_block(free_heap_block *block)
|
||||
{
|
||||
#ifdef FACTOR_DEBUG
|
||||
assert(block->free_p());
|
||||
#endif
|
||||
}
|
||||
|
||||
template<typename Block> free_heap_block *heap<Block>::find_free_block(cell size)
|
||||
template<typename Block> free_heap_block *free_list_allocator<Block>::find_free_block(cell size)
|
||||
{
|
||||
cell attempt = size;
|
||||
|
||||
while(attempt < free_list_count * block_granularity)
|
||||
{
|
||||
int index = attempt / block_granularity;
|
||||
free_heap_block *block = free.small_blocks[index];
|
||||
free_heap_block *block = free_blocks.small_blocks[index];
|
||||
if(block)
|
||||
{
|
||||
assert_free_block(block);
|
||||
free.small_blocks[index] = block->next_free;
|
||||
free_blocks.small_blocks[index] = block->next_free;
|
||||
return block;
|
||||
}
|
||||
|
||||
|
@ -136,7 +125,7 @@ template<typename Block> free_heap_block *heap<Block>::find_free_block(cell size
|
|||
}
|
||||
|
||||
free_heap_block *prev = NULL;
|
||||
free_heap_block *block = free.large_blocks;
|
||||
free_heap_block *block = free_blocks.large_blocks;
|
||||
|
||||
while(block)
|
||||
{
|
||||
|
@ -146,7 +135,7 @@ template<typename Block> free_heap_block *heap<Block>::find_free_block(cell size
|
|||
if(prev)
|
||||
prev->next_free = block->next_free;
|
||||
else
|
||||
free.large_blocks = block->next_free;
|
||||
free_blocks.large_blocks = block->next_free;
|
||||
return block;
|
||||
}
|
||||
|
||||
|
@ -157,7 +146,7 @@ template<typename Block> free_heap_block *heap<Block>::find_free_block(cell size
|
|||
return NULL;
|
||||
}
|
||||
|
||||
template<typename Block> free_heap_block *heap<Block>::split_free_block(free_heap_block *block, cell size)
|
||||
template<typename Block> free_heap_block *free_list_allocator<Block>::split_free_block(free_heap_block *block, cell size)
|
||||
{
|
||||
if(block->size() != size)
|
||||
{
|
||||
|
@ -173,7 +162,7 @@ template<typename Block> free_heap_block *heap<Block>::split_free_block(free_hea
|
|||
return block;
|
||||
}
|
||||
|
||||
template<typename Block> Block *heap<Block>::heap_allot(cell size)
|
||||
template<typename Block> Block *free_list_allocator<Block>::allot(cell size)
|
||||
{
|
||||
size = align(size,block_granularity);
|
||||
|
||||
|
@ -187,20 +176,15 @@ template<typename Block> Block *heap<Block>::heap_allot(cell size)
|
|||
return NULL;
|
||||
}
|
||||
|
||||
template<typename Block> void heap<Block>::heap_free(Block *block)
|
||||
template<typename Block> void free_list_allocator<Block>::free(Block *block)
|
||||
{
|
||||
free_heap_block *free_block = (free_heap_block *)block;
|
||||
free_block->set_free();
|
||||
add_to_free_list(free_block);
|
||||
}
|
||||
|
||||
template<typename Block> void heap<Block>::mark_block(Block *block)
|
||||
{
|
||||
state->set_marked_p(block);
|
||||
}
|
||||
|
||||
/* Compute total sum of sizes of free blocks, and size of largest free block */
|
||||
template<typename Block> void heap<Block>::heap_usage(cell *used, cell *total_free, cell *max_free)
|
||||
template<typename Block> void free_list_allocator<Block>::usage(cell *used, cell *total_free, cell *max_free)
|
||||
{
|
||||
*used = 0;
|
||||
*total_free = 0;
|
||||
|
@ -227,34 +211,34 @@ template<typename Block> void heap<Block>::heap_usage(cell *used, cell *total_fr
|
|||
}
|
||||
|
||||
/* The size of the heap after compaction */
|
||||
template<typename Block> cell heap<Block>::heap_size()
|
||||
template<typename Block> cell free_list_allocator<Block>::occupied()
|
||||
{
|
||||
Block *scan = first_block();
|
||||
Block *end = last_block();
|
||||
Block *last = last_block();
|
||||
|
||||
while(scan != end)
|
||||
while(scan != last)
|
||||
{
|
||||
if(scan->free_p()) break;
|
||||
else scan = next_block_after(scan);
|
||||
}
|
||||
|
||||
if(scan != end)
|
||||
if(scan != last)
|
||||
{
|
||||
free_heap_block *free_block = (free_heap_block *)scan;
|
||||
assert(free_block->free_p());
|
||||
assert((cell)scan + free_block->size() == seg->end);
|
||||
assert((cell)scan + free_block->size() == end);
|
||||
|
||||
return (cell)scan - (cell)first_block();
|
||||
}
|
||||
else
|
||||
return seg->size;
|
||||
return size;
|
||||
}
|
||||
|
||||
/* After code GC, all live code blocks are marked, so any
|
||||
which are not marked can be reclaimed. */
|
||||
template<typename Block>
|
||||
template<typename Iterator>
|
||||
void heap<Block>::sweep_heap(Iterator &iter)
|
||||
void free_list_allocator<Block>::sweep(Iterator &iter)
|
||||
{
|
||||
this->clear_free_list();
|
||||
|
||||
|
@ -276,7 +260,7 @@ void heap<Block>::sweep_heap(Iterator &iter)
|
|||
else
|
||||
prev = scan;
|
||||
}
|
||||
else if(this->state->is_marked_p(scan))
|
||||
else if(this->state.marked_p(scan))
|
||||
{
|
||||
if(prev && prev->free_p())
|
||||
this->add_to_free_list((free_heap_block *)prev);
|
||||
|
@ -305,17 +289,17 @@ void heap<Block>::sweep_heap(Iterator &iter)
|
|||
}
|
||||
|
||||
/* The forwarding map must be computed first by calling
|
||||
state->compute_forwarding(). */
|
||||
state.compute_forwarding(). */
|
||||
template<typename Block>
|
||||
template<typename Iterator>
|
||||
void heap<Block>::compact_heap(Iterator &iter)
|
||||
void free_list_allocator<Block>::compact(Iterator &iter)
|
||||
{
|
||||
heap_compactor<Block,Iterator> compactor(state,first_block(),iter);
|
||||
this->iterate_heap(compactor);
|
||||
heap_compactor<Block,Iterator> compactor(&state,first_block(),iter);
|
||||
this->iterate(compactor);
|
||||
|
||||
/* Now update the free list; there will be a single free block at
|
||||
the end */
|
||||
this->build_free_list((cell)compactor.address - this->seg->start);
|
||||
this->build_free_list((cell)compactor.address - this->start);
|
||||
}
|
||||
|
||||
}
|
|
@ -89,7 +89,7 @@ void full_collector::trace_literal_references(code_block *compiled)
|
|||
collections */
|
||||
void full_collector::mark_code_block(code_block *compiled)
|
||||
{
|
||||
this->code->mark_block(compiled);
|
||||
this->code->set_marked_p(compiled);
|
||||
trace_literal_references(compiled);
|
||||
}
|
||||
|
||||
|
@ -108,7 +108,7 @@ void factor_vm::collect_full_impl(bool trace_contexts_p)
|
|||
{
|
||||
full_collector collector(this);
|
||||
|
||||
code->state->clear_mark_bits();
|
||||
code->clear_mark_bits();
|
||||
|
||||
collector.trace_roots();
|
||||
if(trace_contexts_p)
|
||||
|
|
|
@ -3,7 +3,7 @@ namespace factor
|
|||
|
||||
struct full_policy {
|
||||
factor_vm *parent;
|
||||
zone *tenured;
|
||||
tenured_space *tenured;
|
||||
|
||||
full_policy(factor_vm *parent_) : parent(parent_), tenured(parent->data->tenured) {}
|
||||
|
||||
|
|
11
vm/image.cpp
11
vm/image.cpp
|
@ -23,8 +23,7 @@ void factor_vm::load_data_heap(FILE *file, image_header *h, vm_parameters *p)
|
|||
|
||||
init_data_heap(p->young_size,
|
||||
p->aging_size,
|
||||
p->tenured_size,
|
||||
p->secure_gc);
|
||||
p->tenured_size);
|
||||
|
||||
clear_gc_stats();
|
||||
|
||||
|
@ -52,7 +51,7 @@ void factor_vm::load_code_heap(FILE *file, image_header *h, vm_parameters *p)
|
|||
|
||||
if(h->code_size != 0)
|
||||
{
|
||||
size_t bytes_read = fread(code->first_block(),1,h->code_size,file);
|
||||
size_t bytes_read = fread(code->allocator->first_block(),1,h->code_size,file);
|
||||
if(bytes_read != h->code_size)
|
||||
{
|
||||
print_string("truncated image: ");
|
||||
|
@ -64,7 +63,7 @@ void factor_vm::load_code_heap(FILE *file, image_header *h, vm_parameters *p)
|
|||
}
|
||||
}
|
||||
|
||||
code->build_free_list(h->code_size);
|
||||
code->allocator->build_free_list(h->code_size);
|
||||
}
|
||||
|
||||
void factor_vm::data_fixup(cell *handle, cell data_relocation_base)
|
||||
|
@ -292,7 +291,7 @@ bool factor_vm::save_image(const vm_char *filename)
|
|||
h.data_relocation_base = data->tenured->start;
|
||||
h.data_size = data->tenured->here - data->tenured->start;
|
||||
h.code_relocation_base = code->seg->start;
|
||||
h.code_size = code->heap_size();
|
||||
h.code_size = code->allocator->occupied();
|
||||
|
||||
h.true_object = true_object;
|
||||
h.bignum_zero = bignum_zero;
|
||||
|
@ -306,7 +305,7 @@ bool factor_vm::save_image(const vm_char *filename)
|
|||
|
||||
if(fwrite(&h,sizeof(image_header),1,file) != 1) ok = false;
|
||||
if(fwrite((void*)data->tenured->start,h.data_size,1,file) != 1) ok = false;
|
||||
if(fwrite(code->first_block(),h.code_size,1,file) != 1) ok = false;
|
||||
if(fwrite(code->allocator->first_block(),h.code_size,1,file) != 1) ok = false;
|
||||
if(fclose(file)) ok = false;
|
||||
|
||||
if(!ok)
|
||||
|
|
|
@ -34,7 +34,6 @@ struct vm_parameters {
|
|||
cell ds_size, rs_size;
|
||||
cell young_size, aging_size, tenured_size;
|
||||
cell code_size;
|
||||
bool secure_gc;
|
||||
bool fep;
|
||||
bool console;
|
||||
bool signals;
|
||||
|
|
|
@ -95,7 +95,7 @@ template<typename Block> struct mark_bits {
|
|||
}
|
||||
}
|
||||
|
||||
bool is_marked_p(Block *address)
|
||||
bool marked_p(Block *address)
|
||||
{
|
||||
return bitmap_elt(marked,address);
|
||||
}
|
||||
|
@ -155,7 +155,7 @@ template<typename Block, typename Iterator> struct heap_compactor {
|
|||
|
||||
void operator()(Block *block, cell size)
|
||||
{
|
||||
if(this->state->is_marked_p(block))
|
||||
if(this->state->marked_p(block))
|
||||
{
|
||||
memmove(address,block,size);
|
||||
iter((Block *)address,size);
|
||||
|
|
|
@ -48,9 +48,11 @@ namespace factor
|
|||
#include "bignumint.hpp"
|
||||
#include "bignum.hpp"
|
||||
#include "code_block.hpp"
|
||||
#include "zone.hpp"
|
||||
#include "bump_allocator.hpp"
|
||||
#include "mark_bits.hpp"
|
||||
#include "free_list_allocator.hpp"
|
||||
#include "write_barrier.hpp"
|
||||
#include "old_space.hpp"
|
||||
#include "object_start_map.hpp"
|
||||
#include "aging_space.hpp"
|
||||
#include "tenured_space.hpp"
|
||||
#include "data_heap.hpp"
|
||||
|
@ -61,8 +63,6 @@ namespace factor
|
|||
#include "words.hpp"
|
||||
#include "float_bits.hpp"
|
||||
#include "io.hpp"
|
||||
#include "mark_bits.hpp"
|
||||
#include "heap.hpp"
|
||||
#include "image.hpp"
|
||||
#include "alien.hpp"
|
||||
#include "code_heap.hpp"
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
namespace factor
|
||||
{
|
||||
|
||||
struct tenured_space : zone {
|
||||
struct tenured_space : bump_allocator {
|
||||
object_start_map starts;
|
||||
|
||||
tenured_space(cell size, cell start) :
|
||||
zone(size,start), starts(size,start) {}
|
||||
bump_allocator(size,start), starts(size,start) {}
|
||||
|
||||
object *allot(cell size)
|
||||
{
|
||||
if(here + size > end) return NULL;
|
||||
|
||||
object *obj = zone::allot(size);
|
||||
object *obj = bump_allocator::allot(size);
|
||||
starts.record_object_start_offset(obj);
|
||||
return obj;
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ namespace factor
|
|||
|
||||
struct to_tenured_policy {
|
||||
factor_vm *myvm;
|
||||
zone *tenured;
|
||||
tenured_space *tenured;
|
||||
|
||||
to_tenured_policy(factor_vm *myvm_) : myvm(myvm_), tenured(myvm->data->tenured) {}
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@ namespace factor
|
|||
factor_vm::factor_vm() :
|
||||
nursery(0,0),
|
||||
profiling_p(false),
|
||||
secure_gc(false),
|
||||
gc_off(false),
|
||||
current_gc(NULL),
|
||||
fep_disabled(false),
|
||||
|
|
11
vm/vm.hpp
11
vm/vm.hpp
|
@ -11,7 +11,7 @@ struct factor_vm
|
|||
context *ctx;
|
||||
|
||||
/* New objects are allocated here */
|
||||
zone nursery;
|
||||
bump_allocator nursery;
|
||||
|
||||
/* Add this to a shifted address to compute write barrier offsets */
|
||||
cell cards_offset;
|
||||
|
@ -39,9 +39,6 @@ struct factor_vm
|
|||
unsigned int signal_fpu_status;
|
||||
stack_frame *signal_callstack_top;
|
||||
|
||||
/* Zeroes out deallocated memory; set by the -securegc command line argument */
|
||||
bool secure_gc;
|
||||
|
||||
/* A heap walk allows useful things to be done, like finding all
|
||||
references to an object for debugging purposes. */
|
||||
cell heap_scan_ptr;
|
||||
|
@ -221,7 +218,7 @@ struct factor_vm
|
|||
//data heap
|
||||
void init_card_decks();
|
||||
void set_data_heap(data_heap *data_);
|
||||
void init_data_heap(cell young_size, cell aging_size, cell tenured_size, bool secure_gc_);
|
||||
void init_data_heap(cell young_size, cell aging_size, cell tenured_size);
|
||||
void primitive_size();
|
||||
cell binary_payload_start(object *pointer);
|
||||
void primitive_data_room();
|
||||
|
@ -311,7 +308,7 @@ struct factor_vm
|
|||
void print_callstack();
|
||||
void dump_cell(cell x);
|
||||
void dump_memory(cell from, cell to);
|
||||
void dump_zone(const char *name, zone *z);
|
||||
void dump_zone(const char *name, bump_allocator *z);
|
||||
void dump_generations();
|
||||
void dump_objects(cell type);
|
||||
void find_data_references_step(cell *scan);
|
||||
|
@ -531,7 +528,7 @@ struct factor_vm
|
|||
template<typename Iterator> void iterate_code_heap(Iterator &iter_)
|
||||
{
|
||||
code_heap_iterator<Iterator> iter(iter_);
|
||||
code->iterate_heap(iter);
|
||||
code->allocator->iterate(iter);
|
||||
}
|
||||
|
||||
//callbacks
|
||||
|
|
Loading…
Reference in New Issue