Big cleanup of literal table and relocation-related code

- new way to iterate over instruction_operands cleans up relocation code
- move some methods out of the VM class to clean up code
- if debugging is on, die earlier if attempting to allocate inside GC
- callback heap entries are now code_blocks
- replace block_granularity with data_alignment
db4
Slava Pestov 2009-12-02 00:48:41 -06:00
parent 260cc9e2ff
commit c6602bda27
25 changed files with 257 additions and 196 deletions

View File

@ -7,6 +7,10 @@ namespace factor
*/
inline object *factor_vm::allot_object(cell type, cell size)
{
#ifdef FACTOR_DEBUG
assert(!current_gc);
#endif
/* If the object is smaller than the nursery, allocate it in the nursery,
after a GC if needed */
if(nursery.size > size)

View File

@ -15,7 +15,7 @@ VM_C_API void box_boolean(bool value, factor_vm *parent)
VM_C_API bool to_boolean(cell value, factor_vm *parent)
{
return parent->to_boolean(value);
return to_boolean(value);
}
}

View File

@ -4,12 +4,7 @@ namespace factor
VM_C_API void box_boolean(bool value, factor_vm *vm);
VM_C_API bool to_boolean(cell value, factor_vm *vm);
inline cell factor_vm::tag_boolean(cell untagged)
{
return (untagged ? true_object : false_object);
}
inline bool factor_vm::to_boolean(cell value)
inline static bool to_boolean(cell value)
{
return value != false_object;
}

View File

@ -19,35 +19,44 @@ void factor_vm::init_callbacks(cell size)
callbacks = new callback_heap(size,this);
}
void callback_heap::update(callback *stub)
void callback_heap::update(code_block *stub)
{
tagged<array> code_template(parent->special_objects[CALLBACK_STUB]);
cell rel_class = untag_fixnum(array_nth(code_template.untagged(),1));
cell rel_type = untag_fixnum(array_nth(code_template.untagged(),2));
cell offset = untag_fixnum(array_nth(code_template.untagged(),3));
instruction_operand op(rel_class,offset + (cell)(stub + 1));
op.store_value((cell)(stub->compiled + 1));
relocation_entry rel(
(relocation_type)rel_type,
(relocation_class)rel_class,
offset);
flush_icache((cell)stub,stub->size);
instruction_operand op(rel,stub,0);
op.store_value((cell)callback_xt(stub));
stub->flush_icache();
}
callback *callback_heap::add(code_block *compiled)
code_block *callback_heap::add(cell owner)
{
tagged<array> code_template(parent->special_objects[CALLBACK_STUB]);
tagged<byte_array> insns(array_nth(code_template.untagged(),0));
cell size = array_capacity(insns.untagged());
cell bump = align(size,sizeof(cell)) + sizeof(callback);
cell bump = align(size + sizeof(code_block),data_alignment);
if(here + bump > seg->end) fatal_error("Out of callback space",0);
callback *stub = (callback *)here;
stub->compiled = compiled;
memcpy(stub + 1,insns->data<void>(),size);
stub->size = align(size,sizeof(cell));
free_heap_block *free_block = (free_heap_block *)here;
free_block->make_free(bump);
here += bump;
code_block *stub = (code_block *)free_block;
stub->owner = owner;
stub->literals = false_object;
stub->relocation = false_object;
memcpy(stub->xt(),insns->data<void>(),size);
update(stub);
return stub;
@ -57,9 +66,7 @@ void factor_vm::primitive_callback()
{
tagged<word> w(dpop());
w.untag_check(this);
callback *stub = callbacks->add(w->code);
box_alien(stub + 1);
box_alien(callbacks->add(w.value())->xt());
}
}

View File

@ -1,11 +1,28 @@
namespace factor
{
struct callback {
cell size;
code_block *compiled;
void *code() { return (void *)(this + 1); }
};
/* The callback heap is used to store the machine code that alien-callbacks
actually jump to when C code invokes them.
The callback heap has entries that look like code_blocks from the code heap,
but callback heap entries are allocated contiguously, never deallocated, and all
fields but the owner are set to false_object. The owner points to the callback
bottom word, whose XT is the callback body itself, generated by the optimizing
compiler. The machine code that follows a callback stub consists of a single
CALLBACK_STUB machine code template, which performs a jump to a "far" address
(on PowerPC and x86-64, its loaded into a register first).
GC updates the CALLBACK_STUB code if the code block of the callback bottom word
is ever moved. The callback stub itself won't move, though, and is never
deallocated. This means that the callback stub itself is a stable function
pointer that C code can hold on to until the associated Factor VM exits.
Since callback stubs are GC roots, and are never deallocated, the associated
callback code in the code heap is also never deallocated.
The callback heap is not saved in the image. Running GC in a new session after
saving the image will deallocate any code heap entries that were only reachable
from the callback heap in the previous session when the image was saved. */
struct callback_heap {
segment *seg;
@ -15,18 +32,24 @@ struct callback_heap {
explicit callback_heap(cell size, factor_vm *parent);
~callback_heap();
callback *add(code_block *compiled);
void update(callback *stub);
callback *next(callback *stub)
void *callback_xt(code_block *stub)
{
return (callback *)((cell)stub + stub->size + sizeof(callback));
word *w = (word *)UNTAG(stub->owner);
return w->xt;
}
template<typename Iterator> void iterate(Iterator &iter)
void update(code_block *stub);
code_block *add(cell owner);
code_block *next(code_block *stub)
{
callback *scan = (callback *)seg->start;
callback *end = (callback *)here;
return (code_block *)((cell)stub + stub->size());
}
template<typename Iterator> void each_callback(Iterator &iter)
{
code_block *scan = (code_block *)seg->start;
code_block *end = (code_block *)here;
while(scan < end)
{
iter(scan);

View File

@ -1,6 +1,17 @@
namespace factor
{
/* Code block visitors iterate over sets of code blocks, applying a functor to
each one. The functor returns a new code_block pointer, which may or may not
equal the old one. This is stored back to the original location.
This is used by GC's sweep and compact phases, and the implementation of the
modify-code-heap primitive.
Iteration is driven by visit_*() methods. Some of them define GC roots:
- visit_context_code_blocks()
- visit_callback_code_blocks() */
template<typename Visitor> struct code_block_visitor {
factor_vm *parent;
Visitor visitor;
@ -12,7 +23,6 @@ template<typename Visitor> struct code_block_visitor {
void visit_object_code_block(object *obj);
void visit_embedded_code_pointers(code_block *compiled);
void visit_context_code_blocks();
void visit_callback_code_blocks();
};
template<typename Visitor>
@ -81,14 +91,11 @@ struct embedded_code_pointers_visitor {
explicit embedded_code_pointers_visitor(Visitor visitor_) : visitor(visitor_) {}
void operator()(relocation_entry rel, cell index, code_block *compiled)
void operator()(instruction_operand op)
{
relocation_type type = rel.rel_type();
relocation_type type = op.rel_type();
if(type == RT_XT || type == RT_XT_PIC || type == RT_XT_PIC_TAIL)
{
instruction_operand op(rel.rel_class(),rel.rel_offset() + (cell)compiled->xt());
op.store_code_block(visitor(op.load_code_block()));
}
}
};
@ -98,7 +105,7 @@ void code_block_visitor<Visitor>::visit_embedded_code_pointers(code_block *compi
if(!parent->code->needs_fixup_p(compiled))
{
embedded_code_pointers_visitor<Visitor> visitor(this->visitor);
parent->iterate_relocations(compiled,visitor);
compiled->each_instruction_operand(visitor);
}
}
@ -109,26 +116,4 @@ void code_block_visitor<Visitor>::visit_context_code_blocks()
parent->iterate_active_frames(call_frame_visitor);
}
template<typename Visitor>
struct callback_code_block_visitor {
callback_heap *callbacks;
Visitor visitor;
explicit callback_code_block_visitor(callback_heap *callbacks_, Visitor visitor_) :
callbacks(callbacks_), visitor(visitor_) {}
void operator()(callback *stub)
{
stub->compiled = visitor(stub->compiled);
callbacks->update(stub);
}
};
template<typename Visitor>
void code_block_visitor<Visitor>::visit_callback_code_blocks()
{
callback_code_block_visitor<Visitor> callback_visitor(parent->callbacks,visitor);
parent->callbacks->iterate(callback_visitor);
}
}

View File

@ -150,12 +150,9 @@ struct update_word_references_relocation_visitor {
explicit update_word_references_relocation_visitor(factor_vm *parent_) : parent(parent_) {}
void operator()(relocation_entry rel, cell index, code_block *compiled)
void operator()(instruction_operand op)
{
relocation_type type = rel.rel_type();
instruction_operand op(rel.rel_class(),rel.rel_offset() + (cell)compiled->xt());
switch(type)
switch(op.rel_type())
{
case RT_XT:
{
@ -201,7 +198,7 @@ void factor_vm::update_word_references(code_block *compiled)
else
{
update_word_references_relocation_visitor visitor(this);
iterate_relocations(compiled,visitor);
compiled->each_instruction_operand(visitor);
compiled->flush_icache();
}
}
@ -218,13 +215,13 @@ struct relocate_code_block_relocation_visitor {
explicit relocate_code_block_relocation_visitor(factor_vm *parent_) : parent(parent_) {}
void operator()(relocation_entry rel, cell index, code_block *compiled)
void operator()(instruction_operand op)
{
instruction_operand op(rel.rel_class(),rel.rel_offset() + (cell)compiled->xt());
array *literals = (parent->to_boolean(compiled->literals)
? untag<array>(compiled->literals) : NULL);
code_block *compiled = op.parent_code_block();
array *literals = (to_boolean(compiled->literals) ? untag<array>(compiled->literals) : NULL);
cell index = op.parameter_index();
switch(rel.rel_type())
switch(op.rel_type())
{
case RT_PRIMITIVE:
op.store_value(parent->compute_primitive_relocation(array_nth(literals,index)));
@ -245,7 +242,7 @@ struct relocate_code_block_relocation_visitor {
op.store_value(parent->compute_xt_pic_tail_relocation(array_nth(literals,index)));
break;
case RT_HERE:
op.store_value(parent->compute_here_relocation(array_nth(literals,index),rel.rel_offset(),compiled));
op.store_value(parent->compute_here_relocation(array_nth(literals,index),op.rel_offset(),compiled));
break;
case RT_THIS:
op.store_value((cell)compiled->xt());
@ -269,7 +266,7 @@ struct relocate_code_block_relocation_visitor {
op.store_value(parent->decks_offset);
break;
default:
critical_error("Bad rel type",rel.rel_type());
critical_error("Bad rel type",op.rel_type());
break;
}
}
@ -280,23 +277,24 @@ void factor_vm::relocate_code_block(code_block *compiled)
{
code->needs_fixup.erase(compiled);
relocate_code_block_relocation_visitor visitor(this);
iterate_relocations(compiled,visitor);
compiled->each_instruction_operand(visitor);
compiled->flush_icache();
}
/* Fixup labels. This is done at compile time, not image load time */
void factor_vm::fixup_labels(array *labels, code_block *compiled)
{
cell i;
cell size = array_capacity(labels);
for(i = 0; i < size; i += 3)
for(cell i = 0; i < size; i += 3)
{
cell rel_class = untag_fixnum(array_nth(labels,i));
relocation_class rel_class = (relocation_class)untag_fixnum(array_nth(labels,i));
cell offset = untag_fixnum(array_nth(labels,i + 1));
cell target = untag_fixnum(array_nth(labels,i + 2));
instruction_operand op(rel_class,offset + (cell)compiled->xt());
relocation_entry new_entry(RT_HERE,rel_class,offset);
instruction_operand op(new_entry,compiled,0);
op.store_value(target + (cell)compiled->xt());
}
}

View File

@ -48,6 +48,24 @@ struct code_block
{
factor::flush_icache((cell)this,size());
}
template<typename Iterator> void each_instruction_operand(Iterator &iter)
{
if(to_boolean(relocation))
{
byte_array *rels = (byte_array *)UNTAG(relocation);
cell index = 0;
cell length = (rels->capacity >> TAG_BITS) / sizeof(relocation_entry);
for(cell i = 0; i < length; i++)
{
relocation_entry rel = rels->data<relocation_entry>()[i];
iter(instruction_operand(rel,this,index));
index += rel.number_of_parameters();
}
}
}
};
}

View File

@ -59,6 +59,11 @@ void code_heap::code_heap_free(code_block *compiled)
allocator->free(compiled);
}
void code_heap::flush_icache()
{
factor::flush_icache(seg->start,seg->size);
}
/* Allocate a code heap during startup */
void factor_vm::init_code_heap(cell size)
{
@ -86,7 +91,7 @@ defining a new word. */
void factor_vm::update_code_heap_words()
{
word_updater updater(this);
iterate_code_heap(updater);
each_code_block(updater);
}
void factor_vm::primitive_modify_code_heap()
@ -171,7 +176,7 @@ struct stack_trace_stripper {
void factor_vm::primitive_strip_stack_traces()
{
stack_trace_stripper stripper;
iterate_code_heap(stripper);
each_code_block(stripper);
}
}

View File

@ -26,6 +26,7 @@ struct code_heap {
void set_marked_p(code_block *compiled);
void clear_mark_bits();
void code_heap_free(code_block *compiled);
void flush_icache();
};
struct code_heap_room {

View File

@ -114,17 +114,15 @@ struct code_block_compaction_relocation_visitor {
slot_forwarder(slot_forwarder_),
code_forwarder(code_forwarder_) {}
void operator()(relocation_entry rel, cell index, code_block *compiled)
void operator()(instruction_operand op)
{
relocation_type type = rel.rel_type();
instruction_operand op(rel.rel_class(),rel.rel_offset() + (cell)compiled->xt());
code_block *compiled = op.parent_code_block();
array *literals = (to_boolean(compiled->literals) ? untag<array>(compiled->literals) : NULL);
cell index = op.parameter_index();
array *literals = (parent->to_boolean(compiled->literals)
? untag<array>(compiled->literals) : NULL);
cell old_offset = op.rel_offset() + (cell)old_address->xt();
cell old_offset = rel.rel_offset() + (cell)old_address->xt();
switch(type)
switch(op.rel_type())
{
case RT_IMMEDIATE:
op.store_value(slot_forwarder.visit_pointer(op.load_value(old_offset)));
@ -135,7 +133,7 @@ struct code_block_compaction_relocation_visitor {
op.store_code_block(code_forwarder.visit_code_block(op.load_code_block(old_offset)));
break;
case RT_HERE:
op.store_value(parent->compute_here_relocation(array_nth(literals,index),rel.rel_offset(),compiled));
op.store_value(parent->compute_here_relocation(array_nth(literals,index),op.rel_offset(),compiled));
break;
case RT_THIS:
op.store_value((cell)compiled->xt());
@ -173,7 +171,7 @@ struct code_block_compaction_updater {
slot_forwarder.visit_code_block_objects(new_address);
code_block_compaction_relocation_visitor<SlotForwarder> visitor(parent,old_address,slot_forwarder,code_forwarder);
parent->iterate_relocations(new_address,visitor);
new_address->each_instruction_operand(visitor);
}
};
@ -215,7 +213,6 @@ void factor_vm::collect_compact_impl(bool trace_contexts_p)
{
slot_forwarder.visit_contexts();
code_forwarder.visit_context_code_blocks();
code_forwarder.visit_callback_code_blocks();
}
update_code_roots_for_compaction();
@ -252,10 +249,7 @@ void factor_vm::collect_compact_code_impl(bool trace_contexts_p)
code_block_visitor<forwarder<code_block> > code_forwarder(this,forwarder<code_block>(code_forwarding_map));
if(trace_contexts_p)
{
code_forwarder.visit_context_code_blocks();
code_forwarder.visit_callback_code_blocks();
}
/* Update code heap references in data heap */
object_grow_heap_updater updater(code_forwarder);
@ -270,4 +264,22 @@ void factor_vm::collect_compact_code_impl(bool trace_contexts_p)
update_code_roots_for_compaction();
}
void factor_vm::collect_compact(bool trace_contexts_p)
{
collect_mark_impl(trace_contexts_p);
collect_compact_impl(trace_contexts_p);
code->flush_icache();
}
void factor_vm::collect_growing_heap(cell requested_bytes, bool trace_contexts_p)
{
/* Grow the data heap and copy all live objects to the new heap. */
data_heap *old = data;
set_data_heap(data->grow(requested_bytes));
collect_mark_impl(trace_contexts_p);
collect_compact_code_impl(trace_contexts_p);
code->flush_icache();
delete old;
}
}

View File

@ -30,8 +30,8 @@ void free_list::add_to_free_list(free_heap_block *block)
free_block_count++;
free_space += size;
if(size < free_list_count * block_granularity)
small_blocks[size / block_granularity].push_back(block);
if(size < free_list_count * data_alignment)
small_blocks[size / data_alignment].push_back(block);
else
large_blocks.insert(block);
}
@ -39,9 +39,9 @@ void free_list::add_to_free_list(free_heap_block *block)
free_heap_block *free_list::find_free_block(cell size)
{
/* Check small free lists */
if(size / block_granularity < free_list_count)
if(size / data_alignment < free_list_count)
{
std::vector<free_heap_block *> &blocks = small_blocks[size / block_granularity];
std::vector<free_heap_block *> &blocks = small_blocks[size / data_alignment];
if(blocks.size() == 0)
{
/* Round up to a multiple of 'size' */

View File

@ -84,7 +84,7 @@ template<typename Block> bool free_list_allocator<Block>::can_allot_p(cell size)
template<typename Block> Block *free_list_allocator<Block>::allot(cell size)
{
size = align(size,block_granularity);
size = align(size,data_alignment);
free_heap_block *block = free_blocks.find_free_block(size);
if(block)

View File

@ -27,11 +27,6 @@ void full_collector::trace_context_code_blocks()
code_visitor.visit_context_code_blocks();
}
void full_collector::trace_callback_code_blocks()
{
code_visitor.visit_callback_code_blocks();
}
void full_collector::trace_object_code_block(object *obj)
{
code_visitor.visit_object_code_block(obj);
@ -51,7 +46,7 @@ void factor_vm::update_code_roots_for_sweep()
for(; iter < end; iter++)
{
code_root *root = *iter;
code_block *block = (code_block *)(root->value & -block_granularity);
code_block *block = (code_block *)(root->value & -data_alignment);
if(root->valid && !state->marked_p(block))
root->valid = false;
}
@ -70,7 +65,7 @@ void factor_vm::update_code_roots_for_compaction()
for(; iter < end; iter++)
{
code_root *root = *iter;
code_block *block = (code_block *)(root->value & -block_granularity);
code_block *block = (code_block *)(root->value & -data_alignment);
/* Offset of return address within 16-byte allocation line */
cell offset = root->value - (cell)block;
@ -99,7 +94,6 @@ void factor_vm::collect_mark_impl(bool trace_contexts_p)
{
collector.trace_contexts();
collector.trace_context_code_blocks();
collector.trace_callback_code_blocks();
}
while(!mark_stack.empty())
@ -149,25 +143,7 @@ void factor_vm::collect_full(bool trace_contexts_p)
current_gc->event->op = collect_compact_op;
collect_compact_impl(trace_contexts_p);
}
flush_icache(code->seg->start,code->seg->size);
}
void factor_vm::collect_compact(bool trace_contexts_p)
{
collect_mark_impl(trace_contexts_p);
collect_compact_impl(trace_contexts_p);
flush_icache(code->seg->start,code->seg->size);
}
void factor_vm::collect_growing_heap(cell requested_bytes, bool trace_contexts_p)
{
/* Grow the data heap and copy all live objects to the new heap. */
data_heap *old = data;
set_data_heap(data->grow(requested_bytes));
collect_mark_impl(trace_contexts_p);
collect_compact_code_impl(trace_contexts_p);
flush_icache(code->seg->start,code->seg->size);
delete old;
code->flush_icache();
}
}

View File

@ -49,7 +49,6 @@ struct full_collector : collector<tenured_space,full_policy> {
explicit full_collector(factor_vm *parent_);
void trace_code_block(code_block *compiled);
void trace_context_code_blocks();
void trace_callback_code_blocks();
void trace_object_code_block(object *obj);
};

View File

@ -122,10 +122,10 @@ struct object_fixupper {
alien *ptr = (alien *)obj;
if(!parent->to_boolean(ptr->base))
ptr->expired = parent->true_object;
else
if(to_boolean(ptr->base))
ptr->update_address();
else
ptr->expired = parent->true_object;
break;
}
case DLL_TYPE:
@ -175,17 +175,15 @@ struct code_block_fixup_relocation_visitor {
data_visitor(slot_visitor<data_fixupper>(parent_,data_fixupper(data_offset_))),
code_visitor(code_fixupper(code_offset_)) {}
void operator()(relocation_entry rel, cell index, code_block *compiled)
void operator()(instruction_operand op)
{
relocation_type type = rel.rel_type();
instruction_operand op(rel.rel_class(),rel.rel_offset() + (cell)compiled->xt());
array *literals = (parent->to_boolean(compiled->literals)
? untag<array>(compiled->literals) : NULL);
code_block *compiled = op.parent_code_block();
array *literals = (to_boolean(compiled->literals) ? untag<array>(compiled->literals) : NULL);
cell index = op.parameter_index();
cell old_offset = (cell)rel.rel_offset() + (cell)compiled->xt() - code_offset;
cell old_offset = op.rel_offset() + (cell)compiled->xt() - code_offset;
switch(type)
switch(op.rel_type())
{
case RT_IMMEDIATE:
op.store_value(data_visitor.visit_pointer(op.load_value(old_offset)));
@ -202,7 +200,7 @@ struct code_block_fixup_relocation_visitor {
op.store_value(parent->compute_dlsym_relocation(literals,index));
break;
case RT_HERE:
op.store_value(parent->compute_here_relocation(array_nth(literals,index),rel.rel_offset(),compiled));
op.store_value(parent->compute_here_relocation(array_nth(literals,index),op.rel_offset(),compiled));
break;
case RT_THIS:
op.store_value((cell)compiled->xt());
@ -226,7 +224,7 @@ struct code_block_fixup_relocation_visitor {
op.store_value(parent->decks_offset);
break;
default:
critical_error("Bad rel type",rel.rel_type());
critical_error("Bad rel type",op.rel_type());
break;
}
}
@ -248,7 +246,7 @@ struct code_block_fixupper {
data_visitor.visit_code_block_objects(compiled);
code_block_fixup_relocation_visitor code_visitor(parent,data_offset,code_offset);
parent->iterate_relocations(compiled,code_visitor);
compiled->each_instruction_operand(code_visitor);
}
};

View File

@ -3,6 +3,9 @@
namespace factor
{
instruction_operand::instruction_operand(relocation_entry rel_, code_block *compiled_, cell index_) :
rel(rel_), compiled(compiled_), index(index_), pointer((cell)compiled_->xt() + rel_.rel_offset()) {}
/* Load a 32-bit value from a PowerPC LIS/ORI sequence */
fixnum instruction_operand::load_value_2_2()
{
@ -22,7 +25,7 @@ fixnum instruction_operand::load_value_masked(cell mask, fixnum shift)
fixnum instruction_operand::load_value(cell relative_to)
{
switch(rel_class)
switch(rel.rel_class())
{
case RC_ABSOLUTE_CELL:
return *(cell *)pointer;
@ -45,7 +48,7 @@ fixnum instruction_operand::load_value(cell relative_to)
case RC_INDIRECT_ARM_PC:
return load_value_masked(rel_indirect_arm_mask,0) + relative_to + sizeof(cell) * 2;
default:
critical_error("Bad rel class",rel_class);
critical_error("Bad rel class",rel.rel_class());
return 0;
}
}
@ -90,7 +93,7 @@ void instruction_operand::store_value(fixnum absolute_value)
{
fixnum relative_value = absolute_value - pointer;
switch(rel_class)
switch(rel.rel_class())
{
case RC_ABSOLUTE_CELL:
*(cell *)pointer = absolute_value;
@ -123,7 +126,7 @@ void instruction_operand::store_value(fixnum absolute_value)
store_value_masked(relative_value - sizeof(cell) * 2,rel_indirect_arm_mask,0);
break;
default:
critical_error("Bad rel class",rel_class);
critical_error("Bad rel class",rel.rel_class());
break;
}
}

View File

@ -67,7 +67,7 @@ static const cell rel_relative_arm_3_mask = 0xffffff;
struct relocation_entry {
u32 value;
relocation_entry(u32 value_) : value(value_) {}
explicit relocation_entry(u32 value_) : value(value_) {}
relocation_entry(relocation_type rel_type,
relocation_class rel_class,
@ -120,11 +120,32 @@ struct relocation_entry {
};
struct instruction_operand {
cell rel_class;
relocation_entry rel;
code_block *compiled;
cell index;
cell pointer;
instruction_operand(cell rel_class_, cell pointer_) :
rel_class(rel_class_), pointer(pointer_) {}
instruction_operand(relocation_entry rel_, code_block *compiled_, cell index_);
relocation_type rel_type()
{
return rel.rel_type();
}
cell rel_offset()
{
return rel.rel_offset();
}
cell parameter_index()
{
return index;
}
code_block *parent_code_block()
{
return compiled;
}
fixnum load_value_2_2();
fixnum load_value_masked(cell mask, fixnum shift);

View File

@ -28,14 +28,11 @@ void jit::emit_relocation(cell code_template_)
cell capacity = array_capacity(code_template.untagged());
for(cell i = 1; i < capacity; i += 3)
{
cell rel_class = array_nth(code_template.untagged(),i);
cell rel_type = array_nth(code_template.untagged(),i + 1);
relocation_class rel_class = (relocation_class)untag_fixnum(array_nth(code_template.untagged(),i));
relocation_type rel_type = (relocation_type)untag_fixnum(array_nth(code_template.untagged(),i + 1));
cell offset = array_nth(code_template.untagged(),i + 2);
relocation_entry new_entry(
(relocation_type)untag_fixnum(rel_type),
(relocation_class)untag_fixnum(rel_class),
code.count + untag_fixnum(offset));
relocation_entry new_entry(rel_type,rel_class,code.count + untag_fixnum(offset));
relocation.append_bytes(&new_entry,sizeof(relocation_entry));
}
}

View File

@ -1,7 +1,6 @@
namespace factor
{
const int block_granularity = 16;
const int mark_bits_granularity = sizeof(cell) * 8;
const int mark_bits_mask = sizeof(cell) * 8 - 1;
@ -25,7 +24,7 @@ template<typename Block> struct mark_bits {
explicit mark_bits(cell size_, cell start_) :
size(size_),
start(start_),
bits_size(size / block_granularity / mark_bits_granularity),
bits_size(size / data_alignment / mark_bits_granularity),
marked(new cell[bits_size]),
forwarding(new cell[bits_size])
{
@ -43,12 +42,12 @@ template<typename Block> struct mark_bits {
cell block_line(Block *address)
{
return (((cell)address - start) / block_granularity);
return (((cell)address - start) / data_alignment);
}
Block *line_block(cell line)
{
return (Block *)(line * block_granularity + start);
return (Block *)(line * data_alignment + start);
}
std::pair<cell,cell> bitmap_deref(Block *address)

View File

@ -49,6 +49,7 @@ namespace factor
#include "errors.hpp"
#include "bignumint.hpp"
#include "bignum.hpp"
#include "booleans.hpp"
#include "instruction_operands.hpp"
#include "code_blocks.hpp"
#include "bump_allocator.hpp"
@ -92,7 +93,6 @@ namespace factor
#include "callstack.hpp"
#include "arrays.hpp"
#include "math.hpp"
#include "booleans.hpp"
#include "byte_arrays.hpp"
#include "jit.hpp"
#include "quotations.hpp"

View File

@ -60,7 +60,7 @@ void object_start_map::update_card_for_sweep(cell index, u16 mask)
cell offset = object_start_offsets[index];
if(offset != card_starts_inside_object)
{
mask >>= (offset / block_granularity);
mask >>= (offset / data_alignment);
if(mask == 0)
{
@ -70,7 +70,7 @@ void object_start_map::update_card_for_sweep(cell index, u16 mask)
else
{
/* Move the object start forward if necessary */
object_start_offsets[index] = offset + (rightmost_set_bit(mask) * block_granularity);
object_start_offsets[index] = offset + (rightmost_set_bit(mask) * data_alignment);
}
}
}

View File

@ -93,7 +93,7 @@ bool quotation_jit::stack_frame_p()
switch(tagged<object>(obj).type())
{
case WORD_TYPE:
if(!parent->to_boolean(untag<word>(obj)->subprimitive))
if(!to_boolean(untag<word>(obj)->subprimitive))
return true;
break;
case QUOTATION_TYPE:
@ -154,7 +154,7 @@ void quotation_jit::iterate_quotation()
{
case WORD_TYPE:
/* Intrinsics */
if(parent->to_boolean(obj.as<word>()->subprimitive))
if(to_boolean(obj.as<word>()->subprimitive))
emit_subprimitive(obj.value());
/* The (execute) primitive is special-cased */
else if(obj.value() == parent->special_objects[JIT_EXECUTE_WORD])

View File

@ -1,6 +1,22 @@
namespace factor
{
/* Slot visitors iterate over the slots of an object, applying a functor to
each one that is a non-immediate slot. The pointer is untagged first. The
functor returns a new untagged object pointer. The return value may or may not equal the old one,
however the new pointer receives the same tag before being stored back to the
original location.
Slots storing immediate values are left unchanged and the visitor does inspect
them.
This is used by GC's copying, sweep and compact phases, and the implementation
of the become primitive.
Iteration is driven by visit_*() methods. Some of them define GC roots:
- visit_roots()
- visit_contexts() */
template<typename Visitor> struct slot_visitor {
factor_vm *parent;
Visitor visitor;
@ -15,6 +31,7 @@ template<typename Visitor> struct slot_visitor {
void visit_stack_elements(segment *region, cell *top);
void visit_data_roots();
void visit_bignum_roots();
void visit_callback_roots();
void visit_roots();
void visit_contexts();
void visit_code_block_objects(code_block *compiled);
@ -92,6 +109,28 @@ void slot_visitor<Visitor>::visit_bignum_roots()
}
}
template<typename Visitor>
struct callback_slot_visitor {
callback_heap *callbacks;
slot_visitor<Visitor> *visitor;
explicit callback_slot_visitor(callback_heap *callbacks_, slot_visitor<Visitor> *visitor_) :
callbacks(callbacks_), visitor(visitor_) {}
void operator()(code_block *stub)
{
visitor->visit_handle(&stub->owner);
callbacks->update(stub);
}
};
template<typename Visitor>
void slot_visitor<Visitor>::visit_callback_roots()
{
callback_slot_visitor<Visitor> callback_visitor(parent->callbacks,this);
parent->callbacks->each_callback(callback_visitor);
}
template<typename Visitor>
void slot_visitor<Visitor>::visit_roots()
{
@ -102,6 +141,7 @@ void slot_visitor<Visitor>::visit_roots()
visit_data_roots();
visit_bignum_roots();
visit_callback_roots();
for(cell i = 0; i < special_object_count; i++)
visit_handle(&parent->special_objects[i]);
@ -130,13 +170,10 @@ struct literal_references_visitor {
explicit literal_references_visitor(slot_visitor<Visitor> *visitor_) : visitor(visitor_) {}
void operator()(relocation_entry rel, cell index, code_block *compiled)
void operator()(instruction_operand op)
{
if(rel.rel_type() == RT_IMMEDIATE)
{
instruction_operand op(rel.rel_class(),rel.rel_offset() + (cell)compiled->xt());
if(op.rel_type() == RT_IMMEDIATE)
op.store_value(visitor->visit_pointer(op.load_value()));
}
}
};
@ -154,7 +191,7 @@ void slot_visitor<Visitor>::visit_embedded_literals(code_block *compiled)
if(!parent->code->needs_fixup_p(compiled))
{
literal_references_visitor<Visitor> visitor(this);
parent->iterate_relocations(compiled,visitor);
compiled->each_instruction_operand(visitor);
}
}

View File

@ -374,8 +374,11 @@ struct factor_vm
//booleans
void box_boolean(bool value);
bool to_boolean(cell value);
inline cell tag_boolean(cell untagged);
inline cell tag_boolean(cell untagged)
{
return (untagged ? true_object : false_object);
}
//byte arrays
byte_array *allot_byte_array(cell size);
@ -511,25 +514,6 @@ struct factor_vm
cell compute_context_relocation();
cell compute_vm_relocation(cell arg);
cell code_block_owner(code_block *compiled);
template<typename Iterator> void iterate_relocations(code_block *compiled, Iterator &iter)
{
if(to_boolean(compiled->relocation))
{
byte_array *relocation = (byte_array *)UNTAG(compiled->relocation);
cell index = 0;
cell length = (relocation->capacity >> TAG_BITS) / sizeof(relocation_entry);
for(cell i = 0; i < length; i++)
{
relocation_entry rel = relocation->data<relocation_entry>()[i];
iter(rel,index,compiled);
index += rel.number_of_parameters();
}
}
}
void update_word_references(code_block *compiled);
void check_code_address(cell address);
void relocate_code_block(code_block *compiled);
@ -553,8 +537,7 @@ struct factor_vm
void primitive_code_room();
void primitive_strip_stack_traces();
/* Apply a function to every code block */
template<typename Iterator> void iterate_code_heap(Iterator &iter)
template<typename Iterator> void each_code_block(Iterator &iter)
{
code->allocator->iterate(iter);
}