VM: Refactor compaction.cpp to Factor style

db4
Erik Charlebois 2013-05-11 21:52:44 -04:00
parent 66976a12bf
commit 2bed6f6a25
1 changed files with 237 additions and 271 deletions

View File

@ -10,52 +10,45 @@ struct compaction_fixup {
const object** data_finger;
const code_block** code_finger;
explicit compaction_fixup(
mark_bits<object> *data_forwarding_map_,
explicit compaction_fixup(mark_bits<object>* data_forwarding_map_,
mark_bits<code_block>* code_forwarding_map_,
const object** data_finger_,
const code_block **code_finger_) :
data_forwarding_map(data_forwarding_map_),
const code_block** code_finger_)
: data_forwarding_map(data_forwarding_map_),
code_forwarding_map(code_forwarding_map_),
data_finger(data_finger_),
code_finger(code_finger_) {}
object *fixup_data(object *obj)
{
object* fixup_data(object* obj) {
return data_forwarding_map->forward_block(obj);
}
code_block *fixup_code(code_block *compiled)
{
code_block* fixup_code(code_block* compiled) {
return code_forwarding_map->forward_block(compiled);
}
object *translate_data(const object *obj)
{
object* translate_data(const object* obj) {
if (obj < *data_finger)
return fixup_data((object*)obj);
else
return (object*)obj;
}
code_block *translate_code(const code_block *compiled)
{
code_block* translate_code(const code_block* compiled) {
if (compiled < *code_finger)
return fixup_code((code_block*)compiled);
else
return (code_block*)compiled;
}
cell size(object *obj)
{
cell size(object* obj) {
if (data_forwarding_map->marked_p(obj))
return obj->size(*this);
else
return data_forwarding_map->unmarked_block_size(obj);
}
cell size(code_block *compiled)
{
cell size(code_block* compiled) {
if (code_forwarding_map->marked_p(compiled))
return compiled->size(*this);
else
@ -68,13 +61,13 @@ struct object_compaction_updater {
compaction_fixup fixup;
object_start_map* starts;
explicit object_compaction_updater(factor_vm *parent_, compaction_fixup fixup_) :
parent(parent_),
explicit object_compaction_updater(factor_vm* parent_,
compaction_fixup fixup_)
: parent(parent_),
fixup(fixup_),
starts(&parent->data->tenured->starts) {}
void operator()(object *old_address, object *new_address, cell size)
{
void operator()(object* old_address, object* new_address, cell size) {
slot_visitor<compaction_fixup> slot_forwarder(parent, fixup);
slot_forwarder.visit_slots(new_address);
@ -85,39 +78,33 @@ struct object_compaction_updater {
}
};
template<typename Fixup>
struct code_block_compaction_relocation_visitor {
template <typename Fixup> struct code_block_compaction_relocation_visitor {
factor_vm* parent;
code_block* old_address;
Fixup fixup;
explicit code_block_compaction_relocation_visitor(factor_vm* parent_,
code_block* old_address_,
Fixup fixup_) :
parent(parent_),
old_address(old_address_),
fixup(fixup_) {}
Fixup fixup_)
: parent(parent_), old_address(old_address_), fixup(fixup_) {}
void operator()(instruction_operand op)
{
void operator()(instruction_operand op) {
cell old_offset = op.rel_offset() + (cell) old_address->entry_point();
switch(op.rel_type())
{
case RT_LITERAL:
{
switch (op.rel_type()) {
case RT_LITERAL: {
cell value = op.load_value(old_offset);
if (immediate_p(value))
op.store_value(value);
else
op.store_value(RETAG(fixup.fixup_data(untag<object>(value)),TAG(value)));
op.store_value(
RETAG(fixup.fixup_data(untag<object>(value)), TAG(value)));
break;
}
case RT_ENTRY_POINT:
case RT_ENTRY_POINT_PIC:
case RT_ENTRY_POINT_PIC_TAIL:
case RT_HERE:
{
case RT_HERE: {
cell value = op.load_value(old_offset);
cell offset = TAG(value);
code_block* compiled = (code_block*)UNTAG(value);
@ -136,27 +123,25 @@ struct code_block_compaction_relocation_visitor {
}
};
template<typename Fixup>
struct code_block_compaction_updater {
template <typename Fixup> struct code_block_compaction_updater {
factor_vm* parent;
Fixup fixup;
slot_visitor<Fixup> data_forwarder;
code_block_visitor<Fixup> code_forwarder;
explicit code_block_compaction_updater(factor_vm *parent_,
Fixup fixup_,
slot_visitor<Fixup> data_forwarder_,
code_block_visitor<Fixup> code_forwarder_) :
parent(parent_),
explicit code_block_compaction_updater(
factor_vm* parent_, Fixup fixup_, slot_visitor<Fixup> data_forwarder_,
code_block_visitor<Fixup> code_forwarder_)
: parent(parent_),
fixup(fixup_),
data_forwarder(data_forwarder_),
code_forwarder(code_forwarder_) {}
void operator()(code_block *old_address, code_block *new_address, cell size)
{
void operator()(code_block* old_address, code_block* new_address, cell size) {
data_forwarder.visit_code_block_objects(new_address);
code_block_compaction_relocation_visitor<Fixup> visitor(parent,old_address,fixup);
code_block_compaction_relocation_visitor<Fixup> visitor(parent, old_address,
fixup);
new_address->each_instruction_operand(visitor);
}
};
@ -164,41 +149,37 @@ struct code_block_compaction_updater {
/* After a compaction, invalidate any code heap roots which are not
marked, and also slide the valid roots up so that call sites can be updated
correctly in case an inline cache compilation triggered compaction. */
void factor_vm::update_code_roots_for_compaction()
{
void factor_vm::update_code_roots_for_compaction() {
std::vector<code_root*>::const_iterator iter = code_roots.begin();
std::vector<code_root*>::const_iterator end = code_roots.end();
mark_bits<code_block>* state = &code->allocator->state;
for(; iter < end; iter++)
{
for (; iter < end; iter++) {
code_root* root = *iter;
code_block* block = (code_block*)(root->value & (~data_alignment + 1));
/* Offset of return address within 16-byte allocation line */
cell offset = root->value - (cell) block;
if(root->valid && state->marked_p(block))
{
if (root->valid && state->marked_p(block)) {
block = state->forward_block(block);
root->value = (cell) block + offset;
}
else
} else
root->valid = false;
}
}
/* Compact data and code heaps */
void factor_vm::collect_compact_impl(bool trace_contexts_p)
{
void factor_vm::collect_compact_impl(bool trace_contexts_p) {
gc_event* event = current_gc->event;
#if defined(FACTOR_DEBUG)
code->verify_all_blocks_set();
#endif
if(event) event->started_compaction();
if (event)
event->started_compaction();
tenured_space* tenured = data->tenured;
mark_bits<object>* data_forwarding_map = &tenured->state;
@ -211,7 +192,8 @@ void factor_vm::collect_compact_impl(bool trace_contexts_p)
const object* data_finger = tenured->first_block();
const code_block* code_finger = code->allocator->first_block();
compaction_fixup fixup(data_forwarding_map,code_forwarding_map,&data_finger,&code_finger);
compaction_fixup fixup(data_forwarding_map, code_forwarding_map, &data_finger,
&code_finger);
slot_visitor<compaction_fixup> data_forwarder(this, fixup);
code_block_visitor<compaction_fixup> code_forwarder(this, fixup);
@ -227,12 +209,12 @@ void factor_vm::collect_compact_impl(bool trace_contexts_p)
/* Slide everything in the code heap up, and update data and code heap
pointers inside code blocks. */
code_block_compaction_updater<compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder);
code_block_compaction_updater<compaction_fixup> code_block_updater(
this, fixup, data_forwarder, code_forwarder);
code->allocator->compact(code_block_updater, fixup, &code_finger);
data_forwarder.visit_roots();
if(trace_contexts_p)
{
if (trace_contexts_p) {
data_forwarder.visit_contexts();
code_forwarder.visit_context_code_blocks();
}
@ -242,7 +224,8 @@ void factor_vm::collect_compact_impl(bool trace_contexts_p)
code->initialize_all_blocks_set();
if(event) event->ended_compaction();
if (event)
event->ended_compaction();
}
struct code_compaction_fixup {
@ -252,40 +235,27 @@ struct code_compaction_fixup {
const code_block** code_finger;
explicit code_compaction_fixup(mark_bits<code_block>* code_forwarding_map_,
const code_block **code_finger_) :
code_forwarding_map(code_forwarding_map_),
code_finger(code_finger_) {}
const code_block** code_finger_)
: code_forwarding_map(code_forwarding_map_), code_finger(code_finger_) {}
object *fixup_data(object *obj)
{
return obj;
}
object* fixup_data(object* obj) { return obj; }
code_block *fixup_code(code_block *compiled)
{
code_block* fixup_code(code_block* compiled) {
return code_forwarding_map->forward_block(compiled);
}
object *translate_data(const object *obj)
{
return fixup_data((object *)obj);
}
object* translate_data(const object* obj) { return fixup_data((object*)obj); }
code_block *translate_code(const code_block *compiled)
{
code_block* translate_code(const code_block* compiled) {
if (compiled < *code_finger)
return fixup_code((code_block*)compiled);
else
return (code_block*)compiled;
}
cell size(object *obj)
{
return obj->size();
}
cell size(object* obj) { return obj->size(); }
cell size(code_block *compiled)
{
cell size(code_block* compiled) {
if (code_forwarding_map->marked_p(compiled))
return compiled->size(*this);
else
@ -296,18 +266,15 @@ struct code_compaction_fixup {
struct object_grow_heap_updater {
code_block_visitor<code_compaction_fixup> code_forwarder;
explicit object_grow_heap_updater(code_block_visitor<code_compaction_fixup> code_forwarder_) :
code_forwarder(code_forwarder_) {}
explicit object_grow_heap_updater(
code_block_visitor<code_compaction_fixup> code_forwarder_)
: code_forwarder(code_forwarder_) {}
void operator()(object *obj)
{
code_forwarder.visit_object_code_block(obj);
}
void operator()(object* obj) { code_forwarder.visit_object_code_block(obj); }
};
/* Compact just the code heap, after growing the data heap */
void factor_vm::collect_compact_code_impl(bool trace_contexts_p)
{
void factor_vm::collect_compact_code_impl(bool trace_contexts_p) {
/* Figure out where blocks are going to go */
mark_bits<code_block>* code_forwarding_map = &code->allocator->state;
code_forwarding_map->compute_forwarding();
@ -329,7 +296,8 @@ void factor_vm::collect_compact_code_impl(bool trace_contexts_p)
/* Slide everything in the code heap up, and update code heap
pointers inside code blocks. */
code_block_compaction_updater<code_compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder);
code_block_compaction_updater<code_compaction_fixup> code_block_updater(
this, fixup, data_forwarder, code_forwarder);
code->allocator->compact(code_block_updater, fixup, &code_finger);
update_code_roots_for_compaction();
@ -337,13 +305,11 @@ void factor_vm::collect_compact_code_impl(bool trace_contexts_p)
code->initialize_all_blocks_set();
}
void factor_vm::collect_compact(bool trace_contexts_p)
{
void factor_vm::collect_compact(bool trace_contexts_p) {
collect_mark_impl(trace_contexts_p);
collect_compact_impl(trace_contexts_p);
if(data->high_fragmentation_p())
{
if (data->high_fragmentation_p()) {
/* Compaction did not free up enough memory. Grow the heap. */
set_current_gc_op(collect_growing_heap_op);
collect_growing_heap(0, trace_contexts_p);
@ -352,8 +318,8 @@ void factor_vm::collect_compact(bool trace_contexts_p)
code->flush_icache();
}
void factor_vm::collect_growing_heap(cell requested_size, bool trace_contexts_p)
{
void factor_vm::collect_growing_heap(cell requested_size,
bool trace_contexts_p) {
/* Grow the data heap and copy all live objects to the new heap. */
data_heap* old = data;
set_data_heap(data->grow(requested_size));