2009-10-24 05:18:33 -04:00
|
|
|
#include "master.hpp"
|
|
|
|
|
|
|
|
namespace factor {
|
|
|
|
|
|
|
|
struct object_slot_forwarder {
|
|
|
|
mark_bits<object> *forwarding_map;
|
|
|
|
|
|
|
|
explicit object_slot_forwarder(mark_bits<object> *forwarding_map_) :
|
|
|
|
forwarding_map(forwarding_map_) {}
|
|
|
|
|
|
|
|
object *visit_object(object *obj)
|
|
|
|
{
|
|
|
|
return forwarding_map->forward_block(obj);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
struct code_block_forwarder {
|
2009-10-24 22:24:06 -04:00
|
|
|
mark_bits<code_block> *forwarding_map;
|
2009-10-24 05:18:33 -04:00
|
|
|
|
2009-10-24 22:24:06 -04:00
|
|
|
explicit code_block_forwarder(mark_bits<code_block> *forwarding_map_) :
|
2009-10-24 05:18:33 -04:00
|
|
|
forwarding_map(forwarding_map_) {}
|
|
|
|
|
|
|
|
code_block *operator()(code_block *compiled)
|
|
|
|
{
|
2009-10-24 22:24:06 -04:00
|
|
|
return forwarding_map->forward_block(compiled);
|
2009-10-24 05:18:33 -04:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2009-10-25 00:51:14 -04:00
|
|
|
static inline cell tuple_size_with_forwarding(mark_bits<object> *forwarding_map, object *obj)
|
|
|
|
{
|
|
|
|
/* The tuple layout may or may not have been forwarded already. Tricky. */
|
|
|
|
object *layout_obj = (object *)UNTAG(((tuple *)obj)->layout);
|
|
|
|
tuple_layout *layout;
|
|
|
|
|
|
|
|
if(layout_obj < obj)
|
|
|
|
{
|
|
|
|
/* It's already been moved up; dereference through forwarding
|
|
|
|
map to get the size */
|
|
|
|
layout = (tuple_layout *)forwarding_map->forward_block(layout_obj);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
/* It hasn't been moved up yet; dereference directly */
|
|
|
|
layout = (tuple_layout *)layout_obj;
|
|
|
|
}
|
|
|
|
|
|
|
|
return tuple_size(layout);
|
|
|
|
}
|
|
|
|
|
|
|
|
struct compaction_sizer {
|
|
|
|
mark_bits<object> *forwarding_map;
|
|
|
|
|
|
|
|
explicit compaction_sizer(mark_bits<object> *forwarding_map_) :
|
|
|
|
forwarding_map(forwarding_map_) {}
|
|
|
|
|
|
|
|
cell operator()(object *obj)
|
|
|
|
{
|
2009-10-27 22:31:28 -04:00
|
|
|
if(!forwarding_map->marked_p(obj))
|
|
|
|
return forwarding_map->unmarked_space_starting_at(obj);
|
|
|
|
else if(obj->h.hi_tag() == TUPLE_TYPE)
|
2009-10-25 00:51:14 -04:00
|
|
|
return align(tuple_size_with_forwarding(forwarding_map,obj),data_alignment);
|
2009-10-27 22:31:28 -04:00
|
|
|
else
|
|
|
|
return obj->size();
|
2009-10-25 00:51:14 -04:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2009-10-24 05:18:33 -04:00
|
|
|
struct object_compaction_updater {
|
|
|
|
factor_vm *parent;
|
|
|
|
slot_visitor<object_slot_forwarder> slot_forwarder;
|
2009-10-24 05:36:29 -04:00
|
|
|
code_block_visitor<code_block_forwarder> code_forwarder;
|
2009-10-25 00:51:14 -04:00
|
|
|
mark_bits<object> *data_forwarding_map;
|
2009-10-25 09:07:21 -04:00
|
|
|
object_start_map *starts;
|
2009-10-24 05:18:33 -04:00
|
|
|
|
|
|
|
explicit object_compaction_updater(factor_vm *parent_,
|
2009-10-25 00:51:14 -04:00
|
|
|
slot_visitor<object_slot_forwarder> slot_forwarder_,
|
|
|
|
code_block_visitor<code_block_forwarder> code_forwarder_,
|
|
|
|
mark_bits<object> *data_forwarding_map_) :
|
2009-10-24 05:18:33 -04:00
|
|
|
parent(parent_),
|
2009-10-25 00:51:14 -04:00
|
|
|
slot_forwarder(slot_forwarder_),
|
|
|
|
code_forwarder(code_forwarder_),
|
2009-10-25 09:07:21 -04:00
|
|
|
data_forwarding_map(data_forwarding_map_),
|
|
|
|
starts(&parent->data->tenured->starts) {}
|
2009-10-24 05:18:33 -04:00
|
|
|
|
2009-10-25 09:07:21 -04:00
|
|
|
void operator()(object *old_address, object *new_address, cell size)
|
2009-10-24 05:18:33 -04:00
|
|
|
{
|
2009-10-25 00:51:14 -04:00
|
|
|
cell payload_start;
|
2009-10-25 09:07:21 -04:00
|
|
|
if(old_address->h.hi_tag() == TUPLE_TYPE)
|
|
|
|
payload_start = tuple_size_with_forwarding(data_forwarding_map,old_address);
|
2009-10-25 00:51:14 -04:00
|
|
|
else
|
2009-10-25 09:07:21 -04:00
|
|
|
payload_start = old_address->binary_payload_start();
|
2009-10-25 00:51:14 -04:00
|
|
|
|
2009-10-25 09:07:21 -04:00
|
|
|
memmove(new_address,old_address,size);
|
|
|
|
|
|
|
|
slot_forwarder.visit_slots(new_address,payload_start);
|
|
|
|
code_forwarder.visit_object_code_block(new_address);
|
|
|
|
starts->record_object_start_offset(new_address);
|
2009-10-24 05:18:33 -04:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2009-10-31 22:06:34 -04:00
|
|
|
template<typename SlotForwarder> struct code_block_compaction_updater {
|
2009-10-24 05:18:33 -04:00
|
|
|
factor_vm *parent;
|
2009-10-31 22:06:34 -04:00
|
|
|
SlotForwarder slot_forwarder;
|
2009-10-24 05:18:33 -04:00
|
|
|
|
2009-10-31 22:06:34 -04:00
|
|
|
explicit code_block_compaction_updater(factor_vm *parent_, SlotForwarder slot_forwarder_) :
|
2009-10-25 00:51:14 -04:00
|
|
|
parent(parent_), slot_forwarder(slot_forwarder_) {}
|
2009-10-24 05:18:33 -04:00
|
|
|
|
2009-10-25 09:07:21 -04:00
|
|
|
void operator()(code_block *old_address, code_block *new_address, cell size)
|
2009-10-24 05:18:33 -04:00
|
|
|
{
|
2009-10-25 09:07:21 -04:00
|
|
|
memmove(new_address,old_address,size);
|
|
|
|
slot_forwarder.visit_literal_references(new_address);
|
|
|
|
parent->relocate_code_block(new_address);
|
2009-10-24 05:18:33 -04:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2009-10-31 22:06:34 -04:00
|
|
|
/* Compact data and code heaps */
|
2009-10-25 15:02:14 -04:00
|
|
|
void factor_vm::collect_compact_impl(bool trace_contexts_p)
|
2009-10-24 05:18:33 -04:00
|
|
|
{
|
2009-10-27 00:57:26 -04:00
|
|
|
current_gc->event->started_compaction();
|
|
|
|
|
2009-10-24 05:18:33 -04:00
|
|
|
tenured_space *tenured = data->tenured;
|
|
|
|
mark_bits<object> *data_forwarding_map = &tenured->state;
|
2009-10-24 22:24:06 -04:00
|
|
|
mark_bits<code_block> *code_forwarding_map = &code->allocator->state;
|
2009-10-24 05:18:33 -04:00
|
|
|
|
|
|
|
/* Figure out where blocks are going to go */
|
|
|
|
data_forwarding_map->compute_forwarding();
|
|
|
|
code_forwarding_map->compute_forwarding();
|
|
|
|
|
|
|
|
/* Update root pointers */
|
|
|
|
slot_visitor<object_slot_forwarder> slot_forwarder(this,object_slot_forwarder(data_forwarding_map));
|
2009-10-24 05:36:29 -04:00
|
|
|
code_block_visitor<code_block_forwarder> code_forwarder(this,code_block_forwarder(code_forwarding_map));
|
2009-10-24 05:18:33 -04:00
|
|
|
|
2009-10-25 09:07:21 -04:00
|
|
|
/* Object start offsets get recomputed by the object_compaction_updater */
|
|
|
|
data->tenured->starts.clear_object_start_offsets();
|
|
|
|
|
2009-10-24 05:18:33 -04:00
|
|
|
/* Slide everything in tenured space up, and update data and code heap
|
|
|
|
pointers inside objects. */
|
2009-10-25 00:51:14 -04:00
|
|
|
object_compaction_updater object_updater(this,slot_forwarder,code_forwarder,data_forwarding_map);
|
|
|
|
compaction_sizer object_sizer(data_forwarding_map);
|
|
|
|
tenured->compact(object_updater,object_sizer);
|
2009-10-24 05:18:33 -04:00
|
|
|
|
|
|
|
/* Slide everything in the code heap up, and update data and code heap
|
|
|
|
pointers inside code blocks. */
|
2009-10-31 22:06:34 -04:00
|
|
|
code_block_compaction_updater<slot_visitor<object_slot_forwarder> > code_block_updater(this,slot_forwarder);
|
2009-10-25 00:51:14 -04:00
|
|
|
standard_sizer<code_block> code_block_sizer;
|
|
|
|
code->allocator->compact(code_block_updater,code_block_sizer);
|
2009-10-25 01:06:45 -04:00
|
|
|
|
|
|
|
slot_forwarder.visit_roots();
|
|
|
|
if(trace_contexts_p)
|
|
|
|
{
|
|
|
|
slot_forwarder.visit_contexts();
|
|
|
|
code_forwarder.visit_context_code_blocks();
|
|
|
|
code_forwarder.visit_callback_code_blocks();
|
|
|
|
}
|
2009-10-27 00:57:26 -04:00
|
|
|
|
|
|
|
current_gc->event->ended_compaction();
|
2009-10-24 05:18:33 -04:00
|
|
|
}
|
|
|
|
|
2009-10-31 22:06:34 -04:00
|
|
|
struct object_code_block_updater {
|
|
|
|
code_block_visitor<code_block_forwarder> *forwarder;
|
|
|
|
|
|
|
|
explicit object_code_block_updater(code_block_visitor<code_block_forwarder> *forwarder_) :
|
|
|
|
forwarder(forwarder_) {}
|
|
|
|
|
|
|
|
void operator()(cell obj)
|
|
|
|
{
|
|
|
|
forwarder->visit_object_code_block(tagged<object>(obj).untagged());
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
struct dummy_slot_forwarder {
|
|
|
|
void visit_literal_references(code_block *compiled) {}
|
|
|
|
};
|
|
|
|
|
|
|
|
/* Compact just the code heap */
|
|
|
|
void factor_vm::collect_compact_code_impl()
|
|
|
|
{
|
|
|
|
mark_bits<code_block> *code_forwarding_map = &code->allocator->state;
|
|
|
|
|
|
|
|
/* Figure out where blocks are going to go */
|
|
|
|
code_forwarding_map->compute_forwarding();
|
|
|
|
|
|
|
|
/* Update root pointers */
|
|
|
|
code_block_visitor<code_block_forwarder> code_forwarder(this,code_block_forwarder(code_forwarding_map));
|
|
|
|
|
|
|
|
/* Slide everything in the code heap up, and update code heap
|
|
|
|
pointers inside code blocks. */
|
|
|
|
dummy_slot_forwarder slot_forwarder;
|
|
|
|
code_block_compaction_updater<dummy_slot_forwarder> code_block_updater(this,slot_forwarder);
|
|
|
|
standard_sizer<code_block> code_block_sizer;
|
|
|
|
code->allocator->compact(code_block_updater,code_block_sizer);
|
|
|
|
|
|
|
|
/* Update code heap references in data heap */
|
|
|
|
object_code_block_updater updater(&code_forwarder);
|
|
|
|
each_object(updater);
|
|
|
|
}
|
|
|
|
|
2009-10-24 05:18:33 -04:00
|
|
|
}
|