vm: debugging compaction
parent
b6a21b19a9
commit
b7181d14a8
|
@ -67,6 +67,7 @@ struct object_compaction_updater {
|
||||||
slot_visitor<object_slot_forwarder> slot_forwarder;
|
slot_visitor<object_slot_forwarder> slot_forwarder;
|
||||||
code_block_visitor<code_block_forwarder> code_forwarder;
|
code_block_visitor<code_block_forwarder> code_forwarder;
|
||||||
mark_bits<object> *data_forwarding_map;
|
mark_bits<object> *data_forwarding_map;
|
||||||
|
object_start_map *starts;
|
||||||
|
|
||||||
explicit object_compaction_updater(factor_vm *parent_,
|
explicit object_compaction_updater(factor_vm *parent_,
|
||||||
slot_visitor<object_slot_forwarder> slot_forwarder_,
|
slot_visitor<object_slot_forwarder> slot_forwarder_,
|
||||||
|
@ -75,18 +76,22 @@ struct object_compaction_updater {
|
||||||
parent(parent_),
|
parent(parent_),
|
||||||
slot_forwarder(slot_forwarder_),
|
slot_forwarder(slot_forwarder_),
|
||||||
code_forwarder(code_forwarder_),
|
code_forwarder(code_forwarder_),
|
||||||
data_forwarding_map(data_forwarding_map_) {}
|
data_forwarding_map(data_forwarding_map_),
|
||||||
|
starts(&parent->data->tenured->starts) {}
|
||||||
|
|
||||||
void operator()(object *obj, cell size)
|
void operator()(object *old_address, object *new_address, cell size)
|
||||||
{
|
{
|
||||||
cell payload_start;
|
cell payload_start;
|
||||||
if(obj->h.hi_tag() == TUPLE_TYPE)
|
if(old_address->h.hi_tag() == TUPLE_TYPE)
|
||||||
payload_start = tuple_size_with_forwarding(data_forwarding_map,obj);
|
payload_start = tuple_size_with_forwarding(data_forwarding_map,old_address);
|
||||||
else
|
else
|
||||||
payload_start = obj->binary_payload_start();
|
payload_start = old_address->binary_payload_start();
|
||||||
|
|
||||||
slot_forwarder.visit_slots(obj,payload_start);
|
memmove(new_address,old_address,size);
|
||||||
code_forwarder.visit_object_code_block(obj);
|
|
||||||
|
slot_forwarder.visit_slots(new_address,payload_start);
|
||||||
|
code_forwarder.visit_object_code_block(new_address);
|
||||||
|
starts->record_object_start_offset(new_address);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -97,14 +102,15 @@ struct code_block_compaction_updater {
|
||||||
explicit code_block_compaction_updater(factor_vm *parent_, slot_visitor<object_slot_forwarder> slot_forwarder_) :
|
explicit code_block_compaction_updater(factor_vm *parent_, slot_visitor<object_slot_forwarder> slot_forwarder_) :
|
||||||
parent(parent_), slot_forwarder(slot_forwarder_) {}
|
parent(parent_), slot_forwarder(slot_forwarder_) {}
|
||||||
|
|
||||||
void operator()(code_block *compiled, cell size)
|
void operator()(code_block *old_address, code_block *new_address, cell size)
|
||||||
{
|
{
|
||||||
slot_forwarder.visit_literal_references(compiled);
|
memmove(new_address,old_address,size);
|
||||||
parent->relocate_code_block(compiled);
|
slot_forwarder.visit_literal_references(new_address);
|
||||||
|
parent->relocate_code_block(new_address);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
void factor_vm::compact_full_impl(bool trace_contexts_p)
|
void factor_vm::collect_full_compact(bool trace_contexts_p)
|
||||||
{
|
{
|
||||||
tenured_space *tenured = data->tenured;
|
tenured_space *tenured = data->tenured;
|
||||||
mark_bits<object> *data_forwarding_map = &tenured->state;
|
mark_bits<object> *data_forwarding_map = &tenured->state;
|
||||||
|
@ -118,6 +124,9 @@ void factor_vm::compact_full_impl(bool trace_contexts_p)
|
||||||
slot_visitor<object_slot_forwarder> slot_forwarder(this,object_slot_forwarder(data_forwarding_map));
|
slot_visitor<object_slot_forwarder> slot_forwarder(this,object_slot_forwarder(data_forwarding_map));
|
||||||
code_block_visitor<code_block_forwarder> code_forwarder(this,code_block_forwarder(code_forwarding_map));
|
code_block_visitor<code_block_forwarder> code_forwarder(this,code_block_forwarder(code_forwarding_map));
|
||||||
|
|
||||||
|
/* Object start offsets get recomputed by the object_compaction_updater */
|
||||||
|
data->tenured->starts.clear_object_start_offsets();
|
||||||
|
|
||||||
/* Slide everything in tenured space up, and update data and code heap
|
/* Slide everything in tenured space up, and update data and code heap
|
||||||
pointers inside objects. */
|
pointers inside objects. */
|
||||||
object_compaction_updater object_updater(this,slot_forwarder,code_forwarder,data_forwarding_map);
|
object_compaction_updater object_updater(this,slot_forwarder,code_forwarder,data_forwarding_map);
|
||||||
|
|
|
@ -356,6 +356,24 @@ void free_list_allocator<Block>::sweep(Iterator &iter)
|
||||||
this->add_to_free_list((free_heap_block *)prev);
|
this->add_to_free_list((free_heap_block *)prev);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template<typename Block, typename Iterator> struct heap_compactor {
|
||||||
|
mark_bits<Block> *state;
|
||||||
|
char *address;
|
||||||
|
Iterator &iter;
|
||||||
|
|
||||||
|
explicit heap_compactor(mark_bits<Block> *state_, Block *address_, Iterator &iter_) :
|
||||||
|
state(state_), address((char *)address_), iter(iter_) {}
|
||||||
|
|
||||||
|
void operator()(Block *block, cell size)
|
||||||
|
{
|
||||||
|
if(this->state->marked_p(block))
|
||||||
|
{
|
||||||
|
iter(block,(Block *)address,size);
|
||||||
|
address += size;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
/* The forwarding map must be computed first by calling
|
/* The forwarding map must be computed first by calling
|
||||||
state.compute_forwarding(). */
|
state.compute_forwarding(). */
|
||||||
template<typename Block>
|
template<typename Block>
|
||||||
|
|
|
@ -40,7 +40,7 @@ struct object_start_map_updater {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
void factor_vm::collect_full_impl(bool trace_contexts_p)
|
void factor_vm::collect_full_mark(bool trace_contexts_p)
|
||||||
{
|
{
|
||||||
full_collector collector(this);
|
full_collector collector(this);
|
||||||
|
|
||||||
|
@ -68,16 +68,19 @@ void factor_vm::collect_full_impl(bool trace_contexts_p)
|
||||||
code_marker.visit_object_code_block(obj);
|
code_marker.visit_object_code_block(obj);
|
||||||
}
|
}
|
||||||
|
|
||||||
data->tenured->starts.clear_object_start_offsets();
|
|
||||||
object_start_map_updater updater(&data->tenured->starts);
|
|
||||||
data->tenured->sweep(updater);
|
|
||||||
|
|
||||||
data->reset_generation(data->tenured);
|
data->reset_generation(data->tenured);
|
||||||
data->reset_generation(data->aging);
|
data->reset_generation(data->aging);
|
||||||
data->reset_generation(&nursery);
|
data->reset_generation(&nursery);
|
||||||
code->clear_remembered_set();
|
code->clear_remembered_set();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void factor_vm::collect_full_sweep()
|
||||||
|
{
|
||||||
|
data->tenured->starts.clear_object_start_offsets();
|
||||||
|
object_start_map_updater updater(&data->tenured->starts);
|
||||||
|
data->tenured->sweep(updater);
|
||||||
|
}
|
||||||
|
|
||||||
void factor_vm::collect_growing_heap(cell requested_bytes,
|
void factor_vm::collect_growing_heap(cell requested_bytes,
|
||||||
bool trace_contexts_p,
|
bool trace_contexts_p,
|
||||||
bool compact_p)
|
bool compact_p)
|
||||||
|
@ -85,23 +88,29 @@ void factor_vm::collect_growing_heap(cell requested_bytes,
|
||||||
/* Grow the data heap and copy all live objects to the new heap. */
|
/* Grow the data heap and copy all live objects to the new heap. */
|
||||||
data_heap *old = data;
|
data_heap *old = data;
|
||||||
set_data_heap(data->grow(requested_bytes));
|
set_data_heap(data->grow(requested_bytes));
|
||||||
collect_full_impl(trace_contexts_p);
|
collect_full_mark(trace_contexts_p);
|
||||||
delete old;
|
delete old;
|
||||||
|
|
||||||
if(compact_p)
|
if(compact_p)
|
||||||
compact_full_impl(trace_contexts_p);
|
collect_full_compact(trace_contexts_p);
|
||||||
else
|
else
|
||||||
|
{
|
||||||
|
collect_full_sweep();
|
||||||
relocate_code_heap();
|
relocate_code_heap();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void factor_vm::collect_full(bool trace_contexts_p, bool compact_p)
|
void factor_vm::collect_full(bool trace_contexts_p, bool compact_p)
|
||||||
{
|
{
|
||||||
collect_full_impl(trace_contexts_p);
|
collect_full_mark(trace_contexts_p);
|
||||||
|
|
||||||
if(compact_p)
|
if(compact_p)
|
||||||
compact_full_impl(trace_contexts_p);
|
collect_full_compact(trace_contexts_p);
|
||||||
else
|
else
|
||||||
|
{
|
||||||
|
collect_full_sweep();
|
||||||
update_code_heap_words_and_literals();
|
update_code_heap_words_and_literals();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -139,32 +139,20 @@ template<typename Block> struct mark_bits {
|
||||||
/* We have the popcount for every 64 entries; look up and compute the rest */
|
/* We have the popcount for every 64 entries; look up and compute the rest */
|
||||||
Block *forward_block(Block *original)
|
Block *forward_block(Block *original)
|
||||||
{
|
{
|
||||||
|
#ifdef FACTOR_DEBUG
|
||||||
|
assert(marked_p(original));
|
||||||
|
#endif
|
||||||
std::pair<cell,cell> pair = bitmap_deref(original);
|
std::pair<cell,cell> pair = bitmap_deref(original);
|
||||||
|
|
||||||
cell approx_popcount = forwarding[pair.first];
|
cell approx_popcount = forwarding[pair.first];
|
||||||
u64 mask = ((u64)1 << pair.second) - 1;
|
u64 mask = ((u64)1 << pair.second) - 1;
|
||||||
|
|
||||||
cell new_line_number = approx_popcount + popcount(marked[pair.first] & mask);
|
cell new_line_number = approx_popcount + popcount(marked[pair.first] & mask);
|
||||||
return line_block(new_line_number);
|
Block *new_block = line_block(new_line_number);
|
||||||
}
|
#ifdef FACTOR_DEBUG
|
||||||
};
|
assert(new_block <= original);
|
||||||
|
#endif
|
||||||
template<typename Block, typename Iterator> struct heap_compactor {
|
return new_block;
|
||||||
mark_bits<Block> *state;
|
|
||||||
char *address;
|
|
||||||
Iterator &iter;
|
|
||||||
|
|
||||||
explicit heap_compactor(mark_bits<Block> *state_, Block *address_, Iterator &iter_) :
|
|
||||||
state(state_), address((char *)address_), iter(iter_) {}
|
|
||||||
|
|
||||||
void operator()(Block *block, cell size)
|
|
||||||
{
|
|
||||||
if(this->state->marked_p(block))
|
|
||||||
{
|
|
||||||
memmove(address,block,size);
|
|
||||||
iter((Block *)address,size);
|
|
||||||
address += size;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -247,8 +247,9 @@ struct factor_vm
|
||||||
void collect_nursery();
|
void collect_nursery();
|
||||||
void collect_aging();
|
void collect_aging();
|
||||||
void collect_to_tenured();
|
void collect_to_tenured();
|
||||||
void collect_full_impl(bool trace_contexts_p);
|
void collect_full_mark(bool trace_contexts_p);
|
||||||
void compact_full_impl(bool trace_contexts_p);
|
void collect_full_sweep();
|
||||||
|
void collect_full_compact(bool trace_contexts_p);
|
||||||
void collect_growing_heap(cell requested_bytes, bool trace_contexts_p, bool compact_p);
|
void collect_growing_heap(cell requested_bytes, bool trace_contexts_p, bool compact_p);
|
||||||
void collect_full(bool trace_contexts_p, bool compact_p);
|
void collect_full(bool trace_contexts_p, bool compact_p);
|
||||||
void record_gc_stats(generation_statistics *stats);
|
void record_gc_stats(generation_statistics *stats);
|
||||||
|
|
Loading…
Reference in New Issue