2009-10-07 16:48:09 -04:00
|
|
|
#include "master.hpp"
|
|
|
|
|
2013-05-11 22:02:39 -04:00
|
|
|
namespace factor {
|
|
|
|
|
2013-05-12 23:20:43 -04:00
|
|
|
full_collector::full_collector(factor_vm* parent)
|
|
|
|
: collector<tenured_space, full_policy>(parent, parent->data->tenured,
|
|
|
|
full_policy(parent)),
|
2013-05-11 22:02:39 -04:00
|
|
|
code_visitor(parent, workhorse) {}
|
2009-11-23 19:51:08 -05:00
|
|
|
|
2013-05-11 22:02:39 -04:00
|
|
|
void full_collector::trace_code_block(code_block* compiled) {
|
|
|
|
data_visitor.visit_code_block_objects(compiled);
|
|
|
|
data_visitor.visit_embedded_literals(compiled);
|
|
|
|
code_visitor.visit_embedded_code_pointers(compiled);
|
2009-11-23 19:51:08 -05:00
|
|
|
}
|
|
|
|
|
2009-11-02 19:10:34 -05:00
|
|
|
/* After a sweep, invalidate any code heap roots which are not marked,
|
2013-05-11 22:02:39 -04:00
|
|
|
so that if a block makes a tail call to a generic word, and the PIC
|
|
|
|
compiler triggers a GC, and the caller block gets gets GCd as a result,
|
|
|
|
the PIC code won't try to overwrite the call site */
|
|
|
|
void factor_vm::update_code_roots_for_sweep() {
|
|
|
|
std::vector<code_root*>::const_iterator iter = code_roots.begin();
|
|
|
|
std::vector<code_root*>::const_iterator end = code_roots.end();
|
|
|
|
|
2015-01-06 11:58:24 -05:00
|
|
|
mark_bits* state = &code->allocator->state;
|
2013-05-11 22:02:39 -04:00
|
|
|
|
|
|
|
for (; iter < end; iter++) {
|
|
|
|
code_root* root = *iter;
|
2015-01-06 11:58:24 -05:00
|
|
|
cell block = root->value & (~data_alignment - 1);
|
2013-05-11 22:02:39 -04:00
|
|
|
if (root->valid && !state->marked_p(block))
|
|
|
|
root->valid = false;
|
|
|
|
}
|
2009-11-02 19:10:34 -05:00
|
|
|
}
|
|
|
|
|
2013-05-11 22:02:39 -04:00
|
|
|
void factor_vm::collect_mark_impl(bool trace_contexts_p) {
|
|
|
|
full_collector collector(this);
|
|
|
|
|
|
|
|
mark_stack.clear();
|
|
|
|
|
|
|
|
code->clear_mark_bits();
|
|
|
|
data->tenured->clear_mark_bits();
|
|
|
|
|
2015-01-03 16:36:41 -05:00
|
|
|
collector.data_visitor.visit_roots();
|
2013-05-11 22:02:39 -04:00
|
|
|
if (trace_contexts_p) {
|
2015-01-03 16:36:41 -05:00
|
|
|
collector.data_visitor.visit_contexts();
|
|
|
|
collector.code_visitor.visit_context_code_blocks();
|
2015-01-06 08:53:52 -05:00
|
|
|
collector.code_visitor.visit_uninitialized_code_blocks();
|
2013-05-11 22:02:39 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
while (!mark_stack.empty()) {
|
|
|
|
cell ptr = mark_stack.back();
|
|
|
|
mark_stack.pop_back();
|
|
|
|
|
|
|
|
if (ptr & 1) {
|
|
|
|
code_block* compiled = (code_block*)(ptr - 1);
|
|
|
|
collector.trace_code_block(compiled);
|
|
|
|
} else {
|
|
|
|
object* obj = (object*)ptr;
|
|
|
|
collector.trace_object(obj);
|
2015-01-03 16:36:41 -05:00
|
|
|
collector.code_visitor.visit_object_code_block(obj);
|
2013-05-11 22:02:39 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
data->reset_generation(data->tenured);
|
|
|
|
data->reset_generation(data->aging);
|
|
|
|
data->reset_generation(&nursery);
|
|
|
|
code->clear_remembered_set();
|
2009-10-14 03:06:01 -04:00
|
|
|
}
|
2009-10-08 03:10:28 -04:00
|
|
|
|
2013-05-11 22:02:39 -04:00
|
|
|
void factor_vm::collect_sweep_impl() {
|
|
|
|
gc_event* event = current_gc->event;
|
2010-09-04 16:21:45 -04:00
|
|
|
|
2013-05-11 22:02:39 -04:00
|
|
|
if (event)
|
|
|
|
event->started_data_sweep();
|
|
|
|
data->tenured->sweep();
|
|
|
|
if (event)
|
|
|
|
event->ended_data_sweep();
|
2009-11-05 20:03:51 -05:00
|
|
|
|
2013-05-11 22:02:39 -04:00
|
|
|
update_code_roots_for_sweep();
|
2009-11-22 14:37:39 -05:00
|
|
|
|
2013-05-11 22:02:39 -04:00
|
|
|
if (event)
|
|
|
|
event->started_code_sweep();
|
|
|
|
code->sweep();
|
|
|
|
if (event)
|
|
|
|
event->ended_code_sweep();
|
2009-10-25 09:07:21 -04:00
|
|
|
}
|
|
|
|
|
2013-05-11 22:02:39 -04:00
|
|
|
void factor_vm::collect_full(bool trace_contexts_p) {
|
|
|
|
collect_mark_impl(trace_contexts_p);
|
|
|
|
collect_sweep_impl();
|
|
|
|
|
|
|
|
if (data->low_memory_p()) {
|
|
|
|
/* Full GC did not free up enough memory. Grow the heap. */
|
|
|
|
set_current_gc_op(collect_growing_heap_op);
|
|
|
|
collect_growing_heap(0, trace_contexts_p);
|
|
|
|
} else if (data->high_fragmentation_p()) {
|
|
|
|
/* Enough free memory, but it is not contiguous. Perform a
|
|
|
|
compaction. */
|
|
|
|
set_current_gc_op(collect_compact_op);
|
|
|
|
collect_compact_impl(trace_contexts_p);
|
|
|
|
}
|
|
|
|
|
|
|
|
code->flush_icache();
|
2009-10-07 16:48:09 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|