diff --git a/vm/compaction.cpp b/vm/compaction.cpp index ce5e707f6a..d33b203e16 100644 --- a/vm/compaction.cpp +++ b/vm/compaction.cpp @@ -164,7 +164,7 @@ void factor_vm::update_code_roots_for_compaction() { } /* Compact data and code heaps */ -void factor_vm::collect_compact_impl(bool trace_contexts_p) { +void factor_vm::collect_compact_impl() { gc_event* event = current_gc->event; #ifdef FACTOR_DEBUG @@ -212,10 +212,8 @@ void factor_vm::collect_compact_impl(bool trace_contexts_p) { } forwarder.visit_roots(); - if (trace_contexts_p) { - forwarder.visit_contexts(); - forwarder.visit_context_code_blocks(); - } + forwarder.visit_contexts(); + forwarder.visit_context_code_blocks(); } update_code_roots_for_compaction(); @@ -273,7 +271,7 @@ struct object_grow_heap_updater { }; /* Compact just the code heap, after growing the data heap */ -void factor_vm::collect_compact_code_impl(bool trace_contexts_p) { +void factor_vm::collect_compact_code_impl() { /* Figure out where blocks are going to go */ mark_bits* code_forwarding_map = &code->allocator->state; code_forwarding_map->compute_forwarding(); @@ -284,9 +282,7 @@ void factor_vm::collect_compact_code_impl(bool trace_contexts_p) { slot_visitor forwarder(this, fixup); forwarder.visit_uninitialized_code_blocks(); - - if (trace_contexts_p) - forwarder.visit_context_code_blocks(); + forwarder.visit_context_code_blocks(); /* Update code heap references in data heap */ object_grow_heap_updater object_updater(forwarder); @@ -303,26 +299,25 @@ void factor_vm::collect_compact_code_impl(bool trace_contexts_p) { code->initialize_all_blocks_set(); } -void factor_vm::collect_compact(bool trace_contexts_p) { - collect_mark_impl(trace_contexts_p); - collect_compact_impl(trace_contexts_p); +void factor_vm::collect_compact() { + collect_mark_impl(); + collect_compact_impl(); if (data->high_fragmentation_p()) { /* Compaction did not free up enough memory. Grow the heap. */ set_current_gc_op(collect_growing_heap_op); - collect_growing_heap(0, trace_contexts_p); + collect_growing_heap(0); } code->flush_icache(); } -void factor_vm::collect_growing_heap(cell requested_size, - bool trace_contexts_p) { +void factor_vm::collect_growing_heap(cell requested_size) { /* Grow the data heap and copy all live objects to the new heap. */ data_heap* old = data; set_data_heap(data->grow(&nursery, requested_size)); - collect_mark_impl(trace_contexts_p); - collect_compact_code_impl(trace_contexts_p); + collect_mark_impl(); + collect_compact_code_impl(); code->flush_icache(); delete old; } diff --git a/vm/full_collector.cpp b/vm/full_collector.cpp index 22ee3577bc..89a90c04c5 100644 --- a/vm/full_collector.cpp +++ b/vm/full_collector.cpp @@ -20,7 +20,7 @@ void factor_vm::update_code_roots_for_sweep() { } } -void factor_vm::collect_mark_impl(bool trace_contexts_p) { +void factor_vm::collect_mark_impl() { gc_workhorse workhorse(this, this->data->tenured, full_policy(this)); @@ -33,11 +33,9 @@ void factor_vm::collect_mark_impl(bool trace_contexts_p) { data->tenured->state.clear_mark_bits(); visitor.visit_roots(); - if (trace_contexts_p) { - visitor.visit_contexts(); - visitor.visit_context_code_blocks(); - visitor.visit_uninitialized_code_blocks(); - } + visitor.visit_contexts(); + visitor.visit_context_code_blocks(); + visitor.visit_uninitialized_code_blocks(); while (!mark_stack.empty()) { cell ptr = mark_stack.back(); @@ -80,19 +78,19 @@ void factor_vm::collect_sweep_impl() { event->ended_code_sweep(); } -void factor_vm::collect_full(bool trace_contexts_p) { - collect_mark_impl(trace_contexts_p); +void factor_vm::collect_full() { + collect_mark_impl(); collect_sweep_impl(); if (data->low_memory_p()) { /* Full GC did not free up enough memory. Grow the heap. */ set_current_gc_op(collect_growing_heap_op); - collect_growing_heap(0, trace_contexts_p); + collect_growing_heap(0); } else if (data->high_fragmentation_p()) { /* Enough free memory, but it is not contiguous. Perform a compaction. */ set_current_gc_op(collect_compact_op); - collect_compact_impl(trace_contexts_p); + collect_compact_impl(); } code->flush_icache(); diff --git a/vm/gc.cpp b/vm/gc.cpp index ca84ed0bfc..15f5ca9aff 100644 --- a/vm/gc.cpp +++ b/vm/gc.cpp @@ -111,7 +111,7 @@ void factor_vm::set_current_gc_op(gc_op op) { current_gc->event->op = op; } -void factor_vm::gc(gc_op op, cell requested_size, bool trace_contexts_p) { +void factor_vm::gc(gc_op op, cell requested_size) { FACTOR_ASSERT(!gc_off); FACTOR_ASSERT(!current_gc); @@ -141,7 +141,7 @@ void factor_vm::gc(gc_op op, cell requested_size, bool trace_contexts_p) { if (data->high_fragmentation_p()) { /* Change GC op so that if we fail again, we crash. */ set_current_gc_op(collect_full_op); - collect_full(trace_contexts_p); + collect_full(); } break; case collect_to_tenured_op: @@ -150,17 +150,17 @@ void factor_vm::gc(gc_op op, cell requested_size, bool trace_contexts_p) { if (data->high_fragmentation_p()) { /* Change GC op so that if we fail again, we crash. */ set_current_gc_op(collect_full_op); - collect_full(trace_contexts_p); + collect_full(); } break; case collect_full_op: - collect_full(trace_contexts_p); + collect_full(); break; case collect_compact_op: - collect_compact(trace_contexts_p); + collect_compact(); break; case collect_growing_heap_op: - collect_growing_heap(requested_size, trace_contexts_p); + collect_growing_heap(requested_size); break; default: critical_error("in gc, bad GC op", current_gc->op); @@ -187,18 +187,15 @@ void factor_vm::gc(gc_op op, cell requested_size, bool trace_contexts_p) { } void factor_vm::primitive_minor_gc() { - gc(collect_nursery_op, 0, /* requested size */ - true /* trace contexts? */); + gc(collect_nursery_op, 0 /* requested size */); } void factor_vm::primitive_full_gc() { - gc(collect_full_op, 0, /* requested size */ - true /* trace contexts? */); + gc(collect_full_op, 0 /* requested size */); } void factor_vm::primitive_compact_gc() { - gc(collect_compact_op, 0, /* requested size */ - true /* trace contexts? */); + gc(collect_compact_op, 0 /* requested size */); } /* @@ -214,8 +211,7 @@ object* factor_vm::allot_large_object(cell type, cell size) { /* If it still won't fit, grow the heap */ if (!data->tenured->can_allot_p(requested_size)) { - gc(collect_growing_heap_op, size, /* requested size */ - true /* trace contexts? */); + gc(collect_growing_heap_op, size /* requested size */); } } diff --git a/vm/image.cpp b/vm/image.cpp index 87b82e14e9..56b09288f4 100644 --- a/vm/image.cpp +++ b/vm/image.cpp @@ -347,8 +347,12 @@ void factor_vm::primitive_save_image_and_exit() { if (!save_special_p(i)) special_objects[i] = false_object; - gc(collect_compact_op, 0, /* requested size */ - false /* discard objects only reachable from stacks */); + /* dont trace objects only reachable from context stacks so we don't + get volatile data saved in the image. */ + active_contexts.clear(); + code->uninitialized_blocks.clear(); + + gc(collect_compact_op, 0 /* requested size */); /* Save the image */ if (save_image((vm_char*)(path1.untagged() + 1), diff --git a/vm/vm.hpp b/vm/vm.hpp index f98b210c5f..5014aecfc8 100644 --- a/vm/vm.hpp +++ b/vm/vm.hpp @@ -359,14 +359,14 @@ struct factor_vm { void collect_to_tenured(); void update_code_roots_for_sweep(); void update_code_roots_for_compaction(); - void collect_mark_impl(bool trace_contexts_p); + void collect_mark_impl(); void collect_sweep_impl(); - void collect_full(bool trace_contexts_p); - void collect_compact_impl(bool trace_contexts_p); - void collect_compact_code_impl(bool trace_contexts_p); - void collect_compact(bool trace_contexts_p); - void collect_growing_heap(cell requested_size, bool trace_contexts_p); - void gc(gc_op op, cell requested_size, bool trace_contexts_p); + void collect_full(); + void collect_compact_impl(); + void collect_compact_code_impl(); + void collect_compact(); + void collect_growing_heap(cell requested_size); + void gc(gc_op op, cell requested_size); void scrub_context(context* ctx); void scrub_contexts(); void primitive_minor_gc();