#include "master.hpp" namespace factor { gc_state::gc_state(data_heap *data_, bool growing_data_heap_, cell collecting_gen_) : data(data_), growing_data_heap(growing_data_heap_), collecting_gen(collecting_gen_), collecting_aging_again(false), start_time(current_micros()) { } gc_state::~gc_state() { } template object *factor_vm::resolve_forwarding(object *untagged, Strategy &strategy) { check_data_pointer(untagged); /* is there another forwarding pointer? */ while(untagged->h.forwarding_pointer_p()) untagged = untagged->h.forwarding_pointer(); /* we've found the destination */ untagged->h.check_header(); return untagged; } template void factor_vm::trace_handle(cell *handle, Strategy &strategy) { cell pointer = *handle; if(!immediate_p(pointer)) { object *untagged = untag(pointer); if(strategy.should_copy_p(untagged)) { object *forwarding = resolve_forwarding(untagged,strategy); if(forwarding == untagged) untagged = strategy.copy_object(untagged); else if(strategy.should_copy_p(forwarding)) untagged = strategy.copy_object(forwarding); else untagged = forwarding; *handle = RETAG(untagged,TAG(pointer)); } } } template void factor_vm::trace_slots(object *ptr, Strategy &strategy) { cell *slot = (cell *)ptr; cell *end = (cell *)((cell)ptr + binary_payload_start(ptr)); if(slot != end) { slot++; for(; slot < end; slot++) trace_handle(slot,strategy); } } template object *factor_vm::promote_object(object *untagged, Strategy &strategy) { cell size = untagged_object_size(untagged); object *newpointer = strategy.allot(size); if(!newpointer) longjmp(current_gc->gc_unwind,1); generation_statistics *s = &gc_stats.generations[current_gc->collecting_gen]; s->object_count++; s->bytes_copied += size; memcpy(newpointer,untagged,size); untagged->h.forward_to(newpointer); return newpointer; } template void factor_vm::trace_card(card *ptr, cell here, Strategy &strategy) { cell card_scan = card_to_addr(ptr) + card_offset(ptr); cell card_end = card_to_addr(ptr + 1); if(here < card_end) card_end = here; strategy.copy_reachable_objects(card_scan,&card_end); gc_stats.cards_scanned++; } template void factor_vm::trace_card_deck(card_deck *deck, cell here, card mask, card unmask, Strategy &strategy) { card *first_card = deck_to_card(deck); card *last_card = deck_to_card(deck + 1); u32 *quad_ptr; u32 quad_mask = mask | (mask << 8) | (mask << 16) | (mask << 24); for(quad_ptr = (u32 *)first_card; quad_ptr < (u32 *)last_card; quad_ptr++) { if(*quad_ptr & quad_mask) { card *ptr = (card *)quad_ptr; int card; for(card = 0; card < 4; card++) { if(ptr[card] & mask) { trace_card(&ptr[card],here,strategy); ptr[card] &= ~unmask; } } } } gc_stats.decks_scanned++; } /* Trace all objects referenced from marked cards */ template void factor_vm::trace_cards(cell gen, zone *z, Strategy &strategy) { u64 start_time = current_micros(); card_deck *first_deck = addr_to_deck(z->start); card_deck *last_deck = addr_to_deck(z->end); card mask, unmask; /* if we are collecting the nursery, we care about old->nursery pointers but not old->aging pointers */ if(current_gc->collecting_nursery_p()) { mask = card_points_to_nursery; /* after the collection, no old->nursery pointers remain anywhere, but old->aging pointers might remain in tenured space */ if(gen == tenured_gen) unmask = card_points_to_nursery; /* after the collection, all cards in aging space can be cleared */ else if(gen == aging_gen) unmask = card_mark_mask; else { critical_error("bug in trace_generation_cards",gen); return; } } /* if we are collecting aging space into tenured space, we care about all old->nursery and old->aging pointers. no old->aging pointers can remain */ else if(current_gc->collecting_aging_p()) { if(current_gc->collecting_aging_again) { mask = card_points_to_aging; unmask = card_mark_mask; } /* after we collect aging space into the aging semispace, no old->nursery pointers remain but tenured space might still have pointers to aging space. */ else { mask = card_points_to_aging; unmask = card_points_to_nursery; } } else { critical_error("bug in trace_generation_cards",gen); return; } card_deck *ptr; for(ptr = first_deck; ptr < last_deck; ptr++) { if(*ptr & mask) { trace_card_deck(ptr,z->here,mask,unmask,strategy); *ptr &= ~unmask; } } gc_stats.card_scan_time += (current_micros() - start_time); } /* Copy all tagged pointers in a range of memory */ template void factor_vm::trace_stack_elements(segment *region, cell top, Strategy &strategy) { cell ptr = region->start; for(; ptr <= top; ptr += sizeof(cell)) trace_handle((cell*)ptr,strategy); } template void factor_vm::trace_registered_locals(Strategy &strategy) { std::vector::const_iterator iter = gc_locals.begin(); std::vector::const_iterator end = gc_locals.end(); for(; iter < end; iter++) trace_handle((cell *)(*iter),strategy); } template void factor_vm::trace_registered_bignums(Strategy &strategy) { std::vector::const_iterator iter = gc_bignums.begin(); std::vector::const_iterator end = gc_bignums.end(); for(; iter < end; iter++) { cell *handle = (cell *)(*iter); if(*handle) { *handle |= BIGNUM_TYPE; trace_handle(handle,strategy); *handle &= ~BIGNUM_TYPE; } } } /* Copy roots over at the start of GC, namely various constants, stacks, the user environment and extra roots registered by local_roots.hpp */ template void factor_vm::trace_roots(Strategy &strategy) { trace_handle(&T,strategy); trace_handle(&bignum_zero,strategy); trace_handle(&bignum_pos_one,strategy); trace_handle(&bignum_neg_one,strategy); trace_registered_locals(strategy); trace_registered_bignums(strategy); int i; for(i = 0; i < USER_ENV; i++) trace_handle(&userenv[i],strategy); } template struct stack_frame_marker { factor_vm *myvm; Strategy &strategy; explicit stack_frame_marker(factor_vm *myvm_, Strategy &strategy_) : myvm(myvm_), strategy(strategy_) {} void operator()(stack_frame *frame) { myvm->mark_code_block(myvm->frame_code(frame),strategy); } }; /* Mark code blocks executing in currently active stack frames. */ template void factor_vm::mark_active_blocks(context *stacks, Strategy &strategy) { if(current_gc->collecting_tenured_p()) { cell top = (cell)stacks->callstack_top; cell bottom = (cell)stacks->callstack_bottom; stack_frame_marker marker(this,strategy); iterate_callstack(top,bottom,marker); } } template void factor_vm::mark_object_code_block(object *object, Strategy &strategy) { switch(object->h.hi_tag()) { case WORD_TYPE: { word *w = (word *)object; if(w->code) mark_code_block(w->code,strategy); if(w->profiling) mark_code_block(w->profiling,strategy); break; } case QUOTATION_TYPE: { quotation *q = (quotation *)object; if(q->code) mark_code_block(q->code,strategy); break; } case CALLSTACK_TYPE: { callstack *stack = (callstack *)object; stack_frame_marker marker(this,strategy); iterate_callstack_object(stack,marker); break; } } } template void factor_vm::trace_contexts(Strategy &strategy) { save_stacks(); context *stacks = stack_chain; while(stacks) { trace_stack_elements(stacks->datastack_region,stacks->datastack,strategy); trace_stack_elements(stacks->retainstack_region,stacks->retainstack,strategy); trace_handle(&stacks->catchstack_save,strategy); trace_handle(&stacks->current_callback_save,strategy); mark_active_blocks(stacks,strategy); stacks = stacks->next; } } /* Trace all literals referenced from a code block. Only for aging and nursery collections */ template void factor_vm::trace_literal_references(code_block *compiled, Strategy &strategy) { trace_handle(&compiled->owner,strategy); trace_handle(&compiled->literals,strategy); trace_handle(&compiled->relocation,strategy); } /* Trace literals referenced from all code blocks. Only for aging and nursery collections */ template void factor_vm::trace_code_heap_roots(Strategy &strategy) { if(current_gc->collecting_gen >= code->youngest_referenced_generation) { unordered_map::const_iterator iter = code->remembered_set.begin(); unordered_map::const_iterator end = code->remembered_set.end(); for(; iter != end; iter++) { if(current_gc->collecting_gen >= iter->second) trace_literal_references(iter->first,strategy); } gc_stats.code_heap_scans++; } } /* Mark all literals referenced from a word XT. Only for tenured collections */ template void factor_vm::mark_code_block(code_block *compiled, Strategy &strategy) { check_code_address((cell)compiled); code->mark_block(compiled); trace_literal_references(compiled,strategy); } struct literal_and_word_reference_updater { factor_vm *myvm; literal_and_word_reference_updater(factor_vm *myvm_) : myvm(myvm_) {} void operator()(heap_block *block) { code_block *compiled = (code_block *)block; myvm->update_literal_references(compiled); myvm->update_word_references(compiled); } }; void factor_vm::free_unmarked_code_blocks() { literal_and_word_reference_updater updater(this); code->free_unmarked(updater); code->remembered_set.clear(); code->youngest_referenced_generation = tenured_gen; } void factor_vm::update_dirty_code_blocks() { /* The youngest generation that any code block can now reference */ cell gen; if(current_gc->collecting_accumulation_gen_p()) gen = current_gc->collecting_gen; else gen = current_gc->collecting_gen + 1; unordered_map::iterator iter = code->remembered_set.begin(); unordered_map::iterator end = code->remembered_set.end(); for(; iter != end; iter++) { if(current_gc->collecting_gen >= iter->second) { check_code_address((cell)iter->first); update_literal_references(iter->first); iter->second = gen; } } code->youngest_referenced_generation = gen; } template copying_collector::copying_collector(factor_vm *myvm_, zone *newspace_) : myvm(myvm_), current_gc(myvm_->current_gc), newspace(newspace_) { scan = newspace->here; } template Strategy ©ing_collector::strategy() { return static_cast(*this); } template object *copying_collector::allot(cell size) { if(newspace->here + size <= newspace->end) { object *obj = newspace->allot(size); myvm->allot_barrier(obj); return obj; } else return NULL; } template object *copying_collector::copy_object(object *untagged) { return myvm->promote_object(untagged,strategy()); } template bool copying_collector::should_copy_p(object *pointer) { return strategy().should_copy_p(pointer); } template cell copying_collector::trace_next(cell scan) { object *obj = (object *)scan; myvm->trace_slots(obj,strategy()); return scan + myvm->untagged_object_size(obj); } template void copying_collector::go() { strategy().copy_reachable_objects(scan,&newspace->here); } struct nursery_collector : copying_collector { explicit nursery_collector(factor_vm *myvm_, zone *newspace_) : copying_collector(myvm_,newspace_) {} bool should_copy_p(object *untagged) { return myvm->nursery.contains_p(untagged); } void copy_reachable_objects(cell scan, cell *end) { while(scan < *end) scan = trace_next(scan); } }; struct aging_collector : copying_collector { zone *tenured; explicit aging_collector(factor_vm *myvm_, zone *newspace_) : copying_collector(myvm_,newspace_), tenured(myvm->data->tenured) {} bool should_copy_p(object *untagged) { if(newspace->contains_p(untagged)) return false; else return !tenured->contains_p(untagged); } void copy_reachable_objects(cell scan, cell *end) { while(scan < *end) scan = trace_next(scan); } }; struct aging_again_collector : copying_collector { explicit aging_again_collector(factor_vm *myvm_, zone *newspace_) : copying_collector(myvm_,newspace_) {} bool should_copy_p(object *untagged) { return !newspace->contains_p(untagged); } void copy_reachable_objects(cell scan, cell *end) { while(scan < *end) scan = trace_next(scan); } }; struct tenured_collector : copying_collector { explicit tenured_collector(factor_vm *myvm_, zone *newspace_) : copying_collector(myvm_,newspace_) {} bool should_copy_p(object *untagged) { return !newspace->contains_p(untagged); } void copy_reachable_objects(cell scan, cell *end) { while(scan < *end) { myvm->mark_object_code_block(myvm->untag(scan),*this); scan = trace_next(scan); } } }; void factor_vm::collect_nursery() { nursery_collector collector(this,data->aging); trace_roots(collector); trace_contexts(collector); trace_cards(tenured_gen,data->tenured,collector); trace_cards(aging_gen,data->aging,collector); trace_code_heap_roots(collector); collector.go(); update_dirty_code_blocks(); nursery.here = nursery.start; } void factor_vm::collect_aging() { std::swap(data->aging,data->aging_semispace); reset_generation(data->aging); aging_collector collector(this,data->aging); trace_roots(collector); trace_contexts(collector); trace_cards(tenured_gen,data->tenured,collector); trace_code_heap_roots(collector); collector.go(); update_dirty_code_blocks(); nursery.here = nursery.start; } void factor_vm::collect_aging_again() { aging_again_collector collector(this,data->tenured); trace_roots(collector); trace_contexts(collector); trace_cards(tenured_gen,data->tenured,collector); trace_code_heap_roots(collector); collector.go(); update_dirty_code_blocks(); reset_generation(data->aging); nursery.here = nursery.start; } void factor_vm::collect_tenured(cell requested_bytes, bool trace_contexts_) { if(current_gc->growing_data_heap) { current_gc->old_data_heap = data; set_data_heap(grow_data_heap(current_gc->old_data_heap,requested_bytes)); } else { std::swap(data->tenured,data->tenured_semispace); reset_generation(data->tenured); } tenured_collector collector(this,data->tenured); trace_roots(collector); if(trace_contexts_) trace_contexts(collector); collector.go(); free_unmarked_code_blocks(); reset_generation(data->aging); nursery.here = nursery.start; if(current_gc->growing_data_heap) delete current_gc->old_data_heap; } void factor_vm::record_gc_stats() { generation_statistics *s = &gc_stats.generations[current_gc->collecting_gen]; cell gc_elapsed = (current_micros() - current_gc->start_time); s->collections++; s->gc_time += gc_elapsed; if(s->max_gc_time < gc_elapsed) s->max_gc_time = gc_elapsed; } /* Collect gen and all younger generations. If growing_data_heap_ is true, we must grow the data heap to such a size that an allocation of requested_bytes won't fail */ void factor_vm::garbage_collection(cell collecting_gen_, bool growing_data_heap_, bool trace_contexts_, cell requested_bytes) { assert(!gc_off); assert(!current_gc); current_gc = new gc_state(data,growing_data_heap_,collecting_gen_); /* Keep trying to GC higher and higher generations until we don't run out of space */ if(setjmp(current_gc->gc_unwind)) { /* We come back here if a generation is full */ /* We have no older generations we can try collecting, so we resort to growing the data heap */ if(current_gc->collecting_tenured_p()) { current_gc->growing_data_heap = true; /* see the comment in unmark_marked() */ code->unmark_marked(); } /* we try collecting aging space twice before going on to collect tenured */ else if(current_gc->collecting_aging_p() && !current_gc->collecting_aging_again) { current_gc->collecting_aging_again = true; } /* Collect the next oldest generation */ else { current_gc->collecting_gen++; } } if(current_gc->collecting_nursery_p()) collect_nursery(); else if(current_gc->collecting_aging_p()) { if(current_gc->collecting_aging_again) collect_aging_again(); else collect_aging(); } else if(current_gc->collecting_tenured_p()) collect_tenured(requested_bytes,trace_contexts_); record_gc_stats(); delete current_gc; current_gc = NULL; } void factor_vm::gc() { garbage_collection(tenured_gen,false,true,0); } void factor_vm::primitive_gc() { gc(); } void factor_vm::primitive_gc_stats() { growable_array result(this); cell i; u64 total_gc_time = 0; for(i = 0; i < gen_count; i++) { generation_statistics *s = &gc_stats.generations[i]; result.add(allot_cell(s->collections)); result.add(tag(long_long_to_bignum(s->gc_time))); result.add(tag(long_long_to_bignum(s->max_gc_time))); result.add(allot_cell(s->collections == 0 ? 0 : s->gc_time / s->collections)); result.add(allot_cell(s->object_count)); result.add(tag(long_long_to_bignum(s->bytes_copied))); total_gc_time += s->gc_time; } result.add(tag(ulong_long_to_bignum(total_gc_time))); result.add(tag(ulong_long_to_bignum(gc_stats.cards_scanned))); result.add(tag(ulong_long_to_bignum(gc_stats.decks_scanned))); result.add(tag(ulong_long_to_bignum(gc_stats.card_scan_time))); result.add(allot_cell(gc_stats.code_heap_scans)); result.trim(); dpush(result.elements.value()); } void factor_vm::clear_gc_stats() { memset(&gc_stats,0,sizeof(gc_statistics)); } void factor_vm::primitive_clear_gc_stats() { clear_gc_stats(); } /* classes.tuple uses this to reshape tuples; tools.deploy.shaker uses this to coalesce equal but distinct quotations and wrappers. */ void factor_vm::primitive_become() { array *new_objects = untag_check(dpop()); array *old_objects = untag_check(dpop()); cell capacity = array_capacity(new_objects); if(capacity != array_capacity(old_objects)) critical_error("bad parameters to become",0); cell i; for(i = 0; i < capacity; i++) { tagged old_obj(array_nth(old_objects,i)); tagged new_obj(array_nth(new_objects,i)); if(old_obj != new_obj) old_obj->h.forward_to(new_obj.untagged()); } gc(); /* If a word's definition quotation was in old_objects and the quotation in new_objects is not compiled, we might leak memory by referencing the old quotation unless we recompile all unoptimized words. */ compile_all_words(); } void factor_vm::inline_gc(cell *gc_roots_base, cell gc_roots_size) { for(cell i = 0; i < gc_roots_size; i++) gc_locals.push_back((cell)&gc_roots_base[i]); garbage_collection(nursery_gen,false,true,0); for(cell i = 0; i < gc_roots_size; i++) gc_locals.pop_back(); } VM_C_API void inline_gc(cell *gc_roots_base, cell gc_roots_size, factor_vm *myvm) { ASSERTVM(); VM_PTR->inline_gc(gc_roots_base,gc_roots_size); } /* * It is up to the caller to fill in the object's fields in a meaningful * fashion! */ object *factor_vm::allot_object(header header, cell size) { #ifdef GC_DEBUG if(!gc_off) gc(); #endif object *obj; if(nursery.size > size) { /* If there is insufficient room, collect the nursery */ if(nursery.here + size > nursery.end) garbage_collection(nursery_gen,false,true,0); obj = nursery.allot(size); } /* If the object is bigger than the nursery, allocate it in tenured space */ else { /* If tenured space does not have enough room, collect */ if(data->tenured->here + size > data->tenured->end) gc(); /* If it still won't fit, grow the heap */ if(data->tenured->here + size > data->tenured->end) garbage_collection(tenured_gen,true,true,size); obj = data->tenured->allot(size); allot_barrier(obj); /* Allows initialization code to store old->new pointers without hitting the write barrier in the common case of a nursery allocation */ write_barrier(obj); } obj->h = header; return obj; } }