vm: new mark_bits data structure replaces hashtable when compacting code heap
parent
464aac14cf
commit
ac25b8ebbb
|
@ -379,7 +379,7 @@ struct literal_and_word_references_updater {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
void factor_vm::update_code_block_for_full_gc(code_block *compiled)
|
void factor_vm::update_code_block_words_and_literals(code_block *compiled)
|
||||||
{
|
{
|
||||||
if(code->needs_fixup_p(compiled))
|
if(code->needs_fixup_p(compiled))
|
||||||
relocate_code_block(compiled);
|
relocate_code_block(compiled);
|
||||||
|
|
|
@ -73,6 +73,44 @@ void factor_vm::update_code_heap_words()
|
||||||
iterate_code_heap(updater);
|
iterate_code_heap(updater);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* After a full GC that did not grow the heap, we have to update references
|
||||||
|
to literals and other words. */
|
||||||
|
struct word_and_literal_code_heap_updater {
|
||||||
|
factor_vm *parent;
|
||||||
|
|
||||||
|
word_and_literal_code_heap_updater(factor_vm *parent_) : parent(parent_) {}
|
||||||
|
|
||||||
|
void operator()(heap_block *block)
|
||||||
|
{
|
||||||
|
parent->update_code_block_words_and_literals((code_block *)block);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
void factor_vm::update_code_heap_words_and_literals()
|
||||||
|
{
|
||||||
|
word_and_literal_code_heap_updater updater(this);
|
||||||
|
code->sweep_heap(updater);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* After growing the heap, we have to perform a full relocation to update
|
||||||
|
references to card and deck arrays. */
|
||||||
|
struct code_heap_relocator {
|
||||||
|
factor_vm *parent;
|
||||||
|
|
||||||
|
code_heap_relocator(factor_vm *parent_) : parent(parent_) {}
|
||||||
|
|
||||||
|
void operator()(heap_block *block)
|
||||||
|
{
|
||||||
|
parent->relocate_code_block((code_block *)block);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
void factor_vm::relocate_code_heap()
|
||||||
|
{
|
||||||
|
code_heap_relocator relocator(this);
|
||||||
|
code->sweep_heap(relocator);
|
||||||
|
}
|
||||||
|
|
||||||
void factor_vm::primitive_modify_code_heap()
|
void factor_vm::primitive_modify_code_heap()
|
||||||
{
|
{
|
||||||
gc_root<array> alist(dpop(),this);
|
gc_root<array> alist(dpop(),this);
|
||||||
|
@ -139,11 +177,7 @@ void factor_vm::primitive_code_room()
|
||||||
|
|
||||||
code_block *code_heap::forward_code_block(code_block *compiled)
|
code_block *code_heap::forward_code_block(code_block *compiled)
|
||||||
{
|
{
|
||||||
code_block *block1 = (code_block *)state->forward_block(compiled);
|
return (code_block *)state->forward_block(compiled);
|
||||||
code_block *block2 = (code_block *)forwarding[compiled];
|
|
||||||
printf("%lx %lx\n",block1,block2);
|
|
||||||
assert(block1 == block2);
|
|
||||||
return block2;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
struct callframe_forwarder {
|
struct callframe_forwarder {
|
||||||
|
@ -248,6 +282,9 @@ void factor_vm::compact_code_heap(bool trace_contexts_p)
|
||||||
forward_context_xts();
|
forward_context_xts();
|
||||||
forward_callback_xts();
|
forward_callback_xts();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
code_heap_relocator relocator(this);
|
||||||
|
iterate_code_heap(relocator);
|
||||||
}
|
}
|
||||||
|
|
||||||
struct stack_trace_stripper {
|
struct stack_trace_stripper {
|
||||||
|
|
|
@ -4,7 +4,7 @@ namespace factor
|
||||||
{
|
{
|
||||||
|
|
||||||
factor_vm *vm;
|
factor_vm *vm;
|
||||||
unordered_map<THREADHANDLE, factor_vm*> thread_vms;
|
std::map<THREADHANDLE, factor_vm*> thread_vms;
|
||||||
|
|
||||||
void init_globals()
|
void init_globals()
|
||||||
{
|
{
|
||||||
|
|
|
@ -104,32 +104,6 @@ void full_collector::cheneys_algorithm()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* After growing the heap, we have to perform a full relocation to update
|
|
||||||
references to card and deck arrays. */
|
|
||||||
struct big_code_heap_updater {
|
|
||||||
factor_vm *parent;
|
|
||||||
|
|
||||||
big_code_heap_updater(factor_vm *parent_) : parent(parent_) {}
|
|
||||||
|
|
||||||
void operator()(heap_block *block)
|
|
||||||
{
|
|
||||||
parent->relocate_code_block((code_block *)block);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/* After a full GC that did not grow the heap, we have to update references
|
|
||||||
to literals and other words. */
|
|
||||||
struct small_code_heap_updater {
|
|
||||||
factor_vm *parent;
|
|
||||||
|
|
||||||
small_code_heap_updater(factor_vm *parent_) : parent(parent_) {}
|
|
||||||
|
|
||||||
void operator()(heap_block *block)
|
|
||||||
{
|
|
||||||
parent->update_code_block_for_full_gc((code_block *)block);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
void factor_vm::collect_full_impl(bool trace_contexts_p)
|
void factor_vm::collect_full_impl(bool trace_contexts_p)
|
||||||
{
|
{
|
||||||
full_collector collector(this);
|
full_collector collector(this);
|
||||||
|
@ -161,16 +135,9 @@ void factor_vm::collect_growing_heap(cell requested_bytes,
|
||||||
delete old;
|
delete old;
|
||||||
|
|
||||||
if(compact_code_heap_p)
|
if(compact_code_heap_p)
|
||||||
{
|
|
||||||
compact_code_heap(trace_contexts_p);
|
compact_code_heap(trace_contexts_p);
|
||||||
big_code_heap_updater updater(this);
|
|
||||||
iterate_code_heap(updater);
|
|
||||||
}
|
|
||||||
else
|
else
|
||||||
{
|
relocate_code_heap();
|
||||||
big_code_heap_updater updater(this);
|
|
||||||
code->free_unmarked(updater);
|
|
||||||
}
|
|
||||||
|
|
||||||
code->clear_remembered_set();
|
code->clear_remembered_set();
|
||||||
}
|
}
|
||||||
|
@ -183,16 +150,9 @@ void factor_vm::collect_full(bool trace_contexts_p, bool compact_code_heap_p)
|
||||||
collect_full_impl(trace_contexts_p);
|
collect_full_impl(trace_contexts_p);
|
||||||
|
|
||||||
if(compact_code_heap_p)
|
if(compact_code_heap_p)
|
||||||
{
|
|
||||||
compact_code_heap(trace_contexts_p);
|
compact_code_heap(trace_contexts_p);
|
||||||
big_code_heap_updater updater(this);
|
|
||||||
iterate_code_heap(updater);
|
|
||||||
}
|
|
||||||
else
|
else
|
||||||
{
|
update_code_heap_words_and_literals();
|
||||||
small_code_heap_updater updater(this);
|
|
||||||
code->free_unmarked(updater);
|
|
||||||
}
|
|
||||||
|
|
||||||
code->clear_remembered_set();
|
code->clear_remembered_set();
|
||||||
}
|
}
|
||||||
|
|
|
@ -191,7 +191,6 @@ cell heap::heap_size()
|
||||||
|
|
||||||
void heap::compact_heap()
|
void heap::compact_heap()
|
||||||
{
|
{
|
||||||
forwarding.clear();
|
|
||||||
state->compute_forwarding();
|
state->compute_forwarding();
|
||||||
|
|
||||||
heap_block *scan = first_block();
|
heap_block *scan = first_block();
|
||||||
|
@ -208,7 +207,6 @@ void heap::compact_heap()
|
||||||
{
|
{
|
||||||
cell size = scan->size();
|
cell size = scan->size();
|
||||||
memmove(address,scan,size);
|
memmove(address,scan,size);
|
||||||
forwarding[scan] = address;
|
|
||||||
address += size;
|
address += size;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,6 @@ struct heap {
|
||||||
segment *seg;
|
segment *seg;
|
||||||
heap_free_list free;
|
heap_free_list free;
|
||||||
mark_bits<heap_block,block_size_increment> *state;
|
mark_bits<heap_block,block_size_increment> *state;
|
||||||
unordered_map<heap_block *, char *> forwarding;
|
|
||||||
|
|
||||||
explicit heap(bool secure_gc_, cell size, bool executable_p);
|
explicit heap(bool secure_gc_, cell size, bool executable_p);
|
||||||
~heap();
|
~heap();
|
||||||
|
@ -45,9 +44,9 @@ struct heap {
|
||||||
|
|
||||||
heap_block *free_allocated(heap_block *prev, heap_block *scan);
|
heap_block *free_allocated(heap_block *prev, heap_block *scan);
|
||||||
|
|
||||||
/* After code GC, all referenced code blocks have status set to B_MARKED, so any
|
/* After code GC, all live code blocks are marked, so any
|
||||||
which are allocated and not marked can be reclaimed. */
|
which are not marked can be reclaimed. */
|
||||||
template<typename Iterator> void free_unmarked(Iterator &iter)
|
template<typename Iterator> void sweep_heap(Iterator &iter)
|
||||||
{
|
{
|
||||||
clear_free_list();
|
clear_free_list();
|
||||||
|
|
||||||
|
|
|
@ -78,7 +78,7 @@ static void call_fault_handler(
|
||||||
{
|
{
|
||||||
THREADHANDLE thread_id = pthread_from_mach_thread_np(thread);
|
THREADHANDLE thread_id = pthread_from_mach_thread_np(thread);
|
||||||
assert(thread_id);
|
assert(thread_id);
|
||||||
unordered_map<THREADHANDLE, factor_vm*>::const_iterator vm = thread_vms.find(thread_id);
|
std::map<THREADHANDLE, factor_vm*>::const_iterator vm = thread_vms.find(thread_id);
|
||||||
if (vm != thread_vms.end())
|
if (vm != thread_vms.end())
|
||||||
vm->second->call_fault_handler(exception,code,exc_state,thread_state,float_state);
|
vm->second->call_fault_handler(exception,code,exc_state,thread_state,float_state);
|
||||||
}
|
}
|
||||||
|
|
|
@ -92,11 +92,8 @@ template<typename Block, int Granularity> struct mark_bits {
|
||||||
{
|
{
|
||||||
bits[start.first] |= ~start_mask;
|
bits[start.first] |= ~start_mask;
|
||||||
|
|
||||||
if(end.first != 0)
|
for(cell index = start.first + 1; index < end.first; index++)
|
||||||
{
|
bits[index] = (u64)-1;
|
||||||
for(cell index = start.first + 1; index < end.first - 1; index++)
|
|
||||||
bits[index] = (u64)-1;
|
|
||||||
}
|
|
||||||
|
|
||||||
bits[end.first] |= end_mask;
|
bits[end.first] |= end_mask;
|
||||||
}
|
}
|
||||||
|
@ -122,21 +119,9 @@ template<typename Block, int Granularity> struct mark_bits {
|
||||||
set_bitmap_range(allocated,address);
|
set_bitmap_range(allocated,address);
|
||||||
}
|
}
|
||||||
|
|
||||||
cell popcount1(u64 x)
|
|
||||||
{
|
|
||||||
cell accum = 0;
|
|
||||||
while(x > 0)
|
|
||||||
{
|
|
||||||
accum += (x & 1);
|
|
||||||
x >>= 1;
|
|
||||||
}
|
|
||||||
return accum;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* From http://chessprogramming.wikispaces.com/Population+Count */
|
/* From http://chessprogramming.wikispaces.com/Population+Count */
|
||||||
cell popcount(u64 x)
|
cell popcount(u64 x)
|
||||||
{
|
{
|
||||||
cell old = x;
|
|
||||||
u64 k1 = 0x5555555555555555ll;
|
u64 k1 = 0x5555555555555555ll;
|
||||||
u64 k2 = 0x3333333333333333ll;
|
u64 k2 = 0x3333333333333333ll;
|
||||||
u64 k4 = 0x0f0f0f0f0f0f0f0fll;
|
u64 k4 = 0x0f0f0f0f0f0f0f0fll;
|
||||||
|
@ -146,12 +131,11 @@ template<typename Block, int Granularity> struct mark_bits {
|
||||||
x = (x + (x >> 4)) & k4 ; // put count of each 8 bits into those 8 bits
|
x = (x + (x >> 4)) & k4 ; // put count of each 8 bits into those 8 bits
|
||||||
x = (x * kf) >> 56; // returns 8 most significant bits of x + (x<<8) + (x<<16) + (x<<24) + ...
|
x = (x * kf) >> 56; // returns 8 most significant bits of x + (x<<8) + (x<<16) + (x<<24) + ...
|
||||||
|
|
||||||
assert(x == popcount1(old));
|
|
||||||
return (cell)x;
|
return (cell)x;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* The eventual destination of a block after compaction is just the number
|
/* The eventual destination of a block after compaction is just the number
|
||||||
of marked blocks before it. */
|
of marked blocks before it. Live blocks must be marked on entry. */
|
||||||
void compute_forwarding()
|
void compute_forwarding()
|
||||||
{
|
{
|
||||||
cell accum = 0;
|
cell accum = 0;
|
||||||
|
|
|
@ -25,27 +25,10 @@
|
||||||
|
|
||||||
/* C++ headers */
|
/* C++ headers */
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
#include <map>
|
||||||
#include <set>
|
#include <set>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
#if __GNUC__ == 4
|
|
||||||
#include <tr1/unordered_map>
|
|
||||||
|
|
||||||
namespace factor
|
|
||||||
{
|
|
||||||
using std::tr1::unordered_map;
|
|
||||||
}
|
|
||||||
#elif __GNUC__ == 3
|
|
||||||
#include <boost/unordered_map.hpp>
|
|
||||||
|
|
||||||
namespace factor
|
|
||||||
{
|
|
||||||
using boost::unordered_map;
|
|
||||||
}
|
|
||||||
#else
|
|
||||||
#error Factor requires GCC 3.x or later
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* Forward-declare this since it comes up in function prototypes */
|
/* Forward-declare this since it comes up in function prototypes */
|
||||||
namespace factor
|
namespace factor
|
||||||
{
|
{
|
||||||
|
|
|
@ -494,7 +494,7 @@ struct factor_vm
|
||||||
void update_literal_references(code_block *compiled);
|
void update_literal_references(code_block *compiled);
|
||||||
void relocate_code_block_step(relocation_entry rel, cell index, code_block *compiled);
|
void relocate_code_block_step(relocation_entry rel, cell index, code_block *compiled);
|
||||||
void update_word_references(code_block *compiled);
|
void update_word_references(code_block *compiled);
|
||||||
void update_code_block_for_full_gc(code_block *compiled);
|
void update_code_block_words_and_literals(code_block *compiled);
|
||||||
void check_code_address(cell address);
|
void check_code_address(cell address);
|
||||||
void relocate_code_block(code_block *compiled);
|
void relocate_code_block(code_block *compiled);
|
||||||
void fixup_labels(array *labels, code_block *compiled);
|
void fixup_labels(array *labels, code_block *compiled);
|
||||||
|
@ -513,6 +513,8 @@ struct factor_vm
|
||||||
bool in_code_heap_p(cell ptr);
|
bool in_code_heap_p(cell ptr);
|
||||||
void jit_compile_word(cell word_, cell def_, bool relocate);
|
void jit_compile_word(cell word_, cell def_, bool relocate);
|
||||||
void update_code_heap_words();
|
void update_code_heap_words();
|
||||||
|
void update_code_heap_words_and_literals();
|
||||||
|
void relocate_code_heap();
|
||||||
void primitive_modify_code_heap();
|
void primitive_modify_code_heap();
|
||||||
void primitive_code_room();
|
void primitive_code_room();
|
||||||
void forward_object_xts();
|
void forward_object_xts();
|
||||||
|
@ -711,6 +713,6 @@ struct factor_vm
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
extern unordered_map<THREADHANDLE, factor_vm *> thread_vms;
|
extern std::map<THREADHANDLE, factor_vm *> thread_vms;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue