vm: working on making heap more generic
parent
838a44e901
commit
acdcb181e0
1
Makefile
1
Makefile
|
@ -49,7 +49,6 @@ DLL_OBJS = $(PLAF_DLL_OBJS) \
|
|||
vm/factor.o \
|
||||
vm/full_collector.o \
|
||||
vm/gc.o \
|
||||
vm/heap.o \
|
||||
vm/image.o \
|
||||
vm/inline_cache.o \
|
||||
vm/io.o \
|
||||
|
|
|
@ -3,7 +3,8 @@
|
|||
namespace factor
|
||||
{
|
||||
|
||||
code_heap::code_heap(bool secure_gc, cell size) : heap(secure_gc,size,true) {}
|
||||
code_heap::code_heap(bool secure_gc, cell size) :
|
||||
heap<heap_block,code_heap_layout>(secure_gc,size,true) {}
|
||||
|
||||
void code_heap::write_barrier(code_block *compiled)
|
||||
{
|
||||
|
|
|
@ -1,7 +1,19 @@
|
|||
namespace factor
|
||||
{
|
||||
|
||||
struct code_heap : heap {
|
||||
struct code_heap_layout {
|
||||
cell block_size(heap_block *block)
|
||||
{
|
||||
return block->size();
|
||||
}
|
||||
|
||||
heap_block *next_block_after(heap_block *block)
|
||||
{
|
||||
return (heap_block *)((cell)block + block_size(block));
|
||||
}
|
||||
};
|
||||
|
||||
struct code_heap : heap<heap_block,code_heap_layout> {
|
||||
/* Set of blocks which need full relocation. */
|
||||
std::set<code_block *> needs_fixup;
|
||||
|
||||
|
|
39
vm/debug.cpp
39
vm/debug.cpp
|
@ -284,41 +284,44 @@ void factor_vm::find_data_references(cell look_for)
|
|||
end_scan();
|
||||
}
|
||||
|
||||
/* Dump all code blocks for debugging */
|
||||
void factor_vm::dump_code_heap()
|
||||
{
|
||||
cell reloc_size = 0, literal_size = 0;
|
||||
struct code_block_printer {
|
||||
factor_vm *parent;
|
||||
cell reloc_size, literal_size;
|
||||
|
||||
heap_block *scan = code->first_block();
|
||||
heap_block *end = code->last_block();
|
||||
code_block_printer(factor_vm *parent_) :
|
||||
parent(parent_), reloc_size(0), literal_size(0) {}
|
||||
|
||||
while(scan != end)
|
||||
void operator()(heap_block *scan, cell size)
|
||||
{
|
||||
const char *status;
|
||||
if(scan->free_p())
|
||||
status = "free";
|
||||
else if(code->state->is_marked_p(scan))
|
||||
else if(parent->code->state->is_marked_p(scan))
|
||||
{
|
||||
reloc_size += object_size(((code_block *)scan)->relocation);
|
||||
literal_size += object_size(((code_block *)scan)->literals);
|
||||
reloc_size += parent->object_size(((code_block *)scan)->relocation);
|
||||
literal_size += parent->object_size(((code_block *)scan)->literals);
|
||||
status = "marked";
|
||||
}
|
||||
else
|
||||
{
|
||||
reloc_size += object_size(((code_block *)scan)->relocation);
|
||||
literal_size += object_size(((code_block *)scan)->literals);
|
||||
reloc_size += parent->object_size(((code_block *)scan)->relocation);
|
||||
literal_size += parent->object_size(((code_block *)scan)->literals);
|
||||
status = "allocated";
|
||||
}
|
||||
|
||||
print_cell_hex((cell)scan); print_string(" ");
|
||||
print_cell_hex(scan->size()); print_string(" ");
|
||||
print_cell_hex(size); print_string(" ");
|
||||
print_string(status); print_string("\n");
|
||||
|
||||
scan = scan->next();
|
||||
}
|
||||
|
||||
print_cell(reloc_size); print_string(" bytes of relocation data\n");
|
||||
print_cell(literal_size); print_string(" bytes of literal data\n");
|
||||
};
|
||||
|
||||
/* Dump all code blocks for debugging */
|
||||
void factor_vm::dump_code_heap()
|
||||
{
|
||||
code_block_printer printer(this);
|
||||
code->iterate_heap(printer);
|
||||
print_cell(printer.reloc_size); print_string(" bytes of relocation data\n");
|
||||
print_cell(printer.literal_size); print_string(" bytes of literal data\n");
|
||||
}
|
||||
|
||||
void factor_vm::factorbug()
|
||||
|
|
196
vm/heap.cpp
196
vm/heap.cpp
|
@ -1,196 +0,0 @@
|
|||
#include "master.hpp"
|
||||
|
||||
/* This malloc-style heap code is reasonably generic. Maybe in the future, it
|
||||
will be used for the data heap too, if we ever get mark/sweep/compact GC. */
|
||||
|
||||
namespace factor
|
||||
{
|
||||
|
||||
void heap::clear_free_list()
|
||||
{
|
||||
memset(&free,0,sizeof(heap_free_list));
|
||||
}
|
||||
|
||||
heap::heap(bool secure_gc_, cell size, bool executable_p) : secure_gc(secure_gc_)
|
||||
{
|
||||
if(size > (1L << (sizeof(cell) * 8 - 6))) fatal_error("Heap too large",size);
|
||||
seg = new segment(align_page(size),executable_p);
|
||||
if(!seg) fatal_error("Out of memory in heap allocator",size);
|
||||
state = new mark_bits<heap_block,block_size_increment>(seg->start,size);
|
||||
clear_free_list();
|
||||
}
|
||||
|
||||
heap::~heap()
|
||||
{
|
||||
delete seg;
|
||||
seg = NULL;
|
||||
delete state;
|
||||
state = NULL;
|
||||
}
|
||||
|
||||
void heap::add_to_free_list(free_heap_block *block)
|
||||
{
|
||||
if(block->size() < free_list_count * block_size_increment)
|
||||
{
|
||||
int index = block->size() / block_size_increment;
|
||||
block->next_free = free.small_blocks[index];
|
||||
free.small_blocks[index] = block;
|
||||
}
|
||||
else
|
||||
{
|
||||
block->next_free = free.large_blocks;
|
||||
free.large_blocks = block;
|
||||
}
|
||||
}
|
||||
|
||||
/* Called after reading the code heap from the image file, and after code heap
|
||||
compaction. Makes a free list consisting of one free block, at the very end. */
|
||||
void heap::build_free_list(cell size)
|
||||
{
|
||||
clear_free_list();
|
||||
free_heap_block *end = (free_heap_block *)(seg->start + size);
|
||||
end->set_free();
|
||||
end->set_size(seg->end - (cell)end);
|
||||
add_to_free_list(end);
|
||||
}
|
||||
|
||||
void heap::assert_free_block(free_heap_block *block)
|
||||
{
|
||||
#ifdef FACTOR_DEBUG
|
||||
assert(block->free_p());
|
||||
#endif
|
||||
}
|
||||
|
||||
free_heap_block *heap::find_free_block(cell size)
|
||||
{
|
||||
cell attempt = size;
|
||||
|
||||
while(attempt < free_list_count * block_size_increment)
|
||||
{
|
||||
int index = attempt / block_size_increment;
|
||||
free_heap_block *block = free.small_blocks[index];
|
||||
if(block)
|
||||
{
|
||||
assert_free_block(block);
|
||||
free.small_blocks[index] = block->next_free;
|
||||
return block;
|
||||
}
|
||||
|
||||
attempt *= 2;
|
||||
}
|
||||
|
||||
free_heap_block *prev = NULL;
|
||||
free_heap_block *block = free.large_blocks;
|
||||
|
||||
while(block)
|
||||
{
|
||||
assert_free_block(block);
|
||||
if(block->size() >= size)
|
||||
{
|
||||
if(prev)
|
||||
prev->next_free = block->next_free;
|
||||
else
|
||||
free.large_blocks = block->next_free;
|
||||
return block;
|
||||
}
|
||||
|
||||
prev = block;
|
||||
block = block->next_free;
|
||||
}
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
free_heap_block *heap::split_free_block(free_heap_block *block, cell size)
|
||||
{
|
||||
if(block->size() != size)
|
||||
{
|
||||
/* split the block in two */
|
||||
free_heap_block *split = (free_heap_block *)((cell)block + size);
|
||||
split->set_free();
|
||||
split->set_size(block->size() - size);
|
||||
split->next_free = block->next_free;
|
||||
block->set_size(size);
|
||||
add_to_free_list(split);
|
||||
}
|
||||
|
||||
return block;
|
||||
}
|
||||
|
||||
heap_block *heap::heap_allot(cell size)
|
||||
{
|
||||
size = align(size,block_size_increment);
|
||||
|
||||
free_heap_block *block = find_free_block(size);
|
||||
if(block)
|
||||
{
|
||||
block = split_free_block(block,size);
|
||||
return block;
|
||||
}
|
||||
else
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void heap::heap_free(heap_block *block)
|
||||
{
|
||||
free_heap_block *free_block = (free_heap_block *)block;
|
||||
free_block->set_free();
|
||||
add_to_free_list(free_block);
|
||||
}
|
||||
|
||||
void heap::mark_block(heap_block *block)
|
||||
{
|
||||
state->set_marked_p(block);
|
||||
}
|
||||
|
||||
/* Compute total sum of sizes of free blocks, and size of largest free block */
|
||||
void heap::heap_usage(cell *used, cell *total_free, cell *max_free)
|
||||
{
|
||||
*used = 0;
|
||||
*total_free = 0;
|
||||
*max_free = 0;
|
||||
|
||||
heap_block *scan = first_block();
|
||||
heap_block *end = last_block();
|
||||
|
||||
while(scan != end)
|
||||
{
|
||||
cell size = scan->size();
|
||||
|
||||
if(scan->free_p())
|
||||
{
|
||||
*total_free += size;
|
||||
if(size > *max_free)
|
||||
*max_free = size;
|
||||
}
|
||||
else
|
||||
*used += size;
|
||||
|
||||
scan = scan->next();
|
||||
}
|
||||
}
|
||||
|
||||
/* The size of the heap after compaction */
|
||||
cell heap::heap_size()
|
||||
{
|
||||
heap_block *scan = first_block();
|
||||
heap_block *end = last_block();
|
||||
|
||||
while(scan != end)
|
||||
{
|
||||
if(scan->free_p()) break;
|
||||
else scan = scan->next();
|
||||
}
|
||||
|
||||
if(scan != end)
|
||||
{
|
||||
assert(scan->free_p());
|
||||
assert((cell)scan + scan->size() == seg->end);
|
||||
|
||||
return (cell)scan - (cell)first_block();
|
||||
}
|
||||
else
|
||||
return seg->size;
|
||||
}
|
||||
|
||||
}
|
269
vm/heap.hpp
269
vm/heap.hpp
|
@ -2,30 +2,30 @@ namespace factor
|
|||
{
|
||||
|
||||
static const cell free_list_count = 32;
|
||||
static const cell block_size_increment = 16;
|
||||
|
||||
struct heap_free_list {
|
||||
free_heap_block *small_blocks[free_list_count];
|
||||
free_heap_block *large_blocks;
|
||||
};
|
||||
|
||||
struct heap {
|
||||
template<typename Block, typename HeapLayout> struct heap {
|
||||
bool secure_gc;
|
||||
segment *seg;
|
||||
heap_free_list free;
|
||||
mark_bits<heap_block,block_size_increment> *state;
|
||||
mark_bits<Block,HeapLayout> *state;
|
||||
HeapLayout layout;
|
||||
|
||||
explicit heap(bool secure_gc_, cell size, bool executable_p);
|
||||
~heap();
|
||||
|
||||
inline heap_block *first_block()
|
||||
|
||||
inline Block *first_block()
|
||||
{
|
||||
return (heap_block *)seg->start;
|
||||
return (Block *)seg->start;
|
||||
}
|
||||
|
||||
inline heap_block *last_block()
|
||||
|
||||
inline Block *last_block()
|
||||
{
|
||||
return (heap_block *)seg->end;
|
||||
return (Block *)seg->end;
|
||||
}
|
||||
|
||||
void clear_free_list();
|
||||
|
@ -34,46 +34,253 @@ struct heap {
|
|||
void assert_free_block(free_heap_block *block);
|
||||
free_heap_block *find_free_block(cell size);
|
||||
free_heap_block *split_free_block(free_heap_block *block, cell size);
|
||||
heap_block *heap_allot(cell size);
|
||||
void heap_free(heap_block *block);
|
||||
void mark_block(heap_block *block);
|
||||
Block *heap_allot(cell size);
|
||||
void heap_free(Block *block);
|
||||
void mark_block(Block *block);
|
||||
void heap_usage(cell *used, cell *total_free, cell *max_free);
|
||||
cell heap_size();
|
||||
void compact_heap();
|
||||
|
||||
template<typename Iterator> void sweep_heap(Iterator &iter);
|
||||
template<typename Iterator> void compact_heap(Iterator &iter);
|
||||
|
||||
template<typename Iterator> void iterate_heap(Iterator &iter)
|
||||
{
|
||||
heap_block *scan = first_block();
|
||||
heap_block *end = last_block();
|
||||
Block *scan = first_block();
|
||||
Block *end = last_block();
|
||||
|
||||
while(scan != end)
|
||||
{
|
||||
heap_block *next = scan->next();
|
||||
if(!scan->free_p()) iter(scan,scan->size());
|
||||
Block *next = layout.next_block_after(scan);
|
||||
if(!scan->free_p()) iter(scan,layout.block_size(scan));
|
||||
scan = next;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template<typename Block, typename HeapLayout>
|
||||
void heap<Block,HeapLayout>::clear_free_list()
|
||||
{
|
||||
memset(&free,0,sizeof(heap_free_list));
|
||||
}
|
||||
|
||||
template<typename Block, typename HeapLayout>
|
||||
heap<Block,HeapLayout>::heap(bool secure_gc_, cell size, bool executable_p) : secure_gc(secure_gc_)
|
||||
{
|
||||
if(size > (1L << (sizeof(cell) * 8 - 6))) fatal_error("Heap too large",size);
|
||||
seg = new segment(align_page(size),executable_p);
|
||||
if(!seg) fatal_error("Out of memory in heap allocator",size);
|
||||
state = new mark_bits<Block,HeapLayout>(seg->start,size);
|
||||
clear_free_list();
|
||||
}
|
||||
|
||||
template<typename Block, typename HeapLayout>
|
||||
heap<Block,HeapLayout>::~heap()
|
||||
{
|
||||
delete seg;
|
||||
seg = NULL;
|
||||
delete state;
|
||||
state = NULL;
|
||||
}
|
||||
|
||||
template<typename Block, typename HeapLayout>
|
||||
void heap<Block,HeapLayout>::add_to_free_list(free_heap_block *block)
|
||||
{
|
||||
if(block->size() < free_list_count * block_granularity)
|
||||
{
|
||||
int index = block->size() / block_granularity;
|
||||
block->next_free = free.small_blocks[index];
|
||||
free.small_blocks[index] = block;
|
||||
}
|
||||
else
|
||||
{
|
||||
block->next_free = free.large_blocks;
|
||||
free.large_blocks = block;
|
||||
}
|
||||
}
|
||||
|
||||
/* Called after reading the code heap from the image file, and after code heap
|
||||
compaction. Makes a free list consisting of one free block, at the very end. */
|
||||
template<typename Block, typename HeapLayout>
|
||||
void heap<Block,HeapLayout>::build_free_list(cell size)
|
||||
{
|
||||
clear_free_list();
|
||||
free_heap_block *end = (free_heap_block *)(seg->start + size);
|
||||
end->set_free();
|
||||
end->set_size(seg->end - (cell)end);
|
||||
add_to_free_list(end);
|
||||
}
|
||||
|
||||
template<typename Block, typename HeapLayout>
|
||||
void heap<Block,HeapLayout>::assert_free_block(free_heap_block *block)
|
||||
{
|
||||
#ifdef FACTOR_DEBUG
|
||||
assert(block->free_p());
|
||||
#endif
|
||||
}
|
||||
|
||||
template<typename Block, typename HeapLayout>
|
||||
free_heap_block *heap<Block,HeapLayout>::find_free_block(cell size)
|
||||
{
|
||||
cell attempt = size;
|
||||
|
||||
while(attempt < free_list_count * block_granularity)
|
||||
{
|
||||
int index = attempt / block_granularity;
|
||||
free_heap_block *block = free.small_blocks[index];
|
||||
if(block)
|
||||
{
|
||||
assert_free_block(block);
|
||||
free.small_blocks[index] = block->next_free;
|
||||
return block;
|
||||
}
|
||||
|
||||
attempt *= 2;
|
||||
}
|
||||
|
||||
free_heap_block *prev = NULL;
|
||||
free_heap_block *block = free.large_blocks;
|
||||
|
||||
while(block)
|
||||
{
|
||||
assert_free_block(block);
|
||||
if(block->size() >= size)
|
||||
{
|
||||
if(prev)
|
||||
prev->next_free = block->next_free;
|
||||
else
|
||||
free.large_blocks = block->next_free;
|
||||
return block;
|
||||
}
|
||||
|
||||
prev = block;
|
||||
block = block->next_free;
|
||||
}
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
template<typename Block, typename HeapLayout>
|
||||
free_heap_block *heap<Block,HeapLayout>::split_free_block(free_heap_block *block, cell size)
|
||||
{
|
||||
if(block->size() != size)
|
||||
{
|
||||
/* split the block in two */
|
||||
free_heap_block *split = (free_heap_block *)((cell)block + size);
|
||||
split->set_free();
|
||||
split->set_size(block->size() - size);
|
||||
split->next_free = block->next_free;
|
||||
block->set_size(size);
|
||||
add_to_free_list(split);
|
||||
}
|
||||
|
||||
return block;
|
||||
}
|
||||
|
||||
template<typename Block, typename HeapLayout>
|
||||
Block *heap<Block,HeapLayout>::heap_allot(cell size)
|
||||
{
|
||||
size = align(size,block_granularity);
|
||||
|
||||
free_heap_block *block = find_free_block(size);
|
||||
if(block)
|
||||
{
|
||||
block = split_free_block(block,size);
|
||||
return (Block *)block;
|
||||
}
|
||||
else
|
||||
return NULL;
|
||||
}
|
||||
|
||||
template<typename Block, typename HeapLayout>
|
||||
void heap<Block,HeapLayout>::heap_free(Block *block)
|
||||
{
|
||||
free_heap_block *free_block = (free_heap_block *)block;
|
||||
free_block->set_free();
|
||||
add_to_free_list(free_block);
|
||||
}
|
||||
|
||||
template<typename Block, typename HeapLayout>
|
||||
void heap<Block,HeapLayout>::mark_block(Block *block)
|
||||
{
|
||||
state->set_marked_p(block);
|
||||
}
|
||||
|
||||
/* Compute total sum of sizes of free blocks, and size of largest free block */
|
||||
template<typename Block, typename HeapLayout>
|
||||
void heap<Block,HeapLayout>::heap_usage(cell *used, cell *total_free, cell *max_free)
|
||||
{
|
||||
*used = 0;
|
||||
*total_free = 0;
|
||||
*max_free = 0;
|
||||
|
||||
Block *scan = first_block();
|
||||
Block *end = last_block();
|
||||
|
||||
while(scan != end)
|
||||
{
|
||||
cell size = layout.block_size(scan);
|
||||
|
||||
if(scan->free_p())
|
||||
{
|
||||
*total_free += size;
|
||||
if(size > *max_free)
|
||||
*max_free = size;
|
||||
}
|
||||
else
|
||||
*used += size;
|
||||
|
||||
scan = layout.next_block_after(scan);
|
||||
}
|
||||
}
|
||||
|
||||
/* The size of the heap after compaction */
|
||||
template<typename Block, typename HeapLayout>
|
||||
cell heap<Block,HeapLayout>::heap_size()
|
||||
{
|
||||
Block *scan = first_block();
|
||||
Block *end = last_block();
|
||||
|
||||
while(scan != end)
|
||||
{
|
||||
if(scan->free_p()) break;
|
||||
else scan = layout.next_block_after(scan);
|
||||
}
|
||||
|
||||
if(scan != end)
|
||||
{
|
||||
free_heap_block *free_block = (free_heap_block *)scan;
|
||||
assert(free_block->free_p());
|
||||
assert((cell)scan + scan->size() == seg->end);
|
||||
|
||||
return (cell)scan - (cell)first_block();
|
||||
}
|
||||
else
|
||||
return seg->size;
|
||||
}
|
||||
|
||||
/* After code GC, all live code blocks are marked, so any
|
||||
which are not marked can be reclaimed. */
|
||||
template<typename Iterator> void heap::sweep_heap(Iterator &iter)
|
||||
template<typename Block, typename HeapLayout>
|
||||
template<typename Iterator>
|
||||
void heap<Block,HeapLayout>::sweep_heap(Iterator &iter)
|
||||
{
|
||||
this->clear_free_list();
|
||||
|
||||
heap_block *prev = NULL;
|
||||
heap_block *scan = this->first_block();
|
||||
heap_block *end = this->last_block();
|
||||
Block *prev = NULL;
|
||||
Block *scan = this->first_block();
|
||||
Block *end = this->last_block();
|
||||
|
||||
while(scan != end)
|
||||
{
|
||||
if(scan->free_p())
|
||||
{
|
||||
free_heap_block *free_scan = (free_heap_block *)scan;
|
||||
|
||||
if(prev && prev->free_p())
|
||||
prev->set_size(prev->size() + scan->size());
|
||||
{
|
||||
free_heap_block *free_prev = (free_heap_block *)prev;
|
||||
free_prev->set_size(free_prev->size() + free_scan->size());
|
||||
}
|
||||
else
|
||||
prev = scan;
|
||||
}
|
||||
|
@ -82,17 +289,17 @@ template<typename Iterator> void heap::sweep_heap(Iterator &iter)
|
|||
if(prev && prev->free_p())
|
||||
this->add_to_free_list((free_heap_block *)prev);
|
||||
prev = scan;
|
||||
iter(scan,scan->size());
|
||||
iter(scan,layout.block_size(scan));
|
||||
}
|
||||
else
|
||||
{
|
||||
if(secure_gc)
|
||||
memset(scan + 1,0,scan->size() - sizeof(heap_block));
|
||||
memset(scan + 1,0,layout.block_size(scan) - sizeof(heap_block));
|
||||
|
||||
if(prev && prev->free_p())
|
||||
{
|
||||
free_heap_block *free_prev = (free_heap_block *)prev;
|
||||
free_prev->set_size(free_prev->size() + scan->size());
|
||||
free_prev->set_size(free_prev->size() + layout.block_size(scan));
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -101,7 +308,7 @@ template<typename Iterator> void heap::sweep_heap(Iterator &iter)
|
|||
}
|
||||
}
|
||||
|
||||
scan = scan->next();
|
||||
scan = layout.next_block_after(scan);
|
||||
}
|
||||
|
||||
if(prev && prev->free_p())
|
||||
|
@ -110,14 +317,16 @@ template<typename Iterator> void heap::sweep_heap(Iterator &iter)
|
|||
|
||||
/* The forwarding map must be computed first by calling
|
||||
state->compute_forwarding(). */
|
||||
template<typename Iterator> void heap::compact_heap(Iterator &iter)
|
||||
template<typename Block, typename HeapLayout>
|
||||
template<typename Iterator>
|
||||
void heap<Block,HeapLayout>::compact_heap(Iterator &iter)
|
||||
{
|
||||
heap_compacter<heap_block,block_size_increment,Iterator> compacter(state,first_block(),iter);
|
||||
this->iterate_heap(compacter);
|
||||
heap_compactor<Block,HeapLayout,Iterator> compactor(state,first_block(),iter);
|
||||
this->iterate_heap(compactor);
|
||||
|
||||
/* Now update the free list; there will be a single free block at
|
||||
the end */
|
||||
this->build_free_list((cell)compacter.address - this->seg->start);
|
||||
this->build_free_list((cell)compactor.address - this->seg->start);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -231,11 +231,6 @@ struct heap_block
|
|||
{
|
||||
header = (header & 0x7) | (size << 3);
|
||||
}
|
||||
|
||||
inline heap_block *next()
|
||||
{
|
||||
return (heap_block *)((cell)this + size());
|
||||
}
|
||||
};
|
||||
|
||||
struct free_heap_block : public heap_block
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
namespace factor
|
||||
{
|
||||
|
||||
const int block_granularity = 16;
|
||||
const int forwarding_granularity = 64;
|
||||
|
||||
template<typename Block, int Granularity> struct mark_bits {
|
||||
template<typename Block, typename HeapLayout> struct mark_bits {
|
||||
HeapLayout layout;
|
||||
cell start;
|
||||
cell size;
|
||||
cell bits_size;
|
||||
|
@ -23,7 +25,7 @@ template<typename Block, int Granularity> struct mark_bits {
|
|||
explicit mark_bits(cell start_, cell size_) :
|
||||
start(start_),
|
||||
size(size_),
|
||||
bits_size(size / Granularity / forwarding_granularity),
|
||||
bits_size(size / block_granularity / forwarding_granularity),
|
||||
marked(new u64[bits_size]),
|
||||
forwarding(new cell[bits_size])
|
||||
{
|
||||
|
@ -41,12 +43,12 @@ template<typename Block, int Granularity> struct mark_bits {
|
|||
|
||||
cell block_line(Block *address)
|
||||
{
|
||||
return (((cell)address - start) / Granularity);
|
||||
return (((cell)address - start) / block_granularity);
|
||||
}
|
||||
|
||||
Block *line_block(cell line)
|
||||
{
|
||||
return (Block *)(line * Granularity + start);
|
||||
return (Block *)(line * block_granularity + start);
|
||||
}
|
||||
|
||||
std::pair<cell,cell> bitmap_deref(Block *address)
|
||||
|
@ -71,7 +73,7 @@ template<typename Block, int Granularity> struct mark_bits {
|
|||
void set_bitmap_range(u64 *bits, Block *address)
|
||||
{
|
||||
std::pair<cell,cell> start = bitmap_deref(address);
|
||||
std::pair<cell,cell> end = bitmap_deref(address->next());
|
||||
std::pair<cell,cell> end = bitmap_deref(layout.next_block_after(address));
|
||||
|
||||
u64 start_mask = ((u64)1 << start.second) - 1;
|
||||
u64 end_mask = ((u64)1 << end.second) - 1;
|
||||
|
@ -139,12 +141,12 @@ template<typename Block, int Granularity> struct mark_bits {
|
|||
}
|
||||
};
|
||||
|
||||
template<typename Block, int Granularity, typename Iterator> struct heap_compacter {
|
||||
mark_bits<Block,Granularity> *state;
|
||||
template<typename Block, typename HeapLayout, typename Iterator> struct heap_compactor {
|
||||
mark_bits<Block,HeapLayout> *state;
|
||||
char *address;
|
||||
Iterator &iter;
|
||||
|
||||
explicit heap_compacter(mark_bits<Block,Granularity> *state_, Block *address_, Iterator &iter_) :
|
||||
explicit heap_compactor(mark_bits<Block,HeapLayout> *state_, Block *address_, Iterator &iter_) :
|
||||
state(state_), address((char *)address_), iter(iter_) {}
|
||||
|
||||
void operator()(Block *block, cell size)
|
||||
|
|
Loading…
Reference in New Issue