GC maps for more compact inline GC checks
parent
3dabdcbdc3
commit
806e54630a
|
@ -46,6 +46,7 @@ ifdef CONFIG
|
|||
vm/free_list.o \
|
||||
vm/full_collector.o \
|
||||
vm/gc.o \
|
||||
vm/gc_info.o \
|
||||
vm/image.o \
|
||||
vm/inline_cache.o \
|
||||
vm/instruction_operands.o \
|
||||
|
|
|
@ -48,6 +48,7 @@ DLL_OBJS = $(PLAF_DLL_OBJS) \
|
|||
vm\free_list.obj \
|
||||
vm\full_collector.obj \
|
||||
vm\gc.obj \
|
||||
vm/gc_info.obj \
|
||||
vm\image.obj \
|
||||
vm\inline_cache.obj \
|
||||
vm\instruction_operands.obj \
|
||||
|
|
|
@ -58,7 +58,6 @@ SYMBOL: bootstrap-time
|
|||
original-error set-global
|
||||
error set-global ; inline
|
||||
|
||||
|
||||
[
|
||||
! We time bootstrap
|
||||
nano-count
|
||||
|
|
|
@ -29,14 +29,6 @@ V{
|
|||
|
||||
2 \ vreg-counter set-global
|
||||
|
||||
[
|
||||
V{
|
||||
T{ ##load-tagged f 3 0 }
|
||||
T{ ##replace f 3 D 0 }
|
||||
T{ ##replace f 3 R 3 }
|
||||
}
|
||||
] [ [ { D 0 R 3 } wipe-locs ] V{ } make ] unit-test
|
||||
|
||||
: gc-check? ( bb -- ? )
|
||||
instructions>>
|
||||
{
|
||||
|
@ -50,15 +42,13 @@ V{
|
|||
|
||||
[
|
||||
V{
|
||||
T{ ##load-tagged f 5 0 }
|
||||
T{ ##replace f 5 D 0 }
|
||||
T{ ##replace f 5 R 3 }
|
||||
T{ ##call-gc f { 0 1 2 } }
|
||||
T{ ##gc-map f V{ 0 } V{ 3 } { 0 1 2 } }
|
||||
T{ ##call-gc }
|
||||
T{ ##branch }
|
||||
}
|
||||
]
|
||||
[
|
||||
{ D 0 R 3 } { 0 1 2 } <gc-call> instructions>>
|
||||
V{ D 0 R 3 } { 0 1 2 } <gc-call> instructions>>
|
||||
] unit-test
|
||||
|
||||
30 \ vreg-counter set-global
|
||||
|
@ -156,11 +146,8 @@ H{
|
|||
|
||||
[
|
||||
V{
|
||||
T{ ##load-tagged f 31 0 }
|
||||
T{ ##replace f 31 D 0 }
|
||||
T{ ##replace f 31 D 1 }
|
||||
T{ ##replace f 31 D 2 }
|
||||
T{ ##call-gc f { 2 } }
|
||||
T{ ##gc-map f V{ 0 1 2 } V{ } { 2 } }
|
||||
T{ ##call-gc }
|
||||
T{ ##branch }
|
||||
}
|
||||
] [ 2 get predecessors>> second instructions>> ] unit-test
|
||||
|
|
|
@ -50,16 +50,12 @@ IN: compiler.cfg.gc-checks
|
|||
] bi*
|
||||
] V{ } make >>instructions ;
|
||||
|
||||
: wipe-locs ( uninitialized-locs -- )
|
||||
'[
|
||||
int-rep next-vreg-rep
|
||||
[ 0 ##load-tagged ]
|
||||
[ '[ [ _ ] dip ##replace ] each ] bi
|
||||
] unless-empty ;
|
||||
: scrubbed ( uninitialized-locs -- scrub-d scrub-r )
|
||||
[ ds-loc? ] partition [ [ n>> ] map ] bi@ ;
|
||||
|
||||
: <gc-call> ( uninitialized-locs gc-roots -- bb )
|
||||
[ <basic-block> ] 2dip
|
||||
[ [ wipe-locs ] [ ##call-gc ] bi* ##branch ] V{ } make
|
||||
[ [ scrubbed ] dip ##gc-map ##call-gc ##branch ] V{ } make
|
||||
>>instructions t >>unlikely? ;
|
||||
|
||||
:: insert-guard ( body check bb -- )
|
||||
|
|
|
@ -819,8 +819,10 @@ INSN: ##check-nursery-branch
|
|||
literal: size cc
|
||||
temp: temp1/int-rep temp2/int-rep ;
|
||||
|
||||
INSN: ##call-gc
|
||||
literal: gc-roots ;
|
||||
INSN: ##call-gc ;
|
||||
|
||||
INSN: ##gc-map
|
||||
literal: scrub-d scrub-r gc-roots ;
|
||||
|
||||
! Spills and reloads, inserted by register allocator
|
||||
TUPLE: spill-slot { n integer } ;
|
||||
|
|
|
@ -142,8 +142,7 @@ RENAMING: assign [ vreg>reg ] [ vreg>reg ] [ vreg>reg ]
|
|||
M: vreg-insn assign-registers-in-insn
|
||||
[ assign-insn-defs ] [ assign-insn-uses ] [ assign-insn-temps ] tri ;
|
||||
|
||||
M: ##call-gc assign-registers-in-insn
|
||||
dup call-next-method
|
||||
M: ##gc-map assign-registers-in-insn
|
||||
[ [ vreg>reg ] map ] change-gc-roots drop ;
|
||||
|
||||
M: insn assign-registers-in-insn drop ;
|
||||
|
|
|
@ -10,7 +10,6 @@ IN: compiler.cfg.save-contexts
|
|||
: needs-save-context? ( insns -- ? )
|
||||
[
|
||||
{
|
||||
[ ##call-gc? ]
|
||||
[ ##unary-float-function? ]
|
||||
[ ##binary-float-function? ]
|
||||
[ ##alien-invoke? ]
|
||||
|
|
|
@ -77,5 +77,5 @@ M: uninitialized-analysis join-sets ( sets analysis -- pair )
|
|||
first2
|
||||
[ [ <ds-loc> ] (uninitialized-locs) ]
|
||||
[ [ <rs-loc> ] (uninitialized-locs) ]
|
||||
bi* append
|
||||
bi* append f like
|
||||
] when ;
|
||||
|
|
|
@ -258,6 +258,7 @@ CODEGEN: ##restore-context %restore-context
|
|||
CODEGEN: ##vm-field %vm-field
|
||||
CODEGEN: ##set-vm-field %set-vm-field
|
||||
CODEGEN: ##alien-global %alien-global
|
||||
CODEGEN: ##gc-map %gc-map
|
||||
CODEGEN: ##call-gc %call-gc
|
||||
CODEGEN: ##spill %spill
|
||||
CODEGEN: ##reload %reload
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
USING: namespaces byte-arrays make compiler.codegen.fixup
|
||||
bit-arrays accessors classes.struct tools.test kernel math
|
||||
sequences alien.c-types specialized-arrays boxes ;
|
||||
SPECIALIZED-ARRAY: uint
|
||||
IN: compiler.codegen.fixup.tests
|
||||
|
||||
STRUCT: gc-info
|
||||
{ scrub-d-count uint }
|
||||
{ scrub-r-count uint }
|
||||
{ gc-root-count uint }
|
||||
{ return-address-count uint } ;
|
||||
|
||||
[ ] [
|
||||
[
|
||||
init-fixup
|
||||
|
||||
50 <byte-array> %
|
||||
|
||||
{ { } { } { } } set-next-gc-map
|
||||
gc-map-here
|
||||
|
||||
50 <byte-array> %
|
||||
|
||||
{ { 0 4 } { 1 } { 1 3 } } set-next-gc-map
|
||||
gc-map-here
|
||||
|
||||
emit-gc-info
|
||||
] B{ } make
|
||||
"result" set
|
||||
] unit-test
|
||||
|
||||
[ 0 ] [ "result" get length 16 mod ] unit-test
|
||||
|
||||
[ ] [
|
||||
[
|
||||
100 <byte-array> %
|
||||
|
||||
! The below data is 22 bytes -- 6 bytes padding needed to
|
||||
! align
|
||||
6 <byte-array> %
|
||||
|
||||
! Bitmap - 2 bytes
|
||||
?{
|
||||
! scrub-d
|
||||
t f f f t
|
||||
! scrub-r
|
||||
f t
|
||||
! gc-roots
|
||||
f t f t
|
||||
} underlying>> %
|
||||
|
||||
! Return addresses - 4 bytes
|
||||
uint-array{ 100 } underlying>> %
|
||||
|
||||
! GC info footer - 16 bytes
|
||||
S{ gc-info
|
||||
{ scrub-d-count 5 }
|
||||
{ scrub-r-count 2 }
|
||||
{ gc-root-count 4 }
|
||||
{ return-address-count 1 }
|
||||
} (underlying)>> %
|
||||
] B{ } make
|
||||
"expect" set
|
||||
] unit-test
|
||||
|
||||
[ ] [ "result" get length "expect" get length assert= ] unit-test
|
||||
[ ] [ "result" get "expect" get assert= ] unit-test
|
|
@ -1,10 +1,11 @@
|
|||
! Copyright (C) 2007, 2010 Slava Pestov.
|
||||
! See http://factorcode.org/license.txt for BSD license.
|
||||
USING: arrays byte-arrays byte-vectors generic assocs hashtables
|
||||
io.binary kernel kernel.private math namespaces make sequences
|
||||
words quotations strings alien.accessors alien.strings layouts
|
||||
system combinators math.bitwise math.order combinators.smart
|
||||
accessors growable fry compiler.constants memoize ;
|
||||
USING: arrays bit-arrays byte-arrays byte-vectors generic assocs
|
||||
hashtables io.binary kernel kernel.private math namespaces make
|
||||
sequences words quotations strings alien.accessors alien.strings
|
||||
layouts system combinators math.bitwise math.order
|
||||
combinators.smart accessors growable fry compiler.constants
|
||||
memoize boxes ;
|
||||
IN: compiler.codegen.fixup
|
||||
|
||||
! Utilities
|
||||
|
@ -95,7 +96,7 @@ MEMO: cached-string>symbol ( symbol -- obj ) string>symbol ;
|
|||
: rel-decks-offset ( class -- )
|
||||
rt-decks-offset rel-fixup ;
|
||||
|
||||
! And the rest
|
||||
! Labels
|
||||
: compute-target ( label-fixup -- offset )
|
||||
label>> offset>> [ "Unresolved label" throw ] unless* ;
|
||||
|
||||
|
@ -112,13 +113,7 @@ MEMO: cached-string>symbol ( symbol -- obj ) string>symbol ;
|
|||
[ [ compute-relative-label ] map concat ]
|
||||
bi* ;
|
||||
|
||||
: init-fixup ( -- )
|
||||
V{ } clone parameter-table set
|
||||
V{ } clone literal-table set
|
||||
V{ } clone label-table set
|
||||
BV{ } clone relocation-table set
|
||||
V{ } clone binary-literal-table set ;
|
||||
|
||||
! Binary literals
|
||||
: alignment ( align -- n )
|
||||
[ compiled-offset dup ] dip align swap - ;
|
||||
|
||||
|
@ -136,16 +131,102 @@ MEMO: cached-string>symbol ( symbol -- obj ) string>symbol ;
|
|||
: emit-binary-literals ( -- )
|
||||
binary-literal-table get [ emit-data ] assoc-each ;
|
||||
|
||||
! GC info
|
||||
|
||||
! Every code block either ends with
|
||||
!
|
||||
! uint 0
|
||||
!
|
||||
! or
|
||||
!
|
||||
! bitmap, byte aligned, three subsequences:
|
||||
! - <scrubbed data stack locations>
|
||||
! - <scrubbed retain stack locations>
|
||||
! - <GC root spill slots>
|
||||
! uint[] <return addresses>
|
||||
! uint <largest scrubbed data stack location>
|
||||
! uint <largest scrubbed retain stack location>
|
||||
! uint <largest GC root spill slot>
|
||||
! uint <number of return addresses>
|
||||
|
||||
SYMBOLS: next-gc-map return-addresses gc-maps ;
|
||||
|
||||
: gc-map? ( triple -- ? )
|
||||
! If there are no stack locations to scrub and no GC roots,
|
||||
! there's no point storing the GC map.
|
||||
[ empty? not ] any? ;
|
||||
|
||||
: gc-map-here ( -- )
|
||||
next-gc-map get box> dup gc-map? [
|
||||
gc-maps get push
|
||||
compiled-offset return-addresses get push
|
||||
] [ drop ] if ;
|
||||
|
||||
: set-next-gc-map ( gc-map -- ) next-gc-map get >box ;
|
||||
|
||||
: integers>bits ( seq n -- bit-array )
|
||||
<bit-array> [ '[ [ t ] dip _ set-nth ] each ] keep ;
|
||||
|
||||
: emit-bitmap ( seqs -- n )
|
||||
! seqs is a sequence of sequences of integers 0..n-1
|
||||
[ 0 ] [
|
||||
dup [ [ 0 ] [ supremum 1 + ] if-empty ] [ max ] map-reduce
|
||||
[ '[ _ integers>bits % ] each ] keep
|
||||
] if-empty ;
|
||||
|
||||
: emit-uint ( n -- )
|
||||
building get push-uint ;
|
||||
|
||||
: gc-info ( -- byte-array )
|
||||
[
|
||||
return-addresses get empty? [ 0 emit-uint ] [
|
||||
gc-maps get
|
||||
[
|
||||
[ [ first ] map emit-bitmap ]
|
||||
[ [ second ] map emit-bitmap ]
|
||||
[ [ third ] map emit-bitmap ] tri
|
||||
] ?{ } make underlying>> %
|
||||
return-addresses get [ emit-uint ] each
|
||||
[ emit-uint ] tri@
|
||||
return-addresses get length emit-uint
|
||||
] if
|
||||
] B{ } make ;
|
||||
|
||||
: emit-gc-info ( -- )
|
||||
! We want to place the GC info so that the end is aligned
|
||||
! on a 16-byte boundary.
|
||||
gc-info [
|
||||
length compiled-offset +
|
||||
[ data-alignment get align ] keep -
|
||||
(align-code)
|
||||
] [ % ] bi ;
|
||||
|
||||
: init-fixup ( -- )
|
||||
V{ } clone parameter-table set
|
||||
V{ } clone literal-table set
|
||||
V{ } clone label-table set
|
||||
BV{ } clone relocation-table set
|
||||
V{ } clone binary-literal-table set
|
||||
V{ } clone return-addresses set
|
||||
V{ } clone gc-maps set
|
||||
<box> next-gc-map set ;
|
||||
|
||||
: check-fixup ( seq -- )
|
||||
length data-alignment get mod 0 assert=
|
||||
next-gc-map get occupied>> f assert= ;
|
||||
|
||||
: with-fixup ( quot -- code )
|
||||
'[
|
||||
init-fixup
|
||||
[
|
||||
init-fixup
|
||||
@
|
||||
emit-binary-literals
|
||||
emit-gc-info
|
||||
label-table [ compute-labels ] change
|
||||
parameter-table get >array
|
||||
literal-table get >array
|
||||
relocation-table get >byte-array
|
||||
label-table get
|
||||
] B{ } make
|
||||
dup check-fixup
|
||||
] output>array ; inline
|
||||
|
|
|
@ -488,7 +488,8 @@ HOOK: %write-barrier-imm cpu ( src slot tag temp1 temp2 -- )
|
|||
|
||||
! GC checks
|
||||
HOOK: %check-nursery-branch cpu ( label size cc temp1 temp2 -- )
|
||||
HOOK: %call-gc cpu ( gc-roots -- )
|
||||
HOOK: %gc-map cpu ( scrub-d scrub-r gc-roots -- )
|
||||
HOOK: %call-gc cpu ( -- )
|
||||
|
||||
HOOK: %prologue cpu ( n -- )
|
||||
HOOK: %epilogue cpu ( n -- )
|
||||
|
|
|
@ -239,11 +239,6 @@ M:: x86.32 stack-cleanup ( stack-size return abi -- n )
|
|||
M: x86.32 %cleanup ( n -- )
|
||||
[ ESP swap SUB ] unless-zero ;
|
||||
|
||||
M:: x86.32 %call-gc ( gc-roots -- )
|
||||
4 save-vm-ptr
|
||||
0 stack@ gc-roots gc-root-offsets %load-reference
|
||||
"inline_gc" f %alien-invoke ;
|
||||
|
||||
M: x86.32 dummy-stack-params? f ;
|
||||
|
||||
M: x86.32 dummy-int-params? f ;
|
||||
|
|
|
@ -154,11 +154,6 @@ M:: x86.64 %binary-float-function ( dst src1 src2 func -- )
|
|||
func "libm" load-library %alien-invoke
|
||||
dst double-rep %load-return ;
|
||||
|
||||
M:: x86.64 %call-gc ( gc-roots -- )
|
||||
param-reg-0 gc-roots gc-root-offsets %load-reference
|
||||
param-reg-1 %mov-vm-ptr
|
||||
"inline_gc" f %alien-invoke ;
|
||||
|
||||
M: x86.64 long-long-on-stack? f ;
|
||||
|
||||
M: x86.64 float-on-stack? f ;
|
||||
|
|
|
@ -35,9 +35,6 @@ HOOK: reserved-stack-space cpu ( -- n )
|
|||
|
||||
: spill@ ( n -- op ) spill-offset special-offset stack@ ;
|
||||
|
||||
: gc-root-offsets ( seq -- seq' )
|
||||
[ n>> spill-offset special-offset cell + ] map f like ;
|
||||
|
||||
: decr-stack-reg ( n -- )
|
||||
dup 0 = [ drop ] [ stack-reg swap SUB ] if ;
|
||||
|
||||
|
@ -483,8 +480,18 @@ M:: x86 %check-nursery-branch ( label size cc temp1 temp2 -- )
|
|||
{ cc/<= [ label JG ] }
|
||||
} case ;
|
||||
|
||||
: gc-root-offsets ( seq -- seq' )
|
||||
[ n>> spill-offset special-offset cell + cell /i ] map f like ;
|
||||
|
||||
M: x86 %gc-map ( scrub-d scrub-r gc-roots -- )
|
||||
gc-root-offsets 3array set-next-gc-map ;
|
||||
|
||||
M: x86 %call-gc
|
||||
\ minor-gc %call
|
||||
gc-map-here ;
|
||||
|
||||
M: x86 %alien-global ( dst symbol library -- )
|
||||
[ 0 MOV ] 2dip rc-absolute-cell rel-dlsym ;
|
||||
[ 0 MOV ] 2dip rc-absolute-cell rel-dlsym ;
|
||||
|
||||
M: x86 %epilogue ( n -- ) cell - incr-stack-reg ;
|
||||
|
||||
|
|
|
@ -60,4 +60,11 @@ inline cell popcount(cell x)
|
|||
return x;
|
||||
}
|
||||
|
||||
inline bool bitmap_p(u8 *bitmap, cell index)
|
||||
{
|
||||
cell byte = index >> 3;
|
||||
cell bit = index & 7;
|
||||
return (bitmap[byte] & (1 << bit)) != 0;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -35,16 +35,18 @@ void factor_vm::primitive_resize_byte_array()
|
|||
ctx->push(tag<byte_array>(reallot_array(array.untagged(),capacity)));
|
||||
}
|
||||
|
||||
void growable_byte_array::grow_bytes(cell len)
|
||||
{
|
||||
count += len;
|
||||
if(count >= array_capacity(elements.untagged()))
|
||||
elements = elements.parent->reallot_array(elements.untagged(),count * 2);
|
||||
}
|
||||
|
||||
void growable_byte_array::append_bytes(void *elts, cell len)
|
||||
{
|
||||
cell new_size = count + len;
|
||||
factor_vm *parent = elements.parent;
|
||||
if(new_size >= array_capacity(elements.untagged()))
|
||||
elements = parent->reallot_array(elements.untagged(),new_size * 2);
|
||||
|
||||
memcpy(&elements->data<u8>()[count],elts,len);
|
||||
|
||||
count += len;
|
||||
cell old_count = count;
|
||||
grow_bytes(len);
|
||||
memcpy(&elements->data<u8>()[old_count],elts,len);
|
||||
}
|
||||
|
||||
void growable_byte_array::append_byte_array(cell byte_array_)
|
||||
|
|
|
@ -7,6 +7,7 @@ struct growable_byte_array {
|
|||
|
||||
explicit growable_byte_array(factor_vm *parent,cell capacity = 40) : count(0), elements(parent->allot_byte_array(capacity),parent) { }
|
||||
|
||||
void grow_bytes(cell len);
|
||||
void append_bytes(void *elts, cell len);
|
||||
void append_byte_array(cell elts);
|
||||
|
||||
|
|
|
@ -138,9 +138,6 @@ cell factor_vm::frame_scan(stack_frame *frame)
|
|||
}
|
||||
}
|
||||
|
||||
namespace
|
||||
{
|
||||
|
||||
struct stack_frame_accumulator {
|
||||
factor_vm *parent;
|
||||
growable_array frames;
|
||||
|
@ -159,8 +156,6 @@ struct stack_frame_accumulator {
|
|||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
void factor_vm::primitive_callstack_to_array()
|
||||
{
|
||||
data_root<callstack> callstack(ctx->pop(),this);
|
||||
|
|
|
@ -12,12 +12,12 @@ Iteration is driven by visit_*() methods. Some of them define GC roots:
|
|||
- visit_context_code_blocks()
|
||||
- visit_callback_code_blocks() */
|
||||
|
||||
template<typename Visitor> struct code_block_visitor {
|
||||
template<typename Fixup> struct code_block_visitor {
|
||||
factor_vm *parent;
|
||||
Visitor visitor;
|
||||
Fixup fixup;
|
||||
|
||||
explicit code_block_visitor(factor_vm *parent_, Visitor visitor_) :
|
||||
parent(parent_), visitor(visitor_) {}
|
||||
explicit code_block_visitor(factor_vm *parent_, Fixup fixup_) :
|
||||
parent(parent_), fixup(fixup_) {}
|
||||
|
||||
code_block *visit_code_block(code_block *compiled);
|
||||
void visit_object_code_block(object *obj);
|
||||
|
@ -26,33 +26,34 @@ template<typename Visitor> struct code_block_visitor {
|
|||
void visit_uninitialized_code_blocks();
|
||||
};
|
||||
|
||||
template<typename Visitor>
|
||||
code_block *code_block_visitor<Visitor>::visit_code_block(code_block *compiled)
|
||||
template<typename Fixup>
|
||||
code_block *code_block_visitor<Fixup>::visit_code_block(code_block *compiled)
|
||||
{
|
||||
return visitor(compiled);
|
||||
return fixup.fixup_code(compiled);
|
||||
}
|
||||
|
||||
template<typename Visitor>
|
||||
template<typename Fixup>
|
||||
struct call_frame_code_block_visitor {
|
||||
factor_vm *parent;
|
||||
Visitor visitor;
|
||||
Fixup fixup;
|
||||
|
||||
explicit call_frame_code_block_visitor(factor_vm *parent_, Visitor visitor_) :
|
||||
parent(parent_), visitor(visitor_) {}
|
||||
explicit call_frame_code_block_visitor(factor_vm *parent_, Fixup fixup_) :
|
||||
parent(parent_), fixup(fixup_) {}
|
||||
|
||||
void operator()(stack_frame *frame)
|
||||
{
|
||||
cell offset = (cell)FRAME_RETURN_ADDRESS(frame,parent) - (cell)frame->entry_point;
|
||||
code_block *old_block = parent->frame_code(frame);
|
||||
cell offset = (char *)FRAME_RETURN_ADDRESS(frame,parent) - (char *)old_block;
|
||||
|
||||
code_block *new_block = visitor(parent->frame_code(frame));
|
||||
const code_block *new_block = fixup.fixup_code(old_block);
|
||||
frame->entry_point = new_block->entry_point();
|
||||
|
||||
FRAME_RETURN_ADDRESS(frame,parent) = (void *)((cell)frame->entry_point + offset);
|
||||
FRAME_RETURN_ADDRESS(frame,parent) = (char *)new_block + offset;
|
||||
}
|
||||
};
|
||||
|
||||
template<typename Visitor>
|
||||
void code_block_visitor<Visitor>::visit_object_code_block(object *obj)
|
||||
template<typename Fixup>
|
||||
void code_block_visitor<Fixup>::visit_object_code_block(object *obj)
|
||||
{
|
||||
switch(obj->type())
|
||||
{
|
||||
|
@ -60,9 +61,9 @@ void code_block_visitor<Visitor>::visit_object_code_block(object *obj)
|
|||
{
|
||||
word *w = (word *)obj;
|
||||
if(w->code)
|
||||
w->code = visitor(w->code);
|
||||
w->code = visit_code_block(w->code);
|
||||
if(w->profiling)
|
||||
w->profiling = visitor(w->profiling);
|
||||
w->profiling = visit_code_block(w->profiling);
|
||||
|
||||
parent->update_word_entry_point(w);
|
||||
break;
|
||||
|
@ -71,24 +72,24 @@ void code_block_visitor<Visitor>::visit_object_code_block(object *obj)
|
|||
{
|
||||
quotation *q = (quotation *)obj;
|
||||
if(q->code)
|
||||
parent->set_quot_entry_point(q,visitor(q->code));
|
||||
parent->set_quot_entry_point(q,visit_code_block(q->code));
|
||||
break;
|
||||
}
|
||||
case CALLSTACK_TYPE:
|
||||
{
|
||||
callstack *stack = (callstack *)obj;
|
||||
call_frame_code_block_visitor<Visitor> call_frame_visitor(parent,visitor);
|
||||
call_frame_code_block_visitor<Fixup> call_frame_visitor(parent,fixup);
|
||||
parent->iterate_callstack_object(stack,call_frame_visitor);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
template<typename Visitor>
|
||||
template<typename Fixup>
|
||||
struct embedded_code_pointers_visitor {
|
||||
Visitor visitor;
|
||||
Fixup fixup;
|
||||
|
||||
explicit embedded_code_pointers_visitor(Visitor visitor_) : visitor(visitor_) {}
|
||||
explicit embedded_code_pointers_visitor(Fixup fixup_) : fixup(fixup_) {}
|
||||
|
||||
void operator()(instruction_operand op)
|
||||
{
|
||||
|
@ -96,29 +97,29 @@ struct embedded_code_pointers_visitor {
|
|||
if(type == RT_ENTRY_POINT
|
||||
|| type == RT_ENTRY_POINT_PIC
|
||||
|| type == RT_ENTRY_POINT_PIC_TAIL)
|
||||
op.store_code_block(visitor(op.load_code_block()));
|
||||
op.store_code_block(fixup.fixup_code(op.load_code_block()));
|
||||
}
|
||||
};
|
||||
|
||||
template<typename Visitor>
|
||||
void code_block_visitor<Visitor>::visit_embedded_code_pointers(code_block *compiled)
|
||||
template<typename Fixup>
|
||||
void code_block_visitor<Fixup>::visit_embedded_code_pointers(code_block *compiled)
|
||||
{
|
||||
if(!parent->code->uninitialized_p(compiled))
|
||||
{
|
||||
embedded_code_pointers_visitor<Visitor> visitor(this->visitor);
|
||||
compiled->each_instruction_operand(visitor);
|
||||
embedded_code_pointers_visitor<Fixup> operand_visitor(fixup);
|
||||
compiled->each_instruction_operand(operand_visitor);
|
||||
}
|
||||
}
|
||||
|
||||
template<typename Visitor>
|
||||
void code_block_visitor<Visitor>::visit_context_code_blocks()
|
||||
template<typename Fixup>
|
||||
void code_block_visitor<Fixup>::visit_context_code_blocks()
|
||||
{
|
||||
call_frame_code_block_visitor<Visitor> call_frame_visitor(parent,visitor);
|
||||
call_frame_code_block_visitor<Fixup> call_frame_visitor(parent,fixup);
|
||||
parent->iterate_active_callstacks(call_frame_visitor);
|
||||
}
|
||||
|
||||
template<typename Visitor>
|
||||
void code_block_visitor<Visitor>::visit_uninitialized_code_blocks()
|
||||
template<typename Fixup>
|
||||
void code_block_visitor<Fixup>::visit_uninitialized_code_blocks()
|
||||
{
|
||||
std::map<code_block *, cell> *uninitialized_blocks = &parent->code->uninitialized_blocks;
|
||||
std::map<code_block *, cell>::const_iterator iter = uninitialized_blocks->begin();
|
||||
|
@ -128,7 +129,7 @@ void code_block_visitor<Visitor>::visit_uninitialized_code_blocks()
|
|||
for(; iter != end; iter++)
|
||||
{
|
||||
new_uninitialized_blocks.insert(std::make_pair(
|
||||
visitor(iter->first),
|
||||
fixup.fixup_code(iter->first),
|
||||
iter->second));
|
||||
}
|
||||
|
||||
|
|
|
@ -43,11 +43,22 @@ struct code_block
|
|||
return size;
|
||||
}
|
||||
|
||||
template<typename Fixup> cell size(Fixup fixup) const
|
||||
{
|
||||
return size();
|
||||
}
|
||||
|
||||
void *entry_point() const
|
||||
{
|
||||
return (void *)(this + 1);
|
||||
}
|
||||
|
||||
/* GC info is stored at the end of the block */
|
||||
gc_info *block_gc_info() const
|
||||
{
|
||||
return (gc_info *)((u8 *)this + size() - sizeof(gc_info));
|
||||
}
|
||||
|
||||
void flush_icache()
|
||||
{
|
||||
factor::flush_icache((cell)this,size());
|
||||
|
|
|
@ -3,15 +3,17 @@ namespace factor
|
|||
|
||||
struct must_start_gc_again {};
|
||||
|
||||
template<typename TargetGeneration, typename Policy> struct data_workhorse {
|
||||
template<typename TargetGeneration, typename Policy> struct gc_workhorse : no_fixup {
|
||||
factor_vm *parent;
|
||||
TargetGeneration *target;
|
||||
Policy policy;
|
||||
code_heap *code;
|
||||
|
||||
explicit data_workhorse(factor_vm *parent_, TargetGeneration *target_, Policy policy_) :
|
||||
explicit gc_workhorse(factor_vm *parent_, TargetGeneration *target_, Policy policy_) :
|
||||
parent(parent_),
|
||||
target(target_),
|
||||
policy(policy_) {}
|
||||
policy(policy_),
|
||||
code(parent->code) {}
|
||||
|
||||
object *resolve_forwarding(object *untagged)
|
||||
{
|
||||
|
@ -39,7 +41,7 @@ template<typename TargetGeneration, typename Policy> struct data_workhorse {
|
|||
return newpointer;
|
||||
}
|
||||
|
||||
object *operator()(object *obj)
|
||||
object *fixup_data(object *obj)
|
||||
{
|
||||
if(!policy.should_copy_p(obj))
|
||||
{
|
||||
|
@ -59,17 +61,18 @@ template<typename TargetGeneration, typename Policy> struct data_workhorse {
|
|||
return forwarding;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template<typename TargetGeneration, typename Policy>
|
||||
inline static slot_visitor<data_workhorse<TargetGeneration,Policy> > make_data_visitor(
|
||||
factor_vm *parent,
|
||||
TargetGeneration *target,
|
||||
Policy policy)
|
||||
{
|
||||
return slot_visitor<data_workhorse<TargetGeneration,Policy> >(parent,
|
||||
data_workhorse<TargetGeneration,Policy>(parent,target,policy));
|
||||
}
|
||||
code_block *fixup_code(code_block *compiled)
|
||||
{
|
||||
if(!code->marked_p(compiled))
|
||||
{
|
||||
code->set_marked_p(compiled);
|
||||
parent->mark_stack.push_back((cell)compiled + 1);
|
||||
}
|
||||
|
||||
return compiled;
|
||||
}
|
||||
};
|
||||
|
||||
struct dummy_unmarker {
|
||||
void operator()(card *ptr) {}
|
||||
|
@ -92,7 +95,8 @@ struct collector {
|
|||
data_heap *data;
|
||||
code_heap *code;
|
||||
TargetGeneration *target;
|
||||
slot_visitor<data_workhorse<TargetGeneration,Policy> > data_visitor;
|
||||
gc_workhorse<TargetGeneration,Policy> workhorse;
|
||||
slot_visitor<gc_workhorse<TargetGeneration,Policy> > data_visitor;
|
||||
cell cards_scanned;
|
||||
cell decks_scanned;
|
||||
cell code_blocks_scanned;
|
||||
|
@ -102,7 +106,8 @@ struct collector {
|
|||
data(parent_->data),
|
||||
code(parent_->code),
|
||||
target(target_),
|
||||
data_visitor(make_data_visitor(parent_,target_,policy_)),
|
||||
workhorse(parent,target,policy_),
|
||||
data_visitor(parent,workhorse),
|
||||
cards_scanned(0),
|
||||
decks_scanned(0),
|
||||
code_blocks_scanned(0) {}
|
||||
|
|
|
@ -2,105 +2,99 @@
|
|||
|
||||
namespace factor {
|
||||
|
||||
template<typename Block> struct forwarder {
|
||||
mark_bits<Block> *forwarding_map;
|
||||
struct compaction_fixup {
|
||||
mark_bits<object> *data_forwarding_map;
|
||||
mark_bits<code_block> *code_forwarding_map;
|
||||
const object **data_finger;
|
||||
const code_block **code_finger;
|
||||
|
||||
explicit forwarder(mark_bits<Block> *forwarding_map_) :
|
||||
forwarding_map(forwarding_map_) {}
|
||||
explicit compaction_fixup(
|
||||
mark_bits<object> *data_forwarding_map_,
|
||||
mark_bits<code_block> *code_forwarding_map_,
|
||||
const object **data_finger_,
|
||||
const code_block **code_finger_) :
|
||||
data_forwarding_map(data_forwarding_map_),
|
||||
code_forwarding_map(code_forwarding_map_),
|
||||
data_finger(data_finger_),
|
||||
code_finger(code_finger_) {}
|
||||
|
||||
Block *operator()(Block *block)
|
||||
object *fixup_data(object *obj)
|
||||
{
|
||||
return forwarding_map->forward_block(block);
|
||||
}
|
||||
};
|
||||
|
||||
static inline cell tuple_size_with_forwarding(mark_bits<object> *forwarding_map, object *obj)
|
||||
{
|
||||
/* The tuple layout may or may not have been forwarded already. Tricky. */
|
||||
object *layout_obj = (object *)UNTAG(((tuple *)obj)->layout);
|
||||
tuple_layout *layout;
|
||||
|
||||
if(layout_obj < obj)
|
||||
{
|
||||
/* It's already been moved up; dereference through forwarding
|
||||
map to get the size */
|
||||
layout = (tuple_layout *)forwarding_map->forward_block(layout_obj);
|
||||
}
|
||||
else
|
||||
{
|
||||
/* It hasn't been moved up yet; dereference directly */
|
||||
layout = (tuple_layout *)layout_obj;
|
||||
return data_forwarding_map->forward_block(obj);
|
||||
}
|
||||
|
||||
return tuple_size(layout);
|
||||
}
|
||||
|
||||
struct compaction_sizer {
|
||||
mark_bits<object> *forwarding_map;
|
||||
|
||||
explicit compaction_sizer(mark_bits<object> *forwarding_map_) :
|
||||
forwarding_map(forwarding_map_) {}
|
||||
|
||||
cell operator()(object *obj)
|
||||
code_block *fixup_code(code_block *compiled)
|
||||
{
|
||||
if(!forwarding_map->marked_p(obj))
|
||||
return forwarding_map->unmarked_block_size(obj);
|
||||
else if(obj->type() == TUPLE_TYPE)
|
||||
return align(tuple_size_with_forwarding(forwarding_map,obj),data_alignment);
|
||||
return code_forwarding_map->forward_block(compiled);
|
||||
}
|
||||
|
||||
object *translate_data(const object *obj)
|
||||
{
|
||||
if(obj < *data_finger)
|
||||
return fixup_data((object *)obj);
|
||||
else
|
||||
return obj->size();
|
||||
return (object *)obj;
|
||||
}
|
||||
|
||||
code_block *translate_code(const code_block *compiled)
|
||||
{
|
||||
if(compiled < *code_finger)
|
||||
return fixup_code((code_block *)compiled);
|
||||
else
|
||||
return (code_block *)compiled;
|
||||
}
|
||||
|
||||
cell size(object *obj)
|
||||
{
|
||||
if(data_forwarding_map->marked_p(obj))
|
||||
return obj->size(*this);
|
||||
else
|
||||
return data_forwarding_map->unmarked_block_size(obj);
|
||||
}
|
||||
|
||||
cell size(code_block *compiled)
|
||||
{
|
||||
if(code_forwarding_map->marked_p(compiled))
|
||||
return compiled->size(*this);
|
||||
else
|
||||
return code_forwarding_map->unmarked_block_size(compiled);
|
||||
}
|
||||
};
|
||||
|
||||
struct object_compaction_updater {
|
||||
factor_vm *parent;
|
||||
mark_bits<code_block> *code_forwarding_map;
|
||||
mark_bits<object> *data_forwarding_map;
|
||||
compaction_fixup fixup;
|
||||
object_start_map *starts;
|
||||
|
||||
explicit object_compaction_updater(factor_vm *parent_,
|
||||
mark_bits<object> *data_forwarding_map_,
|
||||
mark_bits<code_block> *code_forwarding_map_) :
|
||||
explicit object_compaction_updater(factor_vm *parent_, compaction_fixup fixup_) :
|
||||
parent(parent_),
|
||||
code_forwarding_map(code_forwarding_map_),
|
||||
data_forwarding_map(data_forwarding_map_),
|
||||
fixup(fixup_),
|
||||
starts(&parent->data->tenured->starts) {}
|
||||
|
||||
void operator()(object *old_address, object *new_address, cell size)
|
||||
{
|
||||
cell payload_start;
|
||||
if(old_address->type() == TUPLE_TYPE)
|
||||
payload_start = tuple_size_with_forwarding(data_forwarding_map,old_address);
|
||||
else
|
||||
payload_start = old_address->binary_payload_start();
|
||||
slot_visitor<compaction_fixup> slot_forwarder(parent,fixup);
|
||||
slot_forwarder.visit_slots(new_address);
|
||||
|
||||
memmove(new_address,old_address,size);
|
||||
|
||||
slot_visitor<forwarder<object> > slot_forwarder(parent,forwarder<object>(data_forwarding_map));
|
||||
slot_forwarder.visit_slots(new_address,payload_start);
|
||||
|
||||
code_block_visitor<forwarder<code_block> > code_forwarder(parent,forwarder<code_block>(code_forwarding_map));
|
||||
code_block_visitor<compaction_fixup> code_forwarder(parent,fixup);
|
||||
code_forwarder.visit_object_code_block(new_address);
|
||||
|
||||
starts->record_object_start_offset(new_address);
|
||||
}
|
||||
};
|
||||
|
||||
template<typename SlotForwarder>
|
||||
template<typename Fixup>
|
||||
struct code_block_compaction_relocation_visitor {
|
||||
factor_vm *parent;
|
||||
code_block *old_address;
|
||||
slot_visitor<SlotForwarder> slot_forwarder;
|
||||
code_block_visitor<forwarder<code_block> > code_forwarder;
|
||||
Fixup fixup;
|
||||
|
||||
explicit code_block_compaction_relocation_visitor(factor_vm *parent_,
|
||||
code_block *old_address_,
|
||||
slot_visitor<SlotForwarder> slot_forwarder_,
|
||||
code_block_visitor<forwarder<code_block> > code_forwarder_) :
|
||||
Fixup fixup_) :
|
||||
parent(parent_),
|
||||
old_address(old_address_),
|
||||
slot_forwarder(slot_forwarder_),
|
||||
code_forwarder(code_forwarder_) {}
|
||||
fixup(fixup_) {}
|
||||
|
||||
void operator()(instruction_operand op)
|
||||
{
|
||||
|
@ -109,16 +103,24 @@ struct code_block_compaction_relocation_visitor {
|
|||
switch(op.rel_type())
|
||||
{
|
||||
case RT_LITERAL:
|
||||
op.store_value(slot_forwarder.visit_pointer(op.load_value(old_offset)));
|
||||
break;
|
||||
{
|
||||
cell value = op.load_value(old_offset);
|
||||
if(immediate_p(value))
|
||||
op.store_value(value);
|
||||
else
|
||||
op.store_value(RETAG(fixup.fixup_data(untag<object>(value)),TAG(value)));
|
||||
break;
|
||||
}
|
||||
case RT_ENTRY_POINT:
|
||||
case RT_ENTRY_POINT_PIC:
|
||||
case RT_ENTRY_POINT_PIC_TAIL:
|
||||
op.store_code_block(code_forwarder.visit_code_block(op.load_code_block(old_offset)));
|
||||
break;
|
||||
case RT_HERE:
|
||||
op.store_value(op.load_value(old_offset) - (cell)old_address + (cell)op.parent_code_block());
|
||||
break;
|
||||
{
|
||||
cell value = op.load_value(old_offset);
|
||||
cell offset = value & (data_alignment - 1);
|
||||
op.store_value((cell)fixup.fixup_code((code_block *)value) + offset);
|
||||
break;
|
||||
}
|
||||
case RT_THIS:
|
||||
case RT_CARDS_OFFSET:
|
||||
case RT_DECKS_OFFSET:
|
||||
|
@ -131,26 +133,27 @@ struct code_block_compaction_relocation_visitor {
|
|||
}
|
||||
};
|
||||
|
||||
template<typename SlotForwarder>
|
||||
template<typename Fixup>
|
||||
struct code_block_compaction_updater {
|
||||
factor_vm *parent;
|
||||
slot_visitor<SlotForwarder> slot_forwarder;
|
||||
code_block_visitor<forwarder<code_block> > code_forwarder;
|
||||
Fixup fixup;
|
||||
slot_visitor<Fixup> data_forwarder;
|
||||
code_block_visitor<Fixup> code_forwarder;
|
||||
|
||||
explicit code_block_compaction_updater(factor_vm *parent_,
|
||||
slot_visitor<SlotForwarder> slot_forwarder_,
|
||||
code_block_visitor<forwarder<code_block> > code_forwarder_) :
|
||||
Fixup fixup_,
|
||||
slot_visitor<Fixup> data_forwarder_,
|
||||
code_block_visitor<Fixup> code_forwarder_) :
|
||||
parent(parent_),
|
||||
slot_forwarder(slot_forwarder_),
|
||||
fixup(fixup_),
|
||||
data_forwarder(data_forwarder_),
|
||||
code_forwarder(code_forwarder_) {}
|
||||
|
||||
void operator()(code_block *old_address, code_block *new_address, cell size)
|
||||
{
|
||||
memmove(new_address,old_address,size);
|
||||
data_forwarder.visit_code_block_objects(new_address);
|
||||
|
||||
slot_forwarder.visit_code_block_objects(new_address);
|
||||
|
||||
code_block_compaction_relocation_visitor<SlotForwarder> visitor(parent,old_address,slot_forwarder,code_forwarder);
|
||||
code_block_compaction_relocation_visitor<Fixup> visitor(parent,old_address,fixup);
|
||||
new_address->each_instruction_operand(visitor);
|
||||
}
|
||||
};
|
||||
|
@ -196,8 +199,12 @@ void factor_vm::collect_compact_impl(bool trace_contexts_p)
|
|||
data_forwarding_map->compute_forwarding();
|
||||
code_forwarding_map->compute_forwarding();
|
||||
|
||||
slot_visitor<forwarder<object> > slot_forwarder(this,forwarder<object>(data_forwarding_map));
|
||||
code_block_visitor<forwarder<code_block> > code_forwarder(this,forwarder<code_block>(code_forwarding_map));
|
||||
const object *data_finger = tenured->first_block();
|
||||
const code_block *code_finger = code->allocator->first_block();
|
||||
|
||||
compaction_fixup fixup(data_forwarding_map,code_forwarding_map,&data_finger,&code_finger);
|
||||
slot_visitor<compaction_fixup> data_forwarder(this,fixup);
|
||||
code_block_visitor<compaction_fixup> code_forwarder(this,fixup);
|
||||
|
||||
code_forwarder.visit_uninitialized_code_blocks();
|
||||
|
||||
|
@ -206,20 +213,18 @@ void factor_vm::collect_compact_impl(bool trace_contexts_p)
|
|||
|
||||
/* Slide everything in tenured space up, and update data and code heap
|
||||
pointers inside objects. */
|
||||
object_compaction_updater object_updater(this,data_forwarding_map,code_forwarding_map);
|
||||
compaction_sizer object_sizer(data_forwarding_map);
|
||||
tenured->compact(object_updater,object_sizer);
|
||||
object_compaction_updater object_updater(this,fixup);
|
||||
tenured->compact(object_updater,fixup,&data_finger);
|
||||
|
||||
/* Slide everything in the code heap up, and update data and code heap
|
||||
pointers inside code blocks. */
|
||||
code_block_compaction_updater<forwarder<object> > code_block_updater(this,slot_forwarder,code_forwarder);
|
||||
standard_sizer<code_block> code_block_sizer;
|
||||
code->allocator->compact(code_block_updater,code_block_sizer);
|
||||
code_block_compaction_updater<compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder);
|
||||
code->allocator->compact(code_block_updater,fixup,&code_finger);
|
||||
|
||||
slot_forwarder.visit_roots();
|
||||
data_forwarder.visit_roots();
|
||||
if(trace_contexts_p)
|
||||
{
|
||||
slot_forwarder.visit_contexts();
|
||||
data_forwarder.visit_contexts();
|
||||
code_forwarder.visit_context_code_blocks();
|
||||
}
|
||||
|
||||
|
@ -229,10 +234,56 @@ void factor_vm::collect_compact_impl(bool trace_contexts_p)
|
|||
current_gc->event->ended_compaction();
|
||||
}
|
||||
|
||||
struct object_grow_heap_updater {
|
||||
code_block_visitor<forwarder<code_block> > code_forwarder;
|
||||
struct code_compaction_fixup {
|
||||
mark_bits<code_block> *code_forwarding_map;
|
||||
const code_block **code_finger;
|
||||
|
||||
explicit object_grow_heap_updater(code_block_visitor<forwarder<code_block> > code_forwarder_) :
|
||||
explicit code_compaction_fixup(mark_bits<code_block> *code_forwarding_map_,
|
||||
const code_block **code_finger_) :
|
||||
code_forwarding_map(code_forwarding_map_),
|
||||
code_finger(code_finger_) {}
|
||||
|
||||
object *fixup_data(object *obj)
|
||||
{
|
||||
return obj;
|
||||
}
|
||||
|
||||
code_block *fixup_code(code_block *compiled)
|
||||
{
|
||||
return code_forwarding_map->forward_block(compiled);
|
||||
}
|
||||
|
||||
object *translate_data(const object *obj)
|
||||
{
|
||||
return fixup_data((object *)obj);
|
||||
}
|
||||
|
||||
code_block *translate_code(const code_block *compiled)
|
||||
{
|
||||
if(compiled >= *code_finger)
|
||||
return fixup_code((code_block *)compiled);
|
||||
else
|
||||
return (code_block *)compiled;
|
||||
}
|
||||
|
||||
cell size(object *obj)
|
||||
{
|
||||
return obj->size();
|
||||
}
|
||||
|
||||
cell size(code_block *compiled)
|
||||
{
|
||||
if(code_forwarding_map->marked_p(compiled))
|
||||
return compiled->size(*this);
|
||||
else
|
||||
return code_forwarding_map->unmarked_block_size(compiled);
|
||||
}
|
||||
};
|
||||
|
||||
struct object_grow_heap_updater {
|
||||
code_block_visitor<code_compaction_fixup> code_forwarder;
|
||||
|
||||
explicit object_grow_heap_updater(code_block_visitor<code_compaction_fixup> code_forwarder_) :
|
||||
code_forwarder(code_forwarder_) {}
|
||||
|
||||
void operator()(object *obj)
|
||||
|
@ -241,10 +292,6 @@ struct object_grow_heap_updater {
|
|||
}
|
||||
};
|
||||
|
||||
struct dummy_slot_forwarder {
|
||||
object *operator()(object *obj) { return obj; }
|
||||
};
|
||||
|
||||
/* Compact just the code heap, after growing the data heap */
|
||||
void factor_vm::collect_compact_code_impl(bool trace_contexts_p)
|
||||
{
|
||||
|
@ -252,8 +299,11 @@ void factor_vm::collect_compact_code_impl(bool trace_contexts_p)
|
|||
mark_bits<code_block> *code_forwarding_map = &code->allocator->state;
|
||||
code_forwarding_map->compute_forwarding();
|
||||
|
||||
slot_visitor<dummy_slot_forwarder> slot_forwarder(this,dummy_slot_forwarder());
|
||||
code_block_visitor<forwarder<code_block> > code_forwarder(this,forwarder<code_block>(code_forwarding_map));
|
||||
const code_block *code_finger = code->allocator->first_block();
|
||||
|
||||
code_compaction_fixup fixup(code_forwarding_map,&code_finger);
|
||||
slot_visitor<code_compaction_fixup> data_forwarder(this,fixup);
|
||||
code_block_visitor<code_compaction_fixup> code_forwarder(this,fixup);
|
||||
|
||||
code_forwarder.visit_uninitialized_code_blocks();
|
||||
|
||||
|
@ -261,14 +311,13 @@ void factor_vm::collect_compact_code_impl(bool trace_contexts_p)
|
|||
code_forwarder.visit_context_code_blocks();
|
||||
|
||||
/* Update code heap references in data heap */
|
||||
object_grow_heap_updater updater(code_forwarder);
|
||||
each_object(updater);
|
||||
object_grow_heap_updater object_updater(code_forwarder);
|
||||
each_object(object_updater);
|
||||
|
||||
/* Slide everything in the code heap up, and update code heap
|
||||
pointers inside code blocks. */
|
||||
code_block_compaction_updater<dummy_slot_forwarder> code_block_updater(this,slot_forwarder,code_forwarder);
|
||||
standard_sizer<code_block> code_block_sizer;
|
||||
code->allocator->compact(code_block_updater,code_block_sizer);
|
||||
code_block_compaction_updater<code_compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder);
|
||||
code->allocator->compact(code_block_updater,fixup,&code_finger);
|
||||
|
||||
update_code_roots_for_compaction();
|
||||
callbacks->update();
|
||||
|
|
|
@ -55,6 +55,31 @@ void context::fix_stacks()
|
|||
reset_retainstack();
|
||||
}
|
||||
|
||||
void context::scrub_stacks(gc_info *info, cell index)
|
||||
{
|
||||
u8 *bitmap = info->gc_info_bitmap();
|
||||
|
||||
{
|
||||
cell base = info->scrub_d_base(index);
|
||||
|
||||
for(cell loc = 0; loc < info->scrub_d_count; loc++)
|
||||
{
|
||||
if(bitmap_p(bitmap,base + loc))
|
||||
((cell *)datastack)[-loc] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
cell base = info->scrub_r_base(index);
|
||||
|
||||
for(cell loc = 0; loc < info->scrub_r_count; loc++)
|
||||
{
|
||||
if(bitmap_p(bitmap,base + loc))
|
||||
printf("scrub retainstack %ld\n",loc);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
context::~context()
|
||||
{
|
||||
delete datastack_seg;
|
||||
|
|
|
@ -45,6 +45,7 @@ struct context {
|
|||
void reset_context_objects();
|
||||
void reset();
|
||||
void fix_stacks();
|
||||
void scrub_stacks(gc_info *info, cell index);
|
||||
|
||||
cell peek()
|
||||
{
|
||||
|
|
|
@ -126,85 +126,6 @@ void factor_vm::init_data_heap(cell young_size, cell aging_size, cell tenured_si
|
|||
set_data_heap(new data_heap(young_size,aging_size,tenured_size));
|
||||
}
|
||||
|
||||
/* Size of the object pointed to by an untagged pointer */
|
||||
cell object::size() const
|
||||
{
|
||||
if(free_p()) return ((free_heap_block *)this)->size();
|
||||
|
||||
switch(type())
|
||||
{
|
||||
case ARRAY_TYPE:
|
||||
return align(array_size((array*)this),data_alignment);
|
||||
case BIGNUM_TYPE:
|
||||
return align(array_size((bignum*)this),data_alignment);
|
||||
case BYTE_ARRAY_TYPE:
|
||||
return align(array_size((byte_array*)this),data_alignment);
|
||||
case STRING_TYPE:
|
||||
return align(string_size(string_capacity((string*)this)),data_alignment);
|
||||
case TUPLE_TYPE:
|
||||
{
|
||||
tuple_layout *layout = (tuple_layout *)UNTAG(((tuple *)this)->layout);
|
||||
return align(tuple_size(layout),data_alignment);
|
||||
}
|
||||
case QUOTATION_TYPE:
|
||||
return align(sizeof(quotation),data_alignment);
|
||||
case WORD_TYPE:
|
||||
return align(sizeof(word),data_alignment);
|
||||
case FLOAT_TYPE:
|
||||
return align(sizeof(boxed_float),data_alignment);
|
||||
case DLL_TYPE:
|
||||
return align(sizeof(dll),data_alignment);
|
||||
case ALIEN_TYPE:
|
||||
return align(sizeof(alien),data_alignment);
|
||||
case WRAPPER_TYPE:
|
||||
return align(sizeof(wrapper),data_alignment);
|
||||
case CALLSTACK_TYPE:
|
||||
return align(callstack_object_size(untag_fixnum(((callstack *)this)->length)),data_alignment);
|
||||
default:
|
||||
critical_error("Invalid header",(cell)this);
|
||||
return 0; /* can't happen */
|
||||
}
|
||||
}
|
||||
|
||||
/* The number of cells from the start of the object which should be scanned by
|
||||
the GC. Some types have a binary payload at the end (string, word, DLL) which
|
||||
we ignore. */
|
||||
cell object::binary_payload_start() const
|
||||
{
|
||||
if(free_p()) return 0;
|
||||
|
||||
switch(type())
|
||||
{
|
||||
/* these objects do not refer to other objects at all */
|
||||
case FLOAT_TYPE:
|
||||
case BYTE_ARRAY_TYPE:
|
||||
case BIGNUM_TYPE:
|
||||
case CALLSTACK_TYPE:
|
||||
return 0;
|
||||
/* these objects have some binary data at the end */
|
||||
case WORD_TYPE:
|
||||
return sizeof(word) - sizeof(cell) * 3;
|
||||
case ALIEN_TYPE:
|
||||
return sizeof(cell) * 3;
|
||||
case DLL_TYPE:
|
||||
return sizeof(cell) * 2;
|
||||
case QUOTATION_TYPE:
|
||||
return sizeof(quotation) - sizeof(cell) * 2;
|
||||
case STRING_TYPE:
|
||||
return sizeof(string);
|
||||
/* everything else consists entirely of pointers */
|
||||
case ARRAY_TYPE:
|
||||
return array_size<array>(array_capacity((array*)this));
|
||||
case TUPLE_TYPE:
|
||||
return tuple_size(untag<tuple_layout>(((tuple *)this)->layout));
|
||||
case WRAPPER_TYPE:
|
||||
return sizeof(wrapper);
|
||||
default:
|
||||
critical_error("Invalid header",(cell)this);
|
||||
return 0; /* can't happen */
|
||||
}
|
||||
}
|
||||
|
||||
data_heap_room factor_vm::data_room()
|
||||
{
|
||||
data_heap_room room;
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
namespace factor
|
||||
{
|
||||
|
||||
template<typename T>
|
||||
struct identity {
|
||||
T operator()(T t)
|
||||
{
|
||||
return t;
|
||||
}
|
||||
};
|
||||
|
||||
struct no_fixup {
|
||||
object *fixup_data(object *obj)
|
||||
{
|
||||
return obj;
|
||||
}
|
||||
|
||||
code_block *fixup_code(code_block *compiled)
|
||||
{
|
||||
return compiled;
|
||||
}
|
||||
|
||||
object *translate_data(const object *obj)
|
||||
{
|
||||
return fixup_data((object *)obj);
|
||||
}
|
||||
|
||||
code_block *translate_code(const code_block *compiled)
|
||||
{
|
||||
return fixup_code((code_block *)compiled);
|
||||
}
|
||||
|
||||
cell size(object *obj)
|
||||
{
|
||||
return obj->size();
|
||||
}
|
||||
|
||||
cell size(code_block *compiled)
|
||||
{
|
||||
return compiled->size();
|
||||
}
|
||||
};
|
||||
|
||||
}
|
|
@ -23,8 +23,8 @@ template<typename Block> struct free_list_allocator {
|
|||
cell largest_free_block();
|
||||
cell free_block_count();
|
||||
void sweep();
|
||||
template<typename Iterator, typename Sizer> void compact(Iterator &iter, Sizer &sizer);
|
||||
template<typename Iterator, typename Sizer> void iterate(Iterator &iter, Sizer &sizer);
|
||||
template<typename Iterator, typename Fixup> void compact(Iterator &iter, Fixup fixup, const Block **finger);
|
||||
template<typename Iterator, typename Fixup> void iterate(Iterator &iter, Fixup fixup);
|
||||
template<typename Iterator> void iterate(Iterator &iter);
|
||||
};
|
||||
|
||||
|
@ -155,14 +155,17 @@ template<typename Block, typename Iterator> struct heap_compactor {
|
|||
mark_bits<Block> *state;
|
||||
char *address;
|
||||
Iterator &iter;
|
||||
const Block **finger;
|
||||
|
||||
explicit heap_compactor(mark_bits<Block> *state_, Block *address_, Iterator &iter_) :
|
||||
state(state_), address((char *)address_), iter(iter_) {}
|
||||
explicit heap_compactor(mark_bits<Block> *state_, Block *address_, Iterator &iter_, const Block **finger_) :
|
||||
state(state_), address((char *)address_), iter(iter_), finger(finger_) {}
|
||||
|
||||
void operator()(Block *block, cell size)
|
||||
{
|
||||
if(this->state->marked_p(block))
|
||||
{
|
||||
*finger = block;
|
||||
memmove((Block *)address,block,size);
|
||||
iter(block,(Block *)address,size);
|
||||
address += size;
|
||||
}
|
||||
|
@ -172,11 +175,11 @@ template<typename Block, typename Iterator> struct heap_compactor {
|
|||
/* The forwarding map must be computed first by calling
|
||||
state.compute_forwarding(). */
|
||||
template<typename Block>
|
||||
template<typename Iterator, typename Sizer>
|
||||
void free_list_allocator<Block>::compact(Iterator &iter, Sizer &sizer)
|
||||
template<typename Iterator, typename Fixup>
|
||||
void free_list_allocator<Block>::compact(Iterator &iter, Fixup fixup, const Block **finger)
|
||||
{
|
||||
heap_compactor<Block,Iterator> compactor(&state,first_block(),iter);
|
||||
iterate(compactor,sizer);
|
||||
heap_compactor<Block,Iterator> compactor(&state,first_block(),iter,finger);
|
||||
iterate(compactor,fixup);
|
||||
|
||||
/* Now update the free list; there will be a single free block at
|
||||
the end */
|
||||
|
@ -185,34 +188,26 @@ void free_list_allocator<Block>::compact(Iterator &iter, Sizer &sizer)
|
|||
|
||||
/* During compaction we have to be careful and measure object sizes differently */
|
||||
template<typename Block>
|
||||
template<typename Iterator, typename Sizer>
|
||||
void free_list_allocator<Block>::iterate(Iterator &iter, Sizer &sizer)
|
||||
template<typename Iterator, typename Fixup>
|
||||
void free_list_allocator<Block>::iterate(Iterator &iter, Fixup fixup)
|
||||
{
|
||||
Block *scan = first_block();
|
||||
Block *end = last_block();
|
||||
|
||||
while(scan != end)
|
||||
{
|
||||
cell size = sizer(scan);
|
||||
cell size = fixup.size(scan);
|
||||
Block *next = (Block *)((cell)scan + size);
|
||||
if(!scan->free_p()) iter(scan,size);
|
||||
scan = next;
|
||||
}
|
||||
}
|
||||
|
||||
template<typename Block> struct standard_sizer {
|
||||
cell operator()(Block *block)
|
||||
{
|
||||
return block->size();
|
||||
}
|
||||
};
|
||||
|
||||
template<typename Block>
|
||||
template<typename Iterator>
|
||||
void free_list_allocator<Block>::iterate(Iterator &iter)
|
||||
{
|
||||
standard_sizer<Block> sizer;
|
||||
iterate(iter,sizer);
|
||||
iterate(iter,no_fixup());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -3,17 +3,9 @@
|
|||
namespace factor
|
||||
{
|
||||
|
||||
inline static code_block_visitor<code_workhorse> make_code_visitor(factor_vm *parent)
|
||||
{
|
||||
return code_block_visitor<code_workhorse>(parent,code_workhorse(parent));
|
||||
}
|
||||
|
||||
full_collector::full_collector(factor_vm *parent_) :
|
||||
collector<tenured_space,full_policy>(
|
||||
parent_,
|
||||
parent_->data->tenured,
|
||||
full_policy(parent_)),
|
||||
code_visitor(make_code_visitor(parent_)) {}
|
||||
collector<tenured_space,full_policy>(parent_,parent_->data->tenured,full_policy(parent_)),
|
||||
code_visitor(parent,workhorse) {}
|
||||
|
||||
void full_collector::trace_code_block(code_block *compiled)
|
||||
{
|
||||
|
|
|
@ -25,26 +25,8 @@ struct full_policy {
|
|||
}
|
||||
};
|
||||
|
||||
struct code_workhorse {
|
||||
factor_vm *parent;
|
||||
code_heap *code;
|
||||
|
||||
explicit code_workhorse(factor_vm *parent_) : parent(parent_), code(parent->code) {}
|
||||
|
||||
code_block *operator()(code_block *compiled)
|
||||
{
|
||||
if(!code->marked_p(compiled))
|
||||
{
|
||||
code->set_marked_p(compiled);
|
||||
parent->mark_stack.push_back((cell)compiled + 1);
|
||||
}
|
||||
|
||||
return compiled;
|
||||
}
|
||||
};
|
||||
|
||||
struct full_collector : collector<tenured_space,full_policy> {
|
||||
code_block_visitor<code_workhorse> code_visitor;
|
||||
code_block_visitor<gc_workhorse<tenured_space,full_policy> > code_visitor;
|
||||
|
||||
explicit full_collector(factor_vm *parent_);
|
||||
void trace_code_block(code_block *compiled);
|
||||
|
|
73
vm/gc.cpp
73
vm/gc.cpp
|
@ -194,8 +194,51 @@ void factor_vm::gc(gc_op op, cell requested_bytes, bool trace_contexts_p)
|
|||
current_gc = NULL;
|
||||
}
|
||||
|
||||
/* primitive_minor_gc() is invoked by inline GC checks, and it needs to fill in
|
||||
uninitialized stack locations before actually calling the GC. See the comment
|
||||
in compiler.cfg.stacks.uninitialized for details. */
|
||||
|
||||
struct call_frame_scrubber {
|
||||
factor_vm *parent;
|
||||
context *ctx;
|
||||
|
||||
explicit call_frame_scrubber(factor_vm *parent_, context *ctx_) :
|
||||
parent(parent_), ctx(ctx_) {}
|
||||
|
||||
void operator()(stack_frame *frame)
|
||||
{
|
||||
const code_block *compiled = parent->frame_code(frame);
|
||||
gc_info *info = compiled->block_gc_info();
|
||||
|
||||
u32 return_address = (cell)FRAME_RETURN_ADDRESS(frame,parent) - (cell)compiled->entry_point();
|
||||
int index = info->return_address_index(return_address);
|
||||
|
||||
if(index != -1)
|
||||
ctx->scrub_stacks(info,index);
|
||||
}
|
||||
};
|
||||
|
||||
void factor_vm::scrub_context(context *ctx)
|
||||
{
|
||||
call_frame_scrubber scrubber(this,ctx);
|
||||
iterate_callstack(ctx,scrubber);
|
||||
}
|
||||
|
||||
void factor_vm::scrub_contexts()
|
||||
{
|
||||
std::set<context *>::const_iterator begin = active_contexts.begin();
|
||||
std::set<context *>::const_iterator end = active_contexts.end();
|
||||
while(begin != end)
|
||||
{
|
||||
scrub_context(*begin);
|
||||
begin++;
|
||||
}
|
||||
}
|
||||
|
||||
void factor_vm::primitive_minor_gc()
|
||||
{
|
||||
scrub_contexts();
|
||||
|
||||
gc(collect_nursery_op,
|
||||
0, /* requested size */
|
||||
true /* trace contexts? */);
|
||||
|
@ -215,36 +258,6 @@ void factor_vm::primitive_compact_gc()
|
|||
true /* trace contexts? */);
|
||||
}
|
||||
|
||||
void factor_vm::inline_gc(cell gc_roots_)
|
||||
{
|
||||
cell stack_pointer = (cell)ctx->callstack_top;
|
||||
|
||||
if(to_boolean(gc_roots_))
|
||||
{
|
||||
tagged<array> gc_roots(gc_roots_);
|
||||
|
||||
cell capacity = array_capacity(gc_roots.untagged());
|
||||
for(cell i = 0; i < capacity; i++)
|
||||
{
|
||||
cell spill_slot = untag_fixnum(array_nth(gc_roots.untagged(),i));
|
||||
cell *address = (cell *)(spill_slot + stack_pointer);
|
||||
data_roots.push_back(data_root_range(address,1));
|
||||
}
|
||||
|
||||
primitive_minor_gc();
|
||||
|
||||
for(cell i = 0; i < capacity; i++)
|
||||
data_roots.pop_back();
|
||||
}
|
||||
else
|
||||
primitive_minor_gc();
|
||||
}
|
||||
|
||||
VM_C_API void inline_gc(cell gc_roots, factor_vm *parent)
|
||||
{
|
||||
parent->inline_gc(gc_roots);
|
||||
}
|
||||
|
||||
/*
|
||||
* It is up to the caller to fill in the object's fields in a meaningful
|
||||
* fashion!
|
||||
|
|
|
@ -52,6 +52,4 @@ struct gc_state {
|
|||
void start_again(gc_op op_, factor_vm *parent);
|
||||
};
|
||||
|
||||
VM_C_API void inline_gc(cell gc_roots, factor_vm *parent);
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
#include "master.hpp"
|
||||
|
||||
namespace factor
|
||||
{
|
||||
|
||||
int gc_info::return_address_index(u32 return_address)
|
||||
{
|
||||
u32 *return_address_array = return_addresses();
|
||||
|
||||
for(cell i = 0; i < return_address_count; i++)
|
||||
{
|
||||
if(return_address == return_address_array[i])
|
||||
return i;
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
namespace factor
|
||||
{
|
||||
|
||||
struct gc_info {
|
||||
u32 scrub_d_count;
|
||||
u32 scrub_r_count;
|
||||
u32 gc_root_count;
|
||||
u32 return_address_count;
|
||||
|
||||
cell total_bitmap_size()
|
||||
{
|
||||
return return_address_count * (scrub_d_count + scrub_r_count + gc_root_count);
|
||||
}
|
||||
|
||||
cell total_bitmap_bytes()
|
||||
{
|
||||
return ((total_bitmap_size() + 7) / 8);
|
||||
}
|
||||
|
||||
u32 *return_addresses()
|
||||
{
|
||||
return (u32 *)((u8 *)this - return_address_count * sizeof(u32));
|
||||
}
|
||||
|
||||
u8 *gc_info_bitmap()
|
||||
{
|
||||
return (u8 *)return_addresses() - total_bitmap_bytes();
|
||||
}
|
||||
|
||||
cell scrub_d_base(cell index)
|
||||
{
|
||||
return index * scrub_d_count;
|
||||
}
|
||||
|
||||
cell scrub_r_base(cell index)
|
||||
{
|
||||
return return_address_count * scrub_d_count +
|
||||
index * scrub_r_count;
|
||||
}
|
||||
|
||||
cell spill_slot_base(cell index)
|
||||
{
|
||||
return return_address_count * scrub_d_count
|
||||
+ return_address_count * scrub_r_count
|
||||
+ index * gc_root_count;
|
||||
}
|
||||
|
||||
int return_address_index(u32 return_address);
|
||||
};
|
||||
|
||||
}
|
175
vm/image.cpp
175
vm/image.cpp
|
@ -55,70 +55,66 @@ void factor_vm::load_code_heap(FILE *file, image_header *h, vm_parameters *p)
|
|||
code->allocator->initial_free_list(h->code_size);
|
||||
}
|
||||
|
||||
struct data_fixupper {
|
||||
cell offset;
|
||||
|
||||
explicit data_fixupper(cell offset_) : offset(offset_) {}
|
||||
|
||||
object *operator()(object *obj)
|
||||
{
|
||||
return (object *)((char *)obj + offset);
|
||||
}
|
||||
};
|
||||
|
||||
struct code_fixupper {
|
||||
cell offset;
|
||||
|
||||
explicit code_fixupper(cell offset_) : offset(offset_) {}
|
||||
|
||||
code_block *operator()(code_block *compiled)
|
||||
{
|
||||
return (code_block *)((char *)compiled + offset);
|
||||
}
|
||||
};
|
||||
|
||||
static inline cell tuple_size_with_fixup(cell offset, object *obj)
|
||||
{
|
||||
tuple_layout *layout = (tuple_layout *)((char *)UNTAG(((tuple *)obj)->layout) + offset);
|
||||
return tuple_size(layout);
|
||||
}
|
||||
|
||||
struct fixup_sizer {
|
||||
cell offset;
|
||||
|
||||
explicit fixup_sizer(cell offset_) : offset(offset_) {}
|
||||
|
||||
cell operator()(object *obj)
|
||||
{
|
||||
if(obj->type() == TUPLE_TYPE)
|
||||
return align(tuple_size_with_fixup(offset,obj),data_alignment);
|
||||
else
|
||||
return obj->size();
|
||||
}
|
||||
};
|
||||
|
||||
struct object_fixupper {
|
||||
factor_vm *parent;
|
||||
struct startup_fixup {
|
||||
cell data_offset;
|
||||
slot_visitor<data_fixupper> data_visitor;
|
||||
code_block_visitor<code_fixupper> code_visitor;
|
||||
cell code_offset;
|
||||
|
||||
object_fixupper(factor_vm *parent_, cell data_offset_, cell code_offset_) :
|
||||
explicit startup_fixup(cell data_offset_, cell code_offset_) :
|
||||
data_offset(data_offset_), code_offset(code_offset_) {}
|
||||
|
||||
object *fixup_data(object *obj)
|
||||
{
|
||||
return (object *)((cell)obj + data_offset);
|
||||
}
|
||||
|
||||
code_block *fixup_code(code_block *obj)
|
||||
{
|
||||
return (code_block *)((cell)obj + code_offset);
|
||||
}
|
||||
|
||||
object *translate_data(const object *obj)
|
||||
{
|
||||
return fixup_data((object *)obj);
|
||||
}
|
||||
|
||||
code_block *translate_code(const code_block *compiled)
|
||||
{
|
||||
return fixup_code((code_block *)compiled);
|
||||
}
|
||||
|
||||
cell size(const object *obj)
|
||||
{
|
||||
return obj->size(*this);
|
||||
}
|
||||
|
||||
cell size(code_block *compiled)
|
||||
{
|
||||
return compiled->size(*this);
|
||||
}
|
||||
};
|
||||
|
||||
struct start_object_updater {
|
||||
factor_vm *parent;
|
||||
startup_fixup fixup;
|
||||
slot_visitor<startup_fixup> data_visitor;
|
||||
code_block_visitor<startup_fixup> code_visitor;
|
||||
|
||||
start_object_updater(factor_vm *parent_, startup_fixup fixup_) :
|
||||
parent(parent_),
|
||||
data_offset(data_offset_),
|
||||
data_visitor(slot_visitor<data_fixupper>(parent_,data_fixupper(data_offset_))),
|
||||
code_visitor(code_block_visitor<code_fixupper>(parent_,code_fixupper(code_offset_))) {}
|
||||
fixup(fixup_),
|
||||
data_visitor(slot_visitor<startup_fixup>(parent_,fixup_)),
|
||||
code_visitor(code_block_visitor<startup_fixup>(parent_,fixup_)) {}
|
||||
|
||||
void operator()(object *obj, cell size)
|
||||
{
|
||||
parent->data->tenured->starts.record_object_start_offset(obj);
|
||||
|
||||
data_visitor.visit_slots(obj);
|
||||
|
||||
switch(obj->type())
|
||||
{
|
||||
case ALIEN_TYPE:
|
||||
{
|
||||
cell payload_start = obj->binary_payload_start();
|
||||
data_visitor.visit_slots(obj,payload_start);
|
||||
|
||||
alien *ptr = (alien *)obj;
|
||||
|
||||
|
@ -130,22 +126,11 @@ struct object_fixupper {
|
|||
}
|
||||
case DLL_TYPE:
|
||||
{
|
||||
cell payload_start = obj->binary_payload_start();
|
||||
data_visitor.visit_slots(obj,payload_start);
|
||||
|
||||
parent->ffi_dlopen((dll *)obj);
|
||||
break;
|
||||
}
|
||||
case TUPLE_TYPE:
|
||||
{
|
||||
cell payload_start = tuple_size_with_fixup(data_offset,obj);
|
||||
data_visitor.visit_slots(obj,payload_start);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
{
|
||||
cell payload_start = obj->binary_payload_start();
|
||||
data_visitor.visit_slots(obj,payload_start);
|
||||
code_visitor.visit_object_code_block(obj);
|
||||
break;
|
||||
}
|
||||
|
@ -155,44 +140,50 @@ struct object_fixupper {
|
|||
|
||||
void factor_vm::fixup_data(cell data_offset, cell code_offset)
|
||||
{
|
||||
slot_visitor<data_fixupper> data_workhorse(this,data_fixupper(data_offset));
|
||||
startup_fixup fixup(data_offset,code_offset);
|
||||
slot_visitor<startup_fixup> data_workhorse(this,fixup);
|
||||
data_workhorse.visit_roots();
|
||||
|
||||
object_fixupper fixupper(this,data_offset,code_offset);
|
||||
fixup_sizer sizer(data_offset);
|
||||
data->tenured->iterate(fixupper,sizer);
|
||||
start_object_updater updater(this,fixup);
|
||||
data->tenured->iterate(updater,fixup);
|
||||
}
|
||||
|
||||
struct code_block_fixup_relocation_visitor {
|
||||
struct startup_code_block_relocation_visitor {
|
||||
factor_vm *parent;
|
||||
cell code_offset;
|
||||
slot_visitor<data_fixupper> data_visitor;
|
||||
code_fixupper code_visitor;
|
||||
startup_fixup fixup;
|
||||
slot_visitor<startup_fixup> data_visitor;
|
||||
|
||||
code_block_fixup_relocation_visitor(factor_vm *parent_, cell data_offset_, cell code_offset_) :
|
||||
startup_code_block_relocation_visitor(factor_vm *parent_, startup_fixup fixup_) :
|
||||
parent(parent_),
|
||||
code_offset(code_offset_),
|
||||
data_visitor(slot_visitor<data_fixupper>(parent_,data_fixupper(data_offset_))),
|
||||
code_visitor(code_fixupper(code_offset_)) {}
|
||||
fixup(fixup_),
|
||||
data_visitor(slot_visitor<startup_fixup>(parent_,fixup_)) {}
|
||||
|
||||
void operator()(instruction_operand op)
|
||||
{
|
||||
code_block *compiled = op.parent_code_block();
|
||||
cell old_offset = op.rel_offset() + (cell)compiled->entry_point() - code_offset;
|
||||
cell old_offset = op.rel_offset() + (cell)compiled->entry_point() - fixup.code_offset;
|
||||
|
||||
switch(op.rel_type())
|
||||
{
|
||||
case RT_LITERAL:
|
||||
op.store_value(data_visitor.visit_pointer(op.load_value(old_offset)));
|
||||
break;
|
||||
{
|
||||
cell value = op.load_value(old_offset);
|
||||
if(immediate_p(value))
|
||||
op.store_value(value);
|
||||
else
|
||||
op.store_value(RETAG(fixup.fixup_data(untag<object>(value)),TAG(value)));
|
||||
break;
|
||||
}
|
||||
case RT_ENTRY_POINT:
|
||||
case RT_ENTRY_POINT_PIC:
|
||||
case RT_ENTRY_POINT_PIC_TAIL:
|
||||
op.store_code_block(code_visitor(op.load_code_block(old_offset)));
|
||||
break;
|
||||
case RT_HERE:
|
||||
op.store_value(op.load_value(old_offset) + code_offset);
|
||||
break;
|
||||
{
|
||||
cell value = op.load_value(old_offset);
|
||||
cell offset = value & (data_alignment - 1);
|
||||
op.store_value((cell)fixup.fixup_code((code_block *)value) + offset);
|
||||
break;
|
||||
}
|
||||
case RT_UNTAGGED:
|
||||
break;
|
||||
default:
|
||||
|
@ -202,30 +193,28 @@ struct code_block_fixup_relocation_visitor {
|
|||
}
|
||||
};
|
||||
|
||||
struct code_block_fixupper {
|
||||
struct startup_code_block_updater {
|
||||
factor_vm *parent;
|
||||
cell data_offset;
|
||||
cell code_offset;
|
||||
startup_fixup fixup;
|
||||
|
||||
code_block_fixupper(factor_vm *parent_, cell data_offset_, cell code_offset_) :
|
||||
parent(parent_),
|
||||
data_offset(data_offset_),
|
||||
code_offset(code_offset_) {}
|
||||
startup_code_block_updater(factor_vm *parent_, startup_fixup fixup_) :
|
||||
parent(parent_), fixup(fixup_) {}
|
||||
|
||||
void operator()(code_block *compiled, cell size)
|
||||
{
|
||||
slot_visitor<data_fixupper> data_visitor(parent,data_fixupper(data_offset));
|
||||
slot_visitor<startup_fixup> data_visitor(parent,fixup);
|
||||
data_visitor.visit_code_block_objects(compiled);
|
||||
|
||||
code_block_fixup_relocation_visitor code_visitor(parent,data_offset,code_offset);
|
||||
startup_code_block_relocation_visitor code_visitor(parent,fixup);
|
||||
compiled->each_instruction_operand(code_visitor);
|
||||
}
|
||||
};
|
||||
|
||||
void factor_vm::fixup_code(cell data_offset, cell code_offset)
|
||||
{
|
||||
code_block_fixupper fixupper(this,data_offset,code_offset);
|
||||
code->allocator->iterate(fixupper);
|
||||
startup_fixup fixup(data_offset,code_offset);
|
||||
startup_code_block_updater updater(this,fixup);
|
||||
code->allocator->iterate(updater,fixup);
|
||||
}
|
||||
|
||||
/* Read an image file from disk, only done once during startup */
|
||||
|
|
|
@ -116,6 +116,11 @@ void jit::compute_position(cell offset_)
|
|||
/* Allocates memory */
|
||||
code_block *jit::to_code_block()
|
||||
{
|
||||
/* Emit dummy GC info */
|
||||
code.grow_bytes(alignment_for(code.count + 4,data_alignment));
|
||||
u32 dummy_gc_info = 0;
|
||||
code.append_bytes(&dummy_gc_info,sizeof(u32));
|
||||
|
||||
code.trim();
|
||||
relocation.trim();
|
||||
parameters.trim();
|
||||
|
|
|
@ -23,6 +23,11 @@ inline static cell align(cell a, cell b)
|
|||
return (a + (b-1)) & ~(b-1);
|
||||
}
|
||||
|
||||
inline static cell alignment_for(cell a, cell b)
|
||||
{
|
||||
return align(a,b) - a;
|
||||
}
|
||||
|
||||
static const cell data_alignment = 16;
|
||||
|
||||
#define WORD_SIZE (signed)(sizeof(cell)*8)
|
||||
|
@ -98,7 +103,10 @@ struct object {
|
|||
cell header;
|
||||
|
||||
cell size() const;
|
||||
template<typename Fixup> cell size(Fixup fixup) const;
|
||||
|
||||
cell binary_payload_start() const;
|
||||
template<typename Fixup> cell binary_payload_start(Fixup fixup) const;
|
||||
|
||||
cell *slots() const { return (cell *)this; }
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ template<typename Block> struct mark_bits {
|
|||
forwarding = NULL;
|
||||
}
|
||||
|
||||
cell block_line(Block *address)
|
||||
cell block_line(const Block *address)
|
||||
{
|
||||
return (((cell)address - start) / data_alignment);
|
||||
}
|
||||
|
@ -50,7 +50,7 @@ template<typename Block> struct mark_bits {
|
|||
return (Block *)(line * data_alignment + start);
|
||||
}
|
||||
|
||||
std::pair<cell,cell> bitmap_deref(Block *address)
|
||||
std::pair<cell,cell> bitmap_deref(const Block *address)
|
||||
{
|
||||
cell line_number = block_line(address);
|
||||
cell word_index = (line_number / mark_bits_granularity);
|
||||
|
@ -58,18 +58,18 @@ template<typename Block> struct mark_bits {
|
|||
return std::make_pair(word_index,word_shift);
|
||||
}
|
||||
|
||||
bool bitmap_elt(cell *bits, Block *address)
|
||||
bool bitmap_elt(cell *bits, const Block *address)
|
||||
{
|
||||
std::pair<cell,cell> position = bitmap_deref(address);
|
||||
return (bits[position.first] & ((cell)1 << position.second)) != 0;
|
||||
}
|
||||
|
||||
Block *next_block_after(Block *block)
|
||||
Block *next_block_after(const Block *block)
|
||||
{
|
||||
return (Block *)((cell)block + block->size());
|
||||
}
|
||||
|
||||
void set_bitmap_range(cell *bits, Block *address)
|
||||
void set_bitmap_range(cell *bits, const Block *address)
|
||||
{
|
||||
std::pair<cell,cell> start = bitmap_deref(address);
|
||||
std::pair<cell,cell> end = bitmap_deref(next_block_after(address));
|
||||
|
@ -99,12 +99,12 @@ template<typename Block> struct mark_bits {
|
|||
}
|
||||
}
|
||||
|
||||
bool marked_p(Block *address)
|
||||
bool marked_p(const Block *address)
|
||||
{
|
||||
return bitmap_elt(marked,address);
|
||||
}
|
||||
|
||||
void set_marked_p(Block *address)
|
||||
void set_marked_p(const Block *address)
|
||||
{
|
||||
set_bitmap_range(marked,address);
|
||||
}
|
||||
|
@ -123,7 +123,7 @@ template<typename Block> struct mark_bits {
|
|||
|
||||
/* We have the popcount for every mark_bits_granularity entries; look
|
||||
up and compute the rest */
|
||||
Block *forward_block(Block *original)
|
||||
Block *forward_block(const Block *original)
|
||||
{
|
||||
#ifdef FACTOR_DEBUG
|
||||
assert(marked_p(original));
|
||||
|
@ -141,7 +141,7 @@ template<typename Block> struct mark_bits {
|
|||
return new_block;
|
||||
}
|
||||
|
||||
Block *next_unmarked_block_after(Block *original)
|
||||
Block *next_unmarked_block_after(const Block *original)
|
||||
{
|
||||
std::pair<cell,cell> position = bitmap_deref(original);
|
||||
cell bit_index = position.second;
|
||||
|
@ -168,7 +168,7 @@ template<typename Block> struct mark_bits {
|
|||
return (Block *)(this->start + this->size);
|
||||
}
|
||||
|
||||
Block *next_marked_block_after(Block *original)
|
||||
Block *next_marked_block_after(const Block *original)
|
||||
{
|
||||
std::pair<cell,cell> position = bitmap_deref(original);
|
||||
cell bit_index = position.second;
|
||||
|
|
|
@ -75,6 +75,7 @@ namespace factor
|
|||
#include "platform.hpp"
|
||||
#include "primitives.hpp"
|
||||
#include "segments.hpp"
|
||||
#include "gc_info.hpp"
|
||||
#include "contexts.hpp"
|
||||
#include "run.hpp"
|
||||
#include "objects.hpp"
|
||||
|
@ -89,6 +90,8 @@ namespace factor
|
|||
#include "bitwise_hacks.hpp"
|
||||
#include "mark_bits.hpp"
|
||||
#include "free_list.hpp"
|
||||
#include "fixup.hpp"
|
||||
#include "tuples.hpp"
|
||||
#include "free_list_allocator.hpp"
|
||||
#include "write_barrier.hpp"
|
||||
#include "object_start_map.hpp"
|
||||
|
@ -100,7 +103,6 @@ namespace factor
|
|||
#include "gc.hpp"
|
||||
#include "debug.hpp"
|
||||
#include "strings.hpp"
|
||||
#include "tuples.hpp"
|
||||
#include "words.hpp"
|
||||
#include "float_bits.hpp"
|
||||
#include "io.hpp"
|
||||
|
@ -115,6 +117,7 @@ namespace factor
|
|||
#include "data_roots.hpp"
|
||||
#include "code_roots.hpp"
|
||||
#include "generic_arrays.hpp"
|
||||
#include "callstack.hpp"
|
||||
#include "slot_visitor.hpp"
|
||||
#include "collector.hpp"
|
||||
#include "copying_collector.hpp"
|
||||
|
@ -124,7 +127,6 @@ namespace factor
|
|||
#include "code_block_visitor.hpp"
|
||||
#include "compaction.hpp"
|
||||
#include "full_collector.hpp"
|
||||
#include "callstack.hpp"
|
||||
#include "arrays.hpp"
|
||||
#include "math.hpp"
|
||||
#include "byte_arrays.hpp"
|
||||
|
|
|
@ -82,13 +82,13 @@ void factor_vm::primitive_size()
|
|||
ctx->push(allot_cell(object_size(ctx->pop())));
|
||||
}
|
||||
|
||||
struct slot_become_visitor {
|
||||
struct slot_become_fixup : no_fixup {
|
||||
std::map<object *,object *> *become_map;
|
||||
|
||||
explicit slot_become_visitor(std::map<object *,object *> *become_map_) :
|
||||
explicit slot_become_fixup(std::map<object *,object *> *become_map_) :
|
||||
become_map(become_map_) {}
|
||||
|
||||
object *operator()(object *old)
|
||||
object *fixup_data(object *old)
|
||||
{
|
||||
std::map<object *,object *>::const_iterator iter = become_map->find(old);
|
||||
if(iter != become_map->end())
|
||||
|
@ -99,9 +99,9 @@ struct slot_become_visitor {
|
|||
};
|
||||
|
||||
struct object_become_visitor {
|
||||
slot_visitor<slot_become_visitor> *workhorse;
|
||||
slot_visitor<slot_become_fixup> *workhorse;
|
||||
|
||||
explicit object_become_visitor(slot_visitor<slot_become_visitor> *workhorse_) :
|
||||
explicit object_become_visitor(slot_visitor<slot_become_fixup> *workhorse_) :
|
||||
workhorse(workhorse_) {}
|
||||
|
||||
void operator()(object *obj)
|
||||
|
@ -111,9 +111,9 @@ struct object_become_visitor {
|
|||
};
|
||||
|
||||
struct code_block_become_visitor {
|
||||
slot_visitor<slot_become_visitor> *workhorse;
|
||||
slot_visitor<slot_become_fixup> *workhorse;
|
||||
|
||||
explicit code_block_become_visitor(slot_visitor<slot_become_visitor> *workhorse_) :
|
||||
explicit code_block_become_visitor(slot_visitor<slot_become_fixup> *workhorse_) :
|
||||
workhorse(workhorse_) {}
|
||||
|
||||
void operator()(code_block *compiled, cell size)
|
||||
|
@ -160,7 +160,7 @@ void factor_vm::primitive_become()
|
|||
|
||||
/* Update all references to old objects to point to new objects */
|
||||
{
|
||||
slot_visitor<slot_become_visitor> workhorse(this,slot_become_visitor(&become_map));
|
||||
slot_visitor<slot_become_fixup> workhorse(this,slot_become_fixup(&become_map));
|
||||
workhorse.visit_roots();
|
||||
workhorse.visit_contexts();
|
||||
|
||||
|
|
|
@ -1,6 +1,100 @@
|
|||
namespace factor
|
||||
{
|
||||
|
||||
/* Size of the object pointed to by an untagged pointer */
|
||||
template<typename Fixup>
|
||||
cell object::size(Fixup fixup) const
|
||||
{
|
||||
if(free_p()) return ((free_heap_block *)this)->size();
|
||||
|
||||
switch(type())
|
||||
{
|
||||
case ARRAY_TYPE:
|
||||
return align(array_size((array*)this),data_alignment);
|
||||
case BIGNUM_TYPE:
|
||||
return align(array_size((bignum*)this),data_alignment);
|
||||
case BYTE_ARRAY_TYPE:
|
||||
return align(array_size((byte_array*)this),data_alignment);
|
||||
case STRING_TYPE:
|
||||
return align(string_size(string_capacity((string*)this)),data_alignment);
|
||||
case TUPLE_TYPE:
|
||||
{
|
||||
tuple_layout *layout = (tuple_layout *)fixup.translate_data(untag<object>(((tuple *)this)->layout));
|
||||
return align(tuple_size(layout),data_alignment);
|
||||
}
|
||||
case QUOTATION_TYPE:
|
||||
return align(sizeof(quotation),data_alignment);
|
||||
case WORD_TYPE:
|
||||
return align(sizeof(word),data_alignment);
|
||||
case FLOAT_TYPE:
|
||||
return align(sizeof(boxed_float),data_alignment);
|
||||
case DLL_TYPE:
|
||||
return align(sizeof(dll),data_alignment);
|
||||
case ALIEN_TYPE:
|
||||
return align(sizeof(alien),data_alignment);
|
||||
case WRAPPER_TYPE:
|
||||
return align(sizeof(wrapper),data_alignment);
|
||||
case CALLSTACK_TYPE:
|
||||
return align(callstack_object_size(untag_fixnum(((callstack *)this)->length)),data_alignment);
|
||||
default:
|
||||
critical_error("Invalid header in size",(cell)this);
|
||||
return 0; /* can't happen */
|
||||
}
|
||||
}
|
||||
|
||||
inline cell object::size() const
|
||||
{
|
||||
return size(no_fixup());
|
||||
}
|
||||
|
||||
/* The number of cells from the start of the object which should be scanned by
|
||||
the GC. Some types have a binary payload at the end (string, word, DLL) which
|
||||
we ignore. */
|
||||
template<typename Fixup>
|
||||
cell object::binary_payload_start(Fixup fixup) const
|
||||
{
|
||||
if(free_p()) return 0;
|
||||
|
||||
switch(type())
|
||||
{
|
||||
/* these objects do not refer to other objects at all */
|
||||
case FLOAT_TYPE:
|
||||
case BYTE_ARRAY_TYPE:
|
||||
case BIGNUM_TYPE:
|
||||
case CALLSTACK_TYPE:
|
||||
return 0;
|
||||
/* these objects have some binary data at the end */
|
||||
case WORD_TYPE:
|
||||
return sizeof(word) - sizeof(cell) * 3;
|
||||
case ALIEN_TYPE:
|
||||
return sizeof(cell) * 3;
|
||||
case DLL_TYPE:
|
||||
return sizeof(cell) * 2;
|
||||
case QUOTATION_TYPE:
|
||||
return sizeof(quotation) - sizeof(cell) * 2;
|
||||
case STRING_TYPE:
|
||||
return sizeof(string);
|
||||
/* everything else consists entirely of pointers */
|
||||
case ARRAY_TYPE:
|
||||
return array_size<array>(array_capacity((array*)this));
|
||||
case TUPLE_TYPE:
|
||||
{
|
||||
tuple_layout *layout = (tuple_layout *)fixup.translate_data(untag<object>(((tuple *)this)->layout));
|
||||
return tuple_size(layout);
|
||||
}
|
||||
case WRAPPER_TYPE:
|
||||
return sizeof(wrapper);
|
||||
default:
|
||||
critical_error("Invalid header in binary_payload_start",(cell)this);
|
||||
return 0; /* can't happen */
|
||||
}
|
||||
}
|
||||
|
||||
inline cell object::binary_payload_start() const
|
||||
{
|
||||
return binary_payload_start(no_fixup());
|
||||
}
|
||||
|
||||
/* Slot visitors iterate over the slots of an object, applying a functor to
|
||||
each one that is a non-immediate slot. The pointer is untagged first. The
|
||||
functor returns a new untagged object pointer. The return value may or may not equal the old one,
|
||||
|
@ -17,12 +111,12 @@ Iteration is driven by visit_*() methods. Some of them define GC roots:
|
|||
- visit_roots()
|
||||
- visit_contexts() */
|
||||
|
||||
template<typename Visitor> struct slot_visitor {
|
||||
template<typename Fixup> struct slot_visitor {
|
||||
factor_vm *parent;
|
||||
Visitor visitor;
|
||||
Fixup fixup;
|
||||
|
||||
explicit slot_visitor<Visitor>(factor_vm *parent_, Visitor visitor_) :
|
||||
parent(parent_), visitor(visitor_) {}
|
||||
explicit slot_visitor<Fixup>(factor_vm *parent_, Fixup fixup_) :
|
||||
parent(parent_), fixup(fixup_) {}
|
||||
|
||||
cell visit_pointer(cell pointer);
|
||||
void visit_handle(cell *handle);
|
||||
|
@ -35,35 +129,36 @@ template<typename Visitor> struct slot_visitor {
|
|||
void visit_callback_roots();
|
||||
void visit_literal_table_roots();
|
||||
void visit_roots();
|
||||
void visit_callstack_object(callstack *stack);
|
||||
void visit_callstack(context *ctx);
|
||||
void visit_contexts();
|
||||
void visit_code_block_objects(code_block *compiled);
|
||||
void visit_embedded_literals(code_block *compiled);
|
||||
};
|
||||
|
||||
template<typename Visitor>
|
||||
cell slot_visitor<Visitor>::visit_pointer(cell pointer)
|
||||
template<typename Fixup>
|
||||
cell slot_visitor<Fixup>::visit_pointer(cell pointer)
|
||||
{
|
||||
if(immediate_p(pointer)) return pointer;
|
||||
|
||||
object *untagged = untag<object>(pointer);
|
||||
untagged = visitor(untagged);
|
||||
object *untagged = fixup.fixup_data(untag<object>(pointer));
|
||||
return RETAG(untagged,TAG(pointer));
|
||||
}
|
||||
|
||||
template<typename Visitor>
|
||||
void slot_visitor<Visitor>::visit_handle(cell *handle)
|
||||
template<typename Fixup>
|
||||
void slot_visitor<Fixup>::visit_handle(cell *handle)
|
||||
{
|
||||
*handle = visit_pointer(*handle);
|
||||
}
|
||||
|
||||
template<typename Visitor>
|
||||
void slot_visitor<Visitor>::visit_object_array(cell *start, cell *end)
|
||||
template<typename Fixup>
|
||||
void slot_visitor<Fixup>::visit_object_array(cell *start, cell *end)
|
||||
{
|
||||
while(start < end) visit_handle(start++);
|
||||
}
|
||||
|
||||
template<typename Visitor>
|
||||
void slot_visitor<Visitor>::visit_slots(object *ptr, cell payload_start)
|
||||
template<typename Fixup>
|
||||
void slot_visitor<Fixup>::visit_slots(object *ptr, cell payload_start)
|
||||
{
|
||||
cell *slot = (cell *)ptr;
|
||||
cell *end = (cell *)((cell)ptr + payload_start);
|
||||
|
@ -75,20 +170,23 @@ void slot_visitor<Visitor>::visit_slots(object *ptr, cell payload_start)
|
|||
}
|
||||
}
|
||||
|
||||
template<typename Visitor>
|
||||
void slot_visitor<Visitor>::visit_slots(object *ptr)
|
||||
template<typename Fixup>
|
||||
void slot_visitor<Fixup>::visit_slots(object *obj)
|
||||
{
|
||||
visit_slots(ptr,ptr->binary_payload_start());
|
||||
if(obj->type() == CALLSTACK_TYPE)
|
||||
visit_callstack_object((callstack *)obj);
|
||||
else
|
||||
visit_slots(obj,obj->binary_payload_start(fixup));
|
||||
}
|
||||
|
||||
template<typename Visitor>
|
||||
void slot_visitor<Visitor>::visit_stack_elements(segment *region, cell *top)
|
||||
template<typename Fixup>
|
||||
void slot_visitor<Fixup>::visit_stack_elements(segment *region, cell *top)
|
||||
{
|
||||
visit_object_array((cell *)region->start,top + 1);
|
||||
}
|
||||
|
||||
template<typename Visitor>
|
||||
void slot_visitor<Visitor>::visit_data_roots()
|
||||
template<typename Fixup>
|
||||
void slot_visitor<Fixup>::visit_data_roots()
|
||||
{
|
||||
std::vector<data_root_range>::const_iterator iter = parent->data_roots.begin();
|
||||
std::vector<data_root_range>::const_iterator end = parent->data_roots.end();
|
||||
|
@ -97,8 +195,8 @@ void slot_visitor<Visitor>::visit_data_roots()
|
|||
visit_object_array(iter->start,iter->start + iter->len);
|
||||
}
|
||||
|
||||
template<typename Visitor>
|
||||
void slot_visitor<Visitor>::visit_bignum_roots()
|
||||
template<typename Fixup>
|
||||
void slot_visitor<Fixup>::visit_bignum_roots()
|
||||
{
|
||||
std::vector<cell>::const_iterator iter = parent->bignum_roots.begin();
|
||||
std::vector<cell>::const_iterator end = parent->bignum_roots.end();
|
||||
|
@ -108,16 +206,16 @@ void slot_visitor<Visitor>::visit_bignum_roots()
|
|||
cell *handle = (cell *)(*iter);
|
||||
|
||||
if(*handle)
|
||||
*handle = (cell)visitor(*(object **)handle);
|
||||
*handle = (cell)fixup.fixup_data(*(object **)handle);
|
||||
}
|
||||
}
|
||||
|
||||
template<typename Visitor>
|
||||
template<typename Fixup>
|
||||
struct callback_slot_visitor {
|
||||
callback_heap *callbacks;
|
||||
slot_visitor<Visitor> *visitor;
|
||||
slot_visitor<Fixup> *visitor;
|
||||
|
||||
explicit callback_slot_visitor(callback_heap *callbacks_, slot_visitor<Visitor> *visitor_) :
|
||||
explicit callback_slot_visitor(callback_heap *callbacks_, slot_visitor<Fixup> *visitor_) :
|
||||
callbacks(callbacks_), visitor(visitor_) {}
|
||||
|
||||
void operator()(code_block *stub)
|
||||
|
@ -126,15 +224,15 @@ struct callback_slot_visitor {
|
|||
}
|
||||
};
|
||||
|
||||
template<typename Visitor>
|
||||
void slot_visitor<Visitor>::visit_callback_roots()
|
||||
template<typename Fixup>
|
||||
void slot_visitor<Fixup>::visit_callback_roots()
|
||||
{
|
||||
callback_slot_visitor<Visitor> callback_visitor(parent->callbacks,this);
|
||||
callback_slot_visitor<Fixup> callback_visitor(parent->callbacks,this);
|
||||
parent->callbacks->each_callback(callback_visitor);
|
||||
}
|
||||
|
||||
template<typename Visitor>
|
||||
void slot_visitor<Visitor>::visit_literal_table_roots()
|
||||
template<typename Fixup>
|
||||
void slot_visitor<Fixup>::visit_literal_table_roots()
|
||||
{
|
||||
std::map<code_block *, cell> *uninitialized_blocks = &parent->code->uninitialized_blocks;
|
||||
std::map<code_block *, cell>::const_iterator iter = uninitialized_blocks->begin();
|
||||
|
@ -151,8 +249,8 @@ void slot_visitor<Visitor>::visit_literal_table_roots()
|
|||
parent->code->uninitialized_blocks = new_uninitialized_blocks;
|
||||
}
|
||||
|
||||
template<typename Visitor>
|
||||
void slot_visitor<Visitor>::visit_roots()
|
||||
template<typename Fixup>
|
||||
void slot_visitor<Fixup>::visit_roots()
|
||||
{
|
||||
visit_handle(&parent->true_object);
|
||||
visit_handle(&parent->bignum_zero);
|
||||
|
@ -167,8 +265,62 @@ void slot_visitor<Visitor>::visit_roots()
|
|||
visit_object_array(parent->special_objects,parent->special_objects + special_object_count);
|
||||
}
|
||||
|
||||
template<typename Visitor>
|
||||
void slot_visitor<Visitor>::visit_contexts()
|
||||
template<typename Fixup>
|
||||
struct call_frame_slot_visitor {
|
||||
factor_vm *parent;
|
||||
slot_visitor<Fixup> *visitor;
|
||||
|
||||
explicit call_frame_slot_visitor(factor_vm *parent_, slot_visitor<Fixup> *visitor_) :
|
||||
parent(parent_), visitor(visitor_) {}
|
||||
|
||||
/*
|
||||
next -> [entry_point]
|
||||
[size]
|
||||
[return address] -- x86 only, backend adds 1 to each spill location
|
||||
[spill area]
|
||||
...
|
||||
frame -> [entry_point]
|
||||
[size]
|
||||
*/
|
||||
void operator()(stack_frame *frame)
|
||||
{
|
||||
const code_block *compiled = visitor->fixup.translate_code(parent->frame_code(frame));
|
||||
gc_info *info = compiled->block_gc_info();
|
||||
|
||||
u32 return_address = (cell)FRAME_RETURN_ADDRESS(frame,parent) - (cell)compiled->entry_point();
|
||||
int index = info->return_address_index(return_address);
|
||||
|
||||
if(index != -1)
|
||||
{
|
||||
u8 *bitmap = info->gc_info_bitmap();
|
||||
cell base = info->spill_slot_base(index);
|
||||
cell *stack_pointer = (cell *)(parent->frame_successor(frame) + 1);
|
||||
|
||||
for(cell spill_slot = 0; spill_slot < info->gc_root_count; spill_slot++)
|
||||
{
|
||||
if(bitmap_p(bitmap,base + spill_slot))
|
||||
visitor->visit_handle(&stack_pointer[spill_slot]);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template<typename Fixup>
|
||||
void slot_visitor<Fixup>::visit_callstack_object(callstack *stack)
|
||||
{
|
||||
call_frame_slot_visitor<Fixup> call_frame_visitor(parent,this);
|
||||
parent->iterate_callstack_object(stack,call_frame_visitor);
|
||||
}
|
||||
|
||||
template<typename Fixup>
|
||||
void slot_visitor<Fixup>::visit_callstack(context *ctx)
|
||||
{
|
||||
call_frame_slot_visitor<Fixup> call_frame_visitor(parent,this);
|
||||
parent->iterate_callstack(ctx,call_frame_visitor);
|
||||
}
|
||||
|
||||
template<typename Fixup>
|
||||
void slot_visitor<Fixup>::visit_contexts()
|
||||
{
|
||||
std::set<context *>::const_iterator begin = parent->active_contexts.begin();
|
||||
std::set<context *>::const_iterator end = parent->active_contexts.end();
|
||||
|
@ -179,16 +331,16 @@ void slot_visitor<Visitor>::visit_contexts()
|
|||
visit_stack_elements(ctx->datastack_seg,(cell *)ctx->datastack);
|
||||
visit_stack_elements(ctx->retainstack_seg,(cell *)ctx->retainstack);
|
||||
visit_object_array(ctx->context_objects,ctx->context_objects + context_object_count);
|
||||
|
||||
visit_callstack(ctx);
|
||||
begin++;
|
||||
}
|
||||
}
|
||||
|
||||
template<typename Visitor>
|
||||
template<typename Fixup>
|
||||
struct literal_references_visitor {
|
||||
slot_visitor<Visitor> *visitor;
|
||||
slot_visitor<Fixup> *visitor;
|
||||
|
||||
explicit literal_references_visitor(slot_visitor<Visitor> *visitor_) : visitor(visitor_) {}
|
||||
explicit literal_references_visitor(slot_visitor<Fixup> *visitor_) : visitor(visitor_) {}
|
||||
|
||||
void operator()(instruction_operand op)
|
||||
{
|
||||
|
@ -197,20 +349,20 @@ struct literal_references_visitor {
|
|||
}
|
||||
};
|
||||
|
||||
template<typename Visitor>
|
||||
void slot_visitor<Visitor>::visit_code_block_objects(code_block *compiled)
|
||||
template<typename Fixup>
|
||||
void slot_visitor<Fixup>::visit_code_block_objects(code_block *compiled)
|
||||
{
|
||||
visit_handle(&compiled->owner);
|
||||
visit_handle(&compiled->parameters);
|
||||
visit_handle(&compiled->relocation);
|
||||
}
|
||||
|
||||
template<typename Visitor>
|
||||
void slot_visitor<Visitor>::visit_embedded_literals(code_block *compiled)
|
||||
template<typename Fixup>
|
||||
void slot_visitor<Fixup>::visit_embedded_literals(code_block *compiled)
|
||||
{
|
||||
if(!parent->code->uninitialized_p(compiled))
|
||||
{
|
||||
literal_references_visitor<Visitor> visitor(this);
|
||||
literal_references_visitor<Fixup> visitor(this);
|
||||
compiled->each_instruction_operand(visitor);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -317,10 +317,11 @@ struct factor_vm
|
|||
void collect_compact(bool trace_contexts_p);
|
||||
void collect_growing_heap(cell requested_bytes, bool trace_contexts_p);
|
||||
void gc(gc_op op, cell requested_bytes, bool trace_contexts_p);
|
||||
void scrub_context(context *ctx);
|
||||
void scrub_contexts();
|
||||
void primitive_minor_gc();
|
||||
void primitive_full_gc();
|
||||
void primitive_compact_gc();
|
||||
void inline_gc(cell gc_roots);
|
||||
void primitive_enable_gc_events();
|
||||
void primitive_disable_gc_events();
|
||||
object *allot_object(cell type, cell size);
|
||||
|
|
Loading…
Reference in New Issue