Change C++ exception usage back into a longjmp() due to GCC bugs
parent
14840edc4b
commit
6c047127ee
1
Makefile
1
Makefile
|
@ -60,6 +60,7 @@ DLL_OBJS = $(PLAF_DLL_OBJS) \
|
||||||
vm/strings.o \
|
vm/strings.o \
|
||||||
vm/tuples.o \
|
vm/tuples.o \
|
||||||
vm/utilities.o \
|
vm/utilities.o \
|
||||||
|
vm/vm.o \
|
||||||
vm/words.o \
|
vm/words.o \
|
||||||
vm/write_barrier.o
|
vm/write_barrier.o
|
||||||
|
|
||||||
|
|
120
vm/data_gc.cpp
120
vm/data_gc.cpp
|
@ -12,18 +12,16 @@ gc_state::gc_state(data_heap *data_, bool growing_data_heap_, cell collecting_ge
|
||||||
data(data_),
|
data(data_),
|
||||||
growing_data_heap(growing_data_heap_),
|
growing_data_heap(growing_data_heap_),
|
||||||
collecting_gen(collecting_gen_),
|
collecting_gen(collecting_gen_),
|
||||||
|
collecting_aging_again(false),
|
||||||
start_time(current_micros()) { }
|
start_time(current_micros()) { }
|
||||||
|
|
||||||
gc_state::~gc_state() { }
|
gc_state::~gc_state() { }
|
||||||
|
|
||||||
/* If a generation fills up, throw this error. It is caught in garbage_collection() */
|
|
||||||
struct generation_full_condition { };
|
|
||||||
|
|
||||||
/* Given a pointer to oldspace, copy it to newspace */
|
/* Given a pointer to oldspace, copy it to newspace */
|
||||||
object *factor_vm::copy_untagged_object_impl(object *pointer, cell size)
|
object *factor_vm::copy_untagged_object_impl(object *pointer, cell size)
|
||||||
{
|
{
|
||||||
if(current_gc->newspace->here + size >= current_gc->newspace->end)
|
if(current_gc->newspace->here + size >= current_gc->newspace->end)
|
||||||
throw generation_full_condition();
|
longjmp(current_gc->gc_unwind,1);
|
||||||
|
|
||||||
object *newpointer = allot_zone(current_gc->newspace,size);
|
object *newpointer = allot_zone(current_gc->newspace,size);
|
||||||
|
|
||||||
|
@ -502,7 +500,6 @@ void factor_vm::begin_gc(cell requested_bytes)
|
||||||
|
|
||||||
void factor_vm::end_gc()
|
void factor_vm::end_gc()
|
||||||
{
|
{
|
||||||
|
|
||||||
gc_stats *s = &stats[current_gc->collecting_gen];
|
gc_stats *s = &stats[current_gc->collecting_gen];
|
||||||
|
|
||||||
cell gc_elapsed = (current_micros() - current_gc->start_time);
|
cell gc_elapsed = (current_micros() - current_gc->start_time);
|
||||||
|
@ -545,77 +542,70 @@ void factor_vm::garbage_collection(cell collecting_gen_, bool growing_data_heap_
|
||||||
|
|
||||||
/* Keep trying to GC higher and higher generations until we don't run out
|
/* Keep trying to GC higher and higher generations until we don't run out
|
||||||
of space */
|
of space */
|
||||||
for(;;)
|
if(setjmp(current_gc->gc_unwind))
|
||||||
{
|
{
|
||||||
try
|
/* We come back here if a generation is full */
|
||||||
{
|
|
||||||
begin_gc(requested_bytes);
|
|
||||||
|
|
||||||
/* Initialize chase pointer */
|
/* We have no older generations we can try collecting, so we
|
||||||
cell scan = current_gc->newspace->here;
|
resort to growing the data heap */
|
||||||
|
if(current_gc->collecting_tenured_p())
|
||||||
|
{
|
||||||
|
current_gc->growing_data_heap = true;
|
||||||
|
|
||||||
/* Trace objects referenced from global environment */
|
/* see the comment in unmark_marked() */
|
||||||
trace_roots();
|
code->unmark_marked();
|
||||||
|
}
|
||||||
|
/* we try collecting aging space twice before going on to
|
||||||
|
collect tenured */
|
||||||
|
else if(data->have_aging_p()
|
||||||
|
&& current_gc->collecting_gen == data->aging()
|
||||||
|
&& !current_gc->collecting_aging_again)
|
||||||
|
{
|
||||||
|
current_gc->collecting_aging_again = true;
|
||||||
|
}
|
||||||
|
/* Collect the next oldest generation */
|
||||||
|
else
|
||||||
|
{
|
||||||
|
current_gc->collecting_gen++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/* Trace objects referenced from stacks, unless we're doing
|
begin_gc(requested_bytes);
|
||||||
save-image-and-exit in which case stack objects are irrelevant */
|
|
||||||
if(trace_contexts_) trace_contexts();
|
|
||||||
|
|
||||||
/* Trace objects referenced from older generations */
|
/* Initialize chase pointer */
|
||||||
trace_cards();
|
cell scan = current_gc->newspace->here;
|
||||||
|
|
||||||
/* On minor GC, trace code heap roots if it has pointers
|
/* Trace objects referenced from global environment */
|
||||||
to this generation or younger. Otherwise, tracing data heap objects
|
trace_roots();
|
||||||
will mark all reachable code blocks, and we free the unmarked ones
|
|
||||||
after. */
|
|
||||||
if(!current_gc->collecting_tenured_p() && current_gc->collecting_gen >= last_code_heap_scan)
|
|
||||||
{
|
|
||||||
update_code_heap_roots();
|
|
||||||
}
|
|
||||||
|
|
||||||
/* do some copying -- this is where most of the work is done */
|
/* Trace objects referenced from stacks, unless we're doing
|
||||||
copy_reachable_objects(scan,¤t_gc->newspace->here);
|
save-image-and-exit in which case stack objects are irrelevant */
|
||||||
|
if(trace_contexts_) trace_contexts();
|
||||||
|
|
||||||
/* On minor GC, update literal references in code blocks, now that all
|
/* Trace objects referenced from older generations */
|
||||||
data heap objects are in their final location. On a major GC,
|
trace_cards();
|
||||||
free all code blocks that did not get marked during tracing. */
|
|
||||||
if(current_gc->collecting_tenured_p())
|
|
||||||
free_unmarked_code_blocks();
|
|
||||||
else
|
|
||||||
update_dirty_code_blocks();
|
|
||||||
|
|
||||||
/* GC completed without any generations filling up; finish up */
|
/* On minor GC, trace code heap roots if it has pointers
|
||||||
break;
|
to this generation or younger. Otherwise, tracing data heap objects
|
||||||
}
|
will mark all reachable code blocks, and we free the unmarked ones
|
||||||
catch(const generation_full_condition &c)
|
after. */
|
||||||
{
|
if(!current_gc->collecting_tenured_p() && current_gc->collecting_gen >= last_code_heap_scan)
|
||||||
/* We come back here if a generation is full */
|
{
|
||||||
|
update_code_heap_roots();
|
||||||
|
}
|
||||||
|
|
||||||
/* We have no older generations we can try collecting, so we
|
/* do some copying -- this is where most of the work is done */
|
||||||
resort to growing the data heap */
|
copy_reachable_objects(scan,¤t_gc->newspace->here);
|
||||||
if(current_gc->collecting_tenured_p())
|
|
||||||
{
|
|
||||||
current_gc->growing_data_heap = true;
|
|
||||||
|
|
||||||
/* see the comment in unmark_marked() */
|
/* On minor GC, update literal references in code blocks, now that all
|
||||||
code->unmark_marked();
|
data heap objects are in their final location. On a major GC,
|
||||||
}
|
free all code blocks that did not get marked during tracing. */
|
||||||
/* we try collecting aging space twice before going on to
|
if(current_gc->collecting_tenured_p())
|
||||||
collect tenured */
|
free_unmarked_code_blocks();
|
||||||
else if(data->have_aging_p()
|
else
|
||||||
&& current_gc->collecting_gen == data->aging()
|
update_dirty_code_blocks();
|
||||||
&& !current_gc->collecting_aging_again)
|
|
||||||
{
|
|
||||||
current_gc->collecting_aging_again = true;
|
|
||||||
}
|
|
||||||
/* Collect the next oldest generation */
|
|
||||||
else
|
|
||||||
{
|
|
||||||
current_gc->collecting_gen++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
/* GC completed without any generations filling up; finish up */
|
||||||
end_gc();
|
end_gc();
|
||||||
|
|
||||||
delete current_gc;
|
delete current_gc;
|
||||||
|
|
|
@ -34,6 +34,8 @@ struct gc_state {
|
||||||
/* GC start time, for benchmarking */
|
/* GC start time, for benchmarking */
|
||||||
u64 start_time;
|
u64 start_time;
|
||||||
|
|
||||||
|
jmp_buf gc_unwind;
|
||||||
|
|
||||||
explicit gc_state(data_heap *data_, bool growing_data_heap_, cell collecting_gen_);
|
explicit gc_state(data_heap *data_, bool growing_data_heap_, cell collecting_gen_);
|
||||||
~gc_state();
|
~gc_state();
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ namespace factor
|
||||||
template<typename Array> cell array_capacity(Array *array)
|
template<typename Array> cell array_capacity(Array *array)
|
||||||
{
|
{
|
||||||
#ifdef FACTOR_DEBUG
|
#ifdef FACTOR_DEBUG
|
||||||
assert(array->h.hi_tag() == T::type_number);
|
assert(array->h.hi_tag() == Array::type_number);
|
||||||
#endif
|
#endif
|
||||||
return array->capacity >> TAG_BITS;
|
return array->capacity >> TAG_BITS;
|
||||||
}
|
}
|
||||||
|
|
|
@ -46,11 +46,12 @@ LONG factor_vm::exception_handler(PEXCEPTION_POINTERS pe)
|
||||||
else
|
else
|
||||||
signal_callstack_top = NULL;
|
signal_callstack_top = NULL;
|
||||||
|
|
||||||
switch (e->ExceptionCode) {
|
switch (e->ExceptionCode)
|
||||||
case EXCEPTION_ACCESS_VIOLATION:
|
{
|
||||||
|
case EXCEPTION_ACCESS_VIOLATION:
|
||||||
signal_fault_addr = e->ExceptionInformation[1];
|
signal_fault_addr = e->ExceptionInformation[1];
|
||||||
c->EIP = (cell)factor::memory_signal_handler_impl;
|
c->EIP = (cell)factor::memory_signal_handler_impl;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case STATUS_FLOAT_DENORMAL_OPERAND:
|
case STATUS_FLOAT_DENORMAL_OPERAND:
|
||||||
case STATUS_FLOAT_DIVIDE_BY_ZERO:
|
case STATUS_FLOAT_DIVIDE_BY_ZERO:
|
||||||
|
|
|
@ -330,6 +330,7 @@ void factor_vm::compile_all_words()
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
printf("done\n");
|
||||||
/* Update XTs in code heap */
|
/* Update XTs in code heap */
|
||||||
word_updater updater(this);
|
word_updater updater(this);
|
||||||
iterate_code_heap(updater);
|
iterate_code_heap(updater);
|
||||||
|
|
|
@ -0,0 +1,11 @@
|
||||||
|
#include "master.hpp"
|
||||||
|
|
||||||
|
namespace factor
|
||||||
|
{
|
||||||
|
|
||||||
|
factor_vm::factor_vm()
|
||||||
|
{
|
||||||
|
memset(this,0,sizeof(factor_vm));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1,10 +1,10 @@
|
||||||
namespace factor
|
namespace factor
|
||||||
{
|
{
|
||||||
|
|
||||||
struct factor_vm
|
struct factor_vm
|
||||||
{
|
{
|
||||||
// First five fields accessed directly by assembler. See vm.factor
|
// First five fields accessed directly by assembler. See vm.factor
|
||||||
context *stack_chain;
|
context *stack_chain;
|
||||||
zone nursery; /* new objects are allocated here */
|
zone nursery; /* new objects are allocated here */
|
||||||
cell cards_offset;
|
cell cards_offset;
|
||||||
cell decks_offset;
|
cell decks_offset;
|
||||||
|
@ -101,20 +101,20 @@ struct factor_vm
|
||||||
bignum *bignum_multiply_unsigned_small_factor(bignum * x, bignum_digit_type y,int negative_p);
|
bignum *bignum_multiply_unsigned_small_factor(bignum * x, bignum_digit_type y,int negative_p);
|
||||||
void bignum_destructive_add(bignum * bignum, bignum_digit_type n);
|
void bignum_destructive_add(bignum * bignum, bignum_digit_type n);
|
||||||
void bignum_destructive_scale_up(bignum * bignum, bignum_digit_type factor);
|
void bignum_destructive_scale_up(bignum * bignum, bignum_digit_type factor);
|
||||||
void bignum_divide_unsigned_large_denominator(bignum * numerator, bignum * denominator,
|
void bignum_divide_unsigned_large_denominator(bignum * numerator, bignum * denominator,
|
||||||
bignum * * quotient, bignum * * remainder, int q_negative_p, int r_negative_p);
|
bignum * * quotient, bignum * * remainder, int q_negative_p, int r_negative_p);
|
||||||
void bignum_divide_unsigned_normalized(bignum * u, bignum * v, bignum * q);
|
void bignum_divide_unsigned_normalized(bignum * u, bignum * v, bignum * q);
|
||||||
bignum_digit_type bignum_divide_subtract(bignum_digit_type * v_start, bignum_digit_type * v_end,
|
bignum_digit_type bignum_divide_subtract(bignum_digit_type * v_start, bignum_digit_type * v_end,
|
||||||
bignum_digit_type guess, bignum_digit_type * u_start);
|
bignum_digit_type guess, bignum_digit_type * u_start);
|
||||||
void bignum_divide_unsigned_medium_denominator(bignum * numerator,bignum_digit_type denominator,
|
void bignum_divide_unsigned_medium_denominator(bignum * numerator,bignum_digit_type denominator,
|
||||||
bignum * * quotient, bignum * * remainder,int q_negative_p, int r_negative_p);
|
bignum * * quotient, bignum * * remainder,int q_negative_p, int r_negative_p);
|
||||||
void bignum_destructive_normalization(bignum * source, bignum * target, int shift_left);
|
void bignum_destructive_normalization(bignum * source, bignum * target, int shift_left);
|
||||||
void bignum_destructive_unnormalization(bignum * bignum, int shift_right);
|
void bignum_destructive_unnormalization(bignum * bignum, int shift_right);
|
||||||
bignum_digit_type bignum_digit_divide(bignum_digit_type uh, bignum_digit_type ul,
|
bignum_digit_type bignum_digit_divide(bignum_digit_type uh, bignum_digit_type ul,
|
||||||
bignum_digit_type v, bignum_digit_type * q) /* return value */;
|
bignum_digit_type v, bignum_digit_type * q) /* return value */;
|
||||||
bignum_digit_type bignum_digit_divide_subtract(bignum_digit_type v1, bignum_digit_type v2,
|
bignum_digit_type bignum_digit_divide_subtract(bignum_digit_type v1, bignum_digit_type v2,
|
||||||
bignum_digit_type guess, bignum_digit_type * u);
|
bignum_digit_type guess, bignum_digit_type * u);
|
||||||
void bignum_divide_unsigned_small_denominator(bignum * numerator, bignum_digit_type denominator,
|
void bignum_divide_unsigned_small_denominator(bignum * numerator, bignum_digit_type denominator,
|
||||||
bignum * * quotient, bignum * * remainder,int q_negative_p, int r_negative_p);
|
bignum * * quotient, bignum * * remainder,int q_negative_p, int r_negative_p);
|
||||||
bignum_digit_type bignum_destructive_scale_down(bignum * bignum, bignum_digit_type denominator);
|
bignum_digit_type bignum_destructive_scale_down(bignum * bignum, bignum_digit_type denominator);
|
||||||
bignum * bignum_remainder_unsigned_small_denominator(bignum * n, bignum_digit_type d, int negative_p);
|
bignum * bignum_remainder_unsigned_small_denominator(bignum * n, bignum_digit_type d, int negative_p);
|
||||||
|
@ -172,7 +172,7 @@ struct factor_vm
|
||||||
template<typename Iterator> void each_object(Iterator &iterator);
|
template<typename Iterator> void each_object(Iterator &iterator);
|
||||||
cell find_all_words();
|
cell find_all_words();
|
||||||
cell object_size(cell tagged);
|
cell object_size(cell tagged);
|
||||||
|
|
||||||
//write barrier
|
//write barrier
|
||||||
cell allot_markers_offset;
|
cell allot_markers_offset;
|
||||||
|
|
||||||
|
@ -185,27 +185,27 @@ struct factor_vm
|
||||||
{
|
{
|
||||||
return ((cell)c - cards_offset) << card_bits;
|
return ((cell)c - cards_offset) << card_bits;
|
||||||
}
|
}
|
||||||
|
|
||||||
inline cell card_offset(card *c)
|
inline cell card_offset(card *c)
|
||||||
{
|
{
|
||||||
return *(c - (cell)data->cards + (cell)data->allot_markers);
|
return *(c - (cell)data->cards + (cell)data->allot_markers);
|
||||||
}
|
}
|
||||||
|
|
||||||
inline card_deck *addr_to_deck(cell a)
|
inline card_deck *addr_to_deck(cell a)
|
||||||
{
|
{
|
||||||
return (card_deck *)(((cell)a >> deck_bits) + decks_offset);
|
return (card_deck *)(((cell)a >> deck_bits) + decks_offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
inline cell deck_to_addr(card_deck *c)
|
inline cell deck_to_addr(card_deck *c)
|
||||||
{
|
{
|
||||||
return ((cell)c - decks_offset) << deck_bits;
|
return ((cell)c - decks_offset) << deck_bits;
|
||||||
}
|
}
|
||||||
|
|
||||||
inline card *deck_to_card(card_deck *d)
|
inline card *deck_to_card(card_deck *d)
|
||||||
{
|
{
|
||||||
return (card *)((((cell)d - decks_offset) << (deck_bits - card_bits)) + cards_offset);
|
return (card *)((((cell)d - decks_offset) << (deck_bits - card_bits)) + cards_offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
inline card *addr_to_allot_marker(object *a)
|
inline card *addr_to_allot_marker(object *a)
|
||||||
{
|
{
|
||||||
return (card *)(((cell)a >> card_bits) + allot_markers_offset);
|
return (card *)(((cell)a >> card_bits) + allot_markers_offset);
|
||||||
|
@ -397,7 +397,7 @@ struct factor_vm
|
||||||
//math
|
//math
|
||||||
cell bignum_zero;
|
cell bignum_zero;
|
||||||
cell bignum_pos_one;
|
cell bignum_pos_one;
|
||||||
cell bignum_neg_one;
|
cell bignum_neg_one;
|
||||||
|
|
||||||
void primitive_bignum_to_fixnum();
|
void primitive_bignum_to_fixnum();
|
||||||
void primitive_float_to_fixnum();
|
void primitive_float_to_fixnum();
|
||||||
|
@ -484,7 +484,7 @@ struct factor_vm
|
||||||
inline double fixnum_to_float(cell tagged);
|
inline double fixnum_to_float(cell tagged);
|
||||||
template<typename Type> Type *untag_check(cell value);
|
template<typename Type> Type *untag_check(cell value);
|
||||||
template<typename Type> Type *untag(cell value);
|
template<typename Type> Type *untag(cell value);
|
||||||
|
|
||||||
//io
|
//io
|
||||||
void init_c_io();
|
void init_c_io();
|
||||||
void io_error();
|
void io_error();
|
||||||
|
@ -535,7 +535,6 @@ struct factor_vm
|
||||||
//code_heap
|
//code_heap
|
||||||
heap *code;
|
heap *code;
|
||||||
unordered_map<heap_block *, char *> forwarding;
|
unordered_map<heap_block *, char *> forwarding;
|
||||||
typedef void (factor_vm::*code_heap_iterator)(code_block *compiled);
|
|
||||||
|
|
||||||
void init_code_heap(cell size);
|
void init_code_heap(cell size);
|
||||||
bool in_code_heap_p(cell ptr);
|
bool in_code_heap_p(cell ptr);
|
||||||
|
@ -554,7 +553,7 @@ struct factor_vm
|
||||||
template<typename Iterator> void iterate_code_heap(Iterator &iter)
|
template<typename Iterator> void iterate_code_heap(Iterator &iter)
|
||||||
{
|
{
|
||||||
heap_block *scan = code->first_block();
|
heap_block *scan = code->first_block();
|
||||||
|
|
||||||
while(scan)
|
while(scan)
|
||||||
{
|
{
|
||||||
if(scan->status != B_FREE)
|
if(scan->status != B_FREE)
|
||||||
|
@ -606,7 +605,7 @@ struct factor_vm
|
||||||
void primitive_set_innermost_stack_frame_quot();
|
void primitive_set_innermost_stack_frame_quot();
|
||||||
void save_callstack_bottom(stack_frame *callstack_bottom);
|
void save_callstack_bottom(stack_frame *callstack_bottom);
|
||||||
template<typename Iterator> void iterate_callstack(cell top, cell bottom, Iterator &iterator);
|
template<typename Iterator> void iterate_callstack(cell top, cell bottom, Iterator &iterator);
|
||||||
|
|
||||||
/* Every object has a regular representation in the runtime, which makes GC
|
/* Every object has a regular representation in the runtime, which makes GC
|
||||||
much simpler. Every slot of the object until binary_payload_start is a pointer
|
much simpler. Every slot of the object until binary_payload_start is a pointer
|
||||||
to some other object. */
|
to some other object. */
|
||||||
|
@ -615,9 +614,9 @@ struct factor_vm
|
||||||
cell scan = obj;
|
cell scan = obj;
|
||||||
cell payload_start = binary_payload_start((object *)obj);
|
cell payload_start = binary_payload_start((object *)obj);
|
||||||
cell end = obj + payload_start;
|
cell end = obj + payload_start;
|
||||||
|
|
||||||
scan += sizeof(cell);
|
scan += sizeof(cell);
|
||||||
|
|
||||||
while(scan < end)
|
while(scan < end)
|
||||||
{
|
{
|
||||||
iter((cell *)scan);
|
iter((cell *)scan);
|
||||||
|
@ -725,11 +724,11 @@ struct factor_vm
|
||||||
const vm_char *default_image_path();
|
const vm_char *default_image_path();
|
||||||
void windows_image_path(vm_char *full_path, vm_char *temp_path, unsigned int length);
|
void windows_image_path(vm_char *full_path, vm_char *temp_path, unsigned int length);
|
||||||
bool windows_stat(vm_char *path);
|
bool windows_stat(vm_char *path);
|
||||||
|
|
||||||
#if defined(WINNT)
|
#if defined(WINNT)
|
||||||
void open_console();
|
void open_console();
|
||||||
LONG exception_handler(PEXCEPTION_POINTERS pe);
|
LONG exception_handler(PEXCEPTION_POINTERS pe);
|
||||||
// next method here:
|
// next method here:
|
||||||
#endif
|
#endif
|
||||||
#else // UNIX
|
#else // UNIX
|
||||||
void memory_signal_handler(int signal, siginfo_t *siginfo, void *uap);
|
void memory_signal_handler(int signal, siginfo_t *siginfo, void *uap);
|
||||||
|
@ -742,59 +741,50 @@ struct factor_vm
|
||||||
#ifdef __APPLE__
|
#ifdef __APPLE__
|
||||||
void call_fault_handler(exception_type_t exception, exception_data_type_t code, MACH_EXC_STATE_TYPE *exc_state, MACH_THREAD_STATE_TYPE *thread_state, MACH_FLOAT_STATE_TYPE *float_state);
|
void call_fault_handler(exception_type_t exception, exception_data_type_t code, MACH_EXC_STATE_TYPE *exc_state, MACH_THREAD_STATE_TYPE *thread_state, MACH_FLOAT_STATE_TYPE *float_state);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
factor_vm()
|
factor_vm();
|
||||||
: profiling_p(false),
|
|
||||||
secure_gc(false),
|
|
||||||
gc_off(false),
|
|
||||||
fep_disabled(false),
|
|
||||||
full_output(false),
|
|
||||||
max_pic_size(0)
|
|
||||||
{
|
|
||||||
memset(this,0,sizeof(this)); // just to make sure
|
|
||||||
}
|
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#ifndef FACTOR_REENTRANT
|
#ifndef FACTOR_REENTRANT
|
||||||
#define FACTOR_SINGLE_THREADED_TESTING
|
#define FACTOR_SINGLE_THREADED_TESTING
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef FACTOR_SINGLE_THREADED_SINGLETON
|
#ifdef FACTOR_SINGLE_THREADED_SINGLETON
|
||||||
/* calls are dispatched using the singleton vm ptr */
|
/* calls are dispatched using the singleton vm ptr */
|
||||||
extern factor_vm *vm;
|
extern factor_vm *vm;
|
||||||
#define PRIMITIVE_GETVM() vm
|
#define PRIMITIVE_GETVM() vm
|
||||||
#define PRIMITIVE_OVERFLOW_GETVM() vm
|
#define PRIMITIVE_OVERFLOW_GETVM() vm
|
||||||
#define VM_PTR vm
|
#define VM_PTR vm
|
||||||
#define ASSERTVM()
|
#define ASSERTVM()
|
||||||
#define SIGNAL_VM_PTR() vm
|
#define SIGNAL_VM_PTR() vm
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef FACTOR_SINGLE_THREADED_TESTING
|
#ifdef FACTOR_SINGLE_THREADED_TESTING
|
||||||
/* calls are dispatched as per multithreaded, but checked against singleton */
|
/* calls are dispatched as per multithreaded, but checked against singleton */
|
||||||
extern factor_vm *vm;
|
extern factor_vm *vm;
|
||||||
#define ASSERTVM() assert(vm==myvm)
|
#define ASSERTVM() assert(vm==myvm)
|
||||||
#define PRIMITIVE_GETVM() ((factor_vm*)myvm)
|
#define PRIMITIVE_GETVM() ((factor_vm*)myvm)
|
||||||
#define PRIMITIVE_OVERFLOW_GETVM() ASSERTVM(); myvm
|
#define PRIMITIVE_OVERFLOW_GETVM() ASSERTVM(); myvm
|
||||||
#define VM_PTR myvm
|
#define VM_PTR myvm
|
||||||
#define SIGNAL_VM_PTR() tls_vm()
|
#define SIGNAL_VM_PTR() tls_vm()
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef FACTOR_REENTRANT_TLS
|
#ifdef FACTOR_REENTRANT_TLS
|
||||||
/* uses thread local storage to obtain vm ptr */
|
/* uses thread local storage to obtain vm ptr */
|
||||||
#define PRIMITIVE_GETVM() tls_vm()
|
#define PRIMITIVE_GETVM() tls_vm()
|
||||||
#define PRIMITIVE_OVERFLOW_GETVM() tls_vm()
|
#define PRIMITIVE_OVERFLOW_GETVM() tls_vm()
|
||||||
#define VM_PTR tls_vm()
|
#define VM_PTR tls_vm()
|
||||||
#define ASSERTVM()
|
#define ASSERTVM()
|
||||||
#define SIGNAL_VM_PTR() tls_vm()
|
#define SIGNAL_VM_PTR() tls_vm()
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef FACTOR_REENTRANT
|
#ifdef FACTOR_REENTRANT
|
||||||
#define PRIMITIVE_GETVM() ((factor_vm*)myvm)
|
#define PRIMITIVE_GETVM() ((factor_vm*)myvm)
|
||||||
#define PRIMITIVE_OVERFLOW_GETVM() ((factor_vm*)myvm)
|
#define PRIMITIVE_OVERFLOW_GETVM() ((factor_vm*)myvm)
|
||||||
#define VM_PTR myvm
|
#define VM_PTR myvm
|
||||||
#define ASSERTVM()
|
#define ASSERTVM()
|
||||||
#define SIGNAL_VM_PTR() tls_vm()
|
#define SIGNAL_VM_PTR() tls_vm()
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
extern unordered_map<THREADHANDLE, factor_vm *> thread_vms;
|
extern unordered_map<THREADHANDLE, factor_vm *> thread_vms;
|
||||||
|
|
Loading…
Reference in New Issue