factor/vm/data_gc.hpp

125 lines
2.7 KiB
C++
Raw Normal View History

2009-05-02 05:04:19 -04:00
void init_data_gc(void);
void gc(void);
DLLEXPORT void minor_gc(void);
/* statistics */
2009-05-02 11:17:05 -04:00
struct F_GC_STATS {
2009-05-02 05:04:19 -04:00
CELL collections;
u64 gc_time;
u64 max_gc_time;
CELL object_count;
u64 bytes_copied;
2009-05-02 11:17:05 -04:00
};
2009-05-02 05:04:19 -04:00
extern F_ZONE *newspace;
extern bool performing_compaction;
extern CELL collecting_gen;
extern bool collecting_aging_again;
INLINE bool collecting_accumulation_gen_p(void)
{
return ((HAVE_AGING_P
&& collecting_gen == AGING
&& !collecting_aging_again)
|| collecting_gen == TENURED);
}
extern CELL last_code_heap_scan;
void copy_handle(CELL *handle);
void garbage_collection(volatile CELL gen,
bool growing_data_heap_,
CELL requested_bytes);
/* We leave this many bytes free at the top of the nursery so that inline
allocation (which does not call GC because of possible roots in volatile
registers) does not run out of memory */
#define ALLOT_BUFFER_ZONE 1024
/*
* It is up to the caller to fill in the object's fields in a meaningful
* fashion!
*/
2009-05-02 10:19:09 -04:00
INLINE void *allot_object(CELL header, CELL size)
2009-05-02 05:04:19 -04:00
{
#ifdef GC_DEBUG
if(!gc_off)
gc();
#endif
2009-05-02 10:19:09 -04:00
F_OBJECT *object;
2009-05-02 05:04:19 -04:00
2009-05-02 10:19:09 -04:00
if(nursery.size - ALLOT_BUFFER_ZONE > size)
2009-05-02 05:04:19 -04:00
{
/* If there is insufficient room, collect the nursery */
2009-05-02 10:19:09 -04:00
if(nursery.here + ALLOT_BUFFER_ZONE + size > nursery.end)
2009-05-02 05:04:19 -04:00
garbage_collection(NURSERY,false,0);
CELL h = nursery.here;
2009-05-02 10:19:09 -04:00
nursery.here = h + align8(size);
object = (F_OBJECT *)h;
2009-05-02 05:04:19 -04:00
}
/* If the object is bigger than the nursery, allocate it in
tenured space */
else
{
F_ZONE *tenured = &data_heap->generations[TENURED];
/* If tenured space does not have enough room, collect */
2009-05-02 10:19:09 -04:00
if(tenured->here + size > tenured->end)
2009-05-02 05:04:19 -04:00
{
gc();
tenured = &data_heap->generations[TENURED];
}
/* If it still won't fit, grow the heap */
2009-05-02 10:19:09 -04:00
if(tenured->here + size > tenured->end)
2009-05-02 05:04:19 -04:00
{
2009-05-02 10:19:09 -04:00
garbage_collection(TENURED,true,size);
2009-05-02 05:04:19 -04:00
tenured = &data_heap->generations[TENURED];
}
2009-05-02 10:19:09 -04:00
object = (F_OBJECT *)allot_zone(tenured,size);
2009-05-02 05:04:19 -04:00
/* We have to do this */
allot_barrier((CELL)object);
/* Allows initialization code to store old->new pointers
without hitting the write barrier in the common case of
a nursery allocation */
write_barrier((CELL)object);
}
2009-05-02 10:19:09 -04:00
object->header = header;
2009-05-02 05:04:19 -04:00
return object;
}
2009-05-02 10:19:09 -04:00
template<typename T> T *allot(CELL size)
{
return (T *)allot_object(tag_header(T::type_number),size);
}
2009-05-02 05:04:19 -04:00
void copy_reachable_objects(CELL scan, CELL *end);
void primitive_gc(void);
void primitive_gc_stats(void);
void clear_gc_stats(void);
void primitive_clear_gc_stats(void);
void primitive_become(void);
extern bool growing_data_heap;
INLINE void check_data_pointer(CELL pointer)
{
#ifdef FACTOR_DEBUG
if(!growing_data_heap)
{
assert(pointer >= data_heap->segment->start
&& pointer < data_heap->segment->end);
}
#endif
}