示例#1
0
void factor_vm::gc(gc_op op, cell requested_bytes, bool trace_contexts_p)
{
	assert(!gc_off);
	assert(!current_gc);

	save_stacks();

	current_gc = new gc_state(op,this);

	/* Keep trying to GC higher and higher generations until we don't run out
	of space */
	if(setjmp(current_gc->gc_unwind))
	{
		/* We come back here if a generation is full */
		start_gc_again();
	}

	current_gc->event->op = current_gc->op;

	switch(current_gc->op)
	{
	case collect_nursery_op:
		collect_nursery();
		break;
	case collect_aging_op:
		collect_aging();
		if(data->high_fragmentation_p())
		{
			current_gc->op = collect_full_op;
			current_gc->event->op = collect_full_op;
			collect_full(trace_contexts_p);
		}
		break;
	case collect_to_tenured_op:
		collect_to_tenured();
		if(data->high_fragmentation_p())
		{
			current_gc->op = collect_full_op;
			current_gc->event->op = collect_full_op;
			collect_full(trace_contexts_p);
		}
		break;
	case collect_full_op:
		collect_full(trace_contexts_p);
		break;
	case collect_compact_op:
		collect_compact(trace_contexts_p);
		break;
	case collect_growing_heap_op:
		collect_growing_heap(requested_bytes,trace_contexts_p);
		break;
	default:
		critical_error("Bad GC op",current_gc->op);
		break;
	}

	end_gc();

	delete current_gc;
	current_gc = NULL;
}
示例#2
0
文件: gc.cpp 项目: Renha/factor
void factor_vm::gc(gc_op op, cell requested_size, bool trace_contexts_p)
{
    FACTOR_ASSERT(!gc_off);
    FACTOR_ASSERT(!current_gc);

    /* Important invariant: tenured space must have enough contiguous free
    space to fit the entire contents of the aging space and nursery. This is
    because when doing a full collection, objects from younger generations
    are promoted before any unreachable tenured objects are freed. */
    FACTOR_ASSERT(!data->high_fragmentation_p());

    current_gc = new gc_state(op,this);
    atomic::store(&current_gc_p, true);

    /* Keep trying to GC higher and higher generations until we don't run
    out of space in the target generation. */
    for(;;)
    {
        try
        {
            if(gc_events) current_gc->event->op = current_gc->op;

            switch(current_gc->op)
            {
            case collect_nursery_op:
                collect_nursery();
                break;
            case collect_aging_op:
                /* We end up here if the above fails. */
                collect_aging();
                if(data->high_fragmentation_p())
                {
                    /* Change GC op so that if we fail again,
                    we crash. */
                    set_current_gc_op(collect_full_op);
                    collect_full(trace_contexts_p);
                }
                break;
            case collect_to_tenured_op:
                /* We end up here if the above fails. */
                collect_to_tenured();
                if(data->high_fragmentation_p())
                {
                    /* Change GC op so that if we fail again,
                    we crash. */
                    set_current_gc_op(collect_full_op);
                    collect_full(trace_contexts_p);
                }
                break;
            case collect_full_op:
                collect_full(trace_contexts_p);
                break;
            case collect_compact_op:
                collect_compact(trace_contexts_p);
                break;
            case collect_growing_heap_op:
                collect_growing_heap(requested_size,trace_contexts_p);
                break;
            default:
                critical_error("Bad GC op",current_gc->op);
                break;
            }

            break;
        }
        catch(const must_start_gc_again &)
        {
            /* We come back here if the target generation is full. */
            start_gc_again();
            continue;
        }
    }

    end_gc();

    atomic::store(&current_gc_p, false);
    delete current_gc;
    current_gc = NULL;

    /* Check the invariant again, just in case. */
    FACTOR_ASSERT(!data->high_fragmentation_p());
}