Exemplo n.º 1
0
void factor_vm::collect_compact() {
  collect_mark_impl();
  collect_compact_impl();

  if (data->high_fragmentation_p()) {
    // Compaction did not free up enough memory. Grow the heap.
    set_current_gc_op(collect_growing_heap_op);
    collect_growing_heap(0);
  }

  code->flush_icache();
}
Exemplo n.º 2
0
void factor_vm::collect_compact(bool trace_contexts_p)
{
	collect_mark_impl(trace_contexts_p);
	collect_compact_impl(trace_contexts_p);
	
	if(data->high_fragmentation_p())
	{
		/* Compaction did not free up enough memory. Grow the heap. */
		set_current_gc_op(collect_growing_heap_op);
		collect_growing_heap(0,trace_contexts_p);
	}

	code->flush_icache();
}
Exemplo n.º 3
0
void factor_vm::gc(gc_op op, cell requested_size) {
  FACTOR_ASSERT(!gc_off);
  FACTOR_ASSERT(!current_gc);

  /* Important invariant: tenured space must have enough contiguous free
     space to fit the entire contents of the aging space and nursery. This is
     because when doing a full collection, objects from younger generations
     are promoted before any unreachable tenured objects are freed. */
  FACTOR_ASSERT(!data->high_fragmentation_p());

  current_gc = new gc_state(op, this);
  if (ctx)
    ctx->callstack_seg->set_border_locked(false);
  atomic::store(&current_gc_p, true);

  /* Keep trying to GC higher and higher generations until we don't run
     out of space in the target generation. */
  for (;;) {
    try {
      if (gc_events)
        current_gc->event->op = current_gc->op;

      switch (current_gc->op) {
        case collect_nursery_op:
          collect_nursery();
          break;
        case collect_aging_op:
          /* We end up here if the above fails. */
          collect_aging();
          if (data->high_fragmentation_p()) {
            /* Change GC op so that if we fail again, we crash. */
            set_current_gc_op(collect_full_op);
            collect_full();
          }
          break;
        case collect_to_tenured_op:
          /* We end up here if the above fails. */
          collect_to_tenured();
          if (data->high_fragmentation_p()) {
            /* Change GC op so that if we fail again, we crash. */
            set_current_gc_op(collect_full_op);
            collect_full();
          }
          break;
        case collect_full_op:
          collect_full();
          break;
        case collect_compact_op:
          collect_compact();
          break;
        case collect_growing_heap_op:
          collect_growing_heap(requested_size);
          break;
        default:
          critical_error("in gc, bad GC op", current_gc->op);
          break;
      }

      break;
    }
    catch (const must_start_gc_again&) {
      /* We come back here if the target generation is full. */
      start_gc_again();
      continue;
    }
  }

  end_gc();

  atomic::store(&current_gc_p, false);
  if (ctx)
    ctx->callstack_seg->set_border_locked(true);
  delete current_gc;
  current_gc = NULL;

  /* Check the invariant again, just in case. */
  FACTOR_ASSERT(!data->high_fragmentation_p());
}