Exemplo n.º 1
0
void caml_finish_marking () {
  caml_save_stack_gc();
  caml_do_local_roots(&caml_darken, caml_domain_self());
  caml_scan_global_roots(&caml_darken);
  caml_empty_mark_stack();
  caml_domain_state->allocated_words = 0;
  caml_restore_stack_gc();
}
Exemplo n.º 2
0
static value alloc_shared(mlsize_t wosize, tag_t tag)
{
  void* mem = caml_shared_try_alloc(caml_domain_self()->shared_heap, wosize, tag, 0 /* not promotion */);
  caml_domain_state->allocated_words += Whsize_wosize(wosize);
  if (mem == NULL) {
    caml_fatal_error("allocation failure during minor GC");
  }
  return Val_hp(mem);
}
Exemplo n.º 3
0
intnat caml_major_collection_slice(intnat howmuch)
{
  intnat computed_work = howmuch ? howmuch : default_slice_budget();
  intnat budget = computed_work;
  intnat sweep_work, mark_work;
  uintnat blocks_marked_before = stat_blocks_marked;
  value v;

  caml_save_stack_gc();

  sweep_work = budget;
  budget = caml_sweep(caml_domain_self()->shared_heap, budget);
  sweep_work -= budget;

  if (gc_phase == Phase_idle) {
    caml_do_local_roots(&caml_darken, caml_domain_self());
    caml_scan_global_roots(&caml_darken);
    gc_phase = Phase_marking;
  }

  mark_work = budget;
  if (mark_stack_pop(&v))
    budget = mark(v, budget);
  mark_work -= budget;

  caml_gc_log("Major slice: %lu alloc, %ld work, %ld sweep, %ld mark (%lu blocks)",
              (unsigned long)caml_domain_state->allocated_words,
              (long)computed_work, (long)sweep_work, (long)mark_work,
              (unsigned long)(stat_blocks_marked - blocks_marked_before));
  caml_domain_state->allocated_words = 0;
  caml_restore_stack_gc();

  if (budget > 0) {
    caml_trigger_stw_gc();
    caml_handle_gc_interrupt();
  }


  return computed_work;
}
Exemplo n.º 4
0
struct caml_heap_state* caml_init_shared_heap() {
  int i;
  struct caml_heap_state* heap;
  if (caml_domain_self()->is_main) {
    caml_plat_mutex_init(&pool_freelist.lock);
  }

  Assert(NOT_MARKABLE == Promotedhd_hd(0));

  heap = caml_stat_alloc(sizeof(struct caml_heap_state));
  heap->free_pools = 0;
  heap->num_free_pools = 0;
  for (i = 0; i<NUM_SIZECLASSES; i++) {
    heap->avail_pools[i] = heap->full_pools[i] =
      heap->unswept_avail_pools[i] = heap->unswept_full_pools[i] = 0;
  }
  heap->next_to_sweep = 0;
  heap->swept_large = 0;
  heap->unswept_large = 0;
  heap->owner = caml_domain_self();
  heap->pools_allocated = 0;
  heap->large_bytes_allocated = 0;
  return heap;
}
Exemplo n.º 5
0
static void verify_heap() {
  caml_save_stack_gc();

  caml_do_local_roots(&verify_push, caml_domain_self());
  caml_scan_global_roots(&verify_push);
  while (verify_sp) verify_object(verify_stack[--verify_sp]);
  caml_gc_log("Verify: %lu objs", verify_objs);

  caml_addrmap_clear(&verify_seen);
  verify_objs = 0;
  caml_stat_free(verify_stack);
  verify_stack = 0;
  verify_stack_len = 0;
  verify_sp = 0;
  caml_restore_stack_gc();
}
Exemplo n.º 6
0
struct caml_heap_state* caml_init_shared_heap() {
  int i;
  struct caml_heap_state* heap;

  heap = caml_stat_alloc(sizeof(struct caml_heap_state));
  heap->free_pools = 0;
  heap->num_free_pools = 0;
  for (i = 0; i<NUM_SIZECLASSES; i++) {
    heap->avail_pools[i] = heap->full_pools[i] =
      heap->unswept_avail_pools[i] = heap->unswept_full_pools[i] = 0;
  }
  heap->next_to_sweep = 0;
  heap->swept_large = 0;
  heap->unswept_large = 0;
  heap->owner = caml_domain_self();
  memset(&heap->stats, 0, sizeof(heap->stats));
  return heap;
}
Exemplo n.º 7
0
static uintnat default_slice_budget() {
  /*
     Free memory at the start of the GC cycle (garbage + free list) (assumed):
                 FM = caml_stat_heap_size * caml_percent_free
                      / (100 + caml_percent_free)

     Assuming steady state and enforcing a constant allocation rate, then
     FM is divided in 2/3 for garbage and 1/3 for free list.
                 G = 2 * FM / 3
     G is also the amount of memory that will be used during this cycle
     (still assuming steady state).

     Proportion of G consumed since the previous slice:
                 PH = caml_domain_state->allocated_words / G
                    = caml_domain_state->allocated_words * 3 * (100 + caml_percent_free)
                      / (2 * caml_stat_heap_size * caml_percent_free)
     Proportion of extra-heap resources consumed since the previous slice:
                 PE = caml_extra_heap_resources
     Proportion of total work to do in this slice:
                 P  = max (PH, PE)
     Amount of marking work for the GC cycle:
                 MW = caml_stat_heap_size * 100 / (100 + caml_percent_free)
     Amount of sweeping work for the GC cycle:
                 SW = caml_stat_heap_size

     Total amount of work for the GC cycle:
                 TW = MW + SW

     Amount of work to do for this slice:
                 W = P * TW
  */
  uintnat heap_size = caml_heap_size(caml_domain_self()->shared_heap);
  double heap_words = (double)Wsize_bsize(heap_size);
  double p = (double) caml_domain_state->allocated_words * 3.0 * (100 + caml_percent_free)
      / heap_words / caml_percent_free / 2.0;

  double total_work =
    heap_words * 100 / (100 + caml_percent_free) /* marking */
    + heap_words; /* sweeping */

  return (intnat)(p * total_work);
  //return 1ll << 50;
}
Exemplo n.º 8
0
/* Make sure the minor heap is empty by performing a minor collection if
 * needed. */
void caml_empty_minor_heap (void)
{
  uintnat minor_allocated_bytes = caml_domain_state->young_end - caml_domain_state->young_ptr;
  unsigned rewritten = 0;
  struct caml_ref_entry *r;

  caml_save_stack_gc();

  stat_live_bytes = 0;

  if (minor_allocated_bytes != 0){
    caml_gc_log ("Minor collection starting");
    caml_do_local_roots(&caml_oldify_one, caml_domain_self());

    for (r = caml_domain_state->remembered_set->ref.base; r < caml_domain_state->remembered_set->ref.ptr; r++){
      value x;
      caml_oldify_one (Op_val(r->obj)[r->field], &x);
    }

    for (r = caml_domain_state->remembered_set->fiber_ref.base; r < caml_domain_state->remembered_set->fiber_ref.ptr; r++) {
      caml_scan_dirty_stack(&caml_oldify_one, r->obj);
    }

    caml_oldify_mopup ();

    for (r = caml_domain_state->remembered_set->ref.base; r < caml_domain_state->remembered_set->ref.ptr; r++){
      value v = Op_val(r->obj)[r->field];
      if (Is_block(v) && Is_young(v)) {
        Assert (Hp_val (v) >= caml_domain_state->young_ptr);
        value vnew;
        header_t hd = Hd_val(v);
        // FIXME: call oldify_one here?
        if (Is_promoted_hd(hd)) {
          vnew = caml_addrmap_lookup(&caml_domain_state->remembered_set->promotion, v);
        } else {
          int offset = 0;
          if (Tag_hd(hd) == Infix_tag) {
            offset = Infix_offset_hd(hd);
            v -= offset;
          }
          Assert (Hd_val (v) == 0);
          vnew = Op_val(v)[0] + offset;
        }
        Assert(Is_block(vnew) && !Is_young(vnew));
        Assert(Hd_val(vnew));
        if (Tag_hd(hd) == Infix_tag) { Assert(Tag_val(vnew) == Infix_tag); }
        rewritten += caml_atomic_cas_field(r->obj, r->field, v, vnew);
      }
    }

    caml_addrmap_iter(&caml_domain_state->remembered_set->promotion, unpin_promoted_object);

    if (caml_domain_state->young_ptr < caml_domain_state->young_start)
      caml_domain_state->young_ptr = caml_domain_state->young_start;
    caml_stat_minor_words += Wsize_bsize (minor_allocated_bytes);
    caml_domain_state->young_ptr = caml_domain_state->young_end;
    clear_table (&caml_domain_state->remembered_set->ref);
    caml_addrmap_clear(&caml_domain_state->remembered_set->promotion);
    caml_addrmap_clear(&caml_domain_state->remembered_set->promotion_rev);
    caml_gc_log ("Minor collection completed: %u of %u kb live, %u pointers rewritten",
                 (unsigned)stat_live_bytes/1024, (unsigned)minor_allocated_bytes/1024, rewritten);
  }

  for (r = caml_domain_state->remembered_set->fiber_ref.base; r < caml_domain_state->remembered_set->fiber_ref.ptr; r++) {
    caml_scan_dirty_stack(&caml_darken, r->obj);
    caml_clean_stack(r->obj);
  }
  clear_table (&caml_domain_state->remembered_set->fiber_ref);

  caml_restore_stack_gc();

#ifdef DEBUG
  {
    value *p;
    for (p = (value *) caml_domain_state->young_start;
         p < (value *) caml_domain_state->young_end; ++p){
      *p = Debug_free_minor;
    }
    ++ minor_gc_counter;
  }
#endif
}
Exemplo n.º 9
0
void caml_empty_minor_heap ()
{
  caml_empty_minor_heap_domain (caml_domain_self());
}
Exemplo n.º 10
0
void caml_shared_unpin(value v) {
  Assert (Is_block(v) && !Is_minor(v));
  Assert (caml_owner_of_shared_block(v) == caml_domain_self());
  Assert (Has_status_hd(Hd_val(v), NOT_MARKABLE));
  Hd_val(v) = With_status_hd(Hd_val(v), global.UNMARKED);
}