Ejemplo n.º 1
0
static void unpin_promoted_object(value local, value promoted)
{
  Assert (caml_addrmap_lookup(&caml_domain_state->remembered_set->promotion, local) == promoted);
  Assert (caml_addrmap_lookup(&caml_domain_state->remembered_set->promotion_rev, promoted) == local);
  caml_shared_unpin(promoted);
  caml_darken(promoted, 0);
}
Ejemplo n.º 2
0
void caml_modify (value *fp, value val)
{
    value _old_ = *(fp);
    *(fp) = (val);
    if (Is_in_heap (fp)){
        if (caml_gc_phase == Phase_mark) caml_darken (_old_, NULL);
        if (Is_block (val) && Is_young (val)
            && ! (Is_block (_old_) && Is_young (_old_))){
            if (caml_ref_table.ptr >= caml_ref_table.limit){
                caml_realloc_ref_table (&caml_ref_table);
            }
            *caml_ref_table.ptr++ = (fp);
        }
    }
}
Ejemplo n.º 3
0
CAMLexport CAMLweakdef void caml_modify (value *fp, value val)
{
  /* The write barrier implemented by [caml_modify] checks for the
     following two conditions and takes appropriate action:
     1- a pointer from the major heap to the minor heap is created
        --> add [fp] to the remembered set
     2- a pointer from the major heap to the major heap is overwritten,
        while the GC is in the marking phase
        --> call [caml_darken] on the overwritten pointer so that the
            major GC treats it as an additional root.
  */
  value old;

  if (Is_young((value)fp)) {
    /* The modified object resides in the minor heap.
       Conditions 1 and 2 cannot occur. */
    *fp = val;
  } else {
    /* The modified object resides in the major heap. */
    CAMLassert(Is_in_heap(fp));
    old = *fp;
    *fp = val;
    if (Is_block(old)) {
      /* If [old] is a pointer within the minor heap, we already
         have a major->minor pointer and [fp] is already in the
         remembered set.  Conditions 1 and 2 cannot occur. */
      if (Is_young(old)) return;
      /* Here, [old] can be a pointer within the major heap.
         Check for condition 2. */
      if (caml_gc_phase == Phase_mark) caml_darken(old, NULL);
    }
    /* Check for condition 1. */
    if (Is_block(val) && Is_young(val)) {
      /* Add [fp] to remembered set */
      if (caml_ref_table.ptr >= caml_ref_table.limit){
        CAMLassert (caml_ref_table.ptr == caml_ref_table.limit);
        caml_realloc_ref_table (&caml_ref_table);
      }
      *caml_ref_table.ptr++ = fp;
    }
  }
}
Ejemplo n.º 4
0
CAMLprim value caml_weak_get (value ar, value n)
{
  CAMLparam2 (ar, n);
  mlsize_t offset = Long_val (n) + 1;
  CAMLlocal2 (res, elt);
                                                   Assert (Is_in_heap (ar));
  if (offset < 1 || offset >= Wosize_val (ar)){
    caml_invalid_argument ("Weak.get");
  }
  if (Field (ar, offset) == caml_weak_none){
    res = None_val;
  }else{
    elt = Field (ar, offset);
    if (caml_gc_phase == Phase_mark && Is_block (elt) && Is_in_heap (elt)){
      caml_darken (elt, NULL);
    }
    res = caml_alloc_small (1, Some_tag);
    Field (res, 0) = elt;
  }
  CAMLreturn (res);
}
Ejemplo n.º 5
0
CAMLprim value caml_weak_get_copy (value ar, value n)
{
  CAMLparam2 (ar, n);
  mlsize_t offset = Long_val (n) + 1;
  CAMLlocal2 (res, elt);
  value v;  /* Caution: this is NOT a local root. */
                                                   Assert (Is_in_heap (ar));
  if (offset < 1 || offset >= Wosize_val (ar)){
    caml_invalid_argument ("Weak.get");
  }

  v = Field (ar, offset);
  if (v == caml_weak_none) CAMLreturn (None_val);
  if (Is_block (v) && Is_in_heap_or_young(v)) {
    elt = caml_alloc (Wosize_val (v), Tag_val (v));
          /* The GC may erase or move v during this call to caml_alloc. */
    v = Field (ar, offset);
    if (v == caml_weak_none) CAMLreturn (None_val);
    if (Tag_val (v) < No_scan_tag){
      mlsize_t i;
      for (i = 0; i < Wosize_val (v); i++){
        value f = Field (v, i);
        if (caml_gc_phase == Phase_mark && Is_block (f) && Is_in_heap (f)){
          caml_darken (f, NULL);
        }
        Modify (&Field (elt, i), f);
      }
    }else{
      memmove (Bp_val (elt), Bp_val (v), Bosize_val (v));
    }
  }else{
    elt = v;
  }
  res = caml_alloc_small (1, Some_tag);
  Field (res, 0) = elt;

  CAMLreturn (res);
}
Ejemplo n.º 6
0
CAMLexport value caml_promote(struct domain* domain, value root)
{
  value **r;
  value iter, f;
  mlsize_t i;
  caml_domain_state* domain_state = domain->state;
  struct caml_minor_tables *minor_tables = domain_state->minor_tables;
  char* young_ptr = domain_state->young_ptr;
  char* young_end = domain_state->young_end;
  float percent_to_scan;
  uintnat prev_alloc_words = domain_state->allocated_words;
  struct oldify_state st = {0};
  struct caml_ephe_ref_elt *re;

  /* Integers are already shared */
  if (Is_long(root))
    return root;

  /* Objects which are in the major heap are already shared. */
  if (!Is_minor(root))
    return root;

  st.oldest_promoted = (value)domain_state->young_start;
  st.promote_domain = domain;

  CAMLassert(caml_owner_of_young_block(root) == domain);
  oldify_one (&st, root, &root);
  oldify_mopup (&st);

  CAMLassert (!Is_minor(root));
  /* FIXME: surely a newly-allocated root is already darkened? */
  caml_darken(0, root, 0);

  percent_to_scan = st.oldest_promoted <= (value)young_ptr ? 0.0 :
    (((float)(st.oldest_promoted - (value)young_ptr)) * 100.0 /
     ((value)young_end - (value)domain_state->young_start));

  if (percent_to_scan > Percent_to_promote_with_GC) {
    caml_gc_log("caml_promote: forcing minor GC. %%_minor_to_scan=%f", percent_to_scan);
    // ???
    caml_empty_minor_heap_domain (domain);
  } else {
    caml_do_local_roots (&forward_pointer, st.promote_domain, domain, 1);
    caml_scan_stack (&forward_pointer, st.promote_domain, domain_state->current_stack);

    /* Scan major to young pointers. */
    for (r = minor_tables->major_ref.base;
         r < minor_tables->major_ref.ptr; r++) {
      value old_p = **r;
      if (Is_block(old_p) && is_in_interval(old_p,young_ptr,young_end)) {
        value new_p = old_p;
        forward_pointer (st.promote_domain, new_p, &new_p);
        if (old_p != new_p) {
          if (caml_domain_alone())
            **r = new_p;
          else
            atomic_compare_exchange_strong((atomic_value*)*r, &old_p, new_p);
        }
      }
    }

    /* Scan ephemeron ref table */
    for (re = minor_tables->ephe_ref.base;
         re < minor_tables->ephe_ref.ptr; re++) {
      value* key = &Op_val(re->ephe)[re->offset];
      if (Is_block(*key) && is_in_interval(*key,young_ptr,young_end)) {
        forward_pointer (st.promote_domain, *key, key);
      }
    }

    /* Scan young to young pointers */
    for (r = minor_tables->minor_ref.base; r < minor_tables->minor_ref.ptr; r++) {
      forward_pointer (st.promote_domain, **r, *r);
    }

    /* Scan newer objects */
    for (iter = (value)young_ptr;
         iter <= st.oldest_promoted;
         iter = next_minor_block(domain_state, iter)) {
      value hd = Hd_hp(iter);
      value curr = Val_hp(iter);
      if (hd != 0) {
        tag_t tag = Tag_hd (hd);
        if (tag == Cont_tag) {
          struct stack_info* stk = Ptr_val(Op_val(curr)[0]);
          if (stk != NULL)
            caml_scan_stack(&forward_pointer, st.promote_domain, stk);
        } else if (tag < No_scan_tag) {
          for (i = 0; i < Wosize_hd (hd); i++) {
            f = Op_val(curr)[i];
            if (Is_block(f)) {
              forward_pointer (st.promote_domain, f,((value*)curr) + i);
            }
          }
        }
      }
    }
  }
  domain_state->stat_promoted_words += domain_state->allocated_words - prev_alloc_words;
  return root;
}