/* [size] is a number of bytes */ CAMLexport value caml_alloc_custom(struct custom_operations * ops, uintnat size, mlsize_t mem, mlsize_t max) { mlsize_t wosize; value result; wosize = 1 + (size + sizeof(value) - 1) / sizeof(value); if (wosize <= Max_young_wosize) { result = caml_alloc_small(wosize, Custom_tag); Custom_ops_val(result) = ops; if (ops->finalize != NULL) { /* Remembered that the block has a finalizer */ if (caml_finalize_table.ptr >= caml_finalize_table.limit){ CAMLassert (caml_finalize_table.ptr == caml_finalize_table.limit); caml_realloc_ref_table (&caml_finalize_table); } *caml_finalize_table.ptr++ = (value *)result; } } else { result = caml_alloc_shr(wosize, Custom_tag); Custom_ops_val(result) = ops; caml_adjust_gc_speed(mem, max); result = caml_check_urgent_gc(result); } return result; }
/* [caml_initialize] never calls the GC, so you may call it while an block is unfinished (i.e. just after a call to [caml_alloc_shr].) */ void caml_initialize (value *fp, value val) { *fp = val; if (Is_block (val) && Is_young (val) && Is_in_heap (fp)){ if (caml_ref_table.ptr >= caml_ref_table.limit){ caml_realloc_ref_table (&caml_ref_table); } *caml_ref_table.ptr++ = fp; } }
/* PR#6084 workaround: define it as a weak symbol */ CAMLexport CAMLweakdef void caml_initialize (value *fp, value val) { CAMLassert(Is_in_heap(fp)); *fp = val; if (Is_block (val) && Is_young (val)) { if (caml_ref_table.ptr >= caml_ref_table.limit){ caml_realloc_ref_table (&caml_ref_table); } *caml_ref_table.ptr++ = fp; } }
void caml_modify (value *fp, value val) { value _old_ = *(fp); *(fp) = (val); if (Is_in_heap (fp)){ if (caml_gc_phase == Phase_mark) caml_darken (_old_, NULL); if (Is_block (val) && Is_young (val) && ! (Is_block (_old_) && Is_young (_old_))){ if (caml_ref_table.ptr >= caml_ref_table.limit){ caml_realloc_ref_table (&caml_ref_table); } *caml_ref_table.ptr++ = (fp); } } }
static void do_set (value ar, mlsize_t offset, value v) { if (Is_block (v) && Is_young (v)){ /* modified version of Modify */ value old = Field (ar, offset); Field (ar, offset) = v; if (!(Is_block (old) && Is_young (old))){ if (caml_weak_ref_table.ptr >= caml_weak_ref_table.limit){ CAMLassert (caml_weak_ref_table.ptr == caml_weak_ref_table.limit); caml_realloc_ref_table (&caml_weak_ref_table); } *caml_weak_ref_table.ptr++ = &Field (ar, offset); } }else{ Field (ar, offset) = v; } }
CAMLexport CAMLweakdef void caml_modify (value *fp, value val) { /* The write barrier implemented by [caml_modify] checks for the following two conditions and takes appropriate action: 1- a pointer from the major heap to the minor heap is created --> add [fp] to the remembered set 2- a pointer from the major heap to the major heap is overwritten, while the GC is in the marking phase --> call [caml_darken] on the overwritten pointer so that the major GC treats it as an additional root. */ value old; if (Is_young((value)fp)) { /* The modified object resides in the minor heap. Conditions 1 and 2 cannot occur. */ *fp = val; } else { /* The modified object resides in the major heap. */ CAMLassert(Is_in_heap(fp)); old = *fp; *fp = val; if (Is_block(old)) { /* If [old] is a pointer within the minor heap, we already have a major->minor pointer and [fp] is already in the remembered set. Conditions 1 and 2 cannot occur. */ if (Is_young(old)) return; /* Here, [old] can be a pointer within the major heap. Check for condition 2. */ if (caml_gc_phase == Phase_mark) caml_darken(old, NULL); } /* Check for condition 1. */ if (Is_block(val) && Is_young(val)) { /* Add [fp] to remembered set */ if (caml_ref_table.ptr >= caml_ref_table.limit){ CAMLassert (caml_ref_table.ptr == caml_ref_table.limit); caml_realloc_ref_table (&caml_ref_table); } *caml_ref_table.ptr++ = fp; } } }