static void verify_object(struct heap_verify_state* st, value v) { if (!Is_block(v)) return; Assert (Hd_val(v)); if (Tag_val(v) == Infix_tag) { v -= Infix_offset_val(v); Assert(Tag_val(v) == Closure_tag); } intnat* entry = caml_addrmap_insert_pos(&st->seen, v); if (*entry != ADDRMAP_NOT_PRESENT) return; *entry = 1; if (Has_status_hd(Hd_val(v), NOT_MARKABLE)) return; st->objs++; // caml_gc_log ("verify_object: v=0x%lx hd=0x%lx tag=%u", v, Hd_val(v), Tag_val(v)); if (!Is_minor(v)) { Assert(Has_status_hd(Hd_val(v), global.UNMARKED)); } if (Tag_val(v) == Stack_tag) { caml_scan_stack(verify_push, st, v); } else if (Tag_val(v) < No_scan_tag) { int i; for (i = 0; i < Wosize_val(v); i++) { value f = Op_val(v)[i]; if (Is_minor(v) && Is_minor(f)) { Assert(caml_owner_of_young_block(v) == caml_owner_of_young_block(f)); } if (Is_block(f)) verify_push(st, f, 0); } } }
static void verify_object(value v) { if (!Is_block(v)) return; if (Tag_val(v) == Infix_tag) { v -= Infix_offset_val(v); Assert(Tag_val(v) == Closure_tag); } intnat* entry = caml_addrmap_insert_pos(&verify_seen, v); if (*entry != ADDRMAP_NOT_PRESENT) return; *entry = 1; if (Has_status_hd(Hd_val(v), NOT_MARKABLE)) return; verify_objs++; if (!Is_minor(v)) { Assert(Has_status_hd(Hd_val(v), global.MARKED)); } if (Tag_val(v) == Stack_tag) { caml_scan_stack(verify_push, v); } else if (Tag_val(v) < No_scan_tag) { int i; for (i = 0; i < Wosize_val(v); i++) { value f = Op_val(v)[i]; if (Is_minor(v) && Is_minor(f)) { Assert(caml_owner_of_young_block(v) == caml_owner_of_young_block(f)); } if (Is_block(f)) verify_push(f, 0); } } }
static value mark_normalise(value v) { Assert(Is_markable(v)); if (Tag_val(v) == Forward_tag) { /* FIXME: short-circuiting lazy values is a useful optimisation */ } else if (Tag_val(v) == Infix_tag) { v -= Infix_offset_val(v); } return v; }
/* If [*v] is an [Infix_tag] object, [v] is updated to point to the first * object in the block. */ static inline void resolve_infix_val (value* v) { int offset = 0; if (Hd_val(*v) == Infix_tag) { offset = Infix_offset_val(*v); CAMLassert (offset > 0); *v -= offset; } }
static void invert_pointer_at (word *p) { word q = *p; Assert (Ecolor ((intnat) p) == 0); /* Use Ecolor (q) == 0 instead of Is_block (q) because q could be an inverted pointer for an infix header (with Ecolor == 2). */ if (Ecolor (q) == 0 && (Classify_addr (q) & In_heap)) { switch (Ecolor (Hd_val (q))) { case 0: case 3: /* Pointer or header: insert in inverted list. */ *p = Hd_val (q); Hd_val (q) = (header_t) p; break; case 1: /* Infix header: make inverted infix list. */ /* Double inversion: the last of the inverted infix list points to the next infix header in this block. The last of the last list contains the original block header. */ { /* This block as a value. */ value val = (value) q - Infix_offset_val (q); /* Get the block header. */ word *hp = (word *) Hp_val (val); while (Ecolor (*hp) == 0) hp = (word *) *hp; Assert (Ecolor (*hp) == 3); if (Tag_ehd (*hp) == Closure_tag) { /* This is the first infix found in this block. */ /* Save original header. */ *p = *hp; /* Link inverted infix list. */ Hd_val (q) = (header_t) ((word) p | 2); /* Change block header's tag to Infix_tag, and change its size to point to the infix list. */ *hp = Make_ehd (Wosize_bhsize (q - val), Infix_tag, 3); } else { Assert (Tag_ehd (*hp) == Infix_tag); /* Point the last of this infix list to the current first infix list of the block. */ *p = (word) &Field (val, Wosize_ehd (*hp)) | 1; /* Point the head of this infix list to the above. */ Hd_val (q) = (header_t) ((word) p | 2); /* Change block header's size to point to this infix list. */ *hp = Make_ehd (Wosize_bhsize (q - val), Infix_tag, 3); } } break; case 2: /* Inverted infix list: insert. */ *p = Hd_val (q); Hd_val (q) = (header_t) ((word) p | 2); break; } } }
static value caml_promote_one(struct promotion_stack* stk, struct domain* domain, value curr) { header_t curr_block_hd; int infix_offset = 0; if (Is_long(curr) || !Is_minor(curr)) return curr; /* needs no promotion */ Assert(caml_owner_of_young_block(curr) == domain); curr_block_hd = Hd_val(curr); if (Tag_hd(curr_block_hd) == Infix_tag) { infix_offset = Infix_offset_val(curr); curr -= infix_offset; curr_block_hd = Hd_val(curr); } if (Is_promoted_hd(curr_block_hd)) { /* already promoted */ return caml_addrmap_lookup(&domain->state->remembered_set->promotion, curr) + infix_offset; } else if (curr_block_hd == 0) { /* promoted by minor GC */ return Op_val(curr)[0] + infix_offset; } /* otherwise, must promote */ void* mem = caml_shared_try_alloc(domain->shared_heap, Wosize_hd(curr_block_hd), Tag_hd(curr_block_hd), 1); if (!mem) caml_fatal_error("allocation failure during promotion"); value promoted = Val_hp(mem); Hd_val(curr) = Promotedhd_hd(curr_block_hd); caml_addrmap_insert(&domain->state->remembered_set->promotion, curr, promoted); caml_addrmap_insert(&domain->state->remembered_set->promotion_rev, promoted, curr); if (Tag_hd(curr_block_hd) >= No_scan_tag) { int i; for (i = 0; i < Wosize_hd(curr_block_hd); i++) Op_val(promoted)[i] = Op_val(curr)[i]; } else { /* push to stack */ if (stk->sp == stk->stack_len) { stk->stack_len = 2 * (stk->stack_len + 10); stk->stack = caml_stat_resize(stk->stack, sizeof(struct promotion_stack_entry) * stk->stack_len); } stk->stack[stk->sp].local = curr; stk->stack[stk->sp].global = promoted; stk->stack[stk->sp].field = 0; stk->sp++; } return promoted + infix_offset; }
/* Check that [v]'s header looks good. [v] must be a block in the heap. */ static void check_head (value v) { Assert (Is_block (v)); Assert (Is_in_heap (v)); Assert (Wosize_val (v) != 0); Assert (Color_hd (Hd_val (v)) != Caml_blue); Assert (Is_in_heap (v)); if (Tag_val (v) == Infix_tag){ int offset = Wsize_bsize (Infix_offset_val (v)); value trueval = Val_op (&Field (v, -offset)); Assert (Tag_val (trueval) == Closure_tag); Assert (Wosize_val (trueval) > offset); Assert (Is_in_heap (&Field (trueval, Wosize_val (trueval) - 1))); }else{ Assert (Is_in_heap (&Field (v, Wosize_val (v) - 1))); } if (Tag_val (v) == Double_tag){ Assert (Wosize_val (v) == Double_wosize); }else if (Tag_val (v) == Double_array_tag){ Assert (Wosize_val (v) % Double_wosize == 0); } }
static void hash_aux(value obj) { unsigned char * p; mlsize_t i, j; tag_t tag; hash_univ_limit--; if (hash_univ_count < 0 || hash_univ_limit < 0) return; again: if (Is_long(obj)) { hash_univ_count--; Combine(Long_val(obj)); return; } /* Pointers into the heap are well-structured blocks. So are atoms. We can inspect the block contents. */ Assert (Is_block (obj)); if (Is_in_value_area(obj)) { tag = Tag_val(obj); switch (tag) { case String_tag: hash_univ_count--; i = caml_string_length(obj); for (p = &Byte_u(obj, 0); i > 0; i--, p++) Combine_small(*p); break; case Double_tag: /* For doubles, we inspect their binary representation, LSB first. The results are consistent among all platforms with IEEE floats. */ hash_univ_count--; #ifdef ARCH_BIG_ENDIAN for (p = &Byte_u(obj, sizeof(double) - 1), i = sizeof(double); i > 0; p--, i--) #else for (p = &Byte_u(obj, 0), i = sizeof(double); i > 0; p++, i--) #endif Combine_small(*p); break; case Double_array_tag: hash_univ_count--; for (j = 0; j < Bosize_val(obj); j += sizeof(double)) { #ifdef ARCH_BIG_ENDIAN for (p = &Byte_u(obj, j + sizeof(double) - 1), i = sizeof(double); i > 0; p--, i--) #else for (p = &Byte_u(obj, j), i = sizeof(double); i > 0; p++, i--) #endif Combine_small(*p); } break; case Abstract_tag: /* We don't know anything about the contents of the block. Better do nothing. */ break; case Infix_tag: hash_aux(obj - Infix_offset_val(obj)); break; case Forward_tag: obj = Forward_val (obj); goto again; case Object_tag: hash_univ_count--; Combine(Oid_val(obj)); break; case Custom_tag: /* If no hashing function provided, do nothing */ if (Custom_ops_val(obj)->hash != NULL) { hash_univ_count--; Combine(Custom_ops_val(obj)->hash(obj)); } break; default: hash_univ_count--; Combine_small(tag); i = Wosize_val(obj); while (i != 0) { i--; hash_aux(Field(obj, i)); } break; } return; } /* Otherwise, obj is a pointer outside the heap, to an object with a priori unknown structure. Use its physical address as hash key. */ Combine((intnat) obj); }
value coq_offset(value v) { if (Tag_val(v) == Closure_tag) return Val_int(0); else return Val_long(-Wsize_bsize(Infix_offset_val(v))); }