DUK_LOCAL void duk__mark_finalizable(duk_heap *heap) { duk_hthread *thr; duk_heaphdr *hdr; duk_size_t count_finalizable = 0; DUK_DD(DUK_DDPRINT("duk__mark_finalizable: %p", (void *) heap)); thr = duk__get_temp_hthread(heap); DUK_ASSERT(thr != NULL); hdr = heap->heap_allocated; while (hdr) { /* A finalizer is looked up from the object and up its prototype chain * (which allows inherited finalizers). A prototype loop must not cause * an error to be thrown here; duk_hobject_hasprop_raw() will ignore a * prototype loop silently and indicate that the property doesn't exist. */ if (!DUK_HEAPHDR_HAS_REACHABLE(hdr) && DUK_HEAPHDR_GET_TYPE(hdr) == DUK_HTYPE_OBJECT && !DUK_HEAPHDR_HAS_FINALIZED(hdr) && duk_hobject_hasprop_raw(thr, (duk_hobject *) hdr, DUK_HTHREAD_STRING_INT_FINALIZER(thr))) { /* heaphdr: * - is not reachable * - is an object * - is not a finalized object * - has a finalizer */ DUK_DD(DUK_DDPRINT("unreachable heap object will be " "finalized -> mark as finalizable " "and treat as a reachability root: %p", (void *) hdr)); DUK_ASSERT(!DUK_HEAPHDR_HAS_READONLY(hdr)); DUK_HEAPHDR_SET_FINALIZABLE(hdr); count_finalizable ++; } hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr); } if (count_finalizable == 0) { return; } DUK_DD(DUK_DDPRINT("marked %ld heap objects as finalizable, now mark them reachable", (long) count_finalizable)); hdr = heap->heap_allocated; while (hdr) { if (DUK_HEAPHDR_HAS_FINALIZABLE(hdr)) { duk__mark_heaphdr(heap, hdr); } hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr); } /* Caller will finish the marking process if we hit a recursion limit. */ }
DUK_INTERNAL void duk_heap_switch_thread(duk_heap *heap, duk_hthread *new_thr) { duk_hthread *curr_thr; DUK_ASSERT(heap != NULL); if (new_thr != NULL) { curr_thr = heap->curr_thread; if (curr_thr == NULL) { /* For initial entry use default value; zero forces an * interrupt before executing the first insturction. */ DUK_DD(DUK_DDPRINT("switch thread, initial entry, init default interrupt counter")); new_thr->interrupt_counter = 0; new_thr->interrupt_init = 0; } else { /* Copy interrupt counter/init value state to new thread (if any). * It's OK for new_thr to be the same as curr_thr. */ #if defined(DUK_USE_DEBUG) if (new_thr != curr_thr) { DUK_DD(DUK_DDPRINT("switch thread, not initial entry, copy interrupt counter")); } #endif new_thr->interrupt_counter = curr_thr->interrupt_counter; new_thr->interrupt_init = curr_thr->interrupt_init; } } else { DUK_DD(DUK_DDPRINT("switch thread, new thread is NULL, no interrupt counter changes")); } heap->curr_thread = new_thr; /* may be NULL */ }
static void duk__mark_finalizable(duk_heap *heap) { duk_hthread *thr; duk_heaphdr *hdr; int count_finalizable = 0; DUK_DD(DUK_DDPRINT("duk__mark_finalizable: %p", (void *) heap)); thr = duk__get_temp_hthread(heap); DUK_ASSERT(thr != NULL); hdr = heap->heap_allocated; while (hdr) { /* A finalizer is looked up from the object and up its prototype chain * (which allows inherited finalizers). */ if (!DUK_HEAPHDR_HAS_REACHABLE(hdr) && DUK_HEAPHDR_GET_TYPE(hdr) == DUK_HTYPE_OBJECT && !DUK_HEAPHDR_HAS_FINALIZED(hdr) && duk_hobject_hasprop_raw(thr, (duk_hobject *) hdr, DUK_HTHREAD_STRING_INT_FINALIZER(thr))) { /* heaphdr: * - is not reachable * - is an object * - is not a finalized object * - has a finalizer */ DUK_DD(DUK_DDPRINT("unreachable heap object will be finalized -> mark as finalizable and treat as a reachability root: %p", hdr)); DUK_HEAPHDR_SET_FINALIZABLE(hdr); count_finalizable ++; } hdr = DUK_HEAPHDR_GET_NEXT(hdr); } if (count_finalizable == 0) { return; } DUK_DD(DUK_DDPRINT("marked %d heap objects as finalizable, now mark them reachable", count_finalizable)); hdr = heap->heap_allocated; while (hdr) { if (DUK_HEAPHDR_HAS_FINALIZABLE(hdr)) { duk__mark_heaphdr(heap, hdr); } hdr = DUK_HEAPHDR_GET_NEXT(hdr); } /* Caller will finish the marking process if we hit a recursion limit. */ }
DUK_LOCAL void duk__generate_ranges(void *userdata, duk_codepoint_t r1, duk_codepoint_t r2, duk_bool_t direct) { duk_re_compiler_ctx *re_ctx = (duk_re_compiler_ctx *) userdata; DUK_DD(DUK_DDPRINT("duk__generate_ranges(): re_ctx=%p, range=[%ld,%ld] direct=%ld", (void *) re_ctx, (long) r1, (long) r2, (long) direct)); if (!direct && (re_ctx->re_flags & DUK_RE_FLAG_IGNORE_CASE)) { /* * Canonicalize a range, generating result ranges as necessary. * Needs to exhaustively scan the entire range (at most 65536 * code points). If 'direct' is set, caller (lexer) has ensured * that the range is already canonicalization compatible (this * is used to avoid unnecessary canonicalization of built-in * ranges like \W, which are not affected by canonicalization). * * NOTE: here is one place where we don't want to support chars * outside the BMP, because the exhaustive search would be * massively larger. */ duk_codepoint_t i; duk_codepoint_t t; duk_codepoint_t r_start, r_end; r_start = duk_unicode_re_canonicalize_char(re_ctx->thr, r1); r_end = r_start; for (i = r1 + 1; i <= r2; i++) { t = duk_unicode_re_canonicalize_char(re_ctx->thr, i); if (t == r_end + 1) { r_end = t; } else { DUK_DD(DUK_DDPRINT("canonicalized, emit range: [%ld,%ld]", (long) r_start, (long) r_end)); duk__append_u32(re_ctx, (duk_uint32_t) r_start); duk__append_u32(re_ctx, (duk_uint32_t) r_end); re_ctx->nranges++; r_start = t; r_end = t; } } DUK_DD(DUK_DDPRINT("canonicalized, emit range: [%ld,%ld]", (long) r_start, (long) r_end)); duk__append_u32(re_ctx, (duk_uint32_t) r_start); duk__append_u32(re_ctx, (duk_uint32_t) r_end); re_ctx->nranges++; } else { DUK_DD(DUK_DDPRINT("direct, emit range: [%ld,%ld]", (long) r1, (long) r2)); duk__append_u32(re_ctx, (duk_uint32_t) r1); duk__append_u32(re_ctx, (duk_uint32_t) r2); re_ctx->nranges++; } }
DUK_LOCAL void duk__finalize_refcounts(duk_heap *heap) { duk_hthread *thr; duk_heaphdr *hdr; thr = duk__get_temp_hthread(heap); DUK_ASSERT(thr != NULL); DUK_DD(DUK_DDPRINT("duk__finalize_refcounts: heap=%p, hthread=%p", (void *) heap, (void *) thr)); hdr = heap->heap_allocated; while (hdr) { if (!DUK_HEAPHDR_HAS_REACHABLE(hdr)) { /* * Unreachable object about to be swept. Finalize target refcounts * (objects which the unreachable object points to) without doing * refzero processing. Recursive decrefs are also prevented when * refzero processing is disabled. * * Value cannot be a finalizable object, as they have been made * temporarily reachable for this round. */ DUK_DDD(DUK_DDDPRINT("unreachable object, refcount finalize before sweeping: %p", (void *) hdr)); duk_heaphdr_refcount_finalize(thr, hdr); } hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr); } }
DUK_LOCAL void duk__compact_objects(duk_heap *heap) { /* XXX: which lists should participate? to be finalized? */ #if defined(DUK_USE_DEBUG) duk_size_t count_check = 0; duk_size_t count_compact = 0; duk_size_t count_bytes_saved = 0; #endif duk_hthread *thr; DUK_DD(DUK_DDPRINT("duk__compact_objects: %p", (void *) heap)); thr = duk__get_temp_hthread(heap); DUK_ASSERT(thr != NULL); #if defined(DUK_USE_DEBUG) duk__compact_object_list(heap, thr, heap->heap_allocated, &count_check, &count_compact, &count_bytes_saved); duk__compact_object_list(heap, thr, heap->finalize_list, &count_check, &count_compact, &count_bytes_saved); #if defined(DUK_USE_REFERENCE_COUNTING) duk__compact_object_list(heap, thr, heap->refzero_list, &count_check, &count_compact, &count_bytes_saved); #endif #else duk__compact_object_list(heap, thr, heap->heap_allocated); duk__compact_object_list(heap, thr, heap->finalize_list); #if defined(DUK_USE_REFERENCE_COUNTING) duk__compact_object_list(heap, thr, heap->refzero_list); #endif #endif #if defined(DUK_USE_DEBUG) DUK_D(DUK_DPRINT("mark-and-sweep compact objects: %ld checked, %ld compaction attempts, %ld bytes saved by compaction", (long) count_check, (long) count_compact, (long) count_bytes_saved)); #endif }
static int duk__init_heap_thread(duk_heap *heap) { duk_hthread *thr; DUK_DD(DUK_DDPRINT("heap init: alloc heap thread")); thr = duk_hthread_alloc(heap, DUK_HOBJECT_FLAG_EXTENSIBLE | DUK_HOBJECT_FLAG_THREAD | DUK_HOBJECT_CLASS_AS_FLAGS(DUK_HOBJECT_CLASS_THREAD)); if (!thr) { DUK_D(DUK_DPRINT("failed to alloc heap_thread")); return 0; } thr->state = DUK_HTHREAD_STATE_INACTIVE; thr->strs = heap->strs; heap->heap_thread = thr; DUK_HTHREAD_INCREF(thr, thr); /* Note: first argument not really used */ /* 'thr' is now reachable */ if (!duk_hthread_init_stacks(heap, thr)) { return 0; } /* FIXME: this may now fail, and is not handled correctly */ duk_hthread_create_builtin_objects(thr); /* default prototype (Note: 'thr' must be reachable) */ DUK_HOBJECT_SET_PROTOTYPE_UPDREF(thr, (duk_hobject *) thr, thr->builtins[DUK_BIDX_THREAD_PROTOTYPE]); return 1; }
void duk_hthread_callstack_shrink_check(duk_hthread *thr) { duk_size_t new_size; duk_activation *p; DUK_ASSERT(thr != NULL); DUK_ASSERT_DISABLE(thr->callstack_top >= 0); /* avoid warning (unsigned) */ DUK_ASSERT(thr->callstack_size >= thr->callstack_top); if (thr->callstack_size - thr->callstack_top < DUK_CALLSTACK_SHRINK_THRESHOLD) { return; } new_size = thr->callstack_top + DUK_CALLSTACK_SHRINK_SPARE; DUK_ASSERT(new_size >= thr->callstack_top); DUK_DD(DUK_DDPRINT("shrinking callstack %d -> %d", thr->callstack_size, new_size)); /* * Note: must use indirect variant of DUK_REALLOC() because underlying * pointer may be changed by mark-and-sweep. */ /* shrink failure is not fatal */ p = (duk_activation *) DUK_REALLOC_INDIRECT(thr->heap, duk_hthread_get_callstack_ptr, (void *) thr, sizeof(duk_activation) * new_size); if (p) { thr->callstack = p; thr->callstack_size = new_size; } else { DUK_D(DUK_DPRINT("callstack shrink failed, ignoring")); } /* note: any entries above the callstack top are garbage and not zeroed */ }
void duk_hthread_catchstack_grow(duk_hthread *thr) { duk_size_t old_size; duk_size_t new_size; DUK_ASSERT(thr != NULL); DUK_ASSERT_DISABLE(thr->catchstack_top); /* avoid warning (unsigned) */ DUK_ASSERT(thr->catchstack_size >= thr->catchstack_top); if (thr->catchstack_top < thr->catchstack_size) { return; } old_size = thr->catchstack_size; new_size = old_size + DUK_CATCHSTACK_GROW_STEP; /* this is a bit approximate (errors out before max is reached); this is OK */ if (new_size >= thr->catchstack_max) { DUK_ERROR(thr, DUK_ERR_RANGE_ERROR, "catchstack limit"); } DUK_DD(DUK_DDPRINT("growing catchstack %d -> %d", old_size, new_size)); /* * Note: must use indirect variant of DUK_REALLOC() because underlying * pointer may be changed by mark-and-sweep. */ thr->catchstack = (duk_catcher *) DUK_REALLOC_INDIRECT_CHECKED(thr, duk_hthread_get_catchstack_ptr, (void *) thr, sizeof(duk_catcher) * new_size); thr->catchstack_size = new_size; /* note: any entries above the catchstack top are garbage and not zeroed */ }
DUK_EXTERNAL void duk_dump_function(duk_context *ctx) { duk_hthread *thr; duk_hcompiledfunction *func; duk_bufwriter_ctx bw_ctx_alloc; duk_bufwriter_ctx *bw_ctx = &bw_ctx_alloc; duk_uint8_t *p; DUK_ASSERT(ctx != NULL); thr = (duk_hthread *) ctx; /* Bound functions don't have all properties so we'd either need to * lookup the non-bound target function or reject bound functions. * For now, bound functions are rejected. */ func = duk_require_hcompiledfunction(ctx, -1); DUK_ASSERT(func != NULL); DUK_ASSERT(!DUK_HOBJECT_HAS_BOUND(&func->obj)); /* Estimating the result size beforehand would be costly, so * start with a reasonable size and extend as needed. */ DUK_BW_INIT_PUSHBUF(thr, bw_ctx, DUK__BYTECODE_INITIAL_ALLOC); p = DUK_BW_GET_PTR(thr, bw_ctx); *p++ = DUK__SER_MARKER; *p++ = DUK__SER_VERSION; p = duk__dump_func(ctx, func, bw_ctx, p); DUK_BW_SET_PTR(thr, bw_ctx, p); DUK_BW_COMPACT(thr, bw_ctx); DUK_DD(DUK_DDPRINT("serialized result: %!T", duk_get_tval(ctx, -1))); duk_remove(ctx, -2); /* [ ... func buf ] -> [ ... buf ] */ }
DUK_LOCAL duk_ret_t duk__error_setter_helper(duk_context *ctx, duk_small_uint_t stridx_key) { /* Attempt to write 'stack', 'fileName', 'lineNumber' works as if * user code called Object.defineProperty() to create an overriding * own property. This allows user code to overwrite .fileName etc * intuitively as e.g. "err.fileName = 'dummy'" as one might expect. * See https://github.com/svaarala/duktape/issues/387. */ DUK_ASSERT_TOP(ctx, 1); /* fixed arg count: value */ duk_push_this(ctx); duk_push_hstring_stridx(ctx, (duk_small_int_t) stridx_key); duk_dup_0(ctx); /* [ ... obj key value ] */ DUK_DD(DUK_DDPRINT("error setter: %!T %!T %!T", duk_get_tval(ctx, -3), duk_get_tval(ctx, -2), duk_get_tval(ctx, -1))); duk_def_prop(ctx, -3, DUK_DEFPROP_HAVE_VALUE | DUK_DEFPROP_HAVE_WRITABLE | DUK_DEFPROP_WRITABLE | DUK_DEFPROP_HAVE_ENUMERABLE | /*not enumerable*/ DUK_DEFPROP_HAVE_CONFIGURABLE | DUK_DEFPROP_CONFIGURABLE); return 0; }
static void duk__sweep_stringtable(duk_heap *heap, duk_size_t *out_count_keep) { duk_hstring *h; duk_uint_fast32_t i; #ifdef DUK_USE_DEBUG duk_size_t count_free = 0; #endif duk_size_t count_keep = 0; DUK_DD(DUK_DDPRINT("duk__sweep_stringtable: %p", (void *) heap)); for (i = 0; i < heap->st_size; i++) { h = heap->st[i]; if (h == NULL || h == DUK_STRTAB_DELETED_MARKER(heap)) { continue; } else if (DUK_HEAPHDR_HAS_REACHABLE((duk_heaphdr *) h)) { DUK_HEAPHDR_CLEAR_REACHABLE((duk_heaphdr *) h); count_keep++; continue; } #ifdef DUK_USE_DEBUG count_free++; #endif #if defined(DUK_USE_REFERENCE_COUNTING) /* Non-zero refcounts should not happen for unreachable strings, * because we refcount finalize all unreachable objects which * should have decreased unreachable string refcounts to zero * (even for cycles). */ DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT((duk_heaphdr *) h) == 0); #endif DUK_DDD(DUK_DDDPRINT("sweep string, not reachable: %p", (void *) h)); /* deal with weak references first */ duk_heap_strcache_string_remove(heap, (duk_hstring *) h); /* remove the string (mark DELETED), could also call * duk_heap_string_remove() but that would be slow and * pointless because we already know the slot. */ heap->st[i] = DUK_STRTAB_DELETED_MARKER(heap); /* then free */ #if 1 DUK_FREE(heap, (duk_heaphdr *) h); /* no inner refs/allocs, just free directly */ #else duk_heap_free_heaphdr_raw(heap, (duk_heaphdr *) h); /* this would be OK but unnecessary */ #endif } #ifdef DUK_USE_DEBUG DUK_D(DUK_DPRINT("mark-and-sweep sweep stringtable: %d freed, %d kept", (int) count_free, (int) count_keep)); #endif *out_count_keep = count_keep; }
DUK_LOCAL void duk__run_object_finalizers(duk_heap *heap, duk_small_uint_t flags) { duk_heaphdr *curr; duk_heaphdr *next; #if defined(DUK_USE_DEBUG) duk_size_t count = 0; #endif duk_hthread *thr; DUK_DD(DUK_DDPRINT("duk__run_object_finalizers: %p", (void *) heap)); thr = duk__get_temp_hthread(heap); DUK_ASSERT(thr != NULL); curr = heap->finalize_list; while (curr) { DUK_DDD(DUK_DDDPRINT("mark-and-sweep finalize: %p", (void *) curr)); DUK_ASSERT(DUK_HEAPHDR_GET_TYPE(curr) == DUK_HTYPE_OBJECT); /* only objects have finalizers */ DUK_ASSERT(!DUK_HEAPHDR_HAS_REACHABLE(curr)); /* flags have been already cleared */ DUK_ASSERT(!DUK_HEAPHDR_HAS_TEMPROOT(curr)); DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(curr)); DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZED(curr)); DUK_ASSERT(!DUK_HEAPHDR_HAS_READONLY(curr)); /* No finalizers for ROM objects */ if (DUK_LIKELY((flags & DUK_MS_FLAG_SKIP_FINALIZERS) == 0)) { /* Run the finalizer, duk_hobject_run_finalizer() sets FINALIZED. * Next mark-and-sweep will collect the object unless it has * become reachable (i.e. rescued). FINALIZED prevents the * finalizer from being executed again before that. */ duk_hobject_run_finalizer(thr, (duk_hobject *) curr); /* must never longjmp */ DUK_ASSERT(DUK_HEAPHDR_HAS_FINALIZED(curr)); } else { /* Used during heap destruction: don't actually run finalizers * because we're heading into forced finalization. Instead, * queue finalizable objects back to the heap_allocated list. */ DUK_D(DUK_DPRINT("skip finalizers flag set, queue object to heap_allocated without finalizing")); DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZED(curr)); } /* queue back to heap_allocated */ next = DUK_HEAPHDR_GET_NEXT(heap, curr); DUK_HEAP_INSERT_INTO_HEAP_ALLOCATED(heap, curr); curr = next; #if defined(DUK_USE_DEBUG) count++; #endif } /* finalize_list will always be processed completely */ heap->finalize_list = NULL; #if defined(DUK_USE_DEBUG) DUK_D(DUK_DPRINT("mark-and-sweep finalize objects: %ld finalizers called", (long) count)); #endif }
DUK_INTERNAL duk_ret_t duk_bi_object_constructor_seal_freeze_shared(duk_context *ctx) { duk_hthread *thr = (duk_hthread *) ctx; duk_hobject *h; duk_bool_t is_freeze; DUK_ASSERT_TOP(ctx, 1); is_freeze = (duk_bool_t) duk_get_current_magic(ctx); if (duk_is_buffer(ctx, 0)) { /* Plain buffer: already sealed, but not frozen (and can't be frozen * because index properties can't be made non-writable. */ if (is_freeze) { goto fail_cannot_freeze; } return 1; } else if (duk_is_lightfunc(ctx, 0)) { /* Lightfunc: already sealed and frozen, success. */ return 1; } #if 0 /* Seal/freeze are quite rare in practice so it'd be nice to get the * correct behavior simply via automatic promotion (at the cost of some * memory churn). However, the promoted objects don't behave the same, * e.g. promoted lightfuncs are extensible. */ h = duk_require_hobject_promote_mask(ctx, 0, DUK_TYPE_MASK_LIGHTFUNC | DUK_TYPE_MASK_BUFFER); #endif h = duk_get_hobject(ctx, 0); if (h == NULL) { /* ES2015 Sections 19.1.2.5, 19.1.2.17 */ return 1; } if (is_freeze && DUK_HOBJECT_IS_BUFOBJ(h)) { /* Buffer objects cannot be frozen because there's no internal * support for making virtual array indices non-writable. */ DUK_DD(DUK_DDPRINT("cannot freeze a buffer object")); goto fail_cannot_freeze; } duk_hobject_object_seal_freeze_helper(thr, h, is_freeze); /* Sealed and frozen objects cannot gain any more properties, * so this is a good time to compact them. */ duk_hobject_compact_props(thr, h); return 1; fail_cannot_freeze: DUK_DCERROR_TYPE_INVALID_ARGS(thr); /* XXX: proper error message */ }
DUK_LOCAL void duk__mark_refzero_list(duk_heap *heap) { duk_heaphdr *hdr; DUK_DD(DUK_DDPRINT("duk__mark_refzero_list: %p", (void *) heap)); hdr = heap->refzero_list; while (hdr) { duk__mark_heaphdr(heap, hdr); hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr); } }
DUK_LOCAL void duk__run_voluntary_gc(duk_heap *heap) { if (DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap)) { DUK_DD(DUK_DDPRINT("mark-and-sweep in progress -> skip voluntary mark-and-sweep now")); } else { duk_small_uint_t flags; duk_bool_t rc; DUK_D(DUK_DPRINT("triggering voluntary mark-and-sweep")); flags = 0; rc = duk_heap_mark_and_sweep(heap, flags); DUK_UNREF(rc); } }
DUK_LOCAL void duk__clear_finalize_list_flags(duk_heap *heap) { duk_heaphdr *hdr; DUK_DD(DUK_DDPRINT("duk__clear_finalize_list_flags: %p", (void *) heap)); hdr = heap->finalize_list; while (hdr) { DUK_HEAPHDR_CLEAR_REACHABLE(hdr); DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(hdr)); DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZED(hdr)); DUK_ASSERT(!DUK_HEAPHDR_HAS_TEMPROOT(hdr)); hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr); } }
DUK_LOCAL void duk__compact_object_list(duk_heap *heap, duk_hthread *thr, duk_heaphdr *start, duk_size_t *p_count_check, duk_size_t *p_count_compact, duk_size_t *p_count_bytes_saved) { #else DUK_LOCAL void duk__compact_object_list(duk_heap *heap, duk_hthread *thr, duk_heaphdr *start) { #endif duk_heaphdr *curr; #if defined(DUK_USE_DEBUG) duk_size_t old_size, new_size; #endif duk_hobject *obj; DUK_UNREF(heap); curr = start; while (curr) { DUK_DDD(DUK_DDDPRINT("mark-and-sweep compact: %p", (void *) curr)); if (DUK_HEAPHDR_GET_TYPE(curr) != DUK_HTYPE_OBJECT) { goto next; } obj = (duk_hobject *) curr; #if defined(DUK_USE_DEBUG) old_size = DUK_HOBJECT_P_COMPUTE_SIZE(DUK_HOBJECT_GET_ESIZE(obj), DUK_HOBJECT_GET_ASIZE(obj), DUK_HOBJECT_GET_HSIZE(obj)); #endif DUK_DD(DUK_DDPRINT("compact object: %p", (void *) obj)); duk_push_hobject((duk_context *) thr, obj); /* XXX: disable error handlers for duration of compaction? */ duk_safe_call((duk_context *) thr, duk__protected_compact_object, NULL, 1, 0); #if defined(DUK_USE_DEBUG) new_size = DUK_HOBJECT_P_COMPUTE_SIZE(DUK_HOBJECT_GET_ESIZE(obj), DUK_HOBJECT_GET_ASIZE(obj), DUK_HOBJECT_GET_HSIZE(obj)); #endif #if defined(DUK_USE_DEBUG) (*p_count_compact)++; (*p_count_bytes_saved) += (duk_size_t) (old_size - new_size); #endif next: curr = DUK_HEAPHDR_GET_NEXT(heap, curr); #if defined(DUK_USE_DEBUG) (*p_count_check)++; #endif } }
DUK_LOCAL void duk__clear_refzero_list_flags(duk_heap *heap) { duk_heaphdr *hdr; DUK_DD(DUK_DDPRINT("duk__clear_refzero_list_flags: %p", (void *) heap)); hdr = heap->refzero_list; while (hdr) { DUK_HEAPHDR_CLEAR_REACHABLE(hdr); DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(hdr)); /* DUK_HEAPHDR_HAS_FINALIZED may or may not be set. */ DUK_ASSERT(!DUK_HEAPHDR_HAS_TEMPROOT(hdr)); hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr); } }
DUK_LOCAL duk_ret_t duk__refcount_fake_finalizer(duk_context *ctx) { DUK_UNREF(ctx); DUK_D(DUK_DPRINT("fake refcount torture finalizer executed")); #if 0 DUK_DD(DUK_DDPRINT("fake torture finalizer for: %!T", duk_get_tval(ctx, 0))); #endif /* Require a lot of stack to force a value stack grow/shrink. */ duk_require_stack(ctx, 100000); /* XXX: do something to force a callstack grow/shrink, perhaps * just a manual forced resize? */ return 0; }
DUK_LOCAL void duk__sweep_stringtable_chain(duk_heap *heap, duk_size_t *out_count_keep) { duk_strtab_entry *e; duk_uint_fast32_t i; duk_size_t count_free = 0; duk_size_t count_keep = 0; duk_size_t j, n; #if defined(DUK_USE_HEAPPTR16) duk_uint16_t *lst; #else duk_hstring **lst; #endif DUK_DD(DUK_DDPRINT("duk__sweep_stringtable: %p", (void *) heap)); /* Non-zero refcounts should not happen for unreachable strings, * because we refcount finalize all unreachable objects which * should have decreased unreachable string refcounts to zero * (even for cycles). */ for (i = 0; i < DUK_STRTAB_CHAIN_SIZE; i++) { e = heap->strtable + i; if (e->listlen == 0) { #if defined(DUK_USE_HEAPPTR16) duk__sweep_string_chain16(heap, &e->u.str16, &count_keep, &count_free); #else duk__sweep_string_chain(heap, &e->u.str, &count_keep, &count_free); #endif } else { #if defined(DUK_USE_HEAPPTR16) lst = (duk_uint16_t *) DUK_USE_HEAPPTR_DEC16(heap->heap_udata, e->u.strlist16); #else lst = e->u.strlist; #endif for (j = 0, n = e->listlen; j < n; j++) { #if defined(DUK_USE_HEAPPTR16) duk__sweep_string_chain16(heap, lst + j, &count_keep, &count_free); #else duk__sweep_string_chain(heap, lst + j, &count_keep, &count_free); #endif } } } DUK_D(DUK_DPRINT("mark-and-sweep sweep stringtable: %ld freed, %ld kept", (long) count_free, (long) count_keep)); *out_count_keep = count_keep; }
static void duk__run_object_finalizers(duk_heap *heap) { duk_heaphdr *curr; duk_heaphdr *next; #ifdef DUK_USE_DEBUG int count = 0; #endif duk_hthread *thr; DUK_DD(DUK_DDPRINT("duk__run_object_finalizers: %p", (void *) heap)); thr = duk__get_temp_hthread(heap); DUK_ASSERT(thr != NULL); curr = heap->finalize_list; while (curr) { DUK_DDD(DUK_DDDPRINT("mark-and-sweep finalize: %p", (void *) curr)); DUK_ASSERT(DUK_HEAPHDR_GET_TYPE(curr) == DUK_HTYPE_OBJECT); /* only objects have finalizers */ DUK_ASSERT(!DUK_HEAPHDR_HAS_REACHABLE(curr)); /* flags have been already cleared */ DUK_ASSERT(!DUK_HEAPHDR_HAS_TEMPROOT(curr)); DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(curr)); DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZED(curr)); /* run the finalizer */ duk_hobject_run_finalizer(thr, (duk_hobject *) curr); /* must never longjmp */ /* mark FINALIZED, for next mark-and-sweep (will collect unless has become reachable; * prevent running finalizer again if reachable) */ DUK_HEAPHDR_SET_FINALIZED(curr); /* queue back to heap_allocated */ next = DUK_HEAPHDR_GET_NEXT(curr); DUK_HEAP_INSERT_INTO_HEAP_ALLOCATED(heap, curr); curr = next; #ifdef DUK_USE_DEBUG count++; #endif } /* finalize_list will always be processed completely */ heap->finalize_list = NULL; #ifdef DUK_USE_DEBUG DUK_D(DUK_DPRINT("mark-and-sweep finalize objects: %d finalizers called", count)); #endif }
DUK_INTERNAL void duk_heap_strcache_string_remove(duk_heap *heap, duk_hstring *h) { duk_small_int_t i; for (i = 0; i < DUK_HEAP_STRCACHE_SIZE; i++) { duk_strcache *c = heap->strcache + i; if (c->h == h) { DUK_DD(DUK_DDPRINT("deleting weak strcache reference to hstring %p from heap %p", (void *) h, (void *) heap)); c->h = NULL; /* XXX: the string shouldn't appear twice, but we now loop to the * end anyway; if fixed, add a looping assertion to ensure there * is no duplicate. */ } } }
DUK_LOCAL duk_hstring *duk__do_lookup(duk_heap *heap, const duk_uint8_t *str, duk_uint32_t blen, duk_uint32_t *out_strhash) { duk_hstring *res; DUK_ASSERT(out_strhash); *out_strhash = duk_heap_hashstring(heap, str, (duk_size_t) blen); #if defined(DUK_USE_ROM_STRINGS) { duk_small_uint_t i; /* XXX: This is VERY inefficient now, and should be e.g. a * binary search or perfect hash, to be fixed. */ for (i = 0; i < (duk_small_uint_t) (sizeof(duk_rom_strings) / sizeof(duk_hstring *)); i++) { duk_hstring *romstr; romstr = (duk_hstring *) duk_rom_strings[i]; if (blen == DUK_HSTRING_GET_BYTELEN(romstr) && DUK_MEMCMP(str, (void *) DUK_HSTRING_GET_DATA(romstr), blen) == 0) { DUK_DD(DUK_DDPRINT("intern check: rom string: %!O, computed hash 0x%08lx, rom hash 0x%08lx", romstr, (unsigned long) *out_strhash, (unsigned long) DUK_HSTRING_GET_HASH(romstr))); DUK_ASSERT(*out_strhash == DUK_HSTRING_GET_HASH(romstr)); *out_strhash = DUK_HSTRING_GET_HASH(romstr); return romstr; } } } #endif /* DUK_USE_ROM_STRINGS */ #if defined(DUK_USE_STRTAB_CHAIN) res = duk__find_matching_string_chain(heap, str, blen, *out_strhash); #elif defined(DUK_USE_STRTAB_PROBE) res = duk__find_matching_string_probe(heap, #if defined(DUK_USE_HEAPPTR16) heap->strtable16, #else heap->strtable, #endif heap->st_size, str, blen, *out_strhash); #else #error internal error, invalid strtab options #endif return res; }
DUK_LOCAL DUK_INLINE void duk__check_voluntary_gc(duk_heap *heap) { if (DUK_UNLIKELY(--(heap)->ms_trigger_counter < 0)) { #if defined(DUK_USE_DEBUG) if (heap->ms_prevent_count == 0) { DUK_D(DUK_DPRINT("triggering voluntary mark-and-sweep")); } else { DUK_DD(DUK_DDPRINT("gc blocked -> skip voluntary mark-and-sweep now")); } #endif /* Prevention checks in the call target handle cases where * voluntary GC is not allowed. The voluntary GC trigger * counter is only rewritten if mark-and-sweep actually runs. */ duk_heap_mark_and_sweep(heap, DUK_MS_FLAG_VOLUNTARY /*flags*/); } }
static void duk__mark_roots_heap(duk_heap *heap) { int i; DUK_DD(DUK_DDPRINT("duk__mark_roots_heap: %p", (void *) heap)); duk__mark_heaphdr(heap, (duk_heaphdr *) heap->heap_thread); duk__mark_heaphdr(heap, (duk_heaphdr *) heap->heap_object); duk__mark_heaphdr(heap, (duk_heaphdr *) heap->log_buffer); for (i = 0; i < DUK_HEAP_NUM_STRINGS; i++) { duk_hstring *h = heap->strs[i]; duk__mark_heaphdr(heap, (duk_heaphdr *) h); } duk__mark_tval(heap, &heap->lj.value1); duk__mark_tval(heap, &heap->lj.value2); }
DUK_INTERNAL void duk_err_longjmp(duk_hthread *thr) { DUK_ASSERT(thr != NULL); DUK_DD(DUK_DDPRINT("longjmp error: type=%d iserror=%d value1=%!T value2=%!T", (int) thr->heap->lj.type, (int) thr->heap->lj.iserror, &thr->heap->lj.value1, &thr->heap->lj.value2)); #if !defined(DUK_USE_CPP_EXCEPTIONS) /* If we don't have a jmpbuf_ptr, there is little we can do except * cause a fatal error. The caller's expectation is that we never * return. * * With C++ exceptions we now just propagate an uncaught error * instead of invoking the fatal error handler. Because there's * a dummy jmpbuf for C++ exceptions now, this could be changed. */ if (!thr->heap->lj.jmpbuf_ptr) { DUK_D(DUK_DPRINT("uncaught error: type=%d iserror=%d value1=%!T value2=%!T", (int) thr->heap->lj.type, (int) thr->heap->lj.iserror, &thr->heap->lj.value1, &thr->heap->lj.value2)); #if defined(DUK_USE_PREFER_SIZE) duk__uncaught_minimal(thr); #else duk__uncaught_error_aware(thr); #endif DUK_UNREACHABLE(); } #endif /* DUK_USE_CPP_EXCEPTIONS */ #if defined(DUK_USE_CPP_EXCEPTIONS) { duk_internal_exception exc; /* dummy */ throw exc; } #else /* DUK_USE_CPP_EXCEPTIONS */ DUK_LONGJMP(thr->heap->lj.jmpbuf_ptr->jb); #endif /* DUK_USE_CPP_EXCEPTIONS */ DUK_UNREACHABLE(); }
DUK_INTERNAL void duk_heap_dump_strtab(duk_heap *heap) { duk_uint32_t i; duk_hstring *h; DUK_ASSERT(heap != NULL); #if defined(DUK_USE_HEAPPTR16) DUK_ASSERT(heap->strtable16 != NULL); #else DUK_ASSERT(heap->strtable != NULL); #endif for (i = 0; i < heap->st_size; i++) { #if defined(DUK_USE_HEAPPTR16) h = (duk_hstring *) DUK_USE_HEAPPTR_DEC16(heap->strtable16[i]); #else h = heap->strtable[i]; #endif DUK_DD(DUK_DDPRINT("[%03d] -> %p", (int) i, (void *) h)); } }
DUK_LOCAL duk_bool_t duk__init_heap_thread(duk_heap *heap) { duk_hthread *thr; DUK_DD(DUK_DDPRINT("heap init: alloc heap thread")); thr = duk_hthread_alloc(heap, DUK_HOBJECT_FLAG_EXTENSIBLE | DUK_HOBJECT_FLAG_THREAD | DUK_HOBJECT_CLASS_AS_FLAGS(DUK_HOBJECT_CLASS_THREAD)); if (!thr) { DUK_D(DUK_DPRINT("failed to alloc heap_thread")); return 0; } thr->state = DUK_HTHREAD_STATE_INACTIVE; #if defined(DUK_USE_ROM_STRINGS) /* No strs[] pointer. */ #else /* DUK_USE_ROM_STRINGS */ #if defined(DUK_USE_HEAPPTR16) thr->strs16 = heap->strs16; #else thr->strs = heap->strs; #endif #endif /* DUK_USE_ROM_STRINGS */ heap->heap_thread = thr; DUK_HTHREAD_INCREF(thr, thr); /* Note: first argument not really used */ /* 'thr' is now reachable */ if (!duk_hthread_init_stacks(heap, thr)) { return 0; } /* XXX: this may now fail, and is not handled correctly */ duk_hthread_create_builtin_objects(thr); /* default prototype (Note: 'thr' must be reachable) */ DUK_HOBJECT_SET_PROTOTYPE_UPDREF(thr, (duk_hobject *) thr, thr->builtins[DUK_BIDX_THREAD_PROTOTYPE]); return 1; }
DUK_LOCAL void duk__mark_roots_heap(duk_heap *heap) { duk_small_uint_t i; DUK_DD(DUK_DDPRINT("duk__mark_roots_heap: %p", (void *) heap)); duk__mark_heaphdr(heap, (duk_heaphdr *) heap->heap_thread); duk__mark_heaphdr(heap, (duk_heaphdr *) heap->heap_object); for (i = 0; i < DUK_HEAP_NUM_STRINGS; i++) { duk_hstring *h = DUK_HEAP_GET_STRING(heap, i); duk__mark_heaphdr(heap, (duk_heaphdr *) h); } duk__mark_tval(heap, &heap->lj.value1); duk__mark_tval(heap, &heap->lj.value2); #if defined(DUK_USE_DEBUGGER_SUPPORT) for (i = 0; i < heap->dbg_breakpoint_count; i++) { duk__mark_heaphdr(heap, (duk_heaphdr *) heap->dbg_breakpoints[i].filename); } #endif }