void *duk_heap_mem_alloc_checked(duk_hthread *thr, size_t size, const char *filename, int line) { #else void *duk_heap_mem_alloc_checked(duk_hthread *thr, size_t size) { #endif void *res; DUK_ASSERT(thr != NULL); DUK_ASSERT_DISABLE(size >= 0); res = DUK_ALLOC(thr->heap, size); if (!res) { #ifdef DUK_USE_VERBOSE_ERRORS DUK_ERROR_RAW(filename, line, thr, DUK_ERR_ALLOC_ERROR, "memory alloc failed"); #else DUK_ERROR(thr, DUK_ERR_ALLOC_ERROR, "memory alloc failed"); #endif } return res; }
duk_ret_t duk_bi_duktape_object_fin(duk_context *ctx) { (void) duk_require_hobject(ctx, 0); if (duk_get_top(ctx) >= 2) { /* Set: currently a finalizer is disabled by setting it to * undefined; this does not remove the property at the moment. * The value could be type checked to be either a function * or something else; if something else, the property could * be deleted. */ duk_set_top(ctx, 2); (void) duk_put_prop_stridx(ctx, 0, DUK_STRIDX_INT_FINALIZER); return 0; } else { /* Get. */ DUK_ASSERT(duk_get_top(ctx) == 1); duk_get_prop_stridx(ctx, 0, DUK_STRIDX_INT_FINALIZER); return 1; } }
DUK_EXTERNAL void duk_get_prototype(duk_context *ctx, duk_idx_t index) { duk_hthread *thr = (duk_hthread *) ctx; duk_hobject *obj; duk_hobject *proto; DUK_ASSERT_CTX_VALID(ctx); DUK_UNREF(thr); obj = duk_require_hobject(ctx, index); DUK_ASSERT(obj != NULL); /* XXX: shared helper for duk_push_hobject_or_undefined()? */ proto = DUK_HOBJECT_GET_PROTOTYPE(thr->heap, obj); if (proto) { duk_push_hobject(ctx, proto); } else { duk_push_undefined(ctx); } }
DUK_EXTERNAL duk_int_t duk_compile_raw(duk_context *ctx, const char *src_buffer, duk_size_t src_length, duk_uint_t flags) { duk__compile_raw_args comp_args_alloc; duk__compile_raw_args *comp_args = &comp_args_alloc; DUK_ASSERT_CTX_VALID(ctx); if ((flags & DUK_COMPILE_STRLEN) && (src_buffer != NULL)) { /* String length is computed here to avoid multiple evaluation * of a macro argument in the calling side. */ src_length = DUK_STRLEN(src_buffer); } comp_args->src_buffer = (const duk_uint8_t *) src_buffer; comp_args->src_length = src_length; comp_args->flags = flags; /* [ ... source? filename? ] (depends on flags) */ if (flags & DUK_COMPILE_SAFE) { duk_int_t rc; duk_int_t nargs; duk_int_t nrets = 1; /* Arguments can be: [ source? filename? &comp_args] so that * nargs is 1 to 3. Call site encodes the correct nargs count * directly into flags. */ nargs = flags & 0x07; DUK_ASSERT(nargs == ((flags & DUK_COMPILE_NOSOURCE) ? 0 : 1) + ((flags & DUK_COMPILE_NOFILENAME) ? 0 : 1)); rc = duk_safe_call(ctx, duk__do_compile, (void *) comp_args, nargs, nrets); /* [ ... closure ] */ return rc; } (void) duk__do_compile(ctx, (void *) comp_args); /* [ ... closure ] */ return DUK_EXEC_SUCCESS; }
DUK_INTERNAL void duk_heap_force_strtab_resize(duk_heap *heap) { duk_small_uint_t prev_mark_and_sweep_base_flags; /* Force a resize so that DELETED entries are eliminated. * Another option would be duk__recheck_strtab_size_probe(); * but since that happens on every intern anyway, this whole * check can now be disabled. */ DUK_ASSERT((heap->mark_and_sweep_base_flags & DUK_MS_FLAG_NO_STRINGTABLE_RESIZE) == 0); prev_mark_and_sweep_base_flags = heap->mark_and_sweep_base_flags; DUK__PREVENT_MS_SIDE_EFFECTS(heap); #if defined(DUK_USE_STRTAB_CHAIN) DUK_UNREF(heap); #elif defined(DUK_USE_STRTAB_PROBE) (void) duk__resize_strtab_probe(heap); #endif heap->mark_and_sweep_base_flags = prev_mark_and_sweep_base_flags; }
duk_ret_t duk_bi_object_prototype_is_prototype_of(duk_context *ctx) { duk_hthread *thr = (duk_hthread *) ctx; duk_hobject *h_v; duk_hobject *h_obj; DUK_ASSERT_TOP(ctx, 1); h_v = duk_get_hobject(ctx, 0); if (!h_v) { duk_push_false(ctx); /* XXX: tail call: return duk_push_false(ctx) */ return 1; } h_obj = duk_push_this_coercible_to_object(ctx); DUK_ASSERT(h_obj != NULL); /* E5.1 Section 15.2.4.6, step 3.a, lookup proto once before compare */ duk_push_boolean(ctx, duk_hobject_prototype_chain_contains(thr, h_v->prototype, h_obj)); return 1; }
DUK_INTERNAL duk_ret_t duk_bi_object_constructor_prevent_extensions(duk_context *ctx) { duk_hthread *thr = (duk_hthread *) ctx; duk_hobject *h; h = duk_require_hobject_or_lfunc(ctx, 0); if (!h) { /* Lightfunc, always success. */ return 1; } DUK_ASSERT(h != NULL); DUK_HOBJECT_CLEAR_EXTENSIBLE(h); /* A non-extensible object cannot gain any more properties, * so this is a good time to compact. */ duk_hobject_compact_props(thr, h); return 1; }
DUK_EXTERNAL void duk_call_method(duk_context *ctx, duk_idx_t nargs) { duk_hthread *thr = (duk_hthread *) ctx; duk_small_uint_t call_flags; duk_idx_t idx_func; DUK_ASSERT_CTX_VALID(ctx); DUK_ASSERT(thr != NULL); idx_func = duk_get_top(ctx) - nargs - 2; /* must work for nargs <= 0 */ if (idx_func < 0 || nargs < 0) { /* note that we can't reliably pop anything here */ DUK_ERROR_TYPE_INVALID_ARGS(thr); } call_flags = 0; /* not protected, respect reclimit, not constructor */ duk_handle_call_unprotected(thr, /* thread */ nargs, /* num_stack_args */ call_flags); /* call_flags */ }
DUK_EXTERNAL duk_int_t duk_safe_call(duk_context *ctx, duk_safe_call_function func, duk_idx_t nargs, duk_idx_t nrets) { duk_hthread *thr = (duk_hthread *) ctx; duk_int_t rc; DUK_ASSERT_CTX_VALID(ctx); DUK_ASSERT(thr != NULL); if (duk_get_top(ctx) < nargs || nrets < 0) { /* See comments in duk_pcall(). */ DUK_ERROR(thr, DUK_ERR_API_ERROR, DUK_STR_INVALID_CALL_ARGS); return DUK_EXEC_ERROR; /* unreachable */ } rc = duk_handle_safe_call(thr, /* thread */ func, /* func */ nargs, /* num_stack_args */ nrets); /* num_stack_res */ return rc; }
DUK_INTERNAL void duk_err_longjmp(duk_hthread *thr) { DUK_ASSERT(thr != NULL); DUK_DD(DUK_DDPRINT("longjmp error: type=%d iserror=%d value1=%!T value2=%!T", (int) thr->heap->lj.type, (int) thr->heap->lj.iserror, &thr->heap->lj.value1, &thr->heap->lj.value2)); #if !defined(DUK_USE_CPP_EXCEPTIONS) /* If we don't have a jmpbuf_ptr, there is little we can do except * cause a fatal error. The caller's expectation is that we never * return. * * With C++ exceptions we now just propagate an uncaught error * instead of invoking the fatal error handler. Because there's * a dummy jmpbuf for C++ exceptions now, this could be changed. */ if (!thr->heap->lj.jmpbuf_ptr) { DUK_D(DUK_DPRINT("uncaught error: type=%d iserror=%d value1=%!T value2=%!T", (int) thr->heap->lj.type, (int) thr->heap->lj.iserror, &thr->heap->lj.value1, &thr->heap->lj.value2)); #if defined(DUK_USE_PREFER_SIZE) duk__uncaught_minimal(thr); #else duk__uncaught_error_aware(thr); #endif DUK_UNREACHABLE(); } #endif /* DUK_USE_CPP_EXCEPTIONS */ #if defined(DUK_USE_CPP_EXCEPTIONS) { duk_internal_exception exc; /* dummy */ throw exc; } #else /* DUK_USE_CPP_EXCEPTIONS */ DUK_LONGJMP(thr->heap->lj.jmpbuf_ptr->jb); #endif /* DUK_USE_CPP_EXCEPTIONS */ DUK_UNREACHABLE(); }
/* Push 'this' binding, check that it is a Date object; then push the * internal time value. At the end, stack is: [ ... this timeval ]. * Returns the time value. Local time adjustment is done if requested. */ static double push_this_and_get_timeval_tzoffset(duk_context *ctx, int flags, int *out_tzoffset) { duk_hthread *thr = (duk_hthread *) ctx; duk_hobject *h; double d; int tzoffset = 0; duk_push_this(ctx); h = duk_get_hobject(ctx, -1); /* FIXME: getter with class check, useful in built-ins */ if (h == NULL || DUK_HOBJECT_GET_CLASS_NUMBER(h) != DUK_HOBJECT_CLASS_DATE) { DUK_ERROR(thr, DUK_ERR_TYPE_ERROR, "expected Date"); } duk_get_prop_stridx(ctx, -1, DUK_STRIDX_INT_VALUE); d = duk_to_number(ctx, -1); duk_pop(ctx); if (DUK_ISNAN(d)) { if (flags & FLAG_NAN_TO_ZERO) { d = 0.0; } if (flags & FLAG_NAN_TO_RANGE_ERROR) { DUK_ERROR(thr, DUK_ERR_RANGE_ERROR, "Invalid Date"); } } /* if no NaN handling flag, may still be NaN here, but not Inf */ DUK_ASSERT(!DUK_ISINF(d)); if (flags & FLAG_LOCALTIME) { /* Note: DST adjustment is determined using UTC time. * If 'd' is NaN, tzoffset will be 0. */ tzoffset = GET_LOCAL_TZOFFSET(d); /* seconds */ d += tzoffset * 1000; } if (out_tzoffset) { *out_tzoffset = tzoffset; } /* [ ... this ] */ return d; }
static void duk__run_object_finalizers(duk_heap *heap) { duk_heaphdr *curr; duk_heaphdr *next; #ifdef DUK_USE_DEBUG int count = 0; #endif duk_hthread *thr; DUK_DD(DUK_DDPRINT("duk__run_object_finalizers: %p", (void *) heap)); thr = duk__get_temp_hthread(heap); DUK_ASSERT(thr != NULL); curr = heap->finalize_list; while (curr) { DUK_DDD(DUK_DDDPRINT("mark-and-sweep finalize: %p", (void *) curr)); DUK_ASSERT(DUK_HEAPHDR_GET_TYPE(curr) == DUK_HTYPE_OBJECT); /* only objects have finalizers */ DUK_ASSERT(!DUK_HEAPHDR_HAS_REACHABLE(curr)); /* flags have been already cleared */ DUK_ASSERT(!DUK_HEAPHDR_HAS_TEMPROOT(curr)); DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(curr)); DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZED(curr)); /* run the finalizer */ duk_hobject_run_finalizer(thr, (duk_hobject *) curr); /* must never longjmp */ /* mark FINALIZED, for next mark-and-sweep (will collect unless has become reachable; * prevent running finalizer again if reachable) */ DUK_HEAPHDR_SET_FINALIZED(curr); /* queue back to heap_allocated */ next = DUK_HEAPHDR_GET_NEXT(curr); DUK_HEAP_INSERT_INTO_HEAP_ALLOCATED(heap, curr); curr = next; #ifdef DUK_USE_DEBUG count++; #endif } /* finalize_list will always be processed completely */ heap->finalize_list = NULL; #ifdef DUK_USE_DEBUG DUK_D(DUK_DPRINT("mark-and-sweep finalize objects: %d finalizers called", count)); #endif }
static int recheck_hash_size(duk_heap *heap, duk_uint32_t new_used) { duk_uint32_t new_free; duk_uint32_t tmp1; duk_uint32_t tmp2; DUK_ASSERT(new_used <= heap->st_size); /* grow by at most one */ new_free = heap->st_size - new_used; /* unsigned intentionally */ /* new_free / size <= 1 / DIV <=> new_free <= size / DIV */ /* new_used / size <= 1 / DIV <=> new_used <= size / DIV */ tmp1 = heap->st_size / DUK_STRTAB_MIN_FREE_DIVISOR; tmp2 = heap->st_size / DUK_STRTAB_MIN_USED_DIVISOR; if (new_free <= tmp1 || new_used <= tmp2) { /* load factor too low or high, count actually used entries and resize */ return resize_hash(heap); } else { return DUK_ERR_OK; } }
DUK_LOCAL duk_bool_t duk__recheck_strtab_size_probe(duk_heap *heap, duk_uint32_t new_used) { duk_uint32_t new_free; duk_uint32_t tmp1; duk_uint32_t tmp2; DUK_ASSERT(new_used <= heap->st_size); /* grow by at most one */ new_free = heap->st_size - new_used; /* unsigned intentionally */ /* new_free / size <= 1 / DIV <=> new_free <= size / DIV */ /* new_used / size <= 1 / DIV <=> new_used <= size / DIV */ tmp1 = heap->st_size / DUK_STRTAB_MIN_FREE_DIVISOR; tmp2 = heap->st_size / DUK_STRTAB_MIN_USED_DIVISOR; if (new_free <= tmp1 || new_used <= tmp2) { /* load factor too low or high, count actually used entries and resize */ return duk__resize_strtab_probe(heap); } else { return 0; /* OK */ } }
DUK_INTERNAL duk_ret_t duk_bi_object_prototype_is_prototype_of(duk_hthread *thr) { duk_hobject *h_v; duk_hobject *h_obj; DUK_ASSERT_TOP(thr, 1); h_v = duk_get_hobject(thr, 0); if (!h_v) { duk_push_false(thr); /* XXX: tail call: return duk_push_false(thr) */ return 1; } h_obj = duk_push_this_coercible_to_object(thr); DUK_ASSERT(h_obj != NULL); /* E5.1 Section 15.2.4.6, step 3.a, lookup proto once before compare. * Prototype loops should cause an error to be thrown. */ duk_push_boolean(thr, duk_hobject_prototype_chain_contains(thr, DUK_HOBJECT_GET_PROTOTYPE(thr->heap, h_v), h_obj, 0 /*ignore_loop*/)); return 1; }
DUK_EXTERNAL duk_bool_t duk_has_prop(duk_context *ctx, duk_idx_t obj_index) { duk_hthread *thr = (duk_hthread *) ctx; duk_tval *tv_obj; duk_tval *tv_key; duk_bool_t rc; DUK_ASSERT_CTX_VALID(ctx); /* Note: copying tv_obj and tv_key to locals to shield against a valstack * resize is not necessary for a property existence check right now. */ tv_obj = duk_require_tval(ctx, obj_index); tv_key = duk_require_tval(ctx, -1); rc = duk_hobject_hasprop(thr, tv_obj, tv_key); DUK_ASSERT(rc == 0 || rc == 1); duk_pop(ctx); /* remove key */ return rc; /* 1 if property found, 0 otherwise */ }
static int duk__resize_strtab(duk_heap *heap) { duk_uint32_t new_size; int ret; new_size = (duk_uint32_t) duk__count_used(heap); if (new_size >= 0x80000000U) { new_size = DUK_STRTAB_HIGHEST_32BIT_PRIME; } else { new_size = duk_util_get_hash_prime(DUK_STRTAB_GROW_ST_SIZE(new_size)); new_size = duk_util_get_hash_prime(new_size); } DUK_ASSERT(new_size > 0); /* rehash even if old and new sizes are the same to get rid of * DELETED entries. */ ret = duk__resize_strtab_raw(heap, new_size); return ret; }
/* for thread dumping */ static char duk__get_tval_summary_char(duk_tval *tv) { switch (DUK_TVAL_GET_TAG(tv)) { case DUK_TAG_UNDEFINED: if (DUK_TVAL_IS_UNDEFINED_UNUSED(tv)) { return '.'; } return 'u'; case DUK_TAG_NULL: return 'n'; case DUK_TAG_BOOLEAN: return 'b'; case DUK_TAG_STRING: return 's'; case DUK_TAG_OBJECT: { duk_hobject *h = DUK_TVAL_GET_OBJECT(tv); if (DUK_HOBJECT_IS_ARRAY(h)) { return 'A'; } else if (DUK_HOBJECT_IS_COMPILEDFUNCTION(h)) { return 'C'; } else if (DUK_HOBJECT_IS_NATIVEFUNCTION(h)) { return 'N'; } else if (DUK_HOBJECT_IS_THREAD(h)) { return 'T'; } return 'O'; } case DUK_TAG_BUFFER: { return 'B'; } case DUK_TAG_POINTER: { return 'P'; } default: DUK_ASSERT(DUK_TVAL_IS_NUMBER(tv)); return 'd'; } DUK_UNREACHABLE(); }
DUK_INTERNAL void duk_err_longjmp(duk_hthread *thr) { DUK_ASSERT(thr != NULL); DUK_DD(DUK_DDPRINT("longjmp error: type=%d iserror=%d value1=%!T value2=%!T", (int) thr->heap->lj.type, (int) thr->heap->lj.iserror, &thr->heap->lj.value1, &thr->heap->lj.value2)); #if defined(DUK_USE_CPP_EXCEPTIONS) /* XXX: detecting uncaught exception case for C++ case; perhaps need * some marker in heap->lj state that a try-catch is active. For now, * invokes C++ uncaught exception handling. */ #else if (!thr->heap->lj.jmpbuf_ptr) { /* * If we don't have a jmpbuf_ptr, there is little we can do * except panic. The caller's expectation is that we never * return. */ DUK_D(DUK_DPRINT("uncaught error: type=%d iserror=%d value1=%!T value2=%!T", (int) thr->heap->lj.type, (int) thr->heap->lj.iserror, &thr->heap->lj.value1, &thr->heap->lj.value2)); duk_fatal((duk_context *) thr, DUK_ERR_UNCAUGHT_ERROR, "uncaught error"); DUK_UNREACHABLE(); } #endif #if defined(DUK_USE_CPP_EXCEPTIONS) { duk_internal_exception exc; /* dummy */ throw exc; } #else DUK_LONGJMP(thr->heap->lj.jmpbuf_ptr->jb); #endif DUK_UNREACHABLE(); }
DUK_LOCAL void duk__markandsweep_torture_finalizer(duk_hthread *thr) { duk_context *ctx; duk_int_t rc; DUK_ASSERT(thr != NULL); ctx = (duk_context *) thr; /* Avoid fake finalization when callstack limit has been reached. * Otherwise a callstack limit error will be created, then refzero'ed. */ if (thr->heap->call_recursion_depth >= thr->heap->call_recursion_limit || thr->callstack_size + 2 * DUK_CALLSTACK_GROW_STEP >= thr->callstack_max /*approximate*/) { DUK_D(DUK_DPRINT("call recursion depth reached, avoid fake mark-and-sweep torture finalizer")); return; } /* Run fake finalizer. Avoid creating unnecessary garbage. */ duk_push_c_function(ctx, duk__markandsweep_fake_finalizer, 0 /*nargs*/); rc = duk_pcall(ctx, 0 /*nargs*/); DUK_UNREF(rc); /* ignored */ duk_pop(ctx); }
void duk_heap_tval_incref(duk_tval *tv) { #if 0 DUK_DDD(DUK_DDDPRINT("tval incref %p (%ld->%ld): %!T", (void *) tv, (tv != NULL && DUK_TVAL_IS_HEAP_ALLOCATED(tv) ? (long) DUK_TVAL_GET_HEAPHDR(tv)->h_refcount : (long) 0), (tv != NULL && DUK_TVAL_IS_HEAP_ALLOCATED(tv) ? (long) (DUK_TVAL_GET_HEAPHDR(tv)->h_refcount + 1) : (long) 0), (duk_tval *) tv)); #endif if (!tv) { return; } if (DUK_TVAL_IS_HEAP_ALLOCATED(tv)) { duk_heaphdr *h = DUK_TVAL_GET_HEAPHDR(tv); if (h) { DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h)); DUK_ASSERT_DISABLE(h->h_refcount >= 0); h->h_refcount++; } } }
DUK_INTERNAL void duk_hthread_create_builtin_objects(duk_hthread *thr) { /* Setup builtins from ROM objects. All heaps/threads will share * the same readonly objects. */ duk_small_uint_t i; for (i = 0; i < DUK_NUM_BUILTINS; i++) { duk_hobject *h; h = (duk_hobject *) DUK_LOSE_CONST(duk_rom_builtins_bidx[i]); DUK_ASSERT(h != NULL); thr->builtins[i] = h; } #if defined(DUK_USE_ROM_GLOBAL_CLONE) || defined(DUK_USE_ROM_GLOBAL_INHERIT) /* By default the global object is read-only which is often much * more of an issue than having read-only built-in objects (like * RegExp, Date, etc). Use a RAM-based copy of the global object * and the global environment object for convenience. */ duk__duplicate_ram_global_object(thr); #endif }
DUK_INTERNAL duk_ret_t duk_bi_object_constructor_is_sealed_frozen_shared(duk_hthread *thr) { duk_hobject *h; duk_bool_t is_frozen; duk_uint_t mask; is_frozen = (duk_bool_t) duk_get_current_magic(thr); mask = duk_get_type_mask(thr, 0); if (mask & (DUK_TYPE_MASK_LIGHTFUNC | DUK_TYPE_MASK_BUFFER)) { DUK_ASSERT(is_frozen == 0 || is_frozen == 1); duk_push_boolean(thr, (mask & DUK_TYPE_MASK_LIGHTFUNC) ? 1 : /* lightfunc always frozen and sealed */ (is_frozen ^ 1)); /* buffer sealed but not frozen (index props writable) */ } else { /* ES2015 Sections 19.1.2.12, 19.1.2.13: anything other than an object * is considered to be already sealed and frozen. */ h = duk_get_hobject(thr, 0); duk_push_boolean(thr, (h == NULL) || duk_hobject_object_is_sealed_frozen_helper(thr, h, is_frozen /*is_frozen*/)); } return 1; }
void duk_hbuffer_insert_bytes(duk_hthread *thr, duk_hbuffer_dynamic *buf, size_t offset, duk_uint8_t *data, size_t length) { char *p; /* XXX: allow inserts with offset > curr_size? i.e., insert zeroes automatically? */ DUK_ASSERT(thr != NULL); DUK_ASSERT(buf != NULL); DUK_ASSERT(DUK_HBUFFER_HAS_DYNAMIC(buf)); DUK_ASSERT_DISABLE(offset >= 0); /* unsigned, so always true */ DUK_ASSERT(offset <= DUK_HBUFFER_GET_SIZE(buf)); /* equality is OK (= append) */ DUK_ASSERT(data != NULL); DUK_ASSERT_DISABLE(length >= 0); /* unsigned, so always true */ if (length == 0) { return; } if (DUK_HBUFFER_DYNAMIC_GET_SPARE_SIZE(buf) < length) { duk_hbuffer_resize(thr, buf, DUK_HBUFFER_GET_SIZE(buf), duk__add_spare(DUK_HBUFFER_GET_SIZE(buf) + length)); } DUK_ASSERT(DUK_HBUFFER_DYNAMIC_GET_SPARE_SIZE(buf) >= length); p = (char *) DUK_HBUFFER_DYNAMIC_GET_CURR_DATA_PTR(buf); if (offset < DUK_HBUFFER_GET_SIZE(buf)) { /* not an append */ DUK_ASSERT(DUK_HBUFFER_GET_SIZE(buf) - offset > 0); /* not a zero byte memmove */ DUK_MEMMOVE((void *) (p + offset + length), (void *) (p + offset), DUK_HBUFFER_GET_SIZE(buf) - offset); } DUK_MEMCPY((void *) (p + offset), data, length); buf->size += length; }
duk_hthread *duk_hthread_alloc(duk_heap *heap, int hobject_flags) { duk_hthread *res; res = (duk_hthread *) DUK_ALLOC(heap, sizeof(duk_hthread)); if (!res) { return NULL; } DUK_MEMZERO(res, sizeof(duk_hthread)); duk__init_object_parts(heap, &res->obj, hobject_flags); #ifdef DUK_USE_EXPLICIT_NULL_INIT res->heap = NULL; res->valstack = NULL; res->valstack_end = NULL; res->valstack_bottom = NULL; res->valstack_top = NULL; res->callstack = NULL; res->catchstack = NULL; res->resumer = NULL; res->strs = NULL; { int i; for (i = 0; i < DUK_NUM_BUILTINS; i++) { res->builtins[i] = NULL; } } #endif /* when nothing is running, API calls are in non-strict mode */ DUK_ASSERT(res->strict == 0); res->heap = heap; res->valstack_max = DUK_VALSTACK_DEFAULT_MAX; res->callstack_max = DUK_CALLSTACK_DEFAULT_MAX; res->catchstack_max = DUK_CATCHSTACK_DEFAULT_MAX; return res; }
static int _finalize_helper(duk_context *ctx) { DUK_ASSERT(ctx != NULL); DUK_DDDPRINT("protected finalization helper running"); /* [... obj] */ duk_get_prop_stridx(ctx, -1, DUK_STRIDX_INT_FINALIZER); /* -> [... obj finalizer] */ if (!duk_is_callable(ctx, -1)) { DUK_DDDPRINT("-> no finalizer or finalizer not callable"); return 0; } duk_dup(ctx, -2); /* -> [... obj finalizer obj] */ DUK_DDDPRINT("-> finalizer found, calling finalizer"); duk_call(ctx, 1); /* -> [... obj retval] */ DUK_DDDPRINT("finalizer finished successfully"); return 0; /* Note: we rely on duk_safe_call() to fix up the stack for the caller, * so we don't need to pop stuff here. There is no return value; * caller determines rescued status based on object refcount. */ }
DUK_INTERNAL void duk_hthread_terminate(duk_hthread *thr) { DUK_ASSERT(thr != NULL); while (thr->callstack_curr != NULL) { duk_hthread_activation_unwind_norz(thr); } thr->valstack_bottom = thr->valstack; duk_set_top((duk_context *) thr, 0); /* unwinds valstack, updating refcounts */ thr->state = DUK_HTHREAD_STATE_TERMINATED; /* Here we could remove references to built-ins, but it may not be * worth the effort because built-ins are quite likely to be shared * with another (unterminated) thread, and terminated threads are also * usually garbage collected quite quickly. * * We could also shrink the value stack here, but that also may not * be worth the effort for the same reason. */ DUK_REFZERO_CHECK_SLOW(thr); }
DUK_LOCAL duk_bool_t duk__have_active_catcher(duk_hthread *thr) { /* * XXX: As noted above, a protected API call won't be counted as a * catcher. This is usually convenient, e.g. in the case of a top- * level duk_pcall(), but may not always be desirable. Perhaps add an * argument to treat them as catchers? */ duk_size_t i; DUK_ASSERT(thr != NULL); while (thr != NULL) { for (i = 0; i < thr->catchstack_top; i++) { duk_catcher *cat = thr->catchstack + i; if (DUK_CAT_HAS_CATCH_ENABLED(cat)) { return 1; /* all we need to know */ } } thr = thr->resumer; } return 0; }
DUK_EXTERNAL void duk_gc(duk_context *ctx, duk_uint_t flags) { #ifdef DUK_USE_MARK_AND_SWEEP duk_hthread *thr = (duk_hthread *) ctx; duk_heap *heap; DUK_UNREF(flags); /* NULL accepted */ if (!ctx) { return; } DUK_ASSERT_CTX_VALID(ctx); heap = thr->heap; DUK_ASSERT(heap != NULL); DUK_D(DUK_DPRINT("mark-and-sweep requested by application")); duk_heap_mark_and_sweep(heap, 0); #else DUK_D(DUK_DPRINT("mark-and-sweep requested by application but mark-and-sweep not enabled, ignoring")); DUK_UNREF(ctx); DUK_UNREF(flags); #endif }
DUK_EXTERNAL duk_bool_t duk_del_prop(duk_context *ctx, duk_idx_t obj_index) { duk_hthread *thr = (duk_hthread *) ctx; duk_tval *tv_obj; duk_tval *tv_key; duk_small_int_t throw_flag; duk_bool_t rc; DUK_ASSERT_CTX_VALID(ctx); /* Note: copying tv_obj and tv_key to locals to shield against a valstack * resize is not necessary for a property delete right now. */ tv_obj = duk_require_tval(ctx, obj_index); tv_key = duk_require_tval(ctx, -1); throw_flag = duk_is_strict_call(ctx); rc = duk_hobject_delprop(thr, tv_obj, tv_key, throw_flag); DUK_ASSERT(rc == 0 || rc == 1); duk_pop(ctx); /* remove key */ return rc; }