Esempio n. 1
0
DUK_INTERNAL void duk_heaphdr_incref(duk_heaphdr *h) {
	DUK_ASSERT(h != NULL);
	DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
	DUK_ASSERT_DISABLE(DUK_HEAPHDR_GET_REFCOUNT(h) >= 0);

	DUK_HEAPHDR_PREINC_REFCOUNT(h);
}
Esempio n. 2
0
		/*
		 *  Buffers have no internal references.  However, a dynamic
		 *  buffer has a separate allocation for the buffer.  This is
		 *  freed by duk_heap_free_heaphdr_raw().
		 */

		duk_heap_remove_any_from_heap_allocated(heap, h);
		duk_heap_free_heaphdr_raw(heap, h);
		break;

	default:
		DUK_D(DUK_DPRINT("invalid heap type in decref: %ld", (long) DUK_HEAPHDR_GET_TYPE(h)));
		DUK_UNREACHABLE();
	}
}

#if !defined(DUK_USE_FAST_REFCOUNT_DEFAULT)
DUK_INTERNAL void duk_tval_incref(duk_tval *tv) {
	DUK_ASSERT(tv != NULL);

	if (DUK_TVAL_IS_HEAP_ALLOCATED(tv)) {
		duk_heaphdr *h = DUK_TVAL_GET_HEAPHDR(tv);
		DUK_ASSERT(h != NULL);
		DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
		DUK_ASSERT_DISABLE(h->h_refcount >= 0);
		DUK_HEAPHDR_PREINC_REFCOUNT(h);
	}
}
#endif

#if 0  /* unused */
DUK_INTERNAL void duk_tval_incref_allownull(duk_tval *tv) {
	if (tv == NULL) {
		return;
	}
	if (DUK_TVAL_IS_HEAP_ALLOCATED(tv)) {
		duk_heaphdr *h = DUK_TVAL_GET_HEAPHDR(tv);
		DUK_ASSERT(h != NULL);
		DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
		DUK_ASSERT_DISABLE(h->h_refcount >= 0);
		DUK_HEAPHDR_PREINC_REFCOUNT(h);
	}
}
#endif

DUK_INTERNAL void duk_tval_decref(duk_hthread *thr, duk_tval *tv) {
	DUK_ASSERT(thr != NULL);
	DUK_ASSERT(tv != NULL);

	if (DUK_TVAL_IS_HEAP_ALLOCATED(tv)) {
		duk_heaphdr *h = DUK_TVAL_GET_HEAPHDR(tv);
		DUK_ASSERT(h != NULL);
		DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
		duk_heaphdr_decref(thr, h);
	}
}

#if 0  /* unused */
DUK_INTERNAL void duk_tval_decref_allownull(duk_hthread *thr, duk_tval *tv) {
	DUK_ASSERT(thr != NULL);

	if (tv == NULL) {
		return;
	}
	if (DUK_TVAL_IS_HEAP_ALLOCATED(tv)) {
		duk_heaphdr *h = DUK_TVAL_GET_HEAPHDR(tv);
		DUK_ASSERT(h != NULL);
		DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
		duk_heaphdr_decref(thr, h);
	}
}
#endif

#if !defined(DUK_USE_FAST_REFCOUNT_DEFAULT)
DUK_INTERNAL void duk_heaphdr_incref(duk_heaphdr *h) {
	DUK_ASSERT(h != NULL);
	DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
	DUK_ASSERT_DISABLE(DUK_HEAPHDR_GET_REFCOUNT(h) >= 0);

	DUK_HEAPHDR_PREINC_REFCOUNT(h);
}
#endif

#if 0  /* unused */
DUK_INTERNAL void duk_heaphdr_incref_allownull(duk_heaphdr *h) {
	if (h == NULL) {
		return;
	}
	DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
	DUK_ASSERT_DISABLE(DUK_HEAPHDR_GET_REFCOUNT(h) >= 0);

	DUK_HEAPHDR_PREINC_REFCOUNT(h);
}
Esempio n. 3
0
DUK_INTERNAL void duk_tval_incref(duk_tval *tv) {
	DUK_ASSERT(tv != NULL);

	if (DUK_TVAL_IS_HEAP_ALLOCATED(tv)) {
		duk_heaphdr *h = DUK_TVAL_GET_HEAPHDR(tv);
		DUK_ASSERT(h != NULL);
		DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
		DUK_ASSERT_DISABLE(h->h_refcount >= 0);
		DUK_HEAPHDR_PREINC_REFCOUNT(h);
	}
}
Esempio n. 4
0
		/*
		 *  Buffers have no internal references.  However, a dynamic
		 *  buffer has a separate allocation for the buffer.  This is
		 *  freed by duk_heap_free_heaphdr_raw().
		 */

		duk_heap_remove_any_from_heap_allocated(heap, h);
		duk_heap_free_heaphdr_raw(heap, h);
		break;

	default:
		DUK_D(DUK_DPRINT("invalid heap type in decref: %ld", (long) DUK_HEAPHDR_GET_TYPE(h)));
		DUK_UNREACHABLE();
	}
}

#if !defined(DUK_USE_FAST_REFCOUNT_DEFAULT)
DUK_INTERNAL void duk_tval_incref(duk_tval *tv) {
	DUK_ASSERT(tv != NULL);

	if (DUK_TVAL_NEEDS_REFCOUNT_UPDATE(tv)) {
		duk_heaphdr *h = DUK_TVAL_GET_HEAPHDR(tv);
		DUK_ASSERT(h != NULL);
		DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
		DUK_ASSERT_DISABLE(h->h_refcount >= 0);
		DUK_HEAPHDR_PREINC_REFCOUNT(h);
	}
}
#endif

#if 0  /* unused */
DUK_INTERNAL void duk_tval_incref_allownull(duk_tval *tv) {
	if (tv == NULL) {
		return;
	}
	if (DUK_TVAL_NEEDS_REFCOUNT_UPDATE(tv)) {
		duk_heaphdr *h = DUK_TVAL_GET_HEAPHDR(tv);
		DUK_ASSERT(h != NULL);
		DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
		DUK_ASSERT_DISABLE(h->h_refcount >= 0);
		DUK_HEAPHDR_PREINC_REFCOUNT(h);
	}
}
Esempio n. 5
0
DUK_INTERNAL void duk_heap_heaphdr_incref(duk_heaphdr *h) {
#if 0
	DUK_DDD(DUK_DDDPRINT("heaphdr incref %p (%ld->%ld): %!O",
	                     (void *) h,
	                     (h != NULL ? (long) h->h_refcount : (long) 0),
	                     (h != NULL ? (long) (h->h_refcount + 1) : (long) 0),
	                     (duk_heaphdr *) h));
#endif

	if (!h) {
		return;
	}
	DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
	DUK_ASSERT_DISABLE(DUK_HEAPHDR_GET_REFCOUNT(h) >= 0);

	DUK_HEAPHDR_PREINC_REFCOUNT(h);
}
Esempio n. 6
0
DUK_INTERNAL void duk_heap_tval_incref(duk_tval *tv) {
#if 0
	DUK_DDD(DUK_DDDPRINT("tval incref %p (%ld->%ld): %!T",
	                     (void *) tv,
	                     (tv != NULL && DUK_TVAL_IS_HEAP_ALLOCATED(tv) ? (long) DUK_TVAL_GET_HEAPHDR(tv)->h_refcount : (long) 0),
	                     (tv != NULL && DUK_TVAL_IS_HEAP_ALLOCATED(tv) ? (long) (DUK_TVAL_GET_HEAPHDR(tv)->h_refcount + 1) : (long) 0),
	                     (duk_tval *) tv));
#endif

	if (!tv) {
		return;
	}

	if (DUK_TVAL_IS_HEAP_ALLOCATED(tv)) {
		duk_heaphdr *h = DUK_TVAL_GET_HEAPHDR(tv);
		if (h) {
			DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
			DUK_ASSERT_DISABLE(h->h_refcount >= 0);
			DUK_HEAPHDR_PREINC_REFCOUNT(h);
		}
	}
}
Esempio n. 7
0
DUK_LOCAL void duk__refzero_free_pending(duk_hthread *thr) {
	duk_heaphdr *h1, *h2;
	duk_heap *heap;
	duk_int_t count = 0;

	DUK_ASSERT(thr != NULL);
	DUK_ASSERT(thr->heap != NULL);
	heap = thr->heap;
	DUK_ASSERT(heap != NULL);

	/*
	 *  Detect recursive invocation
	 */

	if (DUK_HEAP_HAS_REFZERO_FREE_RUNNING(heap)) {
		DUK_DDD(DUK_DDDPRINT("refzero free running, skip run"));
		return;
	}

	/*
	 *  Churn refzero_list until empty
	 */

	DUK_HEAP_SET_REFZERO_FREE_RUNNING(heap);
	while (heap->refzero_list) {
		duk_hobject *obj;
		duk_bool_t rescued = 0;

		/*
		 *  Pick an object from the head (don't remove yet).
		 */

		h1 = heap->refzero_list;
		obj = (duk_hobject *) h1;
		DUK_DD(DUK_DDPRINT("refzero processing %p: %!O", (void *) h1, (duk_heaphdr *) h1));
		DUK_ASSERT(DUK_HEAPHDR_GET_PREV(h1) == NULL);
		DUK_ASSERT(DUK_HEAPHDR_GET_TYPE(h1) == DUK_HTYPE_OBJECT);  /* currently, always the case */

		/*
		 *  Finalizer check.
		 *
		 *  Note: running a finalizer may have arbitrary side effects, e.g.
		 *  queue more objects on refzero_list (tail), or even trigger a
		 *  mark-and-sweep.
		 *
		 *  Note: quick reject check should match vast majority of
		 *  objects and must be safe (not throw any errors, ever).
		 */

		/* XXX: If object has FINALIZED, it was finalized by mark-and-sweep on
		 * its previous run.  Any point in running finalizer again here?  If
		 * finalization semantics is changed so that finalizer is only run once,
		 * checking for FINALIZED would happen here.
		 */

		/* A finalizer is looked up from the object and up its prototype chain
		 * (which allows inherited finalizers).
		 */
		if (duk_hobject_hasprop_raw(thr, obj, DUK_HTHREAD_STRING_INT_FINALIZER(thr))) {
			DUK_DDD(DUK_DDDPRINT("object has a finalizer, run it"));

			DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT(h1) == 0);
			DUK_HEAPHDR_PREINC_REFCOUNT(h1);  /* bump refcount to prevent refzero during finalizer processing */

			duk_hobject_run_finalizer(thr, obj);  /* must never longjmp */

			DUK_HEAPHDR_PREDEC_REFCOUNT(h1);  /* remove artificial bump */
			DUK_ASSERT_DISABLE(h1->h_refcount >= 0);  /* refcount is unsigned, so always true */

			if (DUK_HEAPHDR_GET_REFCOUNT(h1) != 0) {
				DUK_DDD(DUK_DDDPRINT("-> object refcount after finalization non-zero, object will be rescued"));
				rescued = 1;
			} else {
				DUK_DDD(DUK_DDDPRINT("-> object refcount still zero after finalization, object will be freed"));
			}
		}

		/* Refzero head is still the same.  This is the case even if finalizer
		 * inserted more refzero objects; they are inserted to the tail.
		 */
		DUK_ASSERT(h1 == heap->refzero_list);

		/*
		 *  Remove the object from the refzero list.  This cannot be done
		 *  before a possible finalizer has been executed; the finalizer
		 *  may trigger a mark-and-sweep, and mark-and-sweep must be able
		 *  to traverse a complete refzero_list.
		 */

		h2 = DUK_HEAPHDR_GET_NEXT(h1);
		if (h2) {
			DUK_HEAPHDR_SET_PREV(h2, NULL);  /* not strictly necessary */
			heap->refzero_list = h2;
		} else {
			heap->refzero_list = NULL;
			heap->refzero_list_tail = NULL;
		}

		/*
		 *  Rescue or free.
		 */

		if (rescued) {
			/* yes -> move back to heap allocated */
			DUK_DD(DUK_DDPRINT("object rescued during refcount finalization: %p", (void *) h1));
			DUK_HEAPHDR_SET_PREV(h1, NULL);
			DUK_HEAPHDR_SET_NEXT(h1, heap->heap_allocated);
			heap->heap_allocated = h1;
		} else {
			/* no -> decref members, then free */
			duk__refcount_finalize_hobject(thr, obj);
			duk_heap_free_heaphdr_raw(heap, h1);
		}

		count++;
	}
	DUK_HEAP_CLEAR_REFZERO_FREE_RUNNING(heap);

	DUK_DDD(DUK_DDDPRINT("refzero processed %ld objects", (long) count));

	/*
	 *  Once the whole refzero cascade has been freed, check for
	 *  a voluntary mark-and-sweep.
	 */

#if defined(DUK_USE_MARK_AND_SWEEP) && defined(DUK_USE_VOLUNTARY_GC)
	/* 'count' is more or less comparable to normal trigger counter update
	 * which happens in memory block (re)allocation.
	 */
	heap->mark_and_sweep_trigger_counter -= count;
	if (heap->mark_and_sweep_trigger_counter <= 0) {
		duk_bool_t rc;
		duk_small_uint_t flags = 0;  /* not emergency */
		DUK_D(DUK_DPRINT("refcount triggering mark-and-sweep"));
		rc = duk_heap_mark_and_sweep(heap, flags);
		DUK_UNREF(rc);
		DUK_D(DUK_DPRINT("refcount triggered mark-and-sweep => rc %ld", (long) rc));
	}
#endif  /* DUK_USE_MARK_AND_SWEEP && DUK_USE_VOLUNTARY_GC */
}
Esempio n. 8
0
DUK_INTERNAL void duk_heaphdr_refzero(duk_hthread *thr, duk_heaphdr *h) {
	duk_heap *heap;

	DUK_ASSERT(thr != NULL);
	DUK_ASSERT(h != NULL);

	heap = thr->heap;
	DUK_DDD(DUK_DDDPRINT("refzero %p: %!O", (void *) h, (duk_heaphdr *) h));

	/*
	 *  Refzero handling is skipped entirely if (1) mark-and-sweep is
	 *  running or (2) execution is paused in the debugger.  The objects
	 *  are left in the heap, and will be freed by mark-and-sweep or
	 *  eventual heap destruction.
	 *
	 *  This is necessary during mark-and-sweep because refcounts are also
	 *  updated during the sweep phase (otherwise objects referenced by a
	 *  swept object would have incorrect refcounts) which then calls here.
	 *  This could be avoided by using separate decref macros in
	 *  mark-and-sweep; however, mark-and-sweep also calls finalizers which
	 *  would use the ordinary decref macros anyway and still call this
	 *  function.
	 *
	 *  This check must be enabled also when mark-and-sweep support has been
	 *  disabled: the flag is also used in heap destruction when running
	 *  finalizers for remaining objects, and the flag prevents objects from
	 *  being moved around in heap linked lists.
	 */

	/* XXX: ideally this would be just one flag (maybe a derived one) so
	 * that a single bit test is sufficient to check the condition.
	 */
#if defined(DUK_USE_DEBUGGER_SUPPORT)
	if (DUK_UNLIKELY(DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap) || DUK_HEAP_IS_PAUSED(heap))) {
#else
	if (DUK_UNLIKELY(DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap))) {
#endif
		DUK_DDD(DUK_DDDPRINT("refzero handling suppressed when mark-and-sweep running, object: %p", (void *) h));
		return;
	}

	switch ((duk_small_int_t) DUK_HEAPHDR_GET_TYPE(h)) {
	case DUK_HTYPE_STRING:
		/*
		 *  Strings have no internal references but do have "weak"
		 *  references in the string cache.  Also note that strings
		 *  are not on the heap_allocated list like other heap
		 *  elements.
		 */

		duk_heap_strcache_string_remove(heap, (duk_hstring *) h);
		duk_heap_string_remove(heap, (duk_hstring *) h);
		duk_heap_free_heaphdr_raw(heap, h);
		break;

	case DUK_HTYPE_OBJECT:
		/*
		 *  Objects have internal references.  Must finalize through
		 *  the "refzero" work list.
		 */

		duk_heap_remove_any_from_heap_allocated(heap, h);
		duk__queue_refzero(heap, h);
		duk__refzero_free_pending(thr);
		break;

	case DUK_HTYPE_BUFFER:
		/*
		 *  Buffers have no internal references.  However, a dynamic
		 *  buffer has a separate allocation for the buffer.  This is
		 *  freed by duk_heap_free_heaphdr_raw().
		 */

		duk_heap_remove_any_from_heap_allocated(heap, h);
		duk_heap_free_heaphdr_raw(heap, h);
		break;

	default:
		DUK_D(DUK_DPRINT("invalid heap type in decref: %ld", (long) DUK_HEAPHDR_GET_TYPE(h)));
		DUK_UNREACHABLE();
	}
}

#if !defined(DUK_USE_FAST_REFCOUNT_DEFAULT)
DUK_INTERNAL void duk_tval_incref(duk_tval *tv) {
	DUK_ASSERT(tv != NULL);

	if (DUK_TVAL_NEEDS_REFCOUNT_UPDATE(tv)) {
		duk_heaphdr *h = DUK_TVAL_GET_HEAPHDR(tv);
		DUK_ASSERT(h != NULL);
		DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
		DUK_ASSERT_DISABLE(h->h_refcount >= 0);
		DUK_HEAPHDR_PREINC_REFCOUNT(h);
	}
}