Shape * PropertyTree::lookupChild(ThreadSafeContext *cx, Shape *parent, const StackShape &child) { /* Keep this in sync with the logic of getChild above. */ Shape *shape = nullptr; JS_ASSERT(parent); KidsPointer *kidp = &parent->kids; if (kidp->isShape()) { Shape *kid = kidp->toShape(); if (kid->matches(child)) shape = kid; } else if (kidp->isHash()) { if (KidsHash::Ptr p = kidp->toHash()->readonlyThreadsafeLookup(child)) shape = *p; } else { return nullptr; } #if defined(JSGC_INCREMENTAL) && defined(DEBUG) if (shape) { JS::Zone *zone = shape->arenaHeader()->zone; JS_ASSERT(!zone->needsIncrementalBarrier()); JS_ASSERT(!(zone->isGCSweeping() && !shape->isMarked() && !shape->arenaHeader()->allocatedDuringIncremental)); } #endif return shape; }
bool SweepCacheAndFinishGC(JSContext* cx, const Cache& cache) { CHECK(IsIncrementalGCInProgress(cx)); PrepareForIncrementalGC(cx); IncrementalGCSlice(cx, JS::gcreason::API); JS::Zone* zone = JS::GetObjectZone(global); CHECK(!IsIncrementalGCInProgress(cx)); CHECK(!zone->isCollecting()); CHECK(!cache.needsIncrementalBarrier()); return true; }
bool GCUntilCacheSweep(JSContext *cx, const Cache& cache) { CHECK(!IsIncrementalGCInProgress(cx)); JS::Zone* zone = JS::GetObjectZone(global); JS::PrepareZoneForGC(zone); SliceBudget budget(WorkBudget(1)); cx->runtime()->gc.startDebugGC(GC_NORMAL, budget); CHECK(IsIncrementalGCInProgress(cx)); CHECK(zone->isGCSweeping()); CHECK(cache.needsIncrementalBarrier()); return true; }
Shape * PropertyTree::getChild(ExclusiveContext *cx, Shape *parentArg, StackShape &unrootedChild) { RootedShape parent(cx, parentArg); JS_ASSERT(parent); Shape *existingShape = nullptr; /* * The property tree has extremely low fan-out below its root in * popular embeddings with real-world workloads. Patterns such as * defining closures that capture a constructor's environment as * getters or setters on the new object that is passed in as * |this| can significantly increase fan-out below the property * tree root -- see bug 335700 for details. */ KidsPointer *kidp = &parent->kids; if (kidp->isShape()) { Shape *kid = kidp->toShape(); if (kid->matches(unrootedChild)) existingShape = kid; } else if (kidp->isHash()) { if (KidsHash::Ptr p = kidp->toHash()->lookup(unrootedChild)) existingShape = *p; } else { /* If kidp->isNull(), we always insert. */ } #ifdef JSGC_INCREMENTAL if (existingShape) { JS::Zone *zone = existingShape->zone(); if (zone->needsIncrementalBarrier()) { /* * We need a read barrier for the shape tree, since these are weak * pointers. */ Shape *tmp = existingShape; MarkShapeUnbarriered(zone->barrierTracer(), &tmp, "read barrier"); JS_ASSERT(tmp == existingShape); } else if (zone->isGCSweeping() && !existingShape->isMarked() && !existingShape->arenaHeader()->allocatedDuringIncremental) { /* * The shape we've found is unreachable and due to be finalized, so * remove our weak reference to it and don't use it. */ JS_ASSERT(parent->isMarked()); parent->removeChild(existingShape); existingShape = nullptr; } } #endif if (existingShape) return existingShape; RootedGeneric<StackShape*> child(cx, &unrootedChild); Shape *shape = newShape(cx); if (!shape) return nullptr; new (shape) Shape(*child, parent->numFixedSlots()); if (!insertChild(cx, parent, shape)) return nullptr; return shape; }
UnrootedShape PropertyTree::getChild(JSContext *cx, Shape *parent_, uint32_t nfixed, const StackShape &child) { AssertCanGC(); { UnrootedShape shape = NULL; JS_ASSERT(parent_); /* * The property tree has extremely low fan-out below its root in * popular embeddings with real-world workloads. Patterns such as * defining closures that capture a constructor's environment as * getters or setters on the new object that is passed in as * |this| can significantly increase fan-out below the property * tree root -- see bug 335700 for details. */ KidsPointer *kidp = &parent_->kids; if (kidp->isShape()) { UnrootedShape kid = kidp->toShape(); if (kid->matches(child)) shape = kid; } else if (kidp->isHash()) { if (KidsHash::Ptr p = kidp->toHash()->lookup(child)) shape = *p; } else { /* If kidp->isNull(), we always insert. */ } #ifdef JSGC_INCREMENTAL if (shape) { JS::Zone *zone = shape->zone(); if (zone->needsBarrier()) { /* * We need a read barrier for the shape tree, since these are weak * pointers. */ Shape *tmp = shape; MarkShapeUnbarriered(zone->barrierTracer(), &tmp, "read barrier"); JS_ASSERT(tmp == shape); } else if (zone->isGCSweeping() && !shape->isMarked() && !shape->arenaHeader()->allocatedDuringIncremental) { /* * The shape we've found is unreachable and due to be finalized, so * remove our weak reference to it and don't use it. */ JS_ASSERT(parent_->isMarked()); parent_->removeChild(shape); shape = NULL; } } #endif if (shape) return shape; } StackShape::AutoRooter childRoot(cx, &child); RootedShape parent(cx, parent_); UnrootedShape shape = newShape(cx); if (!shape) return UnrootedShape(NULL); new (shape) Shape(child, nfixed); if (!insertChild(cx, parent, shape)) return UnrootedShape(NULL); return shape; }