void JSCompartment::sweep(FreeOp *fop, bool releaseTypes) { JS_ASSERT(!activeAnalysis); /* This function includes itself in PHASE_SWEEP_TABLES. */ sweepCrossCompartmentWrappers(); { gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES); /* Remove dead references held weakly by the compartment. */ sweepBaseShapeTable(); sweepInitialShapeTable(); sweepNewTypeObjectTable(newTypeObjects); sweepNewTypeObjectTable(lazyTypeObjects); sweepCallsiteClones(); if (global_ && IsObjectAboutToBeFinalized(global_.unsafeGet())) global_ = NULL; #ifdef JS_ION if (ionCompartment_) ionCompartment_->sweep(fop); #endif /* * JIT code increments activeUseCount for any RegExpShared used by jit * code for the lifetime of the JIT script. Thus, we must perform * sweeping after clearing jit code. */ regExps.sweep(rt); if (debugScopes) debugScopes->sweep(rt); /* Finalize unreachable (key,value) pairs in all weak maps. */ WeakMapBase::sweepCompartment(this); } if (!zone()->isPreservingCode()) { JS_ASSERT(!types.constrainedOutputs); gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_DISCARD_ANALYSIS); gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_FREE_TI_ARENA); rt->freeLifoAlloc.transferFrom(&analysisLifoAlloc); } else { gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_DISCARD_ANALYSIS); types.sweepShapes(fop); } NativeIterator *ni = enumerators->next(); while (ni != enumerators) { JSObject *iterObj = ni->iterObj(); NativeIterator *next = ni->next(); if (gc::IsObjectAboutToBeFinalized(&iterObj)) ni->unlink(); ni = next; } }
void JSCompartment::sweep(FreeOp *fop, bool releaseTypes) { JS_ASSERT(!activeAnalysis); /* This function includes itself in PHASE_SWEEP_TABLES. */ sweepCrossCompartmentWrappers(); JSRuntime *rt = runtimeFromMainThread(); { gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES); /* Remove dead references held weakly by the compartment. */ sweepBaseShapeTable(); sweepInitialShapeTable(); sweepNewTypeObjectTable(newTypeObjects); sweepNewTypeObjectTable(lazyTypeObjects); sweepCallsiteClones(); if (global_ && IsObjectAboutToBeFinalized(global_.unsafeGet())) global_ = nullptr; if (selfHostingScriptSource && IsObjectAboutToBeFinalized((JSObject **) selfHostingScriptSource.unsafeGet())) { selfHostingScriptSource = nullptr; } #ifdef JS_ION if (jitCompartment_) jitCompartment_->sweep(fop); #endif /* * JIT code increments activeUseCount for any RegExpShared used by jit * code for the lifetime of the JIT script. Thus, we must perform * sweeping after clearing jit code. */ regExps.sweep(rt); if (debugScopes) debugScopes->sweep(rt); /* Finalize unreachable (key,value) pairs in all weak maps. */ WeakMapBase::sweepCompartment(this); } NativeIterator *ni = enumerators->next(); while (ni != enumerators) { JSObject *iterObj = ni->iterObj(); NativeIterator *next = ni->next(); if (gc::IsObjectAboutToBeFinalized(&iterObj)) ni->unlink(); ni = next; } }
void JSCompartment::sweepNativeIterators() { /* Sweep list of native iterators. */ NativeIterator* ni = enumerators->next(); while (ni != enumerators) { JSObject* iterObj = ni->iterObj(); NativeIterator* next = ni->next(); if (gc::IsAboutToBeFinalizedUnbarriered(&iterObj)) ni->unlink(); ni = next; } }
void JSCompartment::sweep(FreeOp *fop, bool releaseTypes) { JS_ASSERT(!activeAnalysis); { gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_DISCARD_CODE); discardJitCode(fop, !zone()->isPreservingCode()); } /* This function includes itself in PHASE_SWEEP_TABLES. */ sweepCrossCompartmentWrappers(); { gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES); /* Remove dead references held weakly by the compartment. */ sweepBaseShapeTable(); sweepInitialShapeTable(); sweepNewTypeObjectTable(newTypeObjects); sweepNewTypeObjectTable(lazyTypeObjects); sweepBreakpoints(fop); sweepCallsiteClones(); if (global_ && IsObjectAboutToBeFinalized(global_.unsafeGet())) global_ = NULL; #ifdef JS_ION if (ionCompartment_) ionCompartment_->sweep(fop); #endif /* * JIT code increments activeUseCount for any RegExpShared used by jit * code for the lifetime of the JIT script. Thus, we must perform * sweeping after clearing jit code. */ regExps.sweep(rt); if (debugScopes) debugScopes->sweep(rt); /* Finalize unreachable (key,value) pairs in all weak maps. */ WeakMapBase::sweepCompartment(this); } if (!zone()->isPreservingCode()) { JS_ASSERT(!types.constrainedOutputs); gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_DISCARD_ANALYSIS); /* * Clear the analysis pool, but don't release its data yet. While * sweeping types any live data will be allocated into the pool. */ LifoAlloc oldAlloc(typeLifoAlloc.defaultChunkSize()); oldAlloc.steal(&typeLifoAlloc); /* * Periodically release observed types for all scripts. This is safe to * do when there are no frames for the compartment on the stack. */ if (active) releaseTypes = false; /* * Sweep analysis information and everything depending on it from the * compartment, including all remaining mjit code if inference is * enabled in the compartment. */ if (types.inferenceEnabled) { gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_DISCARD_TI); for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) { RawScript script = i.get<JSScript>(); if (script->types) { types::TypeScript::Sweep(fop, script); if (releaseTypes) { script->types->destroy(); script->types = NULL; } } } } { gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_SWEEP_TYPES); types.sweep(fop); } { gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_CLEAR_SCRIPT_ANALYSIS); for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) { JSScript *script = i.get<JSScript>(); script->clearAnalysis(); script->clearPropertyReadTypes(); } } { gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_FREE_TI_ARENA); rt->freeLifoAlloc.transferFrom(&analysisLifoAlloc); rt->freeLifoAlloc.transferFrom(&oldAlloc); } } NativeIterator *ni = enumerators->next(); while (ni != enumerators) { JSObject *iterObj = ni->iterObj(); NativeIterator *next = ni->next(); if (gc::IsObjectAboutToBeFinalized(&iterObj)) ni->unlink(); ni = next; } active = false; }
void JSCompartment::sweep(FreeOp *fop, bool releaseTypes) { JS_ASSERT(!activeAnalysis); JSRuntime *rt = runtimeFromMainThread(); { gcstats::MaybeAutoPhase ap(rt->gc.stats, !rt->isHeapCompacting(), gcstats::PHASE_SWEEP_TABLES_INNER_VIEWS); innerViews.sweep(rt); } { gcstats::MaybeAutoPhase ap(rt->gc.stats, !rt->isHeapCompacting(), gcstats::PHASE_SWEEP_TABLES_WRAPPER); sweepCrossCompartmentWrappers(); } /* Remove dead references held weakly by the compartment. */ sweepBaseShapeTable(); sweepInitialShapeTable(); { gcstats::MaybeAutoPhase ap(rt->gc.stats, !rt->isHeapCompacting(), gcstats::PHASE_SWEEP_TABLES_TYPE_OBJECT); sweepNewTypeObjectTable(newTypeObjects); sweepNewTypeObjectTable(lazyTypeObjects); } sweepCallsiteClones(); savedStacks_.sweep(rt); if (global_ && IsObjectAboutToBeFinalized(global_.unsafeGet())) { if (debugMode()) Debugger::detachAllDebuggersFromGlobal(fop, global_); global_.set(nullptr); } if (selfHostingScriptSource && IsObjectAboutToBeFinalized((JSObject **) selfHostingScriptSource.unsafeGet())) { selfHostingScriptSource.set(nullptr); } if (jitCompartment_) jitCompartment_->sweep(fop, this); /* * JIT code increments activeWarmUpCounter for any RegExpShared used by jit * code for the lifetime of the JIT script. Thus, we must perform * sweeping after clearing jit code. */ regExps.sweep(rt); if (debugScopes) debugScopes->sweep(rt); /* Finalize unreachable (key,value) pairs in all weak maps. */ WeakMapBase::sweepCompartment(this); /* Sweep list of native iterators. */ NativeIterator *ni = enumerators->next(); while (ni != enumerators) { JSObject *iterObj = ni->iterObj(); NativeIterator *next = ni->next(); if (gc::IsObjectAboutToBeFinalized(&iterObj)) ni->unlink(); ni = next; } }
static bool SuppressDeletedPropertyHelper(JSContext* cx, HandleObject obj, StringPredicate predicate) { NativeIterator* enumeratorList = cx->compartment()->enumerators; NativeIterator* ni = enumeratorList->next(); while (ni != enumeratorList) { again: if (ni->obj == obj && ni->props_cursor < ni->props_end) { /* Check whether id is still to come. */ GCPtrFlatString* props_cursor = ni->current(); GCPtrFlatString* props_end = ni->end(); for (GCPtrFlatString* idp = props_cursor; idp < props_end; ++idp) { if (predicate(*idp)) { /* * Check whether another property along the prototype chain * became visible as a result of this deletion. */ RootedObject proto(cx); if (!GetPrototype(cx, obj, &proto)) return false; if (proto) { RootedId id(cx); RootedValue idv(cx, StringValue(*idp)); if (!ValueToId<CanGC>(cx, idv, &id)) return false; Rooted<PropertyDescriptor> desc(cx); if (!GetPropertyDescriptor(cx, proto, id, &desc)) return false; if (desc.object()) { if (desc.enumerable()) continue; } } /* * If GetPropertyDescriptorById above removed a property from * ni, start over. */ if (props_end != ni->props_end || props_cursor != ni->props_cursor) goto again; /* * No property along the prototype chain stepped in to take the * property's place, so go ahead and delete id from the list. * If it is the next property to be enumerated, just skip it. */ if (idp == props_cursor) { ni->incCursor(); } else { for (GCPtrFlatString* p = idp; p + 1 != props_end; p++) *p = *(p + 1); ni->props_end = ni->end() - 1; /* * This invokes the pre barrier on this element, since * it's no longer going to be marked, and ensures that * any existing remembered set entry will be dropped. */ *ni->props_end = nullptr; } /* Don't reuse modified native iterators. */ ni->flags |= JSITER_UNREUSABLE; if (predicate.matchesAtMostOne()) break; } } } ni = ni->next(); } return true; }