void BaselineFrame::trace(JSTracer* trc, JitFrameIterator& frameIterator) { replaceCalleeToken(MarkCalleeToken(trc, calleeToken())); // Mark |this|, actual and formal args. if (isFunctionFrame()) { TraceRoot(trc, &thisArgument(), "baseline-this"); unsigned numArgs = js::Max(numActualArgs(), numFormalArgs()); TraceRootRange(trc, numArgs + isConstructing(), argv(), "baseline-args"); } // Mark environment chain, if it exists. if (envChain_) TraceRoot(trc, &envChain_, "baseline-envchain"); // Mark return value. if (hasReturnValue()) TraceRoot(trc, returnValue().address(), "baseline-rval"); if (isEvalFrame() && script()->isDirectEvalInFunction()) TraceRoot(trc, evalNewTargetAddress(), "baseline-evalNewTarget"); if (hasArgsObj()) TraceRoot(trc, &argsObj_, "baseline-args-obj"); // Mark locals and stack values. JSScript* script = this->script(); size_t nfixed = script->nfixed(); jsbytecode* pc; frameIterator.baselineScriptAndPc(nullptr, &pc); size_t nlivefixed = script->calculateLiveFixed(pc); // NB: It is possible that numValueSlots() could be zero, even if nfixed is // nonzero. This is the case if the function has an early stack check. if (numValueSlots() == 0) return; MOZ_ASSERT(nfixed <= numValueSlots()); if (nfixed == nlivefixed) { // All locals are live. MarkLocals(this, trc, 0, numValueSlots()); } else { // Mark operand stack. MarkLocals(this, trc, nfixed, numValueSlots()); // Clear dead block-scoped locals. while (nfixed > nlivefixed) unaliasedLocal(--nfixed).setUndefined(); // Mark live locals. MarkLocals(this, trc, 0, nlivefixed); } if (script->compartment()->debugEnvs) script->compartment()->debugEnvs->markLiveFrame(trc, this); }
void JSCompartment::sweepBreakpoints(FreeOp *fop) { gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES_BREAKPOINT); if (rt->debuggerList.isEmpty()) return; for (CellIterUnderGC i(zone(), FINALIZE_SCRIPT); !i.done(); i.next()) { JSScript *script = i.get<JSScript>(); if (script->compartment() != this || !script->hasAnyBreakpointsOrStepMode()) continue; bool scriptGone = IsScriptAboutToBeFinalized(&script); JS_ASSERT(script == i.get<JSScript>()); for (unsigned i = 0; i < script->length; i++) { BreakpointSite *site = script->getBreakpointSite(script->code + i); if (!site) continue; // nextbp is necessary here to avoid possibly reading *bp after // destroying it. Breakpoint *nextbp; for (Breakpoint *bp = site->firstBreakpoint(); bp; bp = nextbp) { nextbp = bp->nextInSite(); if (scriptGone || IsObjectAboutToBeFinalized(&bp->debugger->toJSObjectRef())) bp->destroy(fop); } } } }
void JSCompartment::clearBreakpointsIn(FreeOp *fop, js::Debugger *dbg, JSObject *handler) { for (gc::CellIter i(zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) { JSScript *script = i.get<JSScript>(); if (script->compartment() == this && script->hasAnyBreakpointsOrStepMode()) script->clearBreakpointsIn(fop, dbg, handler); } }
void JSCompartment::clearTraps(FreeOp *fop) { for (gc::CellIter i(zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) { JSScript *script = i.get<JSScript>(); if (script->compartment() == this && script->hasAnyBreakpointsOrStepMode()) script->clearTraps(fop); } }
bool JSCompartment::hasScriptsOnStack(JSContext *cx) { for (AllFramesIter i(cx->stack.space()); !i.done(); ++i) { JSScript *script = i.fp()->maybeScript(); if (script && script->compartment() == this) return true; } return false; }
void JSCompartment::clearTraps(FreeOp *fop) { MinorGC(fop->runtime(), JS::gcreason::EVICT_NURSERY); for (gc::CellIter i(zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) { JSScript *script = i.get<JSScript>(); if (script->compartment() == this && script->hasAnyBreakpointsOrStepMode()) script->clearTraps(fop); } }
JS_DumpCompartmentPCCounts(JSContext *cx) { for (CellIter i(cx->zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) { JSScript *script = i.get<JSScript>(); if (script->compartment() != cx->compartment) continue; if (script->hasScriptCounts && script->enclosingScriptsCompiledSuccessfully()) JS_DumpPCCounts(cx, script); } }
bool TraceLoggerThread::enable(JSContext* cx) { if (!enable()) return fail(cx, "internal error"); if (enabled_ == 1) { // Get the top Activation to log the top script/pc (No inlined frames). ActivationIterator iter(cx->runtime()); Activation* act = iter.activation(); if (!act) return fail(cx, "internal error"); JSScript* script = nullptr; int32_t engine = 0; if (act->isJit()) { JitFrameIterator it(iter); while (!it.isScripted() && !it.done()) ++it; MOZ_ASSERT(!it.done()); MOZ_ASSERT(it.isIonJS() || it.isBaselineJS()); script = it.script(); engine = it.isIonJS() ? TraceLogger_IonMonkey : TraceLogger_Baseline; } else if (act->isWasm()) { JS_ReportErrorNumber(cx, GetErrorMessage, nullptr, JSMSG_TRACELOGGER_ENABLE_FAIL, "not yet supported in wasm code"); return false; } else { MOZ_ASSERT(act->isInterpreter()); InterpreterFrame* fp = act->asInterpreter()->current(); MOZ_ASSERT(!fp->runningInJit()); script = fp->script(); engine = TraceLogger_Interpreter; if (script->compartment() != cx->compartment()) return fail(cx, "compartment mismatch"); } TraceLoggerEvent event(this, TraceLogger_Scripts, script); startEvent(event); startEvent(engine); } return true; }
void StackSpace::markFrameSlots(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc) { Value *slotsBegin = fp->slots(); if (!fp->isScriptFrame()) { JS_ASSERT(fp->isDummyFrame()); gc::MarkValueRootRange(trc, slotsBegin, slotsEnd, "vm_stack"); return; } /* If it's a scripted frame, we should have a pc. */ JS_ASSERT(pc); JSScript *script = fp->script(); if (!script->hasAnalysis() || !script->analysis()->ranLifetimes()) { gc::MarkValueRootRange(trc, slotsBegin, slotsEnd, "vm_stack"); return; } /* * If the JIT ran a lifetime analysis, then it may have left garbage in the * slots considered not live. We need to avoid marking them. Additionally, * in case the analysis information is thrown out later, we overwrite these * dead slots with valid values so that future GCs won't crash. Analysis * results are thrown away during the sweeping phase, so we always have at * least one GC to do this. */ analyze::AutoEnterAnalysis aea(script->compartment()); analyze::ScriptAnalysis *analysis = script->analysis(); uint32_t offset = pc - script->code; Value *fixedEnd = slotsBegin + script->nfixed; for (Value *vp = slotsBegin; vp < fixedEnd; vp++) { uint32_t slot = analyze::LocalSlot(script, vp - slotsBegin); /* * Will this slot be synced by the JIT? If not, replace with a dummy * value with the same type tag. */ if (!analysis->trackSlot(slot) || analysis->liveness(slot).live(offset)) gc::MarkValueRoot(trc, vp, "vm_stack"); else if (vp->isObject()) *vp = ObjectValue(fp->scopeChain()->global()); else if (vp->isString()) *vp = StringValue(trc->runtime->atomState.nullAtom); } gc::MarkValueRootRange(trc, fixedEnd, slotsEnd, "vm_stack"); }
JS::TraceIncomingCCWs(JSTracer* trc, const JS::CompartmentSet& compartments) { for (js::CompartmentsIter comp(trc->runtime(), SkipAtoms); !comp.done(); comp.next()) { if (compartments.has(comp)) continue; for (JSCompartment::WrapperEnum e(comp); !e.empty(); e.popFront()) { const CrossCompartmentKey& key = e.front().key(); JSObject* obj; JSScript* script; switch (key.kind) { case CrossCompartmentKey::StringWrapper: // StringWrappers are just used to avoid copying strings // across zones multiple times, and don't hold a strong // reference. continue; case CrossCompartmentKey::ObjectWrapper: case CrossCompartmentKey::DebuggerObject: case CrossCompartmentKey::DebuggerSource: case CrossCompartmentKey::DebuggerEnvironment: case CrossCompartmentKey::DebuggerWasmScript: case CrossCompartmentKey::DebuggerWasmSource: obj = static_cast<JSObject*>(key.wrapped); // Ignore CCWs whose wrapped value doesn't live in our given // set of zones. if (!compartments.has(obj->compartment())) continue; TraceManuallyBarrieredEdge(trc, &obj, "cross-compartment wrapper"); MOZ_ASSERT(obj == key.wrapped); break; case CrossCompartmentKey::DebuggerScript: script = static_cast<JSScript*>(key.wrapped); // Ignore CCWs whose wrapped value doesn't live in our given // set of compartments. if (!compartments.has(script->compartment())) continue; TraceManuallyBarrieredEdge(trc, &script, "cross-compartment wrapper"); MOZ_ASSERT(script == key.wrapped); break; } } } }
void js::IterateScripts(JSRuntime *rt, JSCompartment *compartment, void *data, IterateScriptCallback scriptCallback) { AutoPrepareForTracing prep(rt); if (compartment) { for (CellIterUnderGC i(compartment->zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) { JSScript *script = i.get<JSScript>(); if (script->compartment() == compartment) scriptCallback(rt, data, script); } } else { for (ZonesIter zone(rt); !zone.done(); zone.next()) { for (CellIterUnderGC i(zone, gc::FINALIZE_SCRIPT); !i.done(); i.next()) scriptCallback(rt, data, i.get<JSScript>()); } } }
JS_DumpCompartmentPCCounts(JSContext *cx) { for (CellIter i(cx->zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) { JSScript *script = i.get<JSScript>(); if (script->compartment() != cx->compartment()) continue; if (script->hasScriptCounts()) JS_DumpPCCounts(cx, script); } #if defined(JS_ION) for (unsigned thingKind = FINALIZE_OBJECT0; thingKind < FINALIZE_OBJECT_LIMIT; thingKind++) { for (CellIter i(cx->zone(), (AllocKind) thingKind); !i.done(); i.next()) { JSObject *obj = i.get<JSObject>(); if (obj->compartment() != cx->compartment()) continue; if (obj->is<AsmJSModuleObject>()) { AsmJSModule &module = obj->as<AsmJSModuleObject>().module(); Sprinter sprinter(cx); if (!sprinter.init()) return; fprintf(stdout, "--- Asm.js Module ---\n"); for (size_t i = 0; i < module.numFunctionCounts(); i++) { jit::IonScriptCounts *counts = module.functionCounts(i); DumpIonScriptCounts(&sprinter, counts); } fputs(sprinter.string(), stdout); fprintf(stdout, "--- END Asm.js Module ---\n"); } } } #endif }
void JSCompartment::updateForDebugMode(FreeOp *fop, AutoDebugModeGC &dmgc) { for (ContextIter acx(rt); !acx.done(); acx.next()) { if (acx->compartment == this) acx->updateJITEnabled(); } #ifdef JS_METHODJIT bool enabled = debugMode(); JS_ASSERT_IF(enabled, !hasScriptsOnStack()); for (gc::CellIter i(zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) { JSScript *script = i.get<JSScript>(); if (script->compartment() == this) script->debugMode = enabled; } // When we change a compartment's debug mode, whether we're turning it // on or off, we must always throw away all analyses: debug mode // affects various aspects of the analysis, which then get baked into // SSA results, which affects code generation in complicated ways. We // must also throw away all JIT code, as its soundness depends on the // analyses. // // It suffices to do a garbage collection cycle or to finish the // ongoing GC cycle. The necessary cleanup happens in // JSCompartment::sweep. // // dmgc makes sure we can't forget to GC, but it is also important not // to run any scripts in this compartment until the dmgc is destroyed. // That is the caller's responsibility. if (!rt->isHeapBusy()) dmgc.scheduleGC(zone()); #endif }
void StackSpace::markFrameValues(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc) { Value *slotsBegin = fp->slots(); if (!fp->isScriptFrame()) { JS_ASSERT(fp->isDummyFrame()); gc::MarkValueRootRange(trc, slotsBegin, slotsEnd, "vm_stack"); return; } /* If it's a scripted frame, we should have a pc. */ JS_ASSERT(pc); JSScript *script = fp->script(); if (!script->hasAnalysis() || !script->analysis()->ranLifetimes()) { gc::MarkValueRootRange(trc, slotsBegin, slotsEnd, "vm_stack"); return; } /* * If the JIT ran a lifetime analysis, then it may have left garbage in the * slots considered not live. We need to avoid marking them. Additionally, * in case the analysis information is thrown out later, we overwrite these * dead slots with valid values so that future GCs won't crash. Analysis * results are thrown away during the sweeping phase, so we always have at * least one GC to do this. */ analyze::AutoEnterAnalysis aea(script->compartment()); analyze::ScriptAnalysis *analysis = script->analysis(); uint32_t offset = pc - script->code; Value *fixedEnd = slotsBegin + script->nfixed; for (Value *vp = slotsBegin; vp < fixedEnd; vp++) { uint32_t slot = analyze::LocalSlot(script, vp - slotsBegin); /* * Will this slot be synced by the JIT? If not, replace with a dummy * value with the same type tag. */ if (!analysis->trackSlot(slot) || analysis->liveness(slot).live(offset)) { gc::MarkValueRoot(trc, vp, "vm_stack"); } else if (vp->isDouble()) { *vp = DoubleValue(0.0); } else { /* * It's possible that *vp may not be a valid Value. For example, it * may be tagged as a NullValue but the low bits may be nonzero so * that isNull() returns false. This can cause problems later on * when marking the value. Extracting the type in this way and then * overwriting the value circumvents the problem. */ JSValueType type = vp->extractNonDoubleType(); if (type == JSVAL_TYPE_INT32) *vp = Int32Value(0); else if (type == JSVAL_TYPE_UNDEFINED) *vp = UndefinedValue(); else if (type == JSVAL_TYPE_BOOLEAN) *vp = BooleanValue(false); else if (type == JSVAL_TYPE_STRING) *vp = StringValue(trc->runtime->atomState.nullAtom); else if (type == JSVAL_TYPE_NULL) *vp = NullValue(); else if (type == JSVAL_TYPE_OBJECT) *vp = ObjectValue(fp->scopeChain()->global()); } } gc::MarkValueRootRange(trc, fixedEnd, slotsEnd, "vm_stack"); }
static void StatsCellCallback(JSRuntime *rt, void *data, void *thing, JSGCTraceKind traceKind, size_t thingSize) { IteratorClosure *closure = static_cast<IteratorClosure *>(data); RuntimeStats *rtStats = closure->rtStats; ZoneStats *zStats = rtStats->currZoneStats; switch (traceKind) { case JSTRACE_OBJECT: { JSObject *obj = static_cast<JSObject *>(thing); CompartmentStats *cStats = GetCompartmentStats(obj->compartment()); if (obj->is<JSFunction>()) cStats->gcHeapObjectsFunction += thingSize; else if (obj->is<ArrayObject>()) cStats->gcHeapObjectsDenseArray += thingSize; else if (obj->isCrossCompartmentWrapper()) cStats->gcHeapObjectsCrossCompartmentWrapper += thingSize; else cStats->gcHeapObjectsOrdinary += thingSize; JS::ObjectsExtraSizes objectsExtra; obj->sizeOfExcludingThis(rtStats->mallocSizeOf_, &objectsExtra); cStats->objectsExtra.add(objectsExtra); // JSObject::sizeOfExcludingThis() doesn't measure objectsExtraPrivate, // so we do it here. if (ObjectPrivateVisitor *opv = closure->opv) { nsISupports *iface; if (opv->getISupports_(obj, &iface) && iface) { cStats->objectsExtra.private_ += opv->sizeOfIncludingThis(iface); } } break; } case JSTRACE_STRING: { JSString *str = static_cast<JSString *>(thing); size_t strSize = str->sizeOfExcludingThis(rtStats->mallocSizeOf_); // If we can't grow hugeStrings, let's just call this string non-huge. // We're probably about to OOM anyway. if (strSize >= JS::HugeStringInfo::MinSize() && zStats->hugeStrings.growBy(1)) { zStats->gcHeapStringsNormal += thingSize; JS::HugeStringInfo &info = zStats->hugeStrings.back(); info.length = str->length(); info.size = strSize; PutEscapedString(info.buffer, sizeof(info.buffer), &str->asLinear(), 0); } else if (str->isShort()) { MOZ_ASSERT(strSize == 0); zStats->gcHeapStringsShort += thingSize; } else { zStats->gcHeapStringsNormal += thingSize; zStats->stringCharsNonHuge += strSize; } break; } case JSTRACE_SHAPE: { Shape *shape = static_cast<Shape *>(thing); CompartmentStats *cStats = GetCompartmentStats(shape->compartment()); size_t propTableSize, kidsSize; shape->sizeOfExcludingThis(rtStats->mallocSizeOf_, &propTableSize, &kidsSize); if (shape->inDictionary()) { cStats->gcHeapShapesDict += thingSize; cStats->shapesExtraDictTables += propTableSize; JS_ASSERT(kidsSize == 0); } else { if (shape->base()->getObjectParent() == shape->compartment()->maybeGlobal()) { cStats->gcHeapShapesTreeGlobalParented += thingSize; } else { cStats->gcHeapShapesTreeNonGlobalParented += thingSize; } cStats->shapesExtraTreeTables += propTableSize; cStats->shapesExtraTreeShapeKids += kidsSize; } break; } case JSTRACE_BASE_SHAPE: { BaseShape *base = static_cast<BaseShape *>(thing); CompartmentStats *cStats = GetCompartmentStats(base->compartment()); cStats->gcHeapShapesBase += thingSize; break; } case JSTRACE_SCRIPT: { JSScript *script = static_cast<JSScript *>(thing); CompartmentStats *cStats = GetCompartmentStats(script->compartment()); cStats->gcHeapScripts += thingSize; cStats->scriptData += script->sizeOfData(rtStats->mallocSizeOf_); #ifdef JS_ION size_t baselineData = 0, baselineStubsFallback = 0; ion::SizeOfBaselineData(script, rtStats->mallocSizeOf_, &baselineData, &baselineStubsFallback); cStats->baselineData += baselineData; cStats->baselineStubsFallback += baselineStubsFallback; cStats->ionData += ion::SizeOfIonData(script, rtStats->mallocSizeOf_); #endif ScriptSource *ss = script->scriptSource(); SourceSet::AddPtr entry = closure->seenSources.lookupForAdd(ss); if (!entry) { closure->seenSources.add(entry, ss); // Not much to be done on failure. rtStats->runtime.scriptSources += ss->sizeOfIncludingThis(rtStats->mallocSizeOf_); } break; } case JSTRACE_LAZY_SCRIPT: { LazyScript *lazy = static_cast<LazyScript *>(thing); zStats->gcHeapLazyScripts += thingSize; zStats->lazyScripts += lazy->sizeOfExcludingThis(rtStats->mallocSizeOf_); break; } case JSTRACE_IONCODE: { #ifdef JS_ION zStats->gcHeapIonCodes += thingSize; // The code for a script is counted in ExecutableAllocator::sizeOfCode(). #endif break; } case JSTRACE_TYPE_OBJECT: { types::TypeObject *obj = static_cast<types::TypeObject *>(thing); zStats->gcHeapTypeObjects += thingSize; zStats->typeObjects += obj->sizeOfExcludingThis(rtStats->mallocSizeOf_); break; } } // Yes, this is a subtraction: see StatsArenaCallback() for details. zStats->gcHeapUnusedGcThings -= thingSize; }