static bool CreateLazyScriptsForCompartment(JSContext *cx) { AutoObjectVector lazyFunctions(cx); // Find all root lazy functions in the compartment: those which have not been // compiled and which have a source object, indicating that their parent has // been compiled. for (gc::CellIter i(cx->zone(), JSFunction::FinalizeKind); !i.done(); i.next()) { JSObject *obj = i.get<JSObject>(); if (obj->compartment() == cx->compartment() && obj->is<JSFunction>()) { JSFunction *fun = &obj->as<JSFunction>(); if (fun->isInterpretedLazy()) { LazyScript *lazy = fun->lazyScriptOrNull(); if (lazy && lazy->sourceObject() && !lazy->maybeScript()) { if (!lazyFunctions.append(fun)) return false; } } } } // Create scripts for each lazy function, updating the list of functions to // process with any newly exposed inner functions in created scripts. // A function cannot be delazified until its outer script exists. for (size_t i = 0; i < lazyFunctions.length(); i++) { JSFunction *fun = &lazyFunctions[i]->as<JSFunction>(); // lazyFunctions may have been populated with multiple functions for // a lazy script. if (!fun->isInterpretedLazy()) continue; JSScript *script = fun->getOrCreateScript(cx); if (!script) return false; if (!AddInnerLazyFunctionsFromScript(script, lazyFunctions)) return false; } // Repoint any clones of the original functions to their new script. for (gc::CellIter i(cx->zone(), JSFunction::FinalizeKind); !i.done(); i.next()) { JSObject *obj = i.get<JSObject>(); if (obj->compartment() == cx->compartment() && obj->is<JSFunction>()) { JSFunction *fun = &obj->as<JSFunction>(); if (fun->isInterpretedLazy()) { LazyScript *lazy = fun->lazyScriptOrNull(); if (lazy && lazy->maybeScript()) fun->existingScript(); } } } return true; }
JSObject * XrayWrapper<Base>::createHolder(JSContext *cx, JSObject *wrappedNative, JSObject *parent) { JSObject *holder = JS_NewObjectWithGivenProto(cx, &HolderClass, nsnull, parent); if (!holder) return nsnull; CompartmentPrivate *priv = (CompartmentPrivate *)JS_GetCompartmentPrivate(cx, holder->compartment()); JSObject *inner = wrappedNative; OBJ_TO_INNER_OBJECT(cx, inner); XPCWrappedNative *wn = GetWrappedNative(inner); Value expando = ObjectOrNullValue(priv->LookupExpandoObject(wn)); // A note about ownership: the holder has a direct pointer to the wrapped // native that we're wrapping. Normally, we'd have to AddRef the pointer // so that it doesn't have to be collected, but then we'd have to tell the // cycle collector. Fortunately for us, we know that the Xray wrapper // itself has a reference to the flat JS object which will hold the // wrapped native alive. Furthermore, the reachability of that object and // the associated holder are exactly the same, so we can use that for our // strong reference. JS_ASSERT(IS_WN_WRAPPER(wrappedNative) || wrappedNative->getClass()->ext.innerObject); holder->setSlot(JSSLOT_WN, PrivateValue(wn)); holder->setSlot(JSSLOT_RESOLVING, PrivateValue(NULL)); holder->setSlot(JSSLOT_EXPANDO, expando); return holder; }
static bool CreateLazyScriptsForCompartment(JSContext* cx) { AutoObjectVector lazyFunctions(cx); // Find all live root lazy functions in the compartment: those which // have not been compiled, which have a source object, indicating that // they have a parent, and which do not have an uncompiled enclosing // script. The last condition is so that we don't compile lazy scripts // whose enclosing scripts failed to compile, indicating that the lazy // script did not escape the script. // // Note that while we ideally iterate over LazyScripts, LazyScripts do not // currently stand in 1-1 relation with JSScripts; JSFunctions with the // same LazyScript may create different JSScripts due to relazification of // clones. See bug 1105306. for (gc::ZoneCellIter i(cx->zone(), JSFunction::FinalizeKind); !i.done(); i.next()) { JSObject* obj = i.get<JSObject>(); if (obj->compartment() == cx->compartment() && obj->is<JSFunction>()) { JSFunction* fun = &obj->as<JSFunction>(); if (fun->isInterpretedLazy()) { LazyScript* lazy = fun->lazyScriptOrNull(); if (lazy && lazy->sourceObject() && !lazy->maybeScript() && !lazy->hasUncompiledEnclosingScript()) { if (!lazyFunctions.append(fun)) return false; } } } } // Create scripts for each lazy function, updating the list of functions to // process with any newly exposed inner functions in created scripts. // A function cannot be delazified until its outer script exists. for (size_t i = 0; i < lazyFunctions.length(); i++) { JSFunction* fun = &lazyFunctions[i]->as<JSFunction>(); // lazyFunctions may have been populated with multiple functions for // a lazy script. if (!fun->isInterpretedLazy()) continue; LazyScript* lazy = fun->lazyScript(); bool lazyScriptHadNoScript = !lazy->maybeScript(); JSScript* script = fun->getOrCreateScript(cx); if (!script) return false; if (lazyScriptHadNoScript && !AddInnerLazyFunctionsFromScript(script, lazyFunctions)) return false; } return true; }
js::NukeCrossCompartmentWrappers(JSContext* cx, const CompartmentFilter& sourceFilter, const CompartmentFilter& targetFilter, js::NukeReferencesToWindow nukeReferencesToWindow, js::NukeReferencesFromTarget nukeReferencesFromTarget) { CHECK_REQUEST(cx); JSRuntime* rt = cx->runtime(); EvictAllNurseries(rt); for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { if (!sourceFilter.match(c)) continue; // If the compartment matches both the source and target filter, we may // want to cut both incoming and outgoing wrappers. bool nukeAll = (nukeReferencesFromTarget == NukeAllReferences && targetFilter.match(c)); // Iterate the wrappers looking for anything interesting. for (JSCompartment::WrapperEnum e(c); !e.empty(); e.popFront()) { // Some cross-compartment wrappers are for strings. We're not // interested in those. const CrossCompartmentKey& k = e.front().key(); if (!k.is<JSObject*>()) continue; AutoWrapperRooter wobj(cx, WrapperValue(e)); JSObject* wrapped = UncheckedUnwrap(wobj); // We never nuke script source objects, since only ever used internally by the JS // engine, and are expected to remain valid throughout a scripts lifetime. if (MOZ_UNLIKELY(wrapped->is<ScriptSourceObject>())) { continue; } // We only skip nuking window references that point to a target // compartment, not the ones that belong to it. if (nukeReferencesToWindow == DontNukeWindowReferences && MOZ_LIKELY(!nukeAll) && IsWindowProxy(wrapped)) { continue; } if (MOZ_UNLIKELY(nukeAll) || targetFilter.match(wrapped->compartment())) { // We found a wrapper to nuke. e.removeFront(); NukeCrossCompartmentWrapper(cx, wobj); } } } return true; }
JS::TraceIncomingCCWs(JSTracer* trc, const JS::CompartmentSet& compartments) { for (js::CompartmentsIter comp(trc->runtime(), SkipAtoms); !comp.done(); comp.next()) { if (compartments.has(comp)) continue; for (JSCompartment::WrapperEnum e(comp); !e.empty(); e.popFront()) { const CrossCompartmentKey& key = e.front().key(); JSObject* obj; JSScript* script; switch (key.kind) { case CrossCompartmentKey::StringWrapper: // StringWrappers are just used to avoid copying strings // across zones multiple times, and don't hold a strong // reference. continue; case CrossCompartmentKey::ObjectWrapper: case CrossCompartmentKey::DebuggerObject: case CrossCompartmentKey::DebuggerSource: case CrossCompartmentKey::DebuggerEnvironment: case CrossCompartmentKey::DebuggerWasmScript: case CrossCompartmentKey::DebuggerWasmSource: obj = static_cast<JSObject*>(key.wrapped); // Ignore CCWs whose wrapped value doesn't live in our given // set of zones. if (!compartments.has(obj->compartment())) continue; TraceManuallyBarrieredEdge(trc, &obj, "cross-compartment wrapper"); MOZ_ASSERT(obj == key.wrapped); break; case CrossCompartmentKey::DebuggerScript: script = static_cast<JSScript*>(key.wrapped); // Ignore CCWs whose wrapped value doesn't live in our given // set of compartments. if (!compartments.has(script->compartment())) continue; TraceManuallyBarrieredEdge(trc, &script, "cross-compartment wrapper"); MOZ_ASSERT(script == key.wrapped); break; } } } }
static bool WrapReceiver(JSContext* cx, HandleObject wrapper, MutableHandleValue receiver) { // Usually the receiver is the wrapper and we can just unwrap it. If the // wrapped object is also a wrapper, things are more complicated and we // fall back to the slow path (it calls UncheckedUnwrap to unwrap all // wrappers). if (ObjectValue(*wrapper) == receiver) { JSObject* wrapped = Wrapper::wrappedObject(wrapper); if (!IsWrapper(wrapped)) { MOZ_ASSERT(wrapped->compartment() == cx->compartment()); MOZ_ASSERT(!IsWindow(wrapped)); receiver.setObject(*wrapped); return true; } } return cx->compartment()->wrap(cx, receiver); }
js::NukeCrossCompartmentWrappers(JSContext* cx, const CompartmentFilter& sourceFilter, const CompartmentFilter& targetFilter, js::NukeReferencesToWindow nukeReferencesToWindow) { CHECK_REQUEST(cx); JSRuntime* rt = cx->runtime(); rt->gc.evictNursery(JS::gcreason::EVICT_NURSERY); // Iterate through scopes looking for system cross compartment wrappers // that point to an object that shares a global with obj. for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { if (!sourceFilter.match(c)) continue; // Iterate the wrappers looking for anything interesting. for (JSCompartment::WrapperEnum e(c); !e.empty(); e.popFront()) { // Some cross-compartment wrappers are for strings. We're not // interested in those. const CrossCompartmentKey& k = e.front().key(); if (!k.is<JSObject*>()) continue; AutoWrapperRooter wobj(cx, WrapperValue(e)); JSObject* wrapped = UncheckedUnwrap(wobj); if (nukeReferencesToWindow == DontNukeWindowReferences && IsWindowProxy(wrapped)) { continue; } if (targetFilter.match(wrapped->compartment())) { // We found a wrapper to nuke. e.removeFront(); NukeRemovedCrossCompartmentWrapper(cx, wobj); } } } return true; }
/* static */ void ProxyObject::trace(JSTracer* trc, JSObject* obj) { ProxyObject* proxy = &obj->as<ProxyObject>(); TraceEdge(trc, proxy->shapePtr(), "ProxyObject_shape"); #ifdef DEBUG if (TlsContext.get()->isStrictProxyCheckingEnabled() && proxy->is<WrapperObject>()) { JSObject* referent = MaybeForwarded(proxy->target()); if (referent->compartment() != proxy->compartment()) { /* * Assert that this proxy is tracked in the wrapper map. We maintain * the invariant that the wrapped object is the key in the wrapper map. */ Value key = ObjectValue(*referent); WrapperMap::Ptr p = proxy->compartment()->lookupWrapper(key); MOZ_ASSERT(p); MOZ_ASSERT(*p->value().unsafeGet() == ObjectValue(*proxy)); } } #endif // Note: If you add new slots here, make sure to change // nuke() to cope. traceEdgeToTarget(trc, proxy); size_t nreserved = proxy->numReservedSlots(); for (size_t i = 0; i < nreserved; i++) { /* * The GC can use the second reserved slot to link the cross compartment * wrappers into a linked list, in which case we don't want to trace it. */ if (proxy->is<CrossCompartmentWrapperObject>() && i == CrossCompartmentWrapperObject::GrayLinkReservedSlot) { continue; } TraceEdge(trc, proxy->reservedSlotPtr(i), "proxy_reserved"); } Proxy::trace(trc, obj); }
// Call WaiveXrayAndWrap when you have a JS object that you don't want to be // wrapped in an Xray wrapper. cx->compartment is the compartment that will be // using the returned object. If the object to be wrapped is already in the // correct compartment, then this returns the unwrapped object. bool WrapperFactory::WaiveXrayAndWrap(JSContext *cx, jsval *vp) { if (JSVAL_IS_PRIMITIVE(*vp)) return JS_WrapValue(cx, vp); JSObject *obj = JSVAL_TO_OBJECT(*vp)->unwrap(); obj = GetCurrentOuter(cx, obj); if (obj->compartment() == cx->compartment) { *vp = OBJECT_TO_JSVAL(obj); return true; } obj = WaiveXray(cx, obj); if (!obj) return false; *vp = OBJECT_TO_JSVAL(obj); return JS_WrapValue(cx, vp); }
bool ContextStack::pushDummyFrame(JSContext *cx, JSCompartment *dest, JSObject &scopeChain, DummyFrameGuard *dfg) { JS_ASSERT(dest == scopeChain.compartment()); unsigned nvars = VALUES_PER_STACK_FRAME; Value *firstUnused = ensureOnTop(cx, REPORT_ERROR, nvars, CAN_EXTEND, &dfg->pushedSeg_, dest); if (!firstUnused) return false; StackFrame *fp = reinterpret_cast<StackFrame *>(firstUnused); fp->initDummyFrame(cx, scopeChain); dfg->regs_.initDummyFrame(*fp); cx->setCompartment(dest); dfg->prevRegs_ = seg_->pushRegs(dfg->regs_); JS_ASSERT(space().firstUnused() == dfg->regs_.sp); dfg->setPushed(*this); return true; }
js::NukeCrossCompartmentWrappers(JSContext* cx, const CompartmentFilter& sourceFilter, const CompartmentFilter& targetFilter, js::NukeReferencesToWindow nukeReferencesToWindow) { CHECK_REQUEST(cx); JSRuntime *rt = cx->runtime; // Iterate through scopes looking for system cross compartment wrappers // that point to an object that shares a global with obj. for (CompartmentsIter c(rt); !c.done(); c.next()) { if (!sourceFilter.match(c)) continue; // Iterate the wrappers looking for anything interesting. WrapperMap &pmap = c->crossCompartmentWrappers; for (WrapperMap::Enum e(pmap); !e.empty(); e.popFront()) { // Some cross-compartment wrappers are for strings. We're not // interested in those. const CrossCompartmentKey &k = e.front().key; if (k.kind != CrossCompartmentKey::ObjectWrapper) continue; JSObject *wobj = &e.front().value.get().toObject(); JSObject *wrapped = UnwrapObject(wobj); if (nukeReferencesToWindow == DontNukeWindowReferences && wrapped->getClass()->ext.innerObject) continue; if (targetFilter.match(wrapped->compartment())) { // We found a wrapper to nuke. e.removeFront(); NukeCrossCompartmentWrapper(wobj); } } } return JS_TRUE; }
JS_DumpCompartmentPCCounts(JSContext *cx) { for (CellIter i(cx->zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) { JSScript *script = i.get<JSScript>(); if (script->compartment() != cx->compartment()) continue; if (script->hasScriptCounts()) JS_DumpPCCounts(cx, script); } #if defined(JS_ION) for (unsigned thingKind = FINALIZE_OBJECT0; thingKind < FINALIZE_OBJECT_LIMIT; thingKind++) { for (CellIter i(cx->zone(), (AllocKind) thingKind); !i.done(); i.next()) { JSObject *obj = i.get<JSObject>(); if (obj->compartment() != cx->compartment()) continue; if (obj->is<AsmJSModuleObject>()) { AsmJSModule &module = obj->as<AsmJSModuleObject>().module(); Sprinter sprinter(cx); if (!sprinter.init()) return; fprintf(stdout, "--- Asm.js Module ---\n"); for (size_t i = 0; i < module.numFunctionCounts(); i++) { jit::IonScriptCounts *counts = module.functionCounts(i); DumpIonScriptCounts(&sprinter, counts); } fputs(sprinter.string(), stdout); fprintf(stdout, "--- END Asm.js Module ---\n"); } } } #endif }
static bool AddLazyFunctionsForCompartment(JSContext* cx, AutoObjectVector& lazyFunctions, AllocKind kind) { // Find all live root lazy functions in the compartment: those which // have not been compiled, which have a source object, indicating that // they have a parent, and which do not have an uncompiled enclosing // script. The last condition is so that we don't compile lazy scripts // whose enclosing scripts failed to compile, indicating that the lazy // script did not escape the script. for (gc::ZoneCellIter i(cx->zone(), kind); !i.done(); i.next()) { JSObject* obj = i.get<JSObject>(); // Sweeping is incremental; take care to not delazify functions that // are about to be finalized. GC things referenced by objects that are // about to be finalized (e.g., in slots) may already be freed. if (gc::IsObjectAboutToBeFinalized(&obj) || obj->compartment() != cx->compartment() || !obj->is<JSFunction>()) { continue; } JSFunction* fun = &obj->as<JSFunction>(); if (fun->isInterpretedLazy()) { LazyScript* lazy = fun->lazyScriptOrNull(); if (lazy && lazy->sourceObject() && !lazy->maybeScript() && !lazy->hasUncompiledEnclosingScript()) { if (!lazyFunctions.append(fun)) return false; } } } return true; }
inline void GCMarker::processMarkStackTop(SliceBudget &budget) { /* * The function uses explicit goto and implements the scanning of the * object directly. It allows to eliminate the tail recursion and * significantly improve the marking performance, see bug 641025. */ HeapSlot *vp, *end; JSObject *obj; uintptr_t addr = stack.pop(); uintptr_t tag = addr & StackTagMask; addr &= ~StackTagMask; if (tag == ValueArrayTag) { JS_STATIC_ASSERT(ValueArrayTag == 0); JS_ASSERT(!(addr & Cell::CellMask)); obj = reinterpret_cast<JSObject *>(addr); uintptr_t addr2 = stack.pop(); uintptr_t addr3 = stack.pop(); JS_ASSERT(addr2 <= addr3); JS_ASSERT((addr3 - addr2) % sizeof(Value) == 0); vp = reinterpret_cast<HeapSlot *>(addr2); end = reinterpret_cast<HeapSlot *>(addr3); goto scan_value_array; } if (tag == ObjectTag) { obj = reinterpret_cast<JSObject *>(addr); JS_COMPARTMENT_ASSERT(runtime, obj); goto scan_obj; } processMarkStackOther(budget, tag, addr); return; scan_value_array: JS_ASSERT(vp <= end); while (vp != end) { const Value &v = *vp++; if (v.isString()) { JSString *str = v.toString(); JS_COMPARTMENT_ASSERT_STR(runtime, str); JS_ASSERT(str->compartment() == runtime->atomsCompartment || str->compartment() == obj->compartment()); if (str->markIfUnmarked()) ScanString(this, str); } else if (v.isObject()) { JSObject *obj2 = &v.toObject(); JS_COMPARTMENT_ASSERT(runtime, obj2); JS_ASSERT(obj->compartment() == obj2->compartment()); if (obj2->markIfUnmarked(getMarkColor())) { pushValueArray(obj, vp, end); obj = obj2; goto scan_obj; } } } return; scan_obj: { JS_COMPARTMENT_ASSERT(runtime, obj); budget.step(); if (budget.isOverBudget()) { pushObject(obj); return; } types::TypeObject *type = obj->typeFromGC(); PushMarkStack(this, type); Shape *shape = obj->lastProperty(); PushMarkStack(this, shape); /* Call the trace hook if necessary. */ Class *clasp = shape->getObjectClass(); if (clasp->trace) { if (clasp == &ArrayClass) { JS_ASSERT(!shape->isNative()); vp = obj->getDenseArrayElements(); end = vp + obj->getDenseArrayInitializedLength(); goto scan_value_array; } else { JS_ASSERT_IF(runtime->gcMode == JSGC_MODE_INCREMENTAL && runtime->gcIncrementalEnabled, clasp->flags & JSCLASS_IMPLEMENTS_BARRIERS); } clasp->trace(this, obj); } if (!shape->isNative()) return; unsigned nslots = obj->slotSpan(); vp = obj->fixedSlots(); if (obj->slots) { unsigned nfixed = obj->numFixedSlots(); if (nslots > nfixed) { pushValueArray(obj, vp, vp + nfixed); vp = obj->slots; end = vp + (nslots - nfixed); goto scan_value_array; } } JS_ASSERT(nslots <= obj->numFixedSlots()); end = vp + nslots; goto scan_value_array; } }
bool JSCompartment::wrap(JSContext *cx, Value *vp) { JS_ASSERT(cx->compartment == this); unsigned flags = 0; JS_CHECK_RECURSION(cx, return false); /* Only GC things have to be wrapped or copied. */ if (!vp->isMarkable()) return true; if (vp->isString()) { JSString *str = vp->toString(); /* If the string is already in this compartment, we are done. */ if (str->compartment() == this) return true; /* If the string is an atom, we don't have to copy. */ if (str->isAtom()) { JS_ASSERT(str->compartment() == cx->runtime->atomsCompartment); return true; } } /* * Wrappers should really be parented to the wrapped parent of the wrapped * object, but in that case a wrapped global object would have a NULL * parent without being a proper global object (JSCLASS_IS_GLOBAL). Instead, * we parent all wrappers to the global object in their home compartment. * This loses us some transparency, and is generally very cheesy. */ JSObject *global; if (cx->hasfp()) { global = &cx->fp()->scopeChain().global(); } else { global = JS_ObjectToInnerObject(cx, cx->globalObject); if (!global) return false; } /* Unwrap incoming objects. */ if (vp->isObject()) { JSObject *obj = &vp->toObject(); /* If the object is already in this compartment, we are done. */ if (obj->compartment() == this) return true; /* Translate StopIteration singleton. */ if (obj->isStopIteration()) return js_FindClassObject(cx, NULL, JSProto_StopIteration, vp); /* Don't unwrap an outer window proxy. */ if (!obj->getClass()->ext.innerObject) { obj = UnwrapObject(&vp->toObject(), true, &flags); vp->setObject(*obj); if (obj->compartment() == this) return true; if (cx->runtime->preWrapObjectCallback) { obj = cx->runtime->preWrapObjectCallback(cx, global, obj, flags); if (!obj) return false; } vp->setObject(*obj); if (obj->compartment() == this) return true; } else { if (cx->runtime->preWrapObjectCallback) { obj = cx->runtime->preWrapObjectCallback(cx, global, obj, flags); if (!obj) return false; } JS_ASSERT(!obj->isWrapper() || obj->getClass()->ext.innerObject); vp->setObject(*obj); } #ifdef DEBUG { JSObject *outer = obj; OBJ_TO_OUTER_OBJECT(cx, outer); JS_ASSERT(outer && outer == obj); } #endif } /* If we already have a wrapper for this value, use it. */ if (WrapperMap::Ptr p = crossCompartmentWrappers.lookup(*vp)) { *vp = p->value; if (vp->isObject()) { JSObject *obj = &vp->toObject(); JS_ASSERT(obj->isCrossCompartmentWrapper()); if (global->getClass() != &dummy_class && obj->getParent() != global) { do { if (!obj->setParent(cx, global)) return false; obj = obj->getProto(); } while (obj && obj->isCrossCompartmentWrapper()); } } return true; } if (vp->isString()) { Value orig = *vp; JSString *str = vp->toString(); const jschar *chars = str->getChars(cx); if (!chars) return false; JSString *wrapped = js_NewStringCopyN(cx, chars, str->length()); if (!wrapped) return false; vp->setString(wrapped); return crossCompartmentWrappers.put(orig, *vp); } JSObject *obj = &vp->toObject(); /* * Recurse to wrap the prototype. Long prototype chains will run out of * stack, causing an error in CHECK_RECURSE. * * Wrapping the proto before creating the new wrapper and adding it to the * cache helps avoid leaving a bad entry in the cache on OOM. But note that * if we wrapped both proto and parent, we would get infinite recursion * here (since Object.prototype->parent->proto leads to Object.prototype * itself). */ JSObject *proto = obj->getProto(); if (!wrap(cx, &proto)) return false; /* * We hand in the original wrapped object into the wrap hook to allow * the wrap hook to reason over what wrappers are currently applied * to the object. */ JSObject *wrapper = cx->runtime->wrapObjectCallback(cx, obj, proto, global, flags); if (!wrapper) return false; vp->setObject(*wrapper); if (wrapper->getProto() != proto && !SetProto(cx, wrapper, proto, false)) return false; if (!crossCompartmentWrappers.put(GetProxyPrivate(wrapper), *vp)) return false; if (!wrapper->setParent(cx, global)) return false; return true; }
bool js::StartOffThreadParseScript(JSContext *cx, const ReadOnlyCompileOptions &options, const jschar *chars, size_t length, HandleObject scopeChain, JS::OffThreadCompileCallback callback, void *callbackData) { // Suppress GC so that calls below do not trigger a new incremental GC // which could require barriers on the atoms compartment. gc::AutoSuppressGC suppress(cx); frontend::MaybeCallSourceHandler(cx, options, chars, length); if (!EnsureWorkerThreadsInitialized(cx)) return false; JS::CompartmentOptions compartmentOptions(cx->compartment()->options()); compartmentOptions.setZone(JS::FreshZone); JSObject *global = JS_NewGlobalObject(cx, &workerGlobalClass, nullptr, JS::FireOnNewGlobalHook, compartmentOptions); if (!global) return false; global->zone()->types.inferenceEnabled = cx->typeInferenceEnabled(); JS_SetCompartmentPrincipals(global->compartment(), cx->compartment()->principals); RootedObject obj(cx); // Initialize all classes needed for parsing while we are still on the main // thread. Do this for both the target and the new global so that prototype // pointers can be changed infallibly after parsing finishes. if (!js_GetClassObject(cx, cx->global(), JSProto_Function, &obj) || !js_GetClassObject(cx, cx->global(), JSProto_Array, &obj) || !js_GetClassObject(cx, cx->global(), JSProto_RegExp, &obj) || !js_GetClassObject(cx, cx->global(), JSProto_GeneratorFunction, &obj)) { return false; } { AutoCompartment ac(cx, global); if (!js_GetClassObject(cx, global, JSProto_Function, &obj) || !js_GetClassObject(cx, global, JSProto_Array, &obj) || !js_GetClassObject(cx, global, JSProto_RegExp, &obj) || !js_GetClassObject(cx, global, JSProto_GeneratorFunction, &obj)) { return false; } } ScopedJSDeletePtr<ExclusiveContext> workercx( cx->new_<ExclusiveContext>(cx->runtime(), (PerThreadData *) nullptr, ThreadSafeContext::Context_Exclusive)); if (!workercx) return false; ScopedJSDeletePtr<ParseTask> task( cx->new_<ParseTask>(workercx.get(), global, cx, chars, length, scopeChain, callback, callbackData)); if (!task) return false; workercx.forget(); if (!task->init(cx, options)) return false; WorkerThreadState &state = *cx->runtime()->workerThreadState; JS_ASSERT(state.numThreads); // Off thread parsing can't occur during incremental collections on the // atoms compartment, to avoid triggering barriers. (Outside the atoms // compartment, the compilation will use a new zone which doesn't require // barriers itself.) If an atoms-zone GC is in progress, hold off on // executing the parse task until the atoms-zone GC completes (see // EnqueuePendingParseTasksAfterGC). if (cx->runtime()->activeGCInAtomsZone()) { if (!state.parseWaitingOnGC.append(task.get())) return false; } else { task->activate(cx->runtime()); AutoLockWorkerThreadState lock(state); if (!state.parseWorklist.append(task.get())) return false; state.notifyAll(WorkerThreadState::PRODUCER); } task.forget(); return true; }
bool js::StartOffThreadParseScript(JSContext *cx, const CompileOptions &options, const jschar *chars, size_t length, HandleObject scopeChain, JS::OffThreadCompileCallback callback, void *callbackData) { // Suppress GC so that calls below do not trigger a new incremental GC // which could require barriers on the atoms compartment. gc::AutoSuppressGC suppress(cx); frontend::MaybeCallSourceHandler(cx, options, chars, length); if (!EnsureWorkerThreadsInitialized(cx)) return false; JS::CompartmentOptions compartmentOptions(cx->compartment()->options()); compartmentOptions.setZone(JS::FreshZone); JSObject *global = JS_NewGlobalObject(cx, &workerGlobalClass, NULL, JS::FireOnNewGlobalHook, compartmentOptions); if (!global) return false; global->zone()->types.inferenceEnabled = cx->typeInferenceEnabled(); JS_SetCompartmentPrincipals(global->compartment(), cx->compartment()->principals); RootedObject obj(cx); // Initialize all classes needed for parsing while we are still on the main // thread. Do this for both the target and the new global so that prototype // pointers can be changed infallibly after parsing finishes. if (!js_GetClassObject(cx, cx->global(), JSProto_Function, &obj) || !js_GetClassObject(cx, cx->global(), JSProto_Array, &obj) || !js_GetClassObject(cx, cx->global(), JSProto_RegExp, &obj) || !js_GetClassObject(cx, cx->global(), JSProto_GeneratorFunction, &obj)) { return false; } { AutoCompartment ac(cx, global); if (!js_GetClassObject(cx, global, JSProto_Function, &obj) || !js_GetClassObject(cx, global, JSProto_Array, &obj) || !js_GetClassObject(cx, global, JSProto_RegExp, &obj) || !js_GetClassObject(cx, global, JSProto_GeneratorFunction, &obj)) { return false; } } cx->runtime()->setUsedByExclusiveThread(global->zone()); ScopedJSDeletePtr<ExclusiveContext> workercx( cx->new_<ExclusiveContext>(cx->runtime(), (PerThreadData *) NULL, ThreadSafeContext::Context_Exclusive)); if (!workercx) return false; workercx->enterCompartment(global->compartment()); ScopedJSDeletePtr<ParseTask> task( cx->new_<ParseTask>(global->zone(), workercx.get(), options, chars, length, scopeChain, callback, callbackData)); if (!task) return false; workercx.forget(); WorkerThreadState &state = *cx->runtime()->workerThreadState; JS_ASSERT(state.numThreads); AutoLockWorkerThreadState lock(state); if (!state.parseWorklist.append(task.get())) return false; task.forget(); state.notify(WorkerThreadState::WORKER); return true; }
bool js::StartOffThreadParseScript(JSContext *cx, const ReadOnlyCompileOptions &options, const jschar *chars, size_t length, JS::OffThreadCompileCallback callback, void *callbackData) { // Suppress GC so that calls below do not trigger a new incremental GC // which could require barriers on the atoms compartment. gc::AutoSuppressGC suppress(cx); SourceBufferHolder srcBuf(chars, length, SourceBufferHolder::NoOwnership); frontend::MaybeCallSourceHandler(cx, options, srcBuf); EnsureHelperThreadsInitialized(cx); JS::CompartmentOptions compartmentOptions(cx->compartment()->options()); compartmentOptions.setZone(JS::FreshZone); compartmentOptions.setInvisibleToDebugger(true); compartmentOptions.setMergeable(true); // Don't falsely inherit the host's global trace hook. compartmentOptions.setTrace(nullptr); JSObject *global = JS_NewGlobalObject(cx, &parseTaskGlobalClass, nullptr, JS::FireOnNewGlobalHook, compartmentOptions); if (!global) return false; JS_SetCompartmentPrincipals(global->compartment(), cx->compartment()->principals); RootedObject obj(cx); // Initialize all classes needed for parsing while we are still on the main // thread. Do this for both the target and the new global so that prototype // pointers can be changed infallibly after parsing finishes. if (!GetBuiltinConstructor(cx, JSProto_Function, &obj) || !GetBuiltinConstructor(cx, JSProto_Array, &obj) || !GetBuiltinConstructor(cx, JSProto_RegExp, &obj) || !GetBuiltinConstructor(cx, JSProto_Iterator, &obj)) { return false; } { AutoCompartment ac(cx, global); if (!GetBuiltinConstructor(cx, JSProto_Function, &obj) || !GetBuiltinConstructor(cx, JSProto_Array, &obj) || !GetBuiltinConstructor(cx, JSProto_RegExp, &obj) || !GetBuiltinConstructor(cx, JSProto_Iterator, &obj)) { return false; } } ScopedJSDeletePtr<ExclusiveContext> helpercx( cx->new_<ExclusiveContext>(cx->runtime(), (PerThreadData *) nullptr, ThreadSafeContext::Context_Exclusive)); if (!helpercx) return false; ScopedJSDeletePtr<ParseTask> task( cx->new_<ParseTask>(helpercx.get(), global, cx, chars, length, callback, callbackData)); if (!task) return false; helpercx.forget(); if (!task->init(cx, options)) return false; if (OffThreadParsingMustWaitForGC(cx->runtime())) { AutoLockHelperThreadState lock; if (!HelperThreadState().parseWaitingOnGC().append(task.get())) return false; } else { task->activate(cx->runtime()); AutoLockHelperThreadState lock; if (!HelperThreadState().parseWorklist().append(task.get())) return false; HelperThreadState().notifyOne(GlobalHelperThreadState::PRODUCER); } task.forget(); return true; }
bool js::StartOffThreadParseScript(JSContext *cx, const CompileOptions &options, const jschar *chars, size_t length) { frontend::MaybeCallSourceHandler(cx, options, chars, length); JSRuntime *rt = cx->runtime(); if (!EnsureWorkerThreadsInitialized(rt)) return false; JS::CompartmentOptions compartmentOptions(cx->compartment()->options()); compartmentOptions.setZone(JS::FreshZone); JSObject *global = JS_NewGlobalObject(cx, &workerGlobalClass, NULL, JS::FireOnNewGlobalHook, compartmentOptions); if (!global) return false; // For now, type inference is always disabled in exclusive zones. // This restriction would be fairly easy to lift. global->zone()->types.inferenceEnabled = false; // Initialize all classes needed for parsing while we are still on the main // thread. { AutoCompartment ac(cx, global); RootedObject obj(cx); if (!js_GetClassObject(cx, global, JSProto_Function, &obj) || !js_GetClassObject(cx, global, JSProto_Array, &obj) || !js_GetClassObject(cx, global, JSProto_RegExp, &obj)) { return false; } } global->zone()->usedByExclusiveThread = true; ScopedJSDeletePtr<ExclusiveContext> workercx( cx->new_<ExclusiveContext>(cx->runtime(), (PerThreadData *) NULL, ThreadSafeContext::Context_Exclusive)); if (!workercx) return false; workercx->enterCompartment(global->compartment()); ScopedJSDeletePtr<ParseTask> task( cx->new_<ParseTask>(cx->runtime(), workercx.get(), options, chars, length)); if (!task) return false; workercx.forget(); WorkerThreadState &state = *cx->runtime()->workerThreadState; JS_ASSERT(state.numThreads); AutoLockWorkerThreadState lock(rt); if (!state.parseWorklist.append(task.get())) return false; task.forget(); state.notify(WorkerThreadState::WORKER); return true; }
JSObject * WrapperFactory::PrepareForWrapping(JSContext *cx, JSObject *scope, JSObject *obj, uintN flags) { // Don't unwrap an outer window, just double wrap it if needed. if (obj->getClass()->ext.innerObject) return DoubleWrap(cx, obj, flags); // Here are the rules for wrapping: // We should never get a proxy here (the JS engine unwraps those for us). JS_ASSERT(!obj->isWrapper()); // As soon as an object is wrapped in a security wrapper, it morphs to be // a fat wrapper. (see also: bug XXX). if (IS_SLIM_WRAPPER(obj) && !MorphSlimWrapper(cx, obj)) return nsnull; // We only hand out outer objects to script. obj = GetCurrentOuter(cx, obj); if (obj->getClass()->ext.innerObject) return DoubleWrap(cx, obj, flags); // Now, our object is ready to be wrapped, but several objects (notably // nsJSIIDs) have a wrapper per scope. If we are about to wrap one of // those objects in a security wrapper, then we need to hand back the // wrapper for the new scope instead. Also, global objects don't move // between scopes so for those we also want to return the wrapper. So... if (!IS_WN_WRAPPER(obj) || !obj->getParent()) return DoubleWrap(cx, obj, flags); XPCWrappedNative *wn = static_cast<XPCWrappedNative *>(xpc_GetJSPrivate(obj)); // If the object doesn't have classinfo we want to return the same // XPCWrappedNative so that we keep the same set of interfaces. if (!wn->GetClassInfo()) return DoubleWrap(cx, obj, flags); JSAutoEnterCompartment ac; if (!ac.enter(cx, obj)) return nsnull; XPCCallContext ccx(JS_CALLER, cx, obj); { if (NATIVE_HAS_FLAG(&ccx, WantPreCreate)) { // We have a precreate hook. This object might enforce that we only // ever create JS object for it. JSObject *originalScope = scope; nsresult rv = wn->GetScriptableInfo()->GetCallback()-> PreCreate(wn->Native(), cx, scope, &scope); NS_ENSURE_SUCCESS(rv, DoubleWrap(cx, obj, flags)); // If the handed back scope differs from the passed-in scope and is in // a separate compartment, then this object is explicitly requesting // that we don't create a second JS object for it: create a security // wrapper. if (originalScope->compartment() != scope->getCompartment()) return DoubleWrap(cx, obj, flags); // Note: this penalizes objects that only have one wrapper, but are // being accessed across compartments. We would really prefer to // replace the above code with a test that says "do you only have one // wrapper?" } } // NB: Passing a holder here inhibits slim wrappers under // WrapNativeToJSVal. nsCOMPtr<nsIXPConnectJSObjectHolder> holder; // This public WrapNativeToJSVal API enters the compartment of 'scope' // so we don't have to. jsval v; nsresult rv = nsXPConnect::FastGetXPConnect()->WrapNativeToJSVal(cx, scope, wn->Native(), nsnull, &NS_GET_IID(nsISupports), PR_FALSE, &v, getter_AddRefs(holder)); if (NS_SUCCEEDED(rv)) { obj = JSVAL_TO_OBJECT(v); NS_ASSERTION(IS_WN_WRAPPER(obj), "bad object"); XPCWrappedNative *newwn = static_cast<XPCWrappedNative *>(xpc_GetJSPrivate(obj)); if (newwn->GetSet()->GetInterfaceCount() < wn->GetSet()->GetInterfaceCount()) newwn->SetSet(wn->GetSet()); } return DoubleWrap(cx, obj, flags); }
bool js::StartOffThreadParseScript(JSContext* cx, const ReadOnlyCompileOptions& options, const char16_t* chars, size_t length, JS::OffThreadCompileCallback callback, void* callbackData) { // Suppress GC so that calls below do not trigger a new incremental GC // which could require barriers on the atoms compartment. gc::AutoSuppressGC suppress(cx); JS::CompartmentOptions compartmentOptions(cx->compartment()->options()); compartmentOptions.setZone(JS::FreshZone); compartmentOptions.setInvisibleToDebugger(true); compartmentOptions.setMergeable(true); // Don't falsely inherit the host's global trace hook. compartmentOptions.setTrace(nullptr); JSObject* global = JS_NewGlobalObject(cx, &parseTaskGlobalClass, nullptr, JS::FireOnNewGlobalHook, compartmentOptions); if (!global) return false; JS_SetCompartmentPrincipals(global->compartment(), cx->compartment()->principals()); // Initialize all classes required for parsing while still on the main // thread, for both the target and the new global so that prototype // pointers can be changed infallibly after parsing finishes. if (!EnsureParserCreatedClasses(cx)) return false; { AutoCompartment ac(cx, global); if (!EnsureParserCreatedClasses(cx)) return false; } ScopedJSDeletePtr<ExclusiveContext> helpercx( cx->new_<ExclusiveContext>(cx->runtime(), (PerThreadData*) nullptr, ExclusiveContext::Context_Exclusive)); if (!helpercx) return false; ScopedJSDeletePtr<ParseTask> task( cx->new_<ParseTask>(helpercx.get(), global, cx, chars, length, callback, callbackData)); if (!task) return false; helpercx.forget(); if (!task->init(cx, options)) return false; if (OffThreadParsingMustWaitForGC(cx->runtime())) { AutoLockHelperThreadState lock; if (!HelperThreadState().parseWaitingOnGC().append(task.get())) { ReportOutOfMemory(cx); return false; } } else { AutoLockHelperThreadState lock; if (!HelperThreadState().parseWorklist().append(task.get())) { ReportOutOfMemory(cx); return false; } task->activate(cx->runtime()); HelperThreadState().notifyOne(GlobalHelperThreadState::PRODUCER); } task.forget(); return true; }
static void StatsCellCallback(JSRuntime *rt, void *data, void *thing, JSGCTraceKind traceKind, size_t thingSize) { IteratorClosure *closure = static_cast<IteratorClosure *>(data); RuntimeStats *rtStats = closure->rtStats; ZoneStats *zStats = rtStats->currZoneStats; switch (traceKind) { case JSTRACE_OBJECT: { JSObject *obj = static_cast<JSObject *>(thing); CompartmentStats *cStats = GetCompartmentStats(obj->compartment()); if (obj->is<JSFunction>()) cStats->gcHeapObjectsFunction += thingSize; else if (obj->is<ArrayObject>()) cStats->gcHeapObjectsDenseArray += thingSize; else if (obj->isCrossCompartmentWrapper()) cStats->gcHeapObjectsCrossCompartmentWrapper += thingSize; else cStats->gcHeapObjectsOrdinary += thingSize; JS::ObjectsExtraSizes objectsExtra; obj->sizeOfExcludingThis(rtStats->mallocSizeOf_, &objectsExtra); cStats->objectsExtra.add(objectsExtra); // JSObject::sizeOfExcludingThis() doesn't measure objectsExtraPrivate, // so we do it here. if (ObjectPrivateVisitor *opv = closure->opv) { nsISupports *iface; if (opv->getISupports_(obj, &iface) && iface) { cStats->objectsExtra.private_ += opv->sizeOfIncludingThis(iface); } } break; } case JSTRACE_STRING: { JSString *str = static_cast<JSString *>(thing); size_t strSize = str->sizeOfExcludingThis(rtStats->mallocSizeOf_); // If we can't grow hugeStrings, let's just call this string non-huge. // We're probably about to OOM anyway. if (strSize >= JS::HugeStringInfo::MinSize() && zStats->hugeStrings.growBy(1)) { zStats->gcHeapStringsNormal += thingSize; JS::HugeStringInfo &info = zStats->hugeStrings.back(); info.length = str->length(); info.size = strSize; PutEscapedString(info.buffer, sizeof(info.buffer), &str->asLinear(), 0); } else if (str->isShort()) { MOZ_ASSERT(strSize == 0); zStats->gcHeapStringsShort += thingSize; } else { zStats->gcHeapStringsNormal += thingSize; zStats->stringCharsNonHuge += strSize; } break; } case JSTRACE_SHAPE: { Shape *shape = static_cast<Shape *>(thing); CompartmentStats *cStats = GetCompartmentStats(shape->compartment()); size_t propTableSize, kidsSize; shape->sizeOfExcludingThis(rtStats->mallocSizeOf_, &propTableSize, &kidsSize); if (shape->inDictionary()) { cStats->gcHeapShapesDict += thingSize; cStats->shapesExtraDictTables += propTableSize; JS_ASSERT(kidsSize == 0); } else { if (shape->base()->getObjectParent() == shape->compartment()->maybeGlobal()) { cStats->gcHeapShapesTreeGlobalParented += thingSize; } else { cStats->gcHeapShapesTreeNonGlobalParented += thingSize; } cStats->shapesExtraTreeTables += propTableSize; cStats->shapesExtraTreeShapeKids += kidsSize; } break; } case JSTRACE_BASE_SHAPE: { BaseShape *base = static_cast<BaseShape *>(thing); CompartmentStats *cStats = GetCompartmentStats(base->compartment()); cStats->gcHeapShapesBase += thingSize; break; } case JSTRACE_SCRIPT: { JSScript *script = static_cast<JSScript *>(thing); CompartmentStats *cStats = GetCompartmentStats(script->compartment()); cStats->gcHeapScripts += thingSize; cStats->scriptData += script->sizeOfData(rtStats->mallocSizeOf_); #ifdef JS_ION size_t baselineData = 0, baselineStubsFallback = 0; ion::SizeOfBaselineData(script, rtStats->mallocSizeOf_, &baselineData, &baselineStubsFallback); cStats->baselineData += baselineData; cStats->baselineStubsFallback += baselineStubsFallback; cStats->ionData += ion::SizeOfIonData(script, rtStats->mallocSizeOf_); #endif ScriptSource *ss = script->scriptSource(); SourceSet::AddPtr entry = closure->seenSources.lookupForAdd(ss); if (!entry) { closure->seenSources.add(entry, ss); // Not much to be done on failure. rtStats->runtime.scriptSources += ss->sizeOfIncludingThis(rtStats->mallocSizeOf_); } break; } case JSTRACE_LAZY_SCRIPT: { LazyScript *lazy = static_cast<LazyScript *>(thing); zStats->gcHeapLazyScripts += thingSize; zStats->lazyScripts += lazy->sizeOfExcludingThis(rtStats->mallocSizeOf_); break; } case JSTRACE_IONCODE: { #ifdef JS_ION zStats->gcHeapIonCodes += thingSize; // The code for a script is counted in ExecutableAllocator::sizeOfCode(). #endif break; } case JSTRACE_TYPE_OBJECT: { types::TypeObject *obj = static_cast<types::TypeObject *>(thing); zStats->gcHeapTypeObjects += thingSize; zStats->typeObjects += obj->sizeOfExcludingThis(rtStats->mallocSizeOf_); break; } } // Yes, this is a subtraction: see StatsArenaCallback() for details. zStats->gcHeapUnusedGcThings -= thingSize; }