static LookupStatus UpdateSetGlobalName(VMFrame &f, ic::SetGlobalNameIC *ic, JSObject *obj, UnrootedShape shape) { /* Give globals a chance to appear. */ if (!shape) return Lookup_Uncacheable; if (!shape->hasDefaultSetter() || !shape->writable() || !shape->hasSlot() || obj->watched()) { /* Disable the IC for weird shape attributes and watchpoints. */ PatchSetFallback(f, ic); return Lookup_Uncacheable; } /* Object is not branded, so we can use the inline path. */ Repatcher repatcher(f.chunk()); ic->patchInlineShapeGuard(repatcher, obj->lastProperty()); uint32_t index = obj->dynamicSlotIndex(shape->slot()); JSC::CodeLocationLabel label = ic->fastPathStart.labelAtOffset(ic->loadStoreOffset); repatcher.patchAddressOffsetForValueStore(label, index * sizeof(Value), ic->vr.isTypeKnown()); return Lookup_Cacheable; }
void Shape::removeChild(UnrootedShape child) { JS_ASSERT(!child->inDictionary()); JS_ASSERT(child->parent == this); KidsPointer *kidp = &kids; if (kidp->isShape()) { JS_ASSERT(kidp->toShape() == child); kidp->setNull(); child->parent = NULL; return; } KidsHash *hash = kidp->toHash(); JS_ASSERT(hash->count() >= 2); /* otherwise kidp->isShape() should be true */ hash->remove(child); child->parent = NULL; if (hash->count() == 1) { /* Convert from HASH form back to SHAPE form. */ KidsHash::Range r = hash->all(); Shape *otherChild = r.front(); JS_ASSERT((r.popFront(), r.empty())); /* No more elements! */ kidp->setShape(otherChild); js_delete(hash); } }
bool PropertyTree::insertChild(JSContext *cx, UnrootedShape parent, UnrootedShape child) { JS_ASSERT(!parent->inDictionary()); JS_ASSERT(!child->parent); JS_ASSERT(!child->inDictionary()); JS_ASSERT(cx->compartment == compartment); JS_ASSERT(child->compartment() == parent->compartment()); KidsPointer *kidp = &parent->kids; if (kidp->isNull()) { child->setParent(parent); kidp->setShape(child); return true; } if (kidp->isShape()) { UnrootedShape shape = kidp->toShape(); JS_ASSERT(shape != child); JS_ASSERT(!shape->matches(child)); KidsHash *hash = HashChildren(shape, child); if (!hash) { JS_ReportOutOfMemory(cx); return false; } kidp->setHash(hash); child->setParent(parent); return true; } if (!kidp->toHash()->putNew(child, child)) { JS_ReportOutOfMemory(cx); return false; } child->setParent(parent); return true; }
UnrootedShape PropertyTree::getChild(JSContext *cx, Shape *parent_, uint32_t nfixed, const StackShape &child) { AssertCanGC(); { UnrootedShape shape = NULL; JS_ASSERT(parent_); /* * The property tree has extremely low fan-out below its root in * popular embeddings with real-world workloads. Patterns such as * defining closures that capture a constructor's environment as * getters or setters on the new object that is passed in as * |this| can significantly increase fan-out below the property * tree root -- see bug 335700 for details. */ KidsPointer *kidp = &parent_->kids; if (kidp->isShape()) { UnrootedShape kid = kidp->toShape(); if (kid->matches(child)) shape = kid; } else if (kidp->isHash()) { if (KidsHash::Ptr p = kidp->toHash()->lookup(child)) shape = *p; } else { /* If kidp->isNull(), we always insert. */ } #ifdef JSGC_INCREMENTAL if (shape) { JS::Zone *zone = shape->zone(); if (zone->needsBarrier()) { /* * We need a read barrier for the shape tree, since these are weak * pointers. */ Shape *tmp = shape; MarkShapeUnbarriered(zone->barrierTracer(), &tmp, "read barrier"); JS_ASSERT(tmp == shape); } else if (zone->isGCSweeping() && !shape->isMarked() && !shape->arenaHeader()->allocatedDuringIncremental) { /* * The shape we've found is unreachable and due to be finalized, so * remove our weak reference to it and don't use it. */ JS_ASSERT(parent_->isMarked()); parent_->removeChild(shape); shape = NULL; } } #endif if (shape) return shape; } StackShape::AutoRooter childRoot(cx, &child); RootedShape parent(cx, parent_); UnrootedShape shape = newShape(cx); if (!shape) return UnrootedShape(NULL); new (shape) Shape(child, nfixed); if (!insertChild(cx, parent, shape)) return UnrootedShape(NULL); return shape; }
static void StatsCellCallback(JSRuntime *rt, void *data, void *thing, JSGCTraceKind traceKind, size_t thingSize) { IteratorClosure *closure = static_cast<IteratorClosure *>(data); RuntimeStats *rtStats = closure->rtStats; CompartmentStats *cStats = rtStats->currCompartmentStats; switch (traceKind) { case JSTRACE_OBJECT: { JSObject *obj = static_cast<JSObject *>(thing); if (obj->isFunction()) { cStats->gcHeapObjectsFunction += thingSize; } else if (obj->isArray()) { cStats->gcHeapObjectsDenseArray += thingSize; } else if (obj->isCrossCompartmentWrapper()) { cStats->gcHeapObjectsCrossCompartmentWrapper += thingSize; } else { cStats->gcHeapObjectsOrdinary += thingSize; } ObjectsExtraSizes objectsExtra; obj->sizeOfExcludingThis(rtStats->mallocSizeOf, &objectsExtra); cStats->objectsExtra.add(objectsExtra); // JSObject::sizeOfExcludingThis() doesn't measure objectsExtraPrivate, // so we do it here. if (ObjectPrivateVisitor *opv = closure->opv) { nsISupports *iface; if (opv->getISupports(obj, &iface) && iface) { cStats->objectsExtra.private_ += opv->sizeOfIncludingThis(iface); } } break; } case JSTRACE_STRING: { JSString *str = static_cast<JSString *>(thing); size_t strSize = str->sizeOfExcludingThis(rtStats->mallocSizeOf); // If we can't grow hugeStrings, let's just call this string non-huge. // We're probably about to OOM anyway. if (strSize >= HugeStringInfo::MinSize() && cStats->hugeStrings.growBy(1)) { cStats->gcHeapStringsNormal += thingSize; HugeStringInfo &info = cStats->hugeStrings.back(); info.length = str->length(); info.size = strSize; PutEscapedString(info.buffer, sizeof(info.buffer), &str->asLinear(), 0); } else if (str->isShort()) { MOZ_ASSERT(strSize == 0); cStats->gcHeapStringsShort += thingSize; } else { cStats->gcHeapStringsNormal += thingSize; cStats->stringCharsNonHuge += strSize; } break; } case JSTRACE_SHAPE: { UnrootedShape shape = static_cast<RawShape>(thing); size_t propTableSize, kidsSize; shape->sizeOfExcludingThis(rtStats->mallocSizeOf, &propTableSize, &kidsSize); if (shape->inDictionary()) { cStats->gcHeapShapesDict += thingSize; cStats->shapesExtraDictTables += propTableSize; JS_ASSERT(kidsSize == 0); } else { if (shape->base()->getObjectParent() == shape->compartment()->maybeGlobal()) { cStats->gcHeapShapesTreeGlobalParented += thingSize; } else { cStats->gcHeapShapesTreeNonGlobalParented += thingSize; } cStats->shapesExtraTreeTables += propTableSize; cStats->shapesExtraTreeShapeKids += kidsSize; } break; } case JSTRACE_BASE_SHAPE: { cStats->gcHeapShapesBase += thingSize; break; } case JSTRACE_SCRIPT: { JSScript *script = static_cast<JSScript *>(thing); cStats->gcHeapScripts += thingSize; cStats->scriptData += script->sizeOfData(rtStats->mallocSizeOf); #ifdef JS_METHODJIT cStats->jaegerData += script->sizeOfJitScripts(rtStats->mallocSizeOf); # ifdef JS_ION cStats->ionData += ion::MemoryUsed(script, rtStats->mallocSizeOf); # endif #endif ScriptSource *ss = script->scriptSource(); SourceSet::AddPtr entry = closure->seenSources.lookupForAdd(ss); if (!entry) { closure->seenSources.add(entry, ss); // Not much to be done on failure. rtStats->runtime.scriptSources += ss->sizeOfIncludingThis(rtStats->mallocSizeOf); } break; } case JSTRACE_IONCODE: { #ifdef JS_METHODJIT # ifdef JS_ION cStats->gcHeapIonCodes += thingSize; // The code for a script is counted in ExecutableAllocator::sizeOfCode(). # endif #endif break; } case JSTRACE_TYPE_OBJECT: { types::TypeObject *obj = static_cast<types::TypeObject *>(thing); cStats->gcHeapTypeObjects += thingSize; cStats->typeInference.typeObjects += obj->sizeOfExcludingThis(rtStats->mallocSizeOf); break; } } // Yes, this is a subtraction: see StatsArenaCallback() for details. cStats->gcHeapUnusedGcThings -= thingSize; }
bool js::GetOwnProperty(JSContext *cx, Handle<ObjectImpl*> obj, PropertyId pid_, unsigned resolveFlags, PropDesc *desc) { NEW_OBJECT_REPRESENTATION_ONLY(); JS_CHECK_RECURSION(cx, return false); Rooted<PropertyId> pid(cx, pid_); if (static_cast<JSObject *>(obj.get())->isProxy()) { MOZ_NOT_REACHED("NYI: proxy [[GetOwnProperty]]"); return false; } /* |shape| is always set /after/ a GC. */ UnrootedShape shape = obj->nativeLookup(cx, pid); if (!shape) { DropUnrooted(shape); /* Not found: attempt to resolve it. */ Class *clasp = obj->getClass(); JSResolveOp resolve = clasp->resolve; if (resolve != JS_ResolveStub) { Rooted<jsid> id(cx, pid.get().asId()); Rooted<JSObject*> robj(cx, static_cast<JSObject*>(obj.get())); if (clasp->flags & JSCLASS_NEW_RESOLVE) { Rooted<JSObject*> obj2(cx, NULL); JSNewResolveOp op = reinterpret_cast<JSNewResolveOp>(resolve); if (!op(cx, robj, id, resolveFlags, &obj2)) return false; } else { if (!resolve(cx, robj, id)) return false; } } /* Now look it up again. */ shape = obj->nativeLookup(cx, pid); if (!shape) { desc->setUndefined(); return true; } } if (shape->isDataDescriptor()) { *desc = PropDesc(obj->nativeGetSlot(shape->slot()), shape->writability(), shape->enumerability(), shape->configurability()); return true; } if (shape->isAccessorDescriptor()) { *desc = PropDesc(shape->getterValue(), shape->setterValue(), shape->enumerability(), shape->configurability()); return true; } MOZ_NOT_REACHED("NYI: PropertyOp-based properties"); return false; }
void js::ObjectImpl::checkShapeConsistency() { static int throttle = -1; if (throttle < 0) { if (const char *var = getenv("JS_CHECK_SHAPE_THROTTLE")) throttle = atoi(var); if (throttle < 0) throttle = 0; } if (throttle == 0) return; MOZ_ASSERT(isNative()); UnrootedShape shape = lastProperty(); UnrootedShape prev = NULL; if (inDictionaryMode()) { MOZ_ASSERT(shape->hasTable()); ShapeTable &table = shape->table(); for (uint32_t fslot = table.freelist; fslot != SHAPE_INVALID_SLOT; fslot = getSlot(fslot).toPrivateUint32()) { MOZ_ASSERT(fslot < slotSpan()); } for (int n = throttle; --n >= 0 && shape->parent; shape = shape->parent) { MOZ_ASSERT_IF(lastProperty() != shape, !shape->hasTable()); Shape **spp = table.search(shape->propid(), false); MOZ_ASSERT(SHAPE_FETCH(spp) == shape); } shape = lastProperty(); for (int n = throttle; --n >= 0 && shape; shape = shape->parent) { MOZ_ASSERT_IF(shape->slot() != SHAPE_INVALID_SLOT, shape->slot() < slotSpan()); if (!prev) { MOZ_ASSERT(lastProperty() == shape); MOZ_ASSERT(shape->listp == &shape_); } else { MOZ_ASSERT(shape->listp == &prev->parent); } prev = shape; } } else { for (int n = throttle; --n >= 0 && shape->parent; shape = shape->parent) { if (shape->hasTable()) { ShapeTable &table = shape->table(); MOZ_ASSERT(shape->parent); for (Shape::Range r(shape); !r.empty(); r.popFront()) { Shape **spp = table.search(r.front().propid(), false); MOZ_ASSERT(SHAPE_FETCH(spp) == &r.front()); } } if (prev) { MOZ_ASSERT(prev->maybeSlot() >= shape->maybeSlot()); shape->kids.checkConsistency(prev); } prev = shape; } } }