GetByIdStatus GetByIdStatus::computeForStubInfo( const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, StructureStubInfo* stubInfo, StringImpl* uid, CallLinkStatus::ExitSiteData callExitSiteData) { if (!stubInfo || !stubInfo->seen) return GetByIdStatus(NoInformation); PolymorphicGetByIdList* list = 0; State slowPathState = TakesSlowPath; if (stubInfo->accessType == access_get_by_id_list) { list = stubInfo->u.getByIdList.list; for (unsigned i = 0; i < list->size(); ++i) { const GetByIdAccess& access = list->at(i); if (access.doesCalls()) slowPathState = MakesCalls; } } // Finally figure out if we can derive an access strategy. GetByIdStatus result; result.m_state = Simple; result.m_wasSeenInJIT = true; // This is interesting for bytecode dumping only. switch (stubInfo->accessType) { case access_unset: return GetByIdStatus(NoInformation); case access_get_by_id_self: { Structure* structure = stubInfo->u.getByIdSelf.baseObjectStructure.get(); if (structure->takesSlowPathInDFGForImpureProperty()) return GetByIdStatus(slowPathState, true); unsigned attributesIgnored; GetByIdVariant variant; variant.m_offset = structure->getConcurrently(uid, attributesIgnored); if (!isValidOffset(variant.m_offset)) return GetByIdStatus(slowPathState, true); variant.m_structureSet.add(structure); bool didAppend = result.appendVariant(variant); ASSERT_UNUSED(didAppend, didAppend); return result; } case access_get_by_id_list: { for (unsigned listIndex = 0; listIndex < list->size(); ++listIndex) { Structure* structure = list->at(listIndex).structure(); ComplexGetStatus complexGetStatus = ComplexGetStatus::computeFor( profiledBlock, structure, list->at(listIndex).chain(), list->at(listIndex).chainCount(), uid); switch (complexGetStatus.kind()) { case ComplexGetStatus::ShouldSkip: continue; case ComplexGetStatus::TakesSlowPath: return GetByIdStatus(slowPathState, true); case ComplexGetStatus::Inlineable: { std::unique_ptr<CallLinkStatus> callLinkStatus; switch (list->at(listIndex).type()) { case GetByIdAccess::SimpleInline: case GetByIdAccess::SimpleStub: { break; } case GetByIdAccess::Getter: { AccessorCallJITStubRoutine* stub = static_cast<AccessorCallJITStubRoutine*>( list->at(listIndex).stubRoutine()); callLinkStatus = std::make_unique<CallLinkStatus>( CallLinkStatus::computeFor( locker, profiledBlock, *stub->m_callLinkInfo, callExitSiteData)); break; } case GetByIdAccess::CustomGetter: case GetByIdAccess::WatchedStub:{ // FIXME: It would be totally sweet to support this at some point in the future. // https://bugs.webkit.org/show_bug.cgi?id=133052 return GetByIdStatus(slowPathState, true); } default: RELEASE_ASSERT_NOT_REACHED(); } GetByIdVariant variant( StructureSet(structure), complexGetStatus.offset(), complexGetStatus.chain(), std::move(callLinkStatus)); if (!result.appendVariant(variant)) return GetByIdStatus(slowPathState, true); break; } } } return result; } default: return GetByIdStatus(slowPathState, true); } RELEASE_ASSERT_NOT_REACHED(); return GetByIdStatus(); }
PutByIdStatus PutByIdStatus::computeForStubInfo( const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, StructureStubInfo* stubInfo, StringImpl* uid, CallLinkStatus::ExitSiteData callExitSiteData) { if (!stubInfo || !stubInfo->seen) return PutByIdStatus(); switch (stubInfo->accessType) { case access_unset: // If the JIT saw it but didn't optimize it, then assume that this takes slow path. return PutByIdStatus(TakesSlowPath); case access_put_by_id_replace: { PropertyOffset offset = stubInfo->u.putByIdReplace.baseObjectStructure->getConcurrently(uid); if (isValidOffset(offset)) { return PutByIdVariant::replace( stubInfo->u.putByIdReplace.baseObjectStructure.get(), offset); } return PutByIdStatus(TakesSlowPath); } case access_put_by_id_transition_normal: case access_put_by_id_transition_direct: { ASSERT(stubInfo->u.putByIdTransition.previousStructure->transitionWatchpointSetHasBeenInvalidated()); PropertyOffset offset = stubInfo->u.putByIdTransition.structure->getConcurrently(uid); if (isValidOffset(offset)) { RefPtr<IntendedStructureChain> chain; if (stubInfo->u.putByIdTransition.chain) { chain = adoptRef(new IntendedStructureChain( profiledBlock, stubInfo->u.putByIdTransition.previousStructure.get(), stubInfo->u.putByIdTransition.chain.get())); } return PutByIdVariant::transition( stubInfo->u.putByIdTransition.previousStructure.get(), stubInfo->u.putByIdTransition.structure.get(), chain.get(), offset); } return PutByIdStatus(TakesSlowPath); } case access_put_by_id_list: { PolymorphicPutByIdList* list = stubInfo->u.putByIdList.list; PutByIdStatus result; result.m_state = Simple; State slowPathState = TakesSlowPath; for (unsigned i = 0; i < list->size(); ++i) { const PutByIdAccess& access = list->at(i); switch (access.type()) { case PutByIdAccess::Setter: case PutByIdAccess::CustomSetter: slowPathState = MakesCalls; break; default: break; } } for (unsigned i = 0; i < list->size(); ++i) { const PutByIdAccess& access = list->at(i); PutByIdVariant variant; switch (access.type()) { case PutByIdAccess::Replace: { Structure* structure = access.structure(); PropertyOffset offset = structure->getConcurrently(uid); if (!isValidOffset(offset)) return PutByIdStatus(slowPathState); variant = PutByIdVariant::replace(structure, offset); break; } case PutByIdAccess::Transition: { PropertyOffset offset = access.newStructure()->getConcurrently(uid); if (!isValidOffset(offset)) return PutByIdStatus(slowPathState); RefPtr<IntendedStructureChain> chain; if (access.chain()) { chain = adoptRef(new IntendedStructureChain( profiledBlock, access.oldStructure(), access.chain())); if (!chain->isStillValid()) continue; } variant = PutByIdVariant::transition( access.oldStructure(), access.newStructure(), chain.get(), offset); break; } case PutByIdAccess::Setter: { Structure* structure = access.structure(); ComplexGetStatus complexGetStatus = ComplexGetStatus::computeFor( profiledBlock, structure, access.chain(), access.chainCount(), uid); switch (complexGetStatus.kind()) { case ComplexGetStatus::ShouldSkip: continue; case ComplexGetStatus::TakesSlowPath: return PutByIdStatus(slowPathState); case ComplexGetStatus::Inlineable: { AccessorCallJITStubRoutine* stub = static_cast<AccessorCallJITStubRoutine*>( access.stubRoutine()); std::unique_ptr<CallLinkStatus> callLinkStatus = std::make_unique<CallLinkStatus>( CallLinkStatus::computeFor( locker, profiledBlock, *stub->m_callLinkInfo, callExitSiteData)); variant = PutByIdVariant::setter( structure, complexGetStatus.offset(), complexGetStatus.chain(), std::move(callLinkStatus)); } } break; } case PutByIdAccess::CustomSetter: return PutByIdStatus(MakesCalls); default: return PutByIdStatus(slowPathState); } if (!result.appendVariant(variant)) return PutByIdStatus(slowPathState); } return result; } default: return PutByIdStatus(TakesSlowPath); } }
GetByIdStatus GetByIdStatus::computeForStubInfoWithoutExitSiteFeedback( const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, StructureStubInfo* stubInfo, UniquedStringImpl* uid, CallLinkStatus::ExitSiteData callExitSiteData) { if (!stubInfo || !stubInfo->everConsidered) return GetByIdStatus(NoInformation); PolymorphicAccess* list = 0; State slowPathState = TakesSlowPath; if (stubInfo->cacheType == CacheType::Stub) { list = stubInfo->u.stub; for (unsigned i = 0; i < list->size(); ++i) { const AccessCase& access = list->at(i); if (access.doesCalls()) slowPathState = MakesCalls; } } if (stubInfo->tookSlowPath) return GetByIdStatus(slowPathState); // Finally figure out if we can derive an access strategy. GetByIdStatus result; result.m_state = Simple; result.m_wasSeenInJIT = true; // This is interesting for bytecode dumping only. switch (stubInfo->cacheType) { case CacheType::Unset: return GetByIdStatus(NoInformation); case CacheType::GetByIdSelf: { Structure* structure = stubInfo->u.byIdSelf.baseObjectStructure.get(); if (structure->takesSlowPathInDFGForImpureProperty()) return GetByIdStatus(slowPathState, true); unsigned attributesIgnored; GetByIdVariant variant; variant.m_offset = structure->getConcurrently(uid, attributesIgnored); if (!isValidOffset(variant.m_offset)) return GetByIdStatus(slowPathState, true); variant.m_structureSet.add(structure); bool didAppend = result.appendVariant(variant); ASSERT_UNUSED(didAppend, didAppend); return result; } case CacheType::Stub: { for (unsigned listIndex = 0; listIndex < list->size(); ++listIndex) { const AccessCase& access = list->at(listIndex); if (access.viaProxy()) return GetByIdStatus(slowPathState, true); Structure* structure = access.structure(); if (!structure) { // The null structure cases arise due to array.length and string.length. We have no way // of creating a GetByIdVariant for those, and we don't really have to since the DFG // handles those cases in FixupPhase using value profiling. That's a bit awkward - we // shouldn't have to use value profiling to discover something that the AccessCase // could have told us. But, it works well enough. So, our only concern here is to not // crash on null structure. return GetByIdStatus(slowPathState, true); } ComplexGetStatus complexGetStatus = ComplexGetStatus::computeFor( structure, access.conditionSet(), uid); switch (complexGetStatus.kind()) { case ComplexGetStatus::ShouldSkip: continue; case ComplexGetStatus::TakesSlowPath: return GetByIdStatus(slowPathState, true); case ComplexGetStatus::Inlineable: { std::unique_ptr<CallLinkStatus> callLinkStatus; JSFunction* intrinsicFunction = nullptr; switch (access.type()) { case AccessCase::Load: { break; } case AccessCase::IntrinsicGetter: { intrinsicFunction = access.intrinsicFunction(); break; } case AccessCase::Getter: { CallLinkInfo* callLinkInfo = access.callLinkInfo(); ASSERT(callLinkInfo); callLinkStatus = std::make_unique<CallLinkStatus>( CallLinkStatus::computeFor( locker, profiledBlock, *callLinkInfo, callExitSiteData)); break; } default: { // FIXME: It would be totally sweet to support more of these at some point in the // future. https://bugs.webkit.org/show_bug.cgi?id=133052 return GetByIdStatus(slowPathState, true); } } GetByIdVariant variant( StructureSet(structure), complexGetStatus.offset(), complexGetStatus.conditionSet(), WTFMove(callLinkStatus), intrinsicFunction); if (!result.appendVariant(variant)) return GetByIdStatus(slowPathState, true); break; } } } return result; } default: return GetByIdStatus(slowPathState, true); } RELEASE_ASSERT_NOT_REACHED(); return GetByIdStatus(); }
PutByIdStatus PutByIdStatus::computeForStubInfo( const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, StructureStubInfo* stubInfo, UniquedStringImpl* uid, CallLinkStatus::ExitSiteData callExitSiteData) { if (!stubInfo || !stubInfo->everConsidered) return PutByIdStatus(); if (stubInfo->tookSlowPath) return PutByIdStatus(TakesSlowPath); switch (stubInfo->cacheType) { case CacheType::Unset: // This means that we attempted to cache but failed for some reason. return PutByIdStatus(TakesSlowPath); case CacheType::PutByIdReplace: { PropertyOffset offset = stubInfo->u.byIdSelf.baseObjectStructure->getConcurrently(uid); if (isValidOffset(offset)) { return PutByIdVariant::replace( stubInfo->u.byIdSelf.baseObjectStructure.get(), offset, InferredType::Top); } return PutByIdStatus(TakesSlowPath); } case CacheType::Stub: { PolymorphicAccess* list = stubInfo->u.stub; PutByIdStatus result; result.m_state = Simple; State slowPathState = TakesSlowPath; for (unsigned i = 0; i < list->size(); ++i) { const AccessCase& access = list->at(i); if (access.doesCalls()) slowPathState = MakesCalls; } for (unsigned i = 0; i < list->size(); ++i) { const AccessCase& access = list->at(i); if (access.viaProxy()) return PutByIdStatus(slowPathState); PutByIdVariant variant; switch (access.type()) { case AccessCase::Replace: { Structure* structure = access.structure(); PropertyOffset offset = structure->getConcurrently(uid); if (!isValidOffset(offset)) return PutByIdStatus(slowPathState); variant = PutByIdVariant::replace( structure, offset, structure->inferredTypeDescriptorFor(uid)); break; } case AccessCase::Transition: { PropertyOffset offset = access.newStructure()->getConcurrently(uid); if (!isValidOffset(offset)) return PutByIdStatus(slowPathState); ObjectPropertyConditionSet conditionSet = access.conditionSet(); if (!conditionSet.structuresEnsureValidity()) return PutByIdStatus(slowPathState); variant = PutByIdVariant::transition( access.structure(), access.newStructure(), conditionSet, offset, access.newStructure()->inferredTypeDescriptorFor(uid)); break; } case AccessCase::Setter: { Structure* structure = access.structure(); ComplexGetStatus complexGetStatus = ComplexGetStatus::computeFor( structure, access.conditionSet(), uid); switch (complexGetStatus.kind()) { case ComplexGetStatus::ShouldSkip: continue; case ComplexGetStatus::TakesSlowPath: return PutByIdStatus(slowPathState); case ComplexGetStatus::Inlineable: { CallLinkInfo* callLinkInfo = access.callLinkInfo(); ASSERT(callLinkInfo); std::unique_ptr<CallLinkStatus> callLinkStatus = std::make_unique<CallLinkStatus>( CallLinkStatus::computeFor( locker, profiledBlock, *callLinkInfo, callExitSiteData)); variant = PutByIdVariant::setter( structure, complexGetStatus.offset(), complexGetStatus.conditionSet(), WTFMove(callLinkStatus)); } } break; } case AccessCase::CustomSetter: return PutByIdStatus(MakesCalls); default: return PutByIdStatus(slowPathState); } if (!result.appendVariant(variant)) return PutByIdStatus(slowPathState); } return result; } default: return PutByIdStatus(TakesSlowPath); } }