TSharedPtr<ITextFormatArgumentModifier> FTextFormatArgumentModifier_PluralForm::Create(const ETextPluralType InPluralType, const FTextFormatString& InArgsString) { TMap<FTextFormatString, FTextFormatString> ArgKeyValues; if (ParseKeyValueArgs(InArgsString, ArgKeyValues)) { int32 LongestPluralFormStringLen = 0; bool bDoPluralFormsUseFormatArgs = false; // Plural forms may contain format markers, so pre-compile all the variants now so that Evaluate doesn't have to (this also lets us validate the plural form strings and fail if they're not correct) TMap<FTextFormatString, FTextFormat> PluralForms; PluralForms.Reserve(ArgKeyValues.Num()); for (const auto& Pair : ArgKeyValues) { FTextFormat PluralForm = FTextFormat::FromString(FString(Pair.Value.StringLen, Pair.Value.StringPtr)); if (!PluralForm.IsValid()) { break; } LongestPluralFormStringLen = FMath::Max(LongestPluralFormStringLen, Pair.Value.StringLen); bDoPluralFormsUseFormatArgs |= PluralForm.GetExpressionType() == FTextFormat::EExpressionType::Complex; PluralForms.Add(Pair.Key, MoveTemp(PluralForm)); } // Did everything compile? if (PluralForms.Num() == ArgKeyValues.Num()) { return MakeShareable(new FTextFormatArgumentModifier_PluralForm(InPluralType, PluralForms, LongestPluralFormStringLen, bDoPluralFormsUseFormatArgs)); } } return nullptr; }
bool ConvertOverlapResults(int32 NumOverlaps, PxOverlapHit* POverlapResults, const PxFilterData& QueryFilter, TArray<FOverlapResult>& OutOverlaps) { SCOPE_CYCLE_COUNTER(STAT_CollisionConvertOverlap); const int32 ExpectedSize = OutOverlaps.Num() + NumOverlaps; OutOverlaps.Reserve(ExpectedSize); bool bBlockingFound = false; if (ExpectedSize >= GNumOverlapsRequiredForTMap) { // Map from an overlap to the position in the result array (the index has one added to it so 0 can be a sentinel) TMap<FOverlapKey, int32, TInlineSetAllocator<64>> OverlapMap; OverlapMap.Reserve(ExpectedSize); // Fill in the map with existing hits for (int32 ExistingIndex = 0; ExistingIndex < OutOverlaps.Num(); ++ExistingIndex) { const FOverlapResult& ExistingOverlap = OutOverlaps[ExistingIndex]; OverlapMap.Add(FOverlapKey(ExistingOverlap.Component.Get(), ExistingOverlap.ItemIndex), ExistingIndex + 1); } for (int32 PResultIndex = 0; PResultIndex < NumOverlaps; ++PResultIndex) { FOverlapResult NewOverlap; ConvertQueryOverlap(POverlapResults[PResultIndex].shape, POverlapResults[PResultIndex].actor, NewOverlap, QueryFilter); if (NewOverlap.bBlockingHit) { bBlockingFound = true; } // Look for it in the map, newly added elements will start with 0, so we know we need to add it to the results array then (the index is stored as +1) int32& DestinationIndex = OverlapMap.FindOrAdd(FOverlapKey(NewOverlap.Component.Get(), NewOverlap.ItemIndex)); if (DestinationIndex == 0) { DestinationIndex = OutOverlaps.Add(NewOverlap) + 1; } else { FOverlapResult& ExistingOverlap = OutOverlaps[DestinationIndex - 1]; // If we had a non-blocking overlap with this component, but now we have a blocking one, use that one instead! if (!ExistingOverlap.bBlockingHit && NewOverlap.bBlockingHit) { ExistingOverlap = NewOverlap; } } } } else { // N^2 approach, no maps for (int32 i = 0; i < NumOverlaps; i++) { FOverlapResult NewOverlap; ConvertQueryOverlap(POverlapResults[i].shape, POverlapResults[i].actor, NewOverlap, QueryFilter); if (NewOverlap.bBlockingHit) { bBlockingFound = true; } AddUniqueOverlap(OutOverlaps, NewOverlap); } } return bBlockingFound; }
bool ConvertOverlapResults(int32 NumOverlaps, PxOverlapHit* POverlapResults, const PxFilterData& QueryFilter, TArray<FOverlapResult>& OutOverlaps) { SCOPE_CYCLE_COUNTER(STAT_CollisionConvertOverlap); OutOverlaps.Reserve(OutOverlaps.Num() + NumOverlaps); bool bBlockingFound = false; // This number was not empirically determined, just a rough rule of thumb if (OutOverlaps.Num() + NumOverlaps < 6) { // N^2 approach, no maps for (int32 i = 0; i < NumOverlaps; i++) { FOverlapResult NewOverlap; ConvertQueryOverlap(POverlapResults[i].shape, POverlapResults[i].actor, NewOverlap, QueryFilter); if (NewOverlap.bBlockingHit) { bBlockingFound = true; } AddUniqueOverlap(OutOverlaps, NewOverlap); } } else { // Map from an overlap to the position in the result array TMap<FOverlapKey, int32> OverlapMap; OverlapMap.Reserve(OutOverlaps.Num()); // Fill in the map with existing hits for (int32 ExistingIndex = 0; ExistingIndex < OutOverlaps.Num(); ++ExistingIndex) { const FOverlapResult& ExistingOverlap = OutOverlaps[ExistingIndex]; OverlapMap.Add(FOverlapKey(ExistingOverlap.Component.Get(), ExistingOverlap.ItemIndex), ExistingIndex); } for (int32 PResultIndex = 0; PResultIndex < NumOverlaps; ++PResultIndex) { FOverlapResult NewOverlap; ConvertQueryOverlap(POverlapResults[PResultIndex].shape, POverlapResults[PResultIndex].actor, NewOverlap, QueryFilter); if (NewOverlap.bBlockingHit) { bBlockingFound = true; } int32& DestinationIndex = OverlapMap.FindOrAdd(FOverlapKey(NewOverlap.Component.Get(), NewOverlap.ItemIndex)); if (DestinationIndex < OutOverlaps.Num()) { FOverlapResult& ExistingOverlap = OutOverlaps[DestinationIndex]; // If we had a non-blocking overlap with this component, but now we have a blocking one, use that one instead! if (!ExistingOverlap.bBlockingHit && NewOverlap.bBlockingHit) { ExistingOverlap = NewOverlap; } } else { DestinationIndex = OutOverlaps.Add(NewOverlap); } } } return bBlockingFound; }
bool FPackageReader::ReadAssetRegistryData (TArray<FAssetData*>& AssetDataList) { check(Loader); // Does the package contain asset registry tags if( PackageFileSummary.AssetRegistryDataOffset == 0 ) { // No Tag Table! return false; } // Seek the the part of the file where the asset registry tags live Seek( PackageFileSummary.AssetRegistryDataOffset ); // Determine the package name and path FString PackageName = FPackageName::FilenameToLongPackageName(PackageFilename); FString PackagePath = FPackageName::GetLongPackagePath(PackageName); const bool bIsMapPackage = (PackageFileSummary.PackageFlags & PKG_ContainsMap) != 0; // Assets do not show up in map packages unless we launch with -WorldAssets static const bool bUsingWorldAssets = FAssetRegistry::IsUsingWorldAssets(); if ( bIsMapPackage && !bUsingWorldAssets ) { return true; } // Load the object count int32 ObjectCount = 0; *this << ObjectCount; // Worlds that were saved before they were marked public do not have asset data so we will synthesize it here to make sure we see all legacy umaps // We will also do this for maps saved after they were marked public but no asset data was saved for some reason. A bug caused this to happen for some maps. if (bUsingWorldAssets && bIsMapPackage) { const bool bLegacyPackage = PackageFileSummary.GetFileVersionUE4() < VER_UE4_PUBLIC_WORLDS; const bool bNoMapAsset = (ObjectCount == 0); if (bLegacyPackage || bNoMapAsset) { FString AssetName = FPackageName::GetLongPackageAssetName(PackageName); AssetDataList.Add(new FAssetData(FName(*PackageName), FName(*PackagePath), FName(), MoveTemp(FName(*AssetName)), FName(TEXT("World")), TMap<FName, FString>(), PackageFileSummary.ChunkIDs, PackageFileSummary.PackageFlags)); } } // UAsset files only have one object, but legacy or map packages may have more. for(int32 ObjectIdx = 0; ObjectIdx < ObjectCount; ++ObjectIdx) { FString ObjectPath; FString ObjectClassName; int32 TagCount = 0; *this << ObjectPath; *this << ObjectClassName; *this << TagCount; TMap<FName, FString> TagsAndValues; TagsAndValues.Reserve(TagCount); for(int32 TagIdx = 0; TagIdx < TagCount; ++TagIdx) { FString Key; FString Value; *this << Key; *this << Value; TagsAndValues.Add(FName(*Key), Value); } FString GroupNames; FString AssetName; if ( ObjectPath.Contains(TEXT("."), ESearchCase::CaseSensitive)) { ObjectPath.Split(TEXT("."), &GroupNames, &AssetName, ESearchCase::CaseSensitive, ESearchDir::FromEnd); } else { AssetName = ObjectPath; } // Before world were RF_Public, other non-public assets were added to the asset data table in map packages. // Here we simply skip over them if ( bIsMapPackage && PackageFileSummary.GetFileVersionUE4() < VER_UE4_PUBLIC_WORLDS ) { if ( AssetName != FPackageName::GetLongPackageAssetName(PackageName) ) { continue; } } // Create a new FAssetData for this asset and update it with the gathered data AssetDataList.Add(new FAssetData(FName(*PackageName), FName(*PackagePath), MoveTemp(FName(*GroupNames)), MoveTemp(FName(*AssetName)), MoveTemp(FName(*ObjectClassName)), MoveTemp(TagsAndValues), PackageFileSummary.ChunkIDs, PackageFileSummary.PackageFlags)); } return true; }
FDataScanResult FDataScannerImpl::ScanData() { // Count running scanners FScopeCounter ScopeCounter(&NumRunningScanners); FStatsCollector::Accumulate(StatCreatedScanners, 1); FStatsCollector::Accumulate(StatRunningScanners, 1); // Init data FRollingHash<WindowSize> RollingHash; FChunkWriter ChunkWriter(FBuildPatchServicesModule::GetCloudDirectory(), StatsCollector); FDataStructure DataStructure(DataStartOffset); TMap<FGuid, FChunkInfo> ChunkInfoLookup; TArray<uint8> ChunkBuffer; TArray<uint8> NewChunkBuffer; uint32 PaddedZeros = 0; ChunkInfoLookup.Reserve(Data.Num() / WindowSize); ChunkBuffer.SetNumUninitialized(WindowSize); NewChunkBuffer.Reserve(WindowSize); // Get a copy of the chunk inventory TMap<uint64, TSet<FGuid>> ChunkInventory = CloudEnumeration->GetChunkInventory(); TMap<FGuid, int64> ChunkFileSizes = CloudEnumeration->GetChunkFileSizes(); TMap<FGuid, FSHAHash> ChunkShaHashes = CloudEnumeration->GetChunkShaHashes(); // Loop over and process all data FGuid MatchedChunk; uint64 TempTimer; uint64 CpuTimer; FStatsCollector::AccumulateTimeBegin(CpuTimer); for (int32 idx = 0; (idx < Data.Num() || PaddedZeros < WindowSize) && !bShouldAbort; ++idx) { // Consume data const uint32 NumDataNeeded = RollingHash.GetNumDataNeeded(); if (NumDataNeeded > 0) { FStatsScopedTimer ConsumeTimer(StatConsumeBytesTime); uint32 NumConsumedBytes = 0; if (idx < Data.Num()) { NumConsumedBytes = FMath::Min<uint32>(NumDataNeeded, Data.Num() - idx); RollingHash.ConsumeBytes(&Data[idx], NumConsumedBytes); idx += NumConsumedBytes - 1; } // Zero Pad? if (NumConsumedBytes < NumDataNeeded) { TArray<uint8> Zeros; Zeros.AddZeroed(NumDataNeeded - NumConsumedBytes); RollingHash.ConsumeBytes(Zeros.GetData(), Zeros.Num()); PaddedZeros = Zeros.Num(); } check(RollingHash.GetNumDataNeeded() == 0); continue; } const uint64 NumDataInWindow = WindowSize - PaddedZeros; const uint64 WindowHash = RollingHash.GetWindowHash(); // Try find match if (FindExistingChunk(ChunkInventory, ChunkShaHashes, WindowHash, RollingHash, MatchedChunk)) { // Push the chunk to the structure DataStructure.PushKnownChunk(MatchedChunk, NumDataInWindow); FChunkInfo& ChunkInfo = ChunkInfoLookup.FindOrAdd(MatchedChunk); ChunkInfo.Hash = WindowHash; ChunkInfo.ShaHash = ChunkShaHashes[MatchedChunk]; ChunkInfo.IsNew = false; FStatsCollector::Accumulate(StatMatchedData, NumDataInWindow); // Clear matched window RollingHash.Clear(); // Decrement idx to include current byte in next window --idx; } else { // Collect unrecognized bytes NewChunkBuffer.Add(RollingHash.GetWindowData().Bottom()); DataStructure.PushUnknownByte(); if (NumDataInWindow == 1) { NewChunkBuffer.AddZeroed(WindowSize - NewChunkBuffer.Num()); } if (NewChunkBuffer.Num() == WindowSize) { const uint64 NewChunkHash = FRollingHash<WindowSize>::GetHashForDataSet(NewChunkBuffer.GetData()); if (FindExistingChunk(ChunkInventory, ChunkShaHashes, NewChunkHash, NewChunkBuffer, MatchedChunk)) { DataStructure.RemapCurrentChunk(MatchedChunk); FChunkInfo& ChunkInfo = ChunkInfoLookup.FindOrAdd(MatchedChunk); ChunkInfo.Hash = NewChunkHash; ChunkInfo.ShaHash = ChunkShaHashes[MatchedChunk]; ChunkInfo.IsNew = false; FStatsCollector::Accumulate(StatMatchedData, WindowSize); } else { FStatsScopedTimer ChunkWriterTimer(StatChunkWriterTime); const FGuid& NewChunkGuid = DataStructure.GetCurrentChunkId(); FStatsCollector::AccumulateTimeEnd(StatCpuTime, CpuTimer); ChunkWriter.QueueChunk(NewChunkBuffer.GetData(), NewChunkGuid, NewChunkHash); FStatsCollector::AccumulateTimeBegin(CpuTimer); FChunkInfo& ChunkInfo = ChunkInfoLookup.FindOrAdd(NewChunkGuid); ChunkInfo.Hash = NewChunkHash; ChunkInfo.IsNew = true; FSHA1::HashBuffer(NewChunkBuffer.GetData(), NewChunkBuffer.Num(), ChunkInfo.ShaHash.Hash); ChunkShaHashes.Add(NewChunkGuid, ChunkInfo.ShaHash); FStatsCollector::Accumulate(StatExtraData, NewChunkBuffer.Num()); } DataStructure.CompleteCurrentChunk(); NewChunkBuffer.Empty(WindowSize); } // Roll byte into window if (idx < Data.Num()) { RollingHash.RollForward(Data[idx]); } else { RollingHash.RollForward(0); ++PaddedZeros; } } } // Collect left-overs if (NewChunkBuffer.Num() > 0) { NewChunkBuffer.AddZeroed(WindowSize - NewChunkBuffer.Num()); const uint64 NewChunkHash = FRollingHash<WindowSize>::GetHashForDataSet(NewChunkBuffer.GetData()); if (FindExistingChunk(ChunkInventory, ChunkShaHashes, NewChunkHash, NewChunkBuffer, MatchedChunk)) { // Setup chunk info for a match DataStructure.RemapCurrentChunk(MatchedChunk); FChunkInfo& ChunkInfo = ChunkInfoLookup.FindOrAdd(MatchedChunk); ChunkInfo.Hash = NewChunkHash; ChunkInfo.ShaHash = ChunkShaHashes[MatchedChunk]; ChunkInfo.IsNew = false; } else { // Save the final chunk if no match FStatsScopedTimer ChunkWriterTimer(StatChunkWriterTime); const FGuid& NewChunkGuid = DataStructure.GetCurrentChunkId(); FStatsCollector::AccumulateTimeEnd(StatCpuTime, CpuTimer); ChunkWriter.QueueChunk(NewChunkBuffer.GetData(), NewChunkGuid, NewChunkHash); FStatsCollector::AccumulateTimeBegin(CpuTimer); FChunkInfo& ChunkInfo = ChunkInfoLookup.FindOrAdd(NewChunkGuid); ChunkInfo.Hash = NewChunkHash; ChunkInfo.IsNew = true; FSHA1::HashBuffer(NewChunkBuffer.GetData(), NewChunkBuffer.Num(), ChunkInfo.ShaHash.Hash); ChunkShaHashes.Add(NewChunkGuid, ChunkInfo.ShaHash); FStatsCollector::Accumulate(StatExtraData, NewChunkBuffer.Num()); } } FStatsCollector::AccumulateTimeEnd(StatCpuTime, CpuTimer); // Wait for the chunk writer to finish, and fill out chunk file sizes FStatsCollector::AccumulateTimeBegin(TempTimer); ChunkWriter.NoMoreChunks(); ChunkWriter.WaitForThread(); ChunkWriter.GetChunkFilesizes(ChunkFileSizes); FStatsCollector::AccumulateTimeEnd(StatChunkWriterTime, TempTimer); // Fill out chunk file sizes FStatsCollector::AccumulateTimeBegin(CpuTimer); for (auto& ChunkInfo : ChunkInfoLookup) { ChunkInfo.Value.ChunkFileSize = ChunkFileSizes[ChunkInfo.Key]; } // Empty data to save RAM Data.Empty(); FStatsCollector::AccumulateTimeEnd(StatCpuTime, CpuTimer); FStatsCollector::Accumulate(StatRunningScanners, -1); bIsComplete = true; return FDataScanResult( MoveTemp(DataStructure.GetFinalDataStructure()), MoveTemp(ChunkInfoLookup)); }