void FWindowsTextInputMethodSystem::LogActiveIMEInfo() { FString APIString; switch(CurrentAPI) { case EAPI::IMM: { APIString = TEXT("IMM"); // Get the description of the active IME const HKL KeyboardLayout = ::GetKeyboardLayout(0); TArray<TCHAR> DescriptionString; const int32 DescriptionLen = ::ImmGetDescription(KeyboardLayout, nullptr, 0); DescriptionString.SetNumUninitialized(DescriptionLen + 1); // +1 for null ::ImmGetDescription(KeyboardLayout, DescriptionString.GetData(), DescriptionLen); DescriptionString[DescriptionLen] = 0; if(DescriptionLen > 0) { APIString += TEXT(" ("); APIString += DescriptionString.GetData(); APIString += TEXT(")"); } } break; case EAPI::TSF: { APIString = TEXT("TSF"); TF_INPUTPROCESSORPROFILE TSFProfile; if(SUCCEEDED(TSFInputProcessorProfileManager->GetActiveProfile(GUID_TFCAT_TIP_KEYBOARD, &TSFProfile)) && TSFProfile.dwProfileType == TF_PROFILETYPE_INPUTPROCESSOR) { BSTR TSFDescriptionString; if(SUCCEEDED(TSFInputProcessorProfiles->GetLanguageProfileDescription(TSFProfile.clsid, TSFProfile.langid, TSFProfile.guidProfile, &TSFDescriptionString))) { APIString += TEXT(" ("); APIString += TSFDescriptionString; APIString += TEXT(")"); ::SysFreeString(TSFDescriptionString); } } } break; case EAPI::Unknown: default: break; } if(APIString.IsEmpty()) { UE_LOG(LogWindowsTextInputMethodSystem, Display, TEXT("IME system now deactivated.")); } else { UE_LOG(LogWindowsTextInputMethodSystem, Display, TEXT("IME system now activated using %s."), *APIString); } }
bool FPipeHandle::ReadToArray(TArray<uint8> & Output) { int BytesAvailable = 0; if (ioctl(PipeDesc, FIONREAD, &BytesAvailable) == 0) { if (BytesAvailable > 0) { Output.SetNumUninitialized(BytesAvailable); int BytesRead = read(PipeDesc, Output.GetData(), BytesAvailable); if (BytesRead > 0) { if (BytesRead < BytesAvailable) { Output.SetNum(BytesRead); } return true; } else { Output.Empty(); } } } return false; }
// Automatically starts when UE4 is started. // Populates the Token variable with the robot user's token. void CloudyWebAPIImpl::StartupModule() { UE_LOG(CloudyWebAPILog, Warning, TEXT("CloudyWebAPI started")); // Initialize the array with InitialArraySize SaveFileUrls.SetNumUninitialized(InitialArraySize); // BaseUrl will be updated with the correct URL BaseUrl = get_env_var(ENV_VAR_CLOUDYWEB_URL).c_str(); // Token variable will be populated with the robot user's token. AttemptAuthentication(); // Set up socket listener to receive commands from CloudyWeb //Create Socket FIPv4Endpoint Endpoint(SERVER_ENDPOINT); ListenSocket = FTcpSocketBuilder(SERVER_NAME).AsReusable().BoundToEndpoint(Endpoint).Listening(8); //Set Buffer Size int32 NewSize = 0; ListenSocket->SetReceiveBufferSize(BUFFER_SIZE, NewSize); TcpListener = new FTcpListener(*ListenSocket, CONNECTION_THREAD_TIME); TcpListener->OnConnectionAccepted().BindRaw(this, &CloudyWebAPIImpl::InputHandler); FTicker::GetCoreTicker().AddTicker(FTickerDelegate::CreateRaw(this, &CloudyWebAPIImpl::CheckConnection), CONNECTION_THREAD_TIME); // initialise class variables InputStr = ""; HasInputStrChanged = false; }
void FColorVertexBuffer::GetVertexColors( TArray<FColor>& OutColors ) { if( VertexData != NULL && NumVertices > 0 ) { OutColors.SetNumUninitialized( NumVertices ); FMemory::Memcpy( OutColors.GetData(), VertexData->GetDataPointer(), NumVertices * VertexData->GetStride() ) ; } }
UNREALED_API bool BuildDestructibleMeshFromFractureSettings(UDestructibleMesh& DestructibleMesh, FSkeletalMeshImportData* OutData) { bool Success = false; #if WITH_APEX physx::NxDestructibleAsset* NewApexDestructibleAsset = NULL; #if WITH_EDITORONLY_DATA if (DestructibleMesh.FractureSettings != NULL) { TArray<UMaterialInterface*> OverrideMaterials; OverrideMaterials.SetNumUninitialized(DestructibleMesh.Materials.Num()); //save old materials for (int32 MaterialIndex = 0; MaterialIndex < DestructibleMesh.Materials.Num(); ++MaterialIndex) { OverrideMaterials[MaterialIndex] = DestructibleMesh.Materials[MaterialIndex].MaterialInterface; } DestructibleMesh.Materials.SetNumUninitialized(DestructibleMesh.FractureSettings->Materials.Num()); for (int32 MaterialIndex = 0; MaterialIndex < DestructibleMesh.Materials.Num(); ++MaterialIndex) { if (MaterialIndex < OverrideMaterials.Num()) //if user has overriden materials use it { DestructibleMesh.Materials[MaterialIndex].MaterialInterface = OverrideMaterials[MaterialIndex]; } else { DestructibleMesh.Materials[MaterialIndex].MaterialInterface = DestructibleMesh.FractureSettings->Materials[MaterialIndex]; } } NxDestructibleAssetCookingDesc DestructibleAssetCookingDesc; DestructibleMesh.FractureSettings->BuildDestructibleAssetCookingDesc(DestructibleAssetCookingDesc); NewApexDestructibleAsset = DestructibleMesh.FractureSettings->CreateApexDestructibleAsset(DestructibleAssetCookingDesc); } #endif // WITH_EDITORONLY_DATA if (NewApexDestructibleAsset != NULL) { Success = SetApexDestructibleAsset(DestructibleMesh, *NewApexDestructibleAsset, OutData, EDestructibleImportOptions::PreserveSettings); } #endif // WITH_APEX return Success; }
void FCanvasSlotExtension::GetCollisionSegmentsFromGeometry(FGeometry ArrangedGeometry, TArray<FVector2D>& Segments) { Segments.SetNumUninitialized(8); // Left Side Segments[0] = ArrangedGeometry.Position; Segments[1] = ArrangedGeometry.Position + FVector2D(0, ArrangedGeometry.Size.Y); // Top Side Segments[2] = ArrangedGeometry.Position; Segments[3] = ArrangedGeometry.Position + FVector2D(ArrangedGeometry.Size.X, 0); // Right Side Segments[4] = ArrangedGeometry.Position + FVector2D(ArrangedGeometry.Size.X, 0); Segments[5] = ArrangedGeometry.Position + ArrangedGeometry.Size; // Bottom Side Segments[6] = ArrangedGeometry.Position + FVector2D(0, ArrangedGeometry.Size.Y); Segments[7] = ArrangedGeometry.Position + ArrangedGeometry.Size; }
bool UCreatureAnimationAssetFactory::ImportSourceFile(UCreatureAnimationAsset *forAsset) const { const FString &creatureFilename = forAsset->GetCreatureFilename(); if (forAsset == nullptr || creatureFilename.IsEmpty()) { return false; } FString readString; if (!FFileHelper::LoadFileToString(readString, *creatureFilename, 0)) { return false; } #ifdef CREATURE_USE_COMPRESS_JSON // Run compression routine std::string saveString(TCHAR_TO_UTF8(*readString)); forAsset->CreatureZipBinary.Reset(); FArchiveSaveCompressedProxy Compressor = FArchiveSaveCompressedProxy(forAsset->CreatureZipBinary, ECompressionFlags::COMPRESS_ZLIB); TArray<uint8> writeData; writeData.SetNumUninitialized(saveString.length() + 1); for (size_t i = 0; i < saveString.length(); i++) { writeData[i] = saveString.c_str()[i]; } writeData[writeData.Num() - 1] = '\0'; Compressor << writeData; Compressor.Flush(); #else // Just use the uncompressed string forAsset->CreatureRawJSONString = readString; #endif forAsset->GatherAnimationData(); return true; }
void UMyGameInstance::TCPSocketListener() { if (!Connection) return; TArray<uint8> ReceivedData; uint32 Size; while (Connection->HasPendingData(Size)) { ReceivedData.SetNumUninitialized(FMath::Min(Size, 65507u)); //ReceivedData.Init(FMath::Min(Size, 65507u)); int32 Read = 0; Connection->Recv(ReceivedData.GetData(), ReceivedData.Num(), Read); //FInternetAddr OutAddr = new FInternetAddr(); //->GetPeerAddress(OutAddr); //Connection->GetAddress(FInternetAddr::FInternetAddr); //GEngine->AddOnScreenDebugMessage(-1, 5.f, FColor::Red, "as"); } if (ReceivedData.Num() <= 0) { //No Data Received return; } //GEngine->AddOnScreenDebugMessage(-1, 5.f, FColor::Red, StringFromBinaryArray(ReceivedData)); int length = ReceivedData.Num() / sizeof(float); const float* angles = reinterpret_cast<const float*>(ReceivedData.GetData()); pitch = -FMath::RadiansToDegrees(angles[length - 3]); yaw = FMath::RadiansToDegrees(angles[length - 4]); roll = -FMath::RadiansToDegrees(angles[length - 2]); shoot = angles[length - 1]; }
bool FWindowsPlatformProcess::ReadPipeToArray(void* ReadPipe, TArray<uint8> & Output) { uint32 BytesAvailable = 0; if (::PeekNamedPipe(ReadPipe, NULL, 0, NULL, (::DWORD*)&BytesAvailable, NULL) && (BytesAvailable > 0)) { Output.SetNumUninitialized(BytesAvailable); uint32 BytesRead = 0; if (::ReadFile(ReadPipe, Output.GetData(), BytesAvailable, (::DWORD*)&BytesRead, NULL)) { if (BytesRead < BytesAvailable) { Output.SetNum(BytesRead); } return true; } else { Output.Empty(); } } return false; }
void FAnimationRuntime::BlendLocalPosesPerBoneWeights( FCompactPose& BasePose, const TArray<FCompactPose>& BlendPoses, struct FBlendedCurve& BaseCurve, const TArray<struct FBlendedCurve>& BlendedCurves, const TArray<FPerBoneBlendWeight> & BoneBlendWeights, ECurveBlendOption::Type CurveBlendOption, /*out*/ FCompactPose& OutPose, /*out*/ struct FBlendedCurve& OutCurve) { check(BasePose.GetNumBones() == BoneBlendWeights.Num()); int32 PoseNum = BlendPoses.Num(); TArray<float> MaxPoseWeights; MaxPoseWeights.AddZeroed(PoseNum); for (FCompactPoseBoneIndex BoneIndex : BasePose.ForEachBoneIndex()) { const int32 PoseIndex = BoneBlendWeights[BoneIndex.GetInt()].SourceIndex; const FTransform& BaseAtom = BasePose[BoneIndex]; const float BlendWeight = FMath::Clamp(BoneBlendWeights[BoneIndex.GetInt()].BlendWeight, 0.f, 1.f); MaxPoseWeights[PoseIndex] = FMath::Max(MaxPoseWeights[PoseIndex], BlendWeight); if (BlendWeight < ZERO_ANIMWEIGHT_THRESH) { OutPose[BoneIndex] = BaseAtom; } else if ((1.0 - BlendWeight) < ZERO_ANIMWEIGHT_THRESH) { OutPose[BoneIndex] = BlendPoses[PoseIndex][BoneIndex]; } else // we want blend here { FTransform BlendAtom = BaseAtom; const FTransform& TargetAtom = BlendPoses[PoseIndex][BoneIndex]; BlendAtom.BlendWith(TargetAtom, BlendWeight); OutPose[BoneIndex] = BlendAtom; } } // time to blend curves // the way we blend curve per bone // is to find out max weight per that pose, and then apply that weight to the curve { TArray<const FBlendedCurve*> SourceCurves; TArray<float> SourceWegihts; SourceCurves.SetNumUninitialized(PoseNum+1); SourceWegihts.SetNumUninitialized(PoseNum+1); SourceCurves[0] = &BaseCurve; SourceWegihts[0] = 1.f; for (int32 Idx=0; Idx<PoseNum; ++Idx) { SourceCurves[Idx+1] = &BlendedCurves[Idx]; SourceWegihts[Idx+1] = MaxPoseWeights[Idx]; } BlendCurves(SourceCurves, SourceWegihts, OutCurve, CurveBlendOption); } }
void FAnimationRuntime::BlendMeshPosesPerBoneWeights( struct FCompactPose& BasePose, const TArray<struct FCompactPose>& BlendPoses, struct FBlendedCurve& BaseCurve, const TArray<struct FBlendedCurve>& BlendedCurves, const TArray<FPerBoneBlendWeight>& BoneBlendWeights, ECurveBlendOption::Type CurveBlendOption, /*out*/ FCompactPose& OutPose, /*out*/ struct FBlendedCurve& OutCurve) { check(BasePose.GetNumBones() == BoneBlendWeights.Num()); const FBoneContainer& BoneContainer = BasePose.GetBoneContainer(); TCustomBoneIndexArray<FQuat, FCompactPoseBoneIndex> SourceRotations; TCustomBoneIndexArray<FQuat, FCompactPoseBoneIndex> BlendRotations; TCustomBoneIndexArray<FQuat, FCompactPoseBoneIndex> TargetRotations; SourceRotations.AddUninitialized(BasePose.GetNumBones()); BlendRotations.AddUninitialized(BasePose.GetNumBones()); TargetRotations.AddUninitialized(BasePose.GetNumBones()); int32 PoseNum = BlendPoses.Num(); TArray<float> MaxPoseWeights; MaxPoseWeights.AddZeroed(PoseNum); for (FCompactPoseBoneIndex BoneIndex : BasePose.ForEachBoneIndex()) { const int32 PoseIndex = BoneBlendWeights[BoneIndex.GetInt()].SourceIndex; const FCompactPoseBoneIndex ParentIndex = BoneContainer.GetParentBoneIndex(BoneIndex); FQuat SrcRotationInMesh; FQuat TargetRotationInMesh; if (ParentIndex != INDEX_NONE) { SrcRotationInMesh = SourceRotations[ParentIndex] * BasePose[BoneIndex].GetRotation(); TargetRotationInMesh = TargetRotations[ParentIndex] * BlendPoses[PoseIndex][BoneIndex].GetRotation(); } else { SrcRotationInMesh = BasePose[BoneIndex].GetRotation(); TargetRotationInMesh = BlendPoses[PoseIndex][BoneIndex].GetRotation(); } // update mesh based rotations SourceRotations[BoneIndex] = SrcRotationInMesh; TargetRotations[BoneIndex] = TargetRotationInMesh; // now update outer FTransform BaseAtom = BasePose[BoneIndex]; FTransform TargetAtom = BlendPoses[PoseIndex][BoneIndex]; FTransform BlendAtom; const float BlendWeight = FMath::Clamp(BoneBlendWeights[BoneIndex.GetInt()].BlendWeight, 0.f, 1.f); MaxPoseWeights[PoseIndex] = FMath::Max(MaxPoseWeights[PoseIndex], BlendWeight); if (BlendWeight < ZERO_ANIMWEIGHT_THRESH) { BlendAtom = BaseAtom; BlendRotations[BoneIndex] = SourceRotations[BoneIndex]; } else if ((1.0 - BlendWeight) < ZERO_ANIMWEIGHT_THRESH) { BlendAtom = TargetAtom; BlendRotations[BoneIndex] = TargetRotations[BoneIndex]; } else // we want blend here { BlendAtom = BaseAtom; BlendAtom.BlendWith(TargetAtom, BlendWeight); // blend rotation in mesh space BlendRotations[BoneIndex] = FQuat::FastLerp(SourceRotations[BoneIndex], TargetRotations[BoneIndex], BlendWeight); // Fast lerp produces un-normalized quaternions, re-normalize. BlendRotations[BoneIndex].Normalize(); } OutPose[BoneIndex] = BlendAtom; if (ParentIndex != INDEX_NONE) { FQuat LocalBlendQuat = BlendRotations[ParentIndex].Inverse() * BlendRotations[BoneIndex]; // local -> mesh -> local transformations can cause loss of precision for long bone chains, we have to normalize rotation there. LocalBlendQuat.Normalize(); OutPose[BoneIndex].SetRotation(LocalBlendQuat); } } // time to blend curves // the way we blend curve per bone // is to find out max weight per that pose, and then apply that weight to the curve { TArray<const FBlendedCurve*> SourceCurves; TArray<float> SourceWegihts; SourceCurves.SetNumUninitialized(PoseNum+1); SourceWegihts.SetNumUninitialized(PoseNum+1); SourceCurves[0] = &BaseCurve; SourceWegihts[0] = 1.f; for(int32 Idx=0; Idx<PoseNum; ++Idx) { SourceCurves[Idx+1] = &BlendedCurves[Idx]; SourceWegihts[Idx+1] = MaxPoseWeights[Idx]; } BlendCurves(SourceCurves, SourceWegihts, OutCurve, CurveBlendOption); } }
FDataScanResult FDataScannerImpl::ScanData() { // Count running scanners FScopeCounter ScopeCounter(&NumRunningScanners); FStatsCollector::Accumulate(StatCreatedScanners, 1); FStatsCollector::Accumulate(StatRunningScanners, 1); // Init data FRollingHash<WindowSize> RollingHash; FChunkWriter ChunkWriter(FBuildPatchServicesModule::GetCloudDirectory(), StatsCollector); FDataStructure DataStructure(DataStartOffset); TMap<FGuid, FChunkInfo> ChunkInfoLookup; TArray<uint8> ChunkBuffer; TArray<uint8> NewChunkBuffer; uint32 PaddedZeros = 0; ChunkInfoLookup.Reserve(Data.Num() / WindowSize); ChunkBuffer.SetNumUninitialized(WindowSize); NewChunkBuffer.Reserve(WindowSize); // Get a copy of the chunk inventory TMap<uint64, TSet<FGuid>> ChunkInventory = CloudEnumeration->GetChunkInventory(); TMap<FGuid, int64> ChunkFileSizes = CloudEnumeration->GetChunkFileSizes(); TMap<FGuid, FSHAHash> ChunkShaHashes = CloudEnumeration->GetChunkShaHashes(); // Loop over and process all data FGuid MatchedChunk; uint64 TempTimer; uint64 CpuTimer; FStatsCollector::AccumulateTimeBegin(CpuTimer); for (int32 idx = 0; (idx < Data.Num() || PaddedZeros < WindowSize) && !bShouldAbort; ++idx) { // Consume data const uint32 NumDataNeeded = RollingHash.GetNumDataNeeded(); if (NumDataNeeded > 0) { FStatsScopedTimer ConsumeTimer(StatConsumeBytesTime); uint32 NumConsumedBytes = 0; if (idx < Data.Num()) { NumConsumedBytes = FMath::Min<uint32>(NumDataNeeded, Data.Num() - idx); RollingHash.ConsumeBytes(&Data[idx], NumConsumedBytes); idx += NumConsumedBytes - 1; } // Zero Pad? if (NumConsumedBytes < NumDataNeeded) { TArray<uint8> Zeros; Zeros.AddZeroed(NumDataNeeded - NumConsumedBytes); RollingHash.ConsumeBytes(Zeros.GetData(), Zeros.Num()); PaddedZeros = Zeros.Num(); } check(RollingHash.GetNumDataNeeded() == 0); continue; } const uint64 NumDataInWindow = WindowSize - PaddedZeros; const uint64 WindowHash = RollingHash.GetWindowHash(); // Try find match if (FindExistingChunk(ChunkInventory, ChunkShaHashes, WindowHash, RollingHash, MatchedChunk)) { // Push the chunk to the structure DataStructure.PushKnownChunk(MatchedChunk, NumDataInWindow); FChunkInfo& ChunkInfo = ChunkInfoLookup.FindOrAdd(MatchedChunk); ChunkInfo.Hash = WindowHash; ChunkInfo.ShaHash = ChunkShaHashes[MatchedChunk]; ChunkInfo.IsNew = false; FStatsCollector::Accumulate(StatMatchedData, NumDataInWindow); // Clear matched window RollingHash.Clear(); // Decrement idx to include current byte in next window --idx; } else { // Collect unrecognized bytes NewChunkBuffer.Add(RollingHash.GetWindowData().Bottom()); DataStructure.PushUnknownByte(); if (NumDataInWindow == 1) { NewChunkBuffer.AddZeroed(WindowSize - NewChunkBuffer.Num()); } if (NewChunkBuffer.Num() == WindowSize) { const uint64 NewChunkHash = FRollingHash<WindowSize>::GetHashForDataSet(NewChunkBuffer.GetData()); if (FindExistingChunk(ChunkInventory, ChunkShaHashes, NewChunkHash, NewChunkBuffer, MatchedChunk)) { DataStructure.RemapCurrentChunk(MatchedChunk); FChunkInfo& ChunkInfo = ChunkInfoLookup.FindOrAdd(MatchedChunk); ChunkInfo.Hash = NewChunkHash; ChunkInfo.ShaHash = ChunkShaHashes[MatchedChunk]; ChunkInfo.IsNew = false; FStatsCollector::Accumulate(StatMatchedData, WindowSize); } else { FStatsScopedTimer ChunkWriterTimer(StatChunkWriterTime); const FGuid& NewChunkGuid = DataStructure.GetCurrentChunkId(); FStatsCollector::AccumulateTimeEnd(StatCpuTime, CpuTimer); ChunkWriter.QueueChunk(NewChunkBuffer.GetData(), NewChunkGuid, NewChunkHash); FStatsCollector::AccumulateTimeBegin(CpuTimer); FChunkInfo& ChunkInfo = ChunkInfoLookup.FindOrAdd(NewChunkGuid); ChunkInfo.Hash = NewChunkHash; ChunkInfo.IsNew = true; FSHA1::HashBuffer(NewChunkBuffer.GetData(), NewChunkBuffer.Num(), ChunkInfo.ShaHash.Hash); ChunkShaHashes.Add(NewChunkGuid, ChunkInfo.ShaHash); FStatsCollector::Accumulate(StatExtraData, NewChunkBuffer.Num()); } DataStructure.CompleteCurrentChunk(); NewChunkBuffer.Empty(WindowSize); } // Roll byte into window if (idx < Data.Num()) { RollingHash.RollForward(Data[idx]); } else { RollingHash.RollForward(0); ++PaddedZeros; } } } // Collect left-overs if (NewChunkBuffer.Num() > 0) { NewChunkBuffer.AddZeroed(WindowSize - NewChunkBuffer.Num()); const uint64 NewChunkHash = FRollingHash<WindowSize>::GetHashForDataSet(NewChunkBuffer.GetData()); if (FindExistingChunk(ChunkInventory, ChunkShaHashes, NewChunkHash, NewChunkBuffer, MatchedChunk)) { // Setup chunk info for a match DataStructure.RemapCurrentChunk(MatchedChunk); FChunkInfo& ChunkInfo = ChunkInfoLookup.FindOrAdd(MatchedChunk); ChunkInfo.Hash = NewChunkHash; ChunkInfo.ShaHash = ChunkShaHashes[MatchedChunk]; ChunkInfo.IsNew = false; } else { // Save the final chunk if no match FStatsScopedTimer ChunkWriterTimer(StatChunkWriterTime); const FGuid& NewChunkGuid = DataStructure.GetCurrentChunkId(); FStatsCollector::AccumulateTimeEnd(StatCpuTime, CpuTimer); ChunkWriter.QueueChunk(NewChunkBuffer.GetData(), NewChunkGuid, NewChunkHash); FStatsCollector::AccumulateTimeBegin(CpuTimer); FChunkInfo& ChunkInfo = ChunkInfoLookup.FindOrAdd(NewChunkGuid); ChunkInfo.Hash = NewChunkHash; ChunkInfo.IsNew = true; FSHA1::HashBuffer(NewChunkBuffer.GetData(), NewChunkBuffer.Num(), ChunkInfo.ShaHash.Hash); ChunkShaHashes.Add(NewChunkGuid, ChunkInfo.ShaHash); FStatsCollector::Accumulate(StatExtraData, NewChunkBuffer.Num()); } } FStatsCollector::AccumulateTimeEnd(StatCpuTime, CpuTimer); // Wait for the chunk writer to finish, and fill out chunk file sizes FStatsCollector::AccumulateTimeBegin(TempTimer); ChunkWriter.NoMoreChunks(); ChunkWriter.WaitForThread(); ChunkWriter.GetChunkFilesizes(ChunkFileSizes); FStatsCollector::AccumulateTimeEnd(StatChunkWriterTime, TempTimer); // Fill out chunk file sizes FStatsCollector::AccumulateTimeBegin(CpuTimer); for (auto& ChunkInfo : ChunkInfoLookup) { ChunkInfo.Value.ChunkFileSize = ChunkFileSizes[ChunkInfo.Key]; } // Empty data to save RAM Data.Empty(); FStatsCollector::AccumulateTimeEnd(StatCpuTime, CpuTimer); FStatsCollector::Accumulate(StatRunningScanners, -1); bIsComplete = true; return FDataScanResult( MoveTemp(DataStructure.GetFinalDataStructure()), MoveTemp(ChunkInfoLookup)); }