コード例 #1
0
bool FProfilerClientManager::CheckHashAndWrite( const FProfilerServiceFileChunk& FileChunk, const FProfilerFileChunkHeader& FileChunkHeader, FArchive* Writer )
{
#if STATS
	const int32 HashSize = 20;
	uint8 LocalHash[HashSize]={0};
	
	// Hash file chunk data. 
	FSHA1 Sha;
	Sha.Update( FileChunk.Data.GetData(), FileChunkHeader.ChunkSize );
	// Hash file chunk header.
	Sha.Update( FileChunk.Header.GetData(), FileChunk.Header.Num() );
	Sha.Final();
	Sha.GetHash( LocalHash );

	const int32 MemDiff = FMemory::Memcmp( FileChunk.ChunkHash.GetData(), LocalHash, HashSize );

	bool bResult = false;

	if( MemDiff == 0 )
	{
		// Write the data to the archive.
		Writer->Seek( FileChunkHeader.ChunkOffset );
		Writer->Serialize( (void*)FileChunk.Data.GetData(), FileChunkHeader.ChunkSize );

		bResult = true;
	}

	return bResult;
#else
	return false;
#endif
}
コード例 #2
0
void UShaderPlatformQualitySettings::BuildHash(EMaterialQualityLevel::Type QualityLevel, FSHAHash& OutHash) const
{
	FSHA1 Hash;

	AppendToHashState(QualityLevel, Hash);

	Hash.Final();
	Hash.GetHash(&OutHash.Hash[0]);
}
コード例 #3
0
ファイル: Shader.cpp プロジェクト: amyvmiwei/UnrealEngine4
void FShaderParameterMap::UpdateHash(FSHA1& HashState) const
{
	for(TMap<FString,FParameterAllocation>::TConstIterator ParameterIt(ParameterMap);ParameterIt;++ParameterIt)
	{
		const FString& ParamName = ParameterIt.Key();
		const FParameterAllocation& ParamValue = ParameterIt.Value();
		HashState.Update((const uint8*)*ParamName, ParamName.Len() * sizeof(TCHAR));
		HashState.Update((const uint8*)&ParamValue.BufferIndex, sizeof(ParamValue.BufferIndex));
		HashState.Update((const uint8*)&ParamValue.BaseIndex, sizeof(ParamValue.BaseIndex));
		HashState.Update((const uint8*)&ParamValue.Size, sizeof(ParamValue.Size));
	}
}
コード例 #4
0
ファイル: BuildPatchUtil.cpp プロジェクト: xiangyuan/Unreal4
uint8 FBuildPatchUtils::VerifyFile(const FString& FileToVerify, const FSHAHashData& Hash1, const FSHAHashData& Hash2, FBuildPatchFloatDelegate ProgressDelegate, FBuildPatchBoolRetDelegate ShouldPauseDelegate, double& TimeSpentPaused)
{
	uint8 ReturnValue = 0;
	FArchive* FileReader = IFileManager::Get().CreateFileReader(*FileToVerify);
	ProgressDelegate.ExecuteIfBound(0.0f);
	if (FileReader != NULL)
	{
		FSHA1 HashState;
		FSHAHashData HashValue;
		const int64 FileSize = FileReader->TotalSize();
		uint8* FileReadBuffer = new uint8[FileBufferSize];
		while (!FileReader->AtEnd() && !FBuildPatchInstallError::HasFatalError())
		{
			// Pause if necessary
			const double PrePauseTime = FPlatformTime::Seconds();
			double PostPauseTime = PrePauseTime;
			bool bShouldPause = ShouldPauseDelegate.IsBound() && ShouldPauseDelegate.Execute();
			while (bShouldPause && !FBuildPatchInstallError::HasFatalError())
			{
				FPlatformProcess::Sleep(0.1f);
				bShouldPause = ShouldPauseDelegate.Execute();
				PostPauseTime = FPlatformTime::Seconds();
			}
			// Count up pause time
			TimeSpentPaused += PostPauseTime - PrePauseTime;
			// Read file and update hash state
			const int64 SizeLeft = FileSize - FileReader->Tell();
			const uint32 ReadLen = FMath::Min< int64 >(FileBufferSize, SizeLeft);
			FileReader->Serialize(FileReadBuffer, ReadLen);
			HashState.Update(FileReadBuffer, ReadLen);
			const double FileSizeTemp = FileSize;
			const float Progress = 1.0f - ((SizeLeft - ReadLen) / FileSizeTemp);
			ProgressDelegate.ExecuteIfBound(Progress);
		}
		delete[] FileReadBuffer;
		HashState.Final();
		HashState.GetHash(HashValue.Hash);
		ReturnValue = (HashValue == Hash1) ? 1 : (HashValue == Hash2) ? 2 : 0;
		if (ReturnValue == 0)
		{
			GLog->Logf(TEXT("BuildDataGenerator: Verify failed on %s"), *FPaths::GetCleanFilename(FileToVerify));
		}
		FileReader->Close();
		delete FileReader;
	}
	else
	{
		GLog->Logf(TEXT("BuildDataGenerator: ERROR VerifyFile cannot open %s"), *FileToVerify);
	}
	ProgressDelegate.ExecuteIfBound(1.0f);
	return ReturnValue;
}
コード例 #5
0
ファイル: UnrealPak.cpp プロジェクト: colwalder/unrealengine
bool CopyCompressedFileToPak(FArchive& InPak, const FString& InMountPoint, const FPakInputPair& InFile, const FCompressedFileBuffer& CompressedFile, FPakEntryPair& OutNewEntry)
{
	if (CompressedFile.TotalCompressedSize == 0)
	{
		return false;
	}

	int64 HeaderTell = InPak.Tell();
	OutNewEntry.Info.CompressionMethod = CompressedFile.FileCompressionMethod;
	OutNewEntry.Info.CompressionBlocks.AddUninitialized(CompressedFile.CompressedBlocks.Num());

	int64 TellPos = InPak.Tell() + OutNewEntry.Info.GetSerializedSize(FPakInfo::PakFile_Version_Latest);
	const TArray<FPakCompressedBlock>& Blocks = CompressedFile.CompressedBlocks;
	for (int32 BlockIndex = 0, BlockCount = CompressedFile.CompressedBlocks.Num(); BlockIndex < BlockCount; ++BlockIndex)
	{
		OutNewEntry.Info.CompressionBlocks[BlockIndex].CompressedStart = Blocks[BlockIndex].CompressedStart + TellPos;
		OutNewEntry.Info.CompressionBlocks[BlockIndex].CompressedEnd = Blocks[BlockIndex].CompressedEnd + TellPos;
	}

	if (InFile.bNeedEncryption)
	{
		FAES::EncryptData(CompressedFile.CompressedBuffer.Get(), CompressedFile.TotalCompressedSize);
	}

	//Hash the final buffer thats written
	FSHA1 Hash;
	Hash.Update(CompressedFile.CompressedBuffer.Get(), CompressedFile.TotalCompressedSize);
	Hash.Final();

	// Update file size & Hash
	OutNewEntry.Info.CompressionBlockSize = CompressedFile.FileCompressionBlockSize;
	OutNewEntry.Info.UncompressedSize = CompressedFile.OriginalSize;
	OutNewEntry.Info.Size = CompressedFile.TotalCompressedSize;
	Hash.GetHash(OutNewEntry.Info.Hash);

	//	Write the header, then the data
	OutNewEntry.Filename = InFile.Dest.Mid(InMountPoint.Len());
	OutNewEntry.Info.Offset = 0; // Don't serialize offsets here.
	OutNewEntry.Info.bEncrypted = InFile.bNeedEncryption;
	OutNewEntry.Info.Serialize(InPak,FPakInfo::PakFile_Version_Latest);
	InPak.Serialize(CompressedFile.CompressedBuffer.Get(), CompressedFile.TotalCompressedSize);

	return true;
}
コード例 #6
0
void FRawMeshBulkData::UseHashAsGuid(UObject* Owner)
{
	// Build the hash from the path name + the contents of the bulk data.
	FSHA1 Sha;
	TArray<TCHAR> OwnerName = Owner->GetPathName().GetCharArray();
	Sha.Update((uint8*)OwnerName.GetData(), OwnerName.Num() * OwnerName.GetTypeSize());
	if (BulkData.GetBulkDataSize() > 0)
	{
		uint8* Buffer = (uint8*)BulkData.Lock(LOCK_READ_ONLY);
		Sha.Update(Buffer, BulkData.GetBulkDataSize());
		BulkData.Unlock();
	}
	Sha.Final();

	// Retrieve the hash and use it to construct a pseudo-GUID. Use bGuidIsHash to distinguish from real guids.
	uint32 Hash[5];
	Sha.GetHash((uint8*)Hash);
	Guid = FGuid(Hash[0] ^ Hash[4], Hash[1], Hash[2], Hash[3]);
	bGuidIsHash = true;
}
コード例 #7
0
bool FBuildPatchFileConstructor::InsertChunkData(const FChunkPartData& ChunkPart, FArchive& DestinationFile, FSHA1& HashState)
{
	uint8* Data;
	uint8* DataStart;
	FChunkFile* ChunkFile = FBuildPatchChunkCache::Get().GetChunkFile( ChunkPart.Guid );
	if( ChunkFile != NULL && !FBuildPatchInstallError::HasFatalError() )
	{
		ChunkFile->GetDataLock( &Data, NULL );
		DataStart = &Data[ ChunkPart.Offset ];
		HashState.Update( DataStart, ChunkPart.Size );
		DestinationFile.Serialize( DataStart, ChunkPart.Size );
		ChunkFile->Dereference();
		ChunkFile->ReleaseDataLock();
		return true;
	}
	return false;
}
コード例 #8
0
ファイル: BuildPatchUtil.cpp プロジェクト: xiangyuan/Unreal4
bool FBuildPatchUtils::VerifyChunkFile( FArchive& ChunkFileData, bool bQuickCheck )
{
	const int64 FileSize = ChunkFileData.TotalSize();
	bool bSuccess = ChunkFileData.IsLoading();
	if ( !bSuccess )
	{
		GLog->Logf( TEXT( "BuildPatchServices: ERROR: VerifyChunkFile expected readonly archive" ) );
	}
	else
	{
		// Read the header
		FChunkHeader Header;
		ChunkFileData << Header;
		// Check header magic
		if ( !Header.IsValidMagic() )
		{
			bSuccess = false;
			GLog->Logf( TEXT( "BuildPatchServices: ERROR: VerifyChunkFile corrupt header" ) );
		}
		// Check Header and data size
		if ( bSuccess && ( Header.HeaderSize + Header.DataSize ) != FileSize )
		{
			bSuccess = false;
			GLog->Logf( TEXT( "BuildPatchServices: ERROR: VerifyChunkFile header info does not match file size" ) );
		}
		if( bSuccess && !bQuickCheck )
		{
			// Hashes for checking data
			FSHA1 SHAHasher;
			FSHAHashData SHAHash;
			uint64 CycPoly64Hash = 0;
			// Load the data to check
			uint8* FileReadBuffer = new uint8[ FileBufferSize ];
			int64 DataOffset = 0;
			switch ( Header.StoredAs )
			{
			case FChunkHeader::STORED_RAW:
				while( !ChunkFileData.AtEnd() )
				{
					const int64 SizeLeft = FileSize - ChunkFileData.Tell();
					const uint32 ReadLen = FMath::Min< int64 >( FileBufferSize, SizeLeft );
					ChunkFileData.Serialize( FileReadBuffer, ReadLen );
					switch ( Header.HashType )
					{
					case FChunkHeader::HASH_ROLLING:
						CycPoly64Hash = FCycPoly64Hash::GetHashForDataSet(FileReadBuffer, ReadLen, CycPoly64Hash);
						break;
					case  FChunkHeader::HASH_SHA1:
						SHAHasher.Update( FileReadBuffer, ReadLen );
						break;
					default:
						check( false ); // @TODO LSwift: Implement other storage methods!
						bSuccess = false;
						break;
					}
					DataOffset += ReadLen;
				}
				if( bSuccess )
				{
					switch ( Header.HashType )
					{
					case FChunkHeader::HASH_ROLLING:
						bSuccess = Header.RollingHash == CycPoly64Hash;
						break;
					case  FChunkHeader::HASH_SHA1:
						SHAHasher.Final();
						SHAHasher.GetHash( SHAHash.Hash );
						bSuccess = SHAHash == Header.SHAHash;
						break;
					}
					if (!bSuccess)
					{
						GLog->Logf(TEXT("BuildPatchServices: ERROR: VerifyChunkFile file hashcheck failed"));
					}
				}
				break;
			default:
				GLog->Logf( TEXT( "BuildPatchServices: ERROR: VerifyChunkFile failed, unknown storage type" ) );
				bSuccess = false;
				break;
			}
			delete[] FileReadBuffer;
		}
	}

	return bSuccess;
}
コード例 #9
0
void UShaderPlatformQualitySettings::AppendToHashState(EMaterialQualityLevel::Type QualityLevel, FSHA1& HashState) const
{
	const FMaterialQualityOverrides& QualityLevelOverrides = GetQualityOverrides(QualityLevel);
	HashState.Update((const uint8*)&QualityLevelOverrides, sizeof(QualityLevelOverrides));
}
コード例 #10
0
bool FBuildPatchFileConstructor::InsertFileData(const FChunkPartData& ChunkPart, FArchive& DestinationFile, FSHA1& HashState)
{
	bool bSuccess = false;
	bool bLogged = false;

	// Wait for the file data to be available
	while( IsFileDataAvailable( ChunkPart.Guid ) == false )
	{
		FPlatformProcess::Sleep( 0.1f );
	}

	// Read the file
	TArray<uint8> FileData;
	FChunkHeader Header;
	const FString DataFilename = GetFileDataFilename(ChunkPart.Guid);
	bSuccess = FFileHelper::LoadFileToArray(FileData, *DataFilename);
	if (!bSuccess && !bLogged)
	{
		bLogged = true;
		FBuildPatchAnalytics::RecordConstructionError(DataFilename, FPlatformMisc::GetLastError(), TEXT("File Data Missing"));
		GLog->Logf(TEXT("BuildPatchFileConstructor: ERROR: InsertFileData could not open data file %s"), *DataFilename);
	}
	// Decompress data
	bSuccess = bSuccess && FBuildPatchUtils::UncompressFileDataFile(FileData, &Header);
	if (!bSuccess && !bLogged)
	{
		bLogged = true;
		FBuildPatchAnalytics::RecordConstructionError(DataFilename, INDEX_NONE, TEXT("File Data Uncompress Fail"));
		GLog->Logf(TEXT("BuildPatchFileConstructor: ERROR: InsertFileData: could not uncompress %s"), *FPaths::GetCleanFilename(DataFilename));
	}
	// Verify integrity
	bSuccess = bSuccess && FBuildPatchUtils::VerifyChunkFile(FileData);
	if (!bSuccess && !bLogged)
	{
		bLogged = true;
		FBuildPatchAnalytics::RecordConstructionError(DataFilename, INDEX_NONE, TEXT("File Data Verify Fail"));
		GLog->Logf(TEXT("BuildPatchFileConstructor: ERROR: InsertFileData: verification failed for %s"), *FPaths::GetCleanFilename(DataFilename));
	}
	// Check correct GUID
	bSuccess = bSuccess && (!ChunkPart.Guid.IsValid() || (ChunkPart.Guid == Header.Guid));
	if (!bSuccess && !bLogged)
	{
		bLogged = true;
		FBuildPatchAnalytics::RecordConstructionError(DataFilename, INDEX_NONE, TEXT("File Data GUID Mismatch"));
		GLog->Logf(TEXT("BuildPatchFileConstructor: ERROR: InsertFileData: mismatch GUID for %s"), *FPaths::GetCleanFilename(DataFilename));
	}

	// Continue if all was fine
	if (bSuccess)
	{
		switch (Header.StoredAs)
		{
			case FChunkHeader::STORED_RAW:
			{
				// Check we are able to get the chunk part
				const int64 StartOfPartPos = Header.HeaderSize + ChunkPart.Offset;
				const int64 EndOfPartPos = StartOfPartPos + ChunkPart.Size;
				bSuccess = EndOfPartPos <= FileData.Num();
				if (bSuccess)
				{
					HashState.Update(FileData.GetData() + StartOfPartPos, ChunkPart.Size);
					DestinationFile.Serialize(FileData.GetData() + StartOfPartPos, ChunkPart.Size);
				}
				else
				{
					FBuildPatchAnalytics::RecordConstructionError(DataFilename, INDEX_NONE, TEXT("File Data Part OOB"));
					GLog->Logf(TEXT("BuildPatchFileConstructor: ERROR: InsertFileData: part out of bounds for %s"), *FPaths::GetCleanFilename(DataFilename));
				}
			}
			break;
		default:
			FBuildPatchAnalytics::RecordConstructionError(DataFilename, INDEX_NONE, TEXT("File Data Unknown Storage"));
			GLog->Logf(TEXT("BuildPatchFileConstructor: ERROR: InsertFileData: incorrect storage method %d %s"), Header.StoredAs, *FPaths::GetCleanFilename(DataFilename));
			bSuccess = false;
			break;
		}
	}

	return bSuccess;
}
コード例 #11
0
bool FBuildPatchFileConstructor::ConstructFileFromChunks( const FString& Filename, bool bResumeExisting )
{
	const bool bIsFileData = BuildManifest->IsFileDataManifest();
	bResumeExisting = bResumeExisting && !bIsFileData;
	bool bSuccess = true;
	FString ErrorString;
	FString NewFilename = StagingDirectory / Filename;

	// Calculate the hash as we write the data
	FSHA1 HashState;
	FSHAHashData HashValue;

	// First make sure we can get the file manifest
	const FFileManifestData* FileManifest = BuildManifest->GetFileManifest(Filename);
	bSuccess = FileManifest != nullptr;
	if( bSuccess )
	{
		if( !FileManifest->SymlinkTarget.IsEmpty() )
		{
#if PLATFORM_MAC
			bSuccess = symlink(TCHAR_TO_UTF8(*FileManifest->SymlinkTarget), TCHAR_TO_UTF8(*NewFilename)) == 0;
#else
			const bool bSymlinkNotImplemented = false;
			check(bSymlinkNotImplemented);
			bSuccess = false;
#endif
			return bSuccess;
		}

		// Check for resuming of existing file
		int64 StartPosition = 0;
		int32 StartChunkPart = 0;
		if( bResumeExisting )
		{
			// We have to read in the existing file so that the hash check can still be done.
			FArchive* NewFileReader = IFileManager::Get().CreateFileReader( *NewFilename );
			if( NewFileReader != NULL )
			{
				// Read buffer
				uint8* ReadBuffer = new uint8[ FBuildPatchData::ChunkDataSize ];
				// Reuse a certain amount of the file
				StartPosition = FMath::Max<int64>( 0, NewFileReader->TotalSize() - NUM_BYTES_RESUME_IGNORE );
				// We'll also find the correct chunkpart to start writing from
				int64 ByteCounter = 0;
				for( int32 ChunkPartIdx = StartChunkPart; ChunkPartIdx < FileManifest->FileChunkParts.Num() && !FBuildPatchInstallError::HasFatalError(); ++ChunkPartIdx )
				{
					const FChunkPartData& ChunkPart = FileManifest->FileChunkParts[ ChunkPartIdx ];
					const int64 NextBytePosition = ByteCounter + ChunkPart.Size;
					if( NextBytePosition <= StartPosition )
					{
						// Read data for hash check
						NewFileReader->Serialize( ReadBuffer, ChunkPart.Size );
						HashState.Update( ReadBuffer, ChunkPart.Size );
						// Count bytes read from file
						ByteCounter = NextBytePosition;
						// Set to resume from next chunk part
						StartChunkPart = ChunkPartIdx + 1;
						// Inform the chunk cache of the chunk part skip
						FBuildPatchChunkCache::Get().SkipChunkPart( ChunkPart );
						// Wait if paused
						BuildProgress->WaitWhilePaused();
					}
					else
					{
						// No more parts on disk
						break;
					}
				}
				// Set start position to the byte we got up to
				StartPosition = ByteCounter;
				// Clean read buffer
				delete[] ReadBuffer;
				// Close file
				NewFileReader->Close();
				delete NewFileReader;
			}
		}

		// Now we can make sure the chunk cache knows to start downloading chunks
		if( !bIsFileData && !bIsDownloadStarted && !FBuildPatchInstallError::HasFatalError() )
		{
			bIsDownloadStarted = true;
			FBuildPatchChunkCache::Get().BeginDownloads();
		}

		// Attempt to create the file
		FArchive* NewFile = IFileManager::Get().CreateFileWriter( *NewFilename, bResumeExisting ? EFileWrite::FILEWRITE_Append : 0 );
		bSuccess = NewFile != NULL;
		if( bSuccess )
		{
			// Whenever we start writing again, there's no more resuming to be done
			BuildProgress->SetStateProgress( EBuildPatchProgress::Resuming, 1.0f );

			// Seek to file write position
			NewFile->Seek( StartPosition );

			// For each chunk, load it, and place it's data into the file
			for( int32 ChunkPartIdx = StartChunkPart; ChunkPartIdx < FileManifest->FileChunkParts.Num() && bSuccess && !FBuildPatchInstallError::HasFatalError(); ++ChunkPartIdx )
			{
				const FChunkPartData& ChunkPart = FileManifest->FileChunkParts[ChunkPartIdx];
				if( bIsFileData )
				{
					bSuccess = InsertFileData( ChunkPart, *NewFile, HashState );
				}
				else
				{
					bSuccess = InsertChunkData( ChunkPart, *NewFile, HashState );
				}
				if( bSuccess )
				{
					CountBytesProcessed( ChunkPart.Size );
					// Wait if paused
					BuildProgress->WaitWhilePaused();
				}
				else
				{
					ErrorString = TEXT( "Failed to construct file " );
					ErrorString += Filename;
					ErrorString += TEXT( " because of chunk " );
					ErrorString += ChunkPart.Guid.ToString();
					GWarn->Logf( TEXT( "BuildPatchFileConstructor: ERROR: %s" ), *ErrorString );
					FBuildPatchInstallError::SetFatalError( EBuildPatchInstallError::FileConstructionFail, ErrorString );
				}
			}

			// Close the file writer
			NewFile->Close();
			delete NewFile;
		}
		else
		{
			FBuildPatchAnalytics::RecordConstructionError( Filename, FPlatformMisc::GetLastError(), TEXT( "Could Not Create File" ) );
			ErrorString = TEXT( "Could not create new file " );
			ErrorString += Filename;
			GWarn->Logf( TEXT( "BuildPatchFileConstructor: ERROR: %s" ), *ErrorString );
			FBuildPatchInstallError::SetFatalError( EBuildPatchInstallError::FileConstructionFail, ErrorString );
		}
	}
	else
	{
		FBuildPatchAnalytics::RecordConstructionError( Filename, INDEX_NONE, TEXT( "Missing File Manifest" ) );
		ErrorString = TEXT( "Build manifest does not contain a file manifest for " );
		ErrorString += Filename;
		FBuildPatchInstallError::SetFatalError( EBuildPatchInstallError::FileConstructionFail, ErrorString );
	}

	// Verify the hash for the file that we created
	if( bSuccess )
	{
		HashState.Final();
		HashState.GetHash( HashValue.Hash );
		bSuccess = HashValue == FileManifest->FileHash;
		if( !bSuccess )
		{
			FBuildPatchAnalytics::RecordConstructionError( Filename, INDEX_NONE, TEXT( "Serialised Verify Fail" ) );
			ErrorString = TEXT( "Verify failed after constructing file " );
			ErrorString += Filename;
			GWarn->Logf( TEXT( "BuildDataGenerator: ERROR: %s" ), *ErrorString );
			FBuildPatchInstallError::SetFatalError( EBuildPatchInstallError::FileConstructionFail, ErrorString );
		}
	}

#if PLATFORM_MAC
	if( bSuccess && FileManifest->bIsUnixExecutable )
	{
		// Enable executable permission bit
		struct stat FileInfo;
		if (stat(TCHAR_TO_UTF8(*NewFilename), &FileInfo) == 0)
		{
			bSuccess = chmod(TCHAR_TO_UTF8(*NewFilename), FileInfo.st_mode | S_IXUSR | S_IXGRP | S_IXOTH) == 0;
		}
	}
#endif
	
	// Delete the staging file if unsuccessful by means of construction fail (i.e. keep if canceled or download issue)
	if( !bSuccess && FBuildPatchInstallError::GetErrorState() == EBuildPatchInstallError::FileConstructionFail )
	{
		IFileManager::Get().Delete( *NewFilename, false, true );
	}

	return bSuccess;
}
コード例 #12
0
ファイル: ManifestBuilder.cpp プロジェクト: ErwinT6/T6Engine
	void FManifestBuilderImpl::BuildManifest()
	{
		TMap<FGuid, FChunkInfo> ChunkInfoLookup;
		bool Running = true;
		while (Running)
		{
			FDataScannerPtr NextScanner = GetNextScanner();
			if (NextScanner.IsValid())
			{
				FDataScanResult ScanResult = NextScanner->GetResultWhenComplete();
				ChunkInfoLookup.Append(ScanResult.ChunkInfo);

				// Always reverse for now
				if (ScanResult.DataStructure.Num() > 0)
				{
					FChunkPart& ChunkPart = ScanResult.DataStructure[0];
					if (ChunkPart.DataOffset != FileBuilder.CurrentDataPos)
					{
						check(ChunkPart.DataOffset < FileBuilder.CurrentDataPos); // Missing data!

						bool FoundPosition = false;
						uint64 DataCount = 0;
						for (int32 FileIdx = 0; FileIdx < Manifest->Data->FileManifestList.Num() && !FoundPosition; ++FileIdx)
						{
							FFileManifestData& FileManifest = Manifest->Data->FileManifestList[FileIdx];
							FileManifest.Init();
							uint64 FileStartIdx = DataCount;
							uint64 FileEndIdx = FileStartIdx + FileManifest.GetFileSize();
							if (FileEndIdx > ChunkPart.DataOffset)
							{
								for (int32 ChunkIdx = 0; ChunkIdx < FileManifest.FileChunkParts.Num() && !FoundPosition; ++ChunkIdx)
								{
									FChunkPartData& ChunkPartData = FileManifest.FileChunkParts[ChunkIdx];
									uint64 ChunkPartEndIdx = DataCount + ChunkPartData.Size;
									if (ChunkPartEndIdx < ChunkPart.DataOffset)
									{
										DataCount += ChunkPartData.Size;
									}
									else if (ChunkPartEndIdx > ChunkPart.DataOffset)
									{
										ChunkPartData.Size = ChunkPart.DataOffset - DataCount;
										FileBuilder.CurrentDataPos = DataCount + ChunkPartData.Size;
										FileManifest.FileChunkParts.SetNum(ChunkIdx + 1, false);
										FileManifest.FileChunkParts.Emplace();
										Manifest->Data->FileManifestList.SetNum(FileIdx + 1, false);
										FileBuilder.FileManifest = &Manifest->Data->FileManifestList.Last();
										bool FoundFile = BuildStreamer->GetFileSpan(FileStartIdx, FileBuilder.FileSpan);
										check(FoundFile); // Incorrect positional tracking
										FoundPosition = true;
									}
									else
									{
										FileBuilder.CurrentDataPos = DataCount + ChunkPartData.Size;
										FileManifest.FileChunkParts.SetNum(ChunkIdx + 1, false);
										FileManifest.FileChunkParts.Emplace();
										Manifest->Data->FileManifestList.SetNum(FileIdx + 1, false);
										FileBuilder.FileManifest = &Manifest->Data->FileManifestList.Last();
										bool FoundFile = BuildStreamer->GetFileSpan(FileStartIdx, FileBuilder.FileSpan);
										check(FoundFile); // Incorrect positional tracking
										FoundPosition = true;
									}
								}
							}
							else if (FileEndIdx < ChunkPart.DataOffset)
							{
								DataCount += FileManifest.GetFileSize();
							}
							else
							{
								FileBuilder.FileManifest = nullptr;
								FileBuilder.CurrentDataPos = DataCount + FileManifest.GetFileSize();
								Manifest->Data->FileManifestList.SetNum(FileIdx + 1, false);
								FoundPosition = true;
							}
						}

						check(ChunkPart.DataOffset == FileBuilder.CurrentDataPos);
						check(FileBuilder.FileManifest == nullptr || FileBuilder.FileSpan.Filename == Manifest->Data->FileManifestList.Last().Filename);
					}
				}

				for (int32 idx = 0; idx < ScanResult.DataStructure.Num(); ++idx)
				{
					FChunkPart& ChunkPart = ScanResult.DataStructure[idx];
					// Starting new file?
					if (FileBuilder.FileManifest == nullptr)
					{
						Manifest->Data->FileManifestList.Emplace();
						FileBuilder.FileManifest = &Manifest->Data->FileManifestList.Last();

						bool FoundFile = BuildStreamer->GetFileSpan(FileBuilder.CurrentDataPos, FileBuilder.FileSpan);
						check(FoundFile); // Incorrect positional tracking

						FileBuilder.FileManifest->Filename = FileBuilder.FileSpan.Filename;
						FileBuilder.FileManifest->FileChunkParts.Emplace();
					}

					FChunkPartData& FileChunkPartData = FileBuilder.FileManifest->FileChunkParts.Last();
					FileChunkPartData.Guid = ChunkPart.ChunkGuid;
					FileChunkPartData.Offset = (FileBuilder.CurrentDataPos - ChunkPart.DataOffset) + ChunkPart.ChunkOffset;

					// Process data into file manifests
					int64 FileDataLeft = (FileBuilder.FileSpan.StartIdx + FileBuilder.FileSpan.Size) - FileBuilder.CurrentDataPos;
					int64 ChunkDataLeft = (ChunkPart.DataOffset + ChunkPart.PartSize) - FileBuilder.CurrentDataPos;
					check(FileDataLeft > 0);
					check(ChunkDataLeft > 0);

					if (ChunkDataLeft >= FileDataLeft)
					{
						FileBuilder.CurrentDataPos += FileDataLeft;
						FileChunkPartData.Size = FileDataLeft;
					}
					else
					{
						FileBuilder.CurrentDataPos += ChunkDataLeft;
						FileChunkPartData.Size = ChunkDataLeft;
					}

					FileDataLeft = (FileBuilder.FileSpan.StartIdx + FileBuilder.FileSpan.Size) - FileBuilder.CurrentDataPos;
					ChunkDataLeft = (ChunkPart.DataOffset + ChunkPart.PartSize) - FileBuilder.CurrentDataPos;
					check(FileDataLeft == 0 || ChunkDataLeft == 0);
					// End of file?
					if (FileDataLeft == 0)
					{
						// Fill out rest of data??
						FFileSpan FileSpan;
						bool FoundFile = BuildStreamer->GetFileSpan(FileBuilder.FileSpan.StartIdx, FileSpan);
						check(FoundFile); // Incorrect positional tracking
						check(FileSpan.Filename == FileBuilder.FileManifest->Filename);
						FMemory::Memcpy(FileBuilder.FileManifest->FileHash.Hash, FileSpan.SHAHash.Hash, FSHA1::DigestSize);
						FFileAttributes Attributes = FileAttributesMap.FindRef(FileSpan.Filename);
						FileBuilder.FileManifest->bIsUnixExecutable = Attributes.bUnixExecutable || FileSpan.IsUnixExecutable;
						FileBuilder.FileManifest->SymlinkTarget = FileSpan.SymlinkTarget;
						FileBuilder.FileManifest->bIsReadOnly = Attributes.bReadOnly;
						FileBuilder.FileManifest->bIsCompressed = Attributes.bCompressed;
						FileBuilder.FileManifest->InstallTags = Attributes.InstallTags.Array();
						FileBuilder.FileManifest->Init();
						check(FileBuilder.FileManifest->GetFileSize() == FileBuilder.FileSpan.Size);
						FileBuilder.FileManifest = nullptr;
					}
					else if (ChunkDataLeft == 0)
					{
						FileBuilder.FileManifest->FileChunkParts.Emplace();
					}

					// Continue with this chunk?
					if (ChunkDataLeft > 0)
					{
						--idx;
					}
				}
			}
			else
			{
				if (EndOfData)
				{
					Running = false;
				}
				else
				{
					CheckForWork->Wait();
					CheckForWork->Reset();
				}
			}
		}

		// Fill out chunk list from only chunks that remain referenced
		TSet<FGuid> ReferencedChunks;
		for (const auto& FileManifest : Manifest->Data->FileManifestList)
		{
			for (const auto& ChunkPart : FileManifest.FileChunkParts)
			{
				if (ReferencedChunks.Contains(ChunkPart.Guid) == false)
				{
					auto& ChunkInfo = ChunkInfoLookup[ChunkPart.Guid];
					ReferencedChunks.Add(ChunkPart.Guid);
					Manifest->Data->ChunkList.Emplace();
					auto& ChunkInfoData = Manifest->Data->ChunkList.Last();
					ChunkInfoData.Guid = ChunkPart.Guid;
					ChunkInfoData.Hash = ChunkInfo.Hash;
					FMemory::Memcpy(ChunkInfoData.ShaHash.Hash, ChunkInfo.ShaHash.Hash, FSHA1::DigestSize);
					ChunkInfoData.FileSize = ChunkInfo.ChunkFileSize;
					ChunkInfoData.GroupNumber = FCrc::MemCrc32(&ChunkPart.Guid, sizeof(FGuid)) % 100;
				}
			}
		}

		// Get empty files
		FSHA1 EmptyHasher;
		EmptyHasher.Final();
		const TArray< FString >& EmptyFileList = BuildStreamer->GetEmptyFiles();
		for (const auto& EmptyFile : EmptyFileList)
		{
			Manifest->Data->FileManifestList.Emplace();
			FFileManifestData& EmptyFileManifest = Manifest->Data->FileManifestList.Last();
			EmptyFileManifest.Filename = EmptyFile;
			EmptyHasher.GetHash(EmptyFileManifest.FileHash.Hash);
		}

		// Fill out lookups
		Manifest->InitLookups();
	}