bool GenerateHashForFile( FString Filename, uint8 FileHash[16]) { FArchive* File = IFileManager::Get().CreateFileReader(*Filename); if ( File == NULL ) return false; uint64 TotalSize = File->TotalSize(); uint8* ByteBuffer = new uint8[TotalSize]; File->Serialize(ByteBuffer, TotalSize); delete File; File = NULL; FMD5 FileHasher; FileHasher.Update(ByteBuffer, TotalSize); delete[] ByteBuffer; FileHasher.Final(FileHash); return true; // uint8 DestFileHash[20]; }
/** * Load a text file to an FString. * Supports all combination of ANSI/Unicode files and platforms. * @param Result string representation of the loaded file * @param Filename name of the file to load * @param VerifyFlags flags controlling the hash verification behavior ( see EHashOptions ) */ bool FFileHelper::LoadFileToString( FString& Result, const TCHAR* Filename, uint32 VerifyFlags ) { FArchive* Reader = IFileManager::Get().CreateFileReader( Filename ); if( !Reader ) { return 0; } int32 Size = Reader->TotalSize(); uint8* Ch = (uint8*)FMemory::Malloc(Size); Reader->Serialize( Ch, Size ); bool Success = Reader->Close(); delete Reader; BufferToString( Result, Ch, Size ); // handle SHA verify of the file if( (VerifyFlags & EHashOptions::EnableVerify) && ( (VerifyFlags & EHashOptions::ErrorMissingHash) || FSHA1::GetFileSHAHash(Filename, NULL) ) ) { // kick off SHA verify task. this frees the buffer on close FBufferReaderWithSHA Ar( Ch, Size, true, Filename, false, true ); } else { // free manually since not running SHA task FMemory::Free(Ch); } return Success; }
uint8 FBuildPatchUtils::VerifyFile(const FString& FileToVerify, const FSHAHashData& Hash1, const FSHAHashData& Hash2, FBuildPatchFloatDelegate ProgressDelegate, FBuildPatchBoolRetDelegate ShouldPauseDelegate, double& TimeSpentPaused) { uint8 ReturnValue = 0; FArchive* FileReader = IFileManager::Get().CreateFileReader(*FileToVerify); ProgressDelegate.ExecuteIfBound(0.0f); if (FileReader != NULL) { FSHA1 HashState; FSHAHashData HashValue; const int64 FileSize = FileReader->TotalSize(); uint8* FileReadBuffer = new uint8[FileBufferSize]; while (!FileReader->AtEnd() && !FBuildPatchInstallError::HasFatalError()) { // Pause if necessary const double PrePauseTime = FPlatformTime::Seconds(); double PostPauseTime = PrePauseTime; bool bShouldPause = ShouldPauseDelegate.IsBound() && ShouldPauseDelegate.Execute(); while (bShouldPause && !FBuildPatchInstallError::HasFatalError()) { FPlatformProcess::Sleep(0.1f); bShouldPause = ShouldPauseDelegate.Execute(); PostPauseTime = FPlatformTime::Seconds(); } // Count up pause time TimeSpentPaused += PostPauseTime - PrePauseTime; // Read file and update hash state const int64 SizeLeft = FileSize - FileReader->Tell(); const uint32 ReadLen = FMath::Min< int64 >(FileBufferSize, SizeLeft); FileReader->Serialize(FileReadBuffer, ReadLen); HashState.Update(FileReadBuffer, ReadLen); const double FileSizeTemp = FileSize; const float Progress = 1.0f - ((SizeLeft - ReadLen) / FileSizeTemp); ProgressDelegate.ExecuteIfBound(Progress); } delete[] FileReadBuffer; HashState.Final(); HashState.GetHash(HashValue.Hash); ReturnValue = (HashValue == Hash1) ? 1 : (HashValue == Hash2) ? 2 : 0; if (ReturnValue == 0) { GLog->Logf(TEXT("BuildDataGenerator: Verify failed on %s"), *FPaths::GetCleanFilename(FileToVerify)); } FileReader->Close(); delete FileReader; } else { GLog->Logf(TEXT("BuildDataGenerator: ERROR VerifyFile cannot open %s"), *FileToVerify); } ProgressDelegate.ExecuteIfBound(1.0f); return ReturnValue; }
bool UJavascriptLibrary::ReadFile(UObject* Object, FString Filename) { FArchive* Reader = IFileManager::Get().CreateFileReader(*Filename); if (!Reader) { return false; } int32 Size = Reader->TotalSize(); if (Size != FArrayBufferAccessor::GetSize()) { return false; } Reader->Serialize(FArrayBufferAccessor::GetData(), Size); return Reader->Close(); }
/** * Load a binary file to a dynamic array. */ bool FFileHelper::LoadFileToArray( TArray<uint8>& Result, const TCHAR* Filename, uint32 Flags ) { FArchive* Reader = IFileManager::Get().CreateFileReader( Filename, Flags ); if( !Reader ) { if (!(Flags & FILEREAD_Silent)) { UE_LOG(LogStreaming,Warning,TEXT("Failed to read file '%s' error."),Filename); } return 0; } Result.Reset(); Result.AddUninitialized( Reader->TotalSize() ); Reader->Serialize(Result.GetData(), Result.Num()); bool Success = Reader->Close(); delete Reader; return Success; }
FCacheEntryMetadata* FRuntimeAssetCacheBackend::GetCachedData(const FName Bucket, const TCHAR* CacheKey, TArray<uint8>& OutData) { FCacheEntryMetadata* Result = nullptr; FArchive* Ar = CreateReadArchive(Bucket, CacheKey); if (!Ar) { return Result; } Result = PreloadMetadata(Ar); int64 TotalSize = Ar->TotalSize(); int64 CurrentPosition = Ar->Tell(); int64 NumberOfBytesToSerialize = TotalSize - CurrentPosition; OutData.Reset(); OutData.AddUninitialized(NumberOfBytesToSerialize); Ar->Serialize(OutData.GetData(), NumberOfBytesToSerialize); Ar->Close(); delete Ar; return Result; }
virtual void UpdateADBPath() override { FScopeLock PathUpdateLock(&ADBPathCheckLock); TCHAR AndroidDirectory[32768] = { 0 }; FPlatformMisc::GetEnvironmentVariable(TEXT("ANDROID_HOME"), AndroidDirectory, 32768); FString ADBPath; #if PLATFORM_MAC if (AndroidDirectory[0] == 0) { // didn't find ANDROID_HOME, so parse the .bash_profile file on MAC FArchive* FileReader = IFileManager::Get().CreateFileReader(*FString([@"~/.bash_profile" stringByExpandingTildeInPath])); if (FileReader) { const int64 FileSize = FileReader->TotalSize(); ANSICHAR* AnsiContents = (ANSICHAR*)FMemory::Malloc(FileSize); FileReader->Serialize(AnsiContents, FileSize); FileReader->Close(); delete FileReader; TArray<FString> Lines; FString(ANSI_TO_TCHAR(AnsiContents)).ParseIntoArrayLines(&Lines); FMemory::Free(AnsiContents); for (int32 Index = 0; Index < Lines.Num(); Index++) { if (AndroidDirectory[0] == 0 && Lines[Index].StartsWith(TEXT("export ANDROID_HOME="))) { FString Directory; Lines[Index].Split(TEXT("="), NULL, &Directory); Directory = Directory.Replace(TEXT("\""), TEXT("")); FCString::Strcpy(AndroidDirectory, *Directory); setenv("ANDROID_HOME", TCHAR_TO_ANSI(AndroidDirectory), 1); } } } }
bool FBuildPatchUtils::VerifyChunkFile( FArchive& ChunkFileData, bool bQuickCheck ) { const int64 FileSize = ChunkFileData.TotalSize(); bool bSuccess = ChunkFileData.IsLoading(); if ( !bSuccess ) { GLog->Logf( TEXT( "BuildPatchServices: ERROR: VerifyChunkFile expected readonly archive" ) ); } else { // Read the header FChunkHeader Header; ChunkFileData << Header; // Check header magic if ( !Header.IsValidMagic() ) { bSuccess = false; GLog->Logf( TEXT( "BuildPatchServices: ERROR: VerifyChunkFile corrupt header" ) ); } // Check Header and data size if ( bSuccess && ( Header.HeaderSize + Header.DataSize ) != FileSize ) { bSuccess = false; GLog->Logf( TEXT( "BuildPatchServices: ERROR: VerifyChunkFile header info does not match file size" ) ); } if( bSuccess && !bQuickCheck ) { // Hashes for checking data FSHA1 SHAHasher; FSHAHashData SHAHash; uint64 CycPoly64Hash = 0; // Load the data to check uint8* FileReadBuffer = new uint8[ FileBufferSize ]; int64 DataOffset = 0; switch ( Header.StoredAs ) { case FChunkHeader::STORED_RAW: while( !ChunkFileData.AtEnd() ) { const int64 SizeLeft = FileSize - ChunkFileData.Tell(); const uint32 ReadLen = FMath::Min< int64 >( FileBufferSize, SizeLeft ); ChunkFileData.Serialize( FileReadBuffer, ReadLen ); switch ( Header.HashType ) { case FChunkHeader::HASH_ROLLING: CycPoly64Hash = FCycPoly64Hash::GetHashForDataSet(FileReadBuffer, ReadLen, CycPoly64Hash); break; case FChunkHeader::HASH_SHA1: SHAHasher.Update( FileReadBuffer, ReadLen ); break; default: check( false ); // @TODO LSwift: Implement other storage methods! bSuccess = false; break; } DataOffset += ReadLen; } if( bSuccess ) { switch ( Header.HashType ) { case FChunkHeader::HASH_ROLLING: bSuccess = Header.RollingHash == CycPoly64Hash; break; case FChunkHeader::HASH_SHA1: SHAHasher.Final(); SHAHasher.GetHash( SHAHash.Hash ); bSuccess = SHAHash == Header.SHAHash; break; } if (!bSuccess) { GLog->Logf(TEXT("BuildPatchServices: ERROR: VerifyChunkFile file hashcheck failed")); } } break; default: GLog->Logf( TEXT( "BuildPatchServices: ERROR: VerifyChunkFile failed, unknown storage type" ) ); bSuccess = false; break; } delete[] FileReadBuffer; } } return bSuccess; }
bool FBuildPatchChunkCache::RecycleChunkFromBuild( const FGuid& ChunkGuid ) { // Must never double acquire check( ChunkCache.Contains( ChunkGuid ) == false ); // Debug leaving any files open bool bSuccess = true; // Get the app manifest that this chunk can be sourced from FBuildPatchAppManifestPtr ChunkSourceAppManifest = InstallationInfo.GetManifestContainingChunk(ChunkGuid); if (!ChunkSourceAppManifest.IsValid()) { return false; } // Get the install directory for this manifest const FString ChunkSourceInstallDir = InstallationInfo.GetManifestInstallDir(ChunkSourceAppManifest); if(ChunkSourceInstallDir.Len() <= 0) { return false; } // We need to generate an inventory of all chunk parts in this build that refer to the chunk that we require TMap< FGuid, TArray< FFileChunkPart > > ChunkPartInventory; TArray< FGuid > Array; Array.Add( ChunkGuid ); ChunkSourceAppManifest->EnumerateChunkPartInventory(Array, ChunkPartInventory); // Attempt construction of the chunk from the parts FArchive* BuildFileIn = NULL; FString BuildFileOpened; int64 BuildFileInSize = 0; // We must have a hash for this chunk or else we cant verify it uint8 HashType = 0; uint64 ChunkHash = 0; FSHAHashData ChunkShaHash; if (InstallManifet->GetChunkShaHash(ChunkGuid, ChunkShaHash)) { HashType = FChunkHeader::HASH_SHA1; } else if (ChunkSourceAppManifest->GetChunkHash(ChunkGuid, ChunkHash)) { HashType = FChunkHeader::HASH_ROLLING; } TArray< FFileChunkPart >* FileChunkPartsPtr = ChunkPartInventory.Find( ChunkGuid ); bSuccess = (FileChunkPartsPtr != NULL && HashType != 0); if( bSuccess ) { const TArray< FFileChunkPart >& FileChunkParts = *FileChunkPartsPtr; TArray< uint8 > TempArray; TempArray.AddUninitialized( FBuildPatchData::ChunkDataSize ); uint8* TempChunkConstruction = TempArray.GetData(); FMemory::Memzero( TempChunkConstruction, FBuildPatchData::ChunkDataSize ); bSuccess = FileChunkParts.Num() > 0; for( auto FileChunkPartIt = FileChunkParts.CreateConstIterator(); FileChunkPartIt && bSuccess && !FBuildPatchInstallError::HasFatalError(); ++FileChunkPartIt ) { const FFileChunkPart& FileChunkPart = *FileChunkPartIt; FString FullFilename = ChunkSourceInstallDir / FileChunkPart.Filename; // Close current build file ? if( BuildFileIn != NULL && BuildFileOpened != FullFilename ) { BuildFileIn->Close(); delete BuildFileIn; BuildFileIn = NULL; BuildFileOpened = TEXT( "" ); BuildFileInSize = 0; } // Open build file ? if( BuildFileIn == NULL ) { BuildFileIn = IFileManager::Get().CreateFileReader( *FullFilename ); bSuccess = BuildFileIn != NULL; if( !bSuccess ) { BuildFileOpened = TEXT( "" ); FBuildPatchAnalytics::RecordChunkCacheError( ChunkGuid, FileChunkPart.Filename, FPlatformMisc::GetLastError(), TEXT( "ChunkRecycle" ), TEXT( "Source File Missing" ) ); GWarn->Logf( TEXT( "BuildPatchChunkConstruction: Warning: Failed to load source file for chunk. %s" ), *FullFilename ); } else { BuildFileOpened = FullFilename; BuildFileInSize = BuildFileIn->TotalSize(); } } // Grab the section of the chunk if( BuildFileIn != NULL ) { // Make sure we don't attempt to read off the end of the file const int64 LastRequiredByte = FileChunkPart.FileOffset + FileChunkPart.ChunkPart.Size; if( BuildFileInSize >= LastRequiredByte ) { BuildFileIn->Seek( FileChunkPart.FileOffset ); BuildFileIn->Serialize( TempChunkConstruction + FileChunkPart.ChunkPart.Offset, FileChunkPart.ChunkPart.Size ); } else { bSuccess = false; FBuildPatchAnalytics::RecordChunkCacheError( ChunkGuid, FileChunkPart.Filename, INDEX_NONE, TEXT( "ChunkRecycle" ), TEXT( "Source File Too Small" ) ); GWarn->Logf( TEXT( "BuildPatchChunkConstruction: Warning: Source file too small for chunk position. %s" ), *FullFilename ); } } } // Check no other fatal errors were registered in the meantime bSuccess = bSuccess && !FBuildPatchInstallError::HasFatalError(); // Check chunk hash if( bSuccess ) { FSHAHashData ShaHashCheck; switch (HashType) { case FChunkHeader::HASH_ROLLING: bSuccess = FRollingHash< FBuildPatchData::ChunkDataSize >::GetHashForDataSet(TempChunkConstruction) == ChunkHash; break; case FChunkHeader::HASH_SHA1: FSHA1::HashBuffer(TempChunkConstruction, FBuildPatchData::ChunkDataSize, ShaHashCheck.Hash); bSuccess = ShaHashCheck == ChunkShaHash; break; default: bSuccess = false; } if( !bSuccess ) { FBuildPatchAnalytics::RecordChunkCacheError( ChunkGuid, TEXT( "" ), INDEX_NONE, TEXT( "ChunkRecycle" ), TEXT( "Chunk Hash Fail" ) ); GWarn->Logf( TEXT( "BuildPatchChunkConstruction: Warning: Hash check failed for recycled chunk %s" ), *ChunkGuid.ToString() ); } } // Save the chunk to cache if all went well if( bSuccess ) { // It was added asynchronously!! check( ChunkCache.Contains( ChunkGuid ) == false ); // Create the ChunkFile data structure FChunkFile* NewChunkFile = new FChunkFile( GetRemainingReferenceCount( ChunkGuid ), true ); // Lock data FChunkHeader* ChunkHeader; uint8* ChunkData; NewChunkFile->GetDataLock( &ChunkData, &ChunkHeader ); // Copy the data FMemoryReader MemReader( TempArray ); MemReader.Serialize( ChunkData, FBuildPatchData::ChunkDataSize ); // Setup the header ChunkHeader->Guid = ChunkGuid; ChunkHeader->StoredAs = FChunkHeader::STORED_RAW; ChunkHeader->DataSize = FBuildPatchData::ChunkDataSize; // This would change if compressing/encrypting ChunkHeader->HashType = HashType; ChunkHeader->RollingHash = ChunkHash; ChunkHeader->SHAHash = ChunkShaHash; // Release data NewChunkFile->ReleaseDataLock(); // Count chunk NumChunksRecycled.Increment(); // Add it to our cache. ChunkCache.Add( ChunkGuid, NewChunkFile ); } // Close any open file if( BuildFileIn != NULL ) { BuildFileIn->Close(); delete BuildFileIn; BuildFileIn = NULL; } } return bSuccess; }
bool FBuildPatchChunkCache::ReadChunkFromDriveCache( const FGuid& ChunkGuid ) { bool bSuccess = true; // Get the chunk filename const FString Filename = FBuildPatchUtils::GetChunkOldFilename( ChunkCacheStage, ChunkGuid ); // Read the chunk FArchive* FileReader = IFileManager::Get().CreateFileReader( *Filename ); bSuccess = FileReader != NULL; if( bSuccess ) { // Get file size const int64 FileSize = FileReader->TotalSize(); // Create the ChunkFile data structure FChunkFile* NewChunkFile = new FChunkFile( GetRemainingReferenceCount( ChunkGuid ), true ); // Lock data FChunkHeader* ChunkHeader; uint8* ChunkData; NewChunkFile->GetDataLock( &ChunkData, &ChunkHeader ); // Read the header *FileReader << *ChunkHeader; // Check header magic bSuccess = ChunkHeader->IsValidMagic(); if ( bSuccess ) { // Check the right data size bSuccess = ChunkHeader->DataSize == FBuildPatchData::ChunkDataSize; if( bSuccess ) { // Check Header and data size bSuccess = ( ChunkHeader->HeaderSize + ChunkHeader->DataSize ) == FileSize; if( bSuccess ) { // Read the data FileReader->Serialize( ChunkData, FBuildPatchData::ChunkDataSize ); // Verify the data hash FSHAHashData ShaHashCheck; switch (ChunkHeader->HashType) { case FChunkHeader::HASH_ROLLING: bSuccess = ChunkHeader->RollingHash == FRollingHash< FBuildPatchData::ChunkDataSize >::GetHashForDataSet(ChunkData); break; case FChunkHeader::HASH_SHA1: FSHA1::HashBuffer(ChunkData, FBuildPatchData::ChunkDataSize, ShaHashCheck.Hash); bSuccess = ShaHashCheck == ChunkHeader->SHAHash; break; default: bSuccess = false; } if( !bSuccess ) { FBuildPatchAnalytics::RecordChunkCacheError( ChunkGuid, Filename, INDEX_NONE, TEXT( "DriveCache" ), TEXT( "Hash Check Failed" ) ); GLog->Logf( TEXT( "FBuildPatchChunkCache: ERROR: ReadChunkFromDriveCache chunk failed hash check %s" ), *ChunkGuid.ToString() ); } else { // Count loads NumDriveCacheChunkLoads.Increment(); GLog->Logf( TEXT( "FBuildPatchChunkCache: ReadChunkFromDriveCache loaded chunk %s" ), *ChunkGuid.ToString() ); } } else { FBuildPatchAnalytics::RecordChunkCacheError( ChunkGuid, Filename, INDEX_NONE, TEXT( "DriveCache" ), TEXT( "Incorrect File Size" ) ); GLog->Logf( TEXT( "FBuildPatchChunkCache: ERROR: ReadChunkFromDriveCache header info does not match file size %s" ), *ChunkGuid.ToString() ); } } else { FBuildPatchAnalytics::RecordChunkCacheError( ChunkGuid, Filename, INDEX_NONE, TEXT( "DriveCache" ), TEXT( "Datasize/Hashtype Mismatch" ) ); GLog->Logf( TEXT( "FBuildPatchChunkCache: ERROR: ReadChunkFromDriveCache mismatch datasize/hashtype combination %s" ), *ChunkGuid.ToString() ); } } else { FBuildPatchAnalytics::RecordChunkCacheError( ChunkGuid, Filename, INDEX_NONE, TEXT( "DriveCache" ), TEXT( "Corrupt Header" ) ); GLog->Logf( TEXT( "FBuildPatchChunkCache: ERROR: ReadChunkFromDriveCache corrupt header %s" ), *ChunkGuid.ToString() ); } // Release data NewChunkFile->ReleaseDataLock(); // Add the newly filled data to the cache if successful if( bSuccess ) { ChunkCache.Add( ChunkGuid, NewChunkFile ); } // If there was a problem, remove from cache and reservation else { ChunkCache.Remove( ChunkGuid ); } // Close the file FileReader->Close(); delete FileReader; } else { FBuildPatchAnalytics::RecordChunkCacheError( ChunkGuid, Filename, FPlatformMisc::GetLastError(), TEXT( "DriveCache" ), TEXT( "Open File Fail" ) ); GLog->Logf( TEXT( "BuildPatchServices: ERROR: GetChunkData could not open chunk file %s" ), *ChunkGuid.ToString() ); } return bSuccess; }
void USetupDefinition::ProcessCopy( FString Key, FString Value, UBOOL Selected, FInstallPoll* Poll ) { guard(USetupDefinition::ProcessCopy); BYTE Buffer[4096]; if( Selected && Key==TEXT("File") ) { // Get source and dest filenames. FFileInfo Info(*Value); if( Info.Lang==TEXT("") || Info.Lang==UObject::GetLanguage() ) { if( Info.Dest==TEXT("") ) Info.Dest = Info.Src; if( !LocateSourceFile(Info.Src) ) LocalizedFileError( TEXT("MissingInstallerFile"), Patch ? TEXT("AdviseBadDownload") : TEXT("AdviseBadMedia"), *Info.Src ); FString FullDest = DestPath * Info.Dest; FString FullSrc = Info.Ref==TEXT("") ? Info.Src : GetFullRef(*Info.Ref); FString FullPatch = FullDest + TEXT("_tmp"); // Update uninstallation log. UninstallLogAdd( TEXT("File"), *Info.Dest, 0, 1 ); // Make destination directory. if( !GFileManager->MakeDirectory( *BasePath(FullDest), 1 ) ) LocalizedFileError( TEXT("FailedMakeDir"), TEXT("AdviseBadDest"), *FullDest ); // Status display. if( !Poll->Poll(*FullDest,0,0,RunningBytes,TotalBytes) ) DidCancel(); // Copy SrcAr -> DestAr. INT CalcOldCRC = 0; guard(CopyFile); FString ThisDest = Info.Ref==TEXT("") ? FullDest : FullPatch; debugf( TEXT("Copying %s to %s"), *FullSrc, *ThisDest); FArchive* SrcAr = GFileManager->CreateFileReader( *FullSrc ); if( !SrcAr ) LocalizedFileError( TEXT("FailedOpenSource"), Patch ? TEXT("AdviseBadDownload") : TEXT("AdviseBadMedia"), *FullSrc ); INT Size = SrcAr->TotalSize(); FArchive* DestAr = GFileManager->CreateFileWriter( *ThisDest, FILEWRITE_EvenIfReadOnly ); if( !DestAr ) LocalizedFileError( TEXT("FailedOpenDest"), TEXT("AdviseBadDest"), *ThisDest ); if( FullSrc.Right(3).Caps() == TEXT(".UZ") && ThisDest.Right(3).Caps() != TEXT(".UZ")) { INT Signature; FString OrigFilename; *SrcAr << Signature; if( Signature != 5678 ) LocalizedFileError( TEXT("FailedOpenSource"), TEXT("AdviseBadMedia"), *FullSrc ); else { *SrcAr << OrigFilename; FCodecFull Codec; Codec.AddCodec(new FCodecRLE); Codec.AddCodec(new FCodecBWT); Codec.AddCodec(new FCodecMTF); Codec.AddCodec(new FCodecRLE); Codec.AddCodec(new FCodecHuffman); Codec.Decode( *SrcAr, *DestAr ); if( !Poll->Poll(*FullDest,Size,Size,RunningBytes+=Size,TotalBytes) ) { delete SrcAr; delete DestAr; DidCancel(); } } } else { for( SQWORD Pos=0; Pos<Size; Pos+=sizeof(Buffer) ) { INT Count = Min( Size-Pos, (SQWORD)sizeof(Buffer) ); SrcAr->Serialize( Buffer, Count ); if( SrcAr->IsError() ) { delete SrcAr; delete DestAr; LocalizedFileError( TEXT("FailedReadingSource"), Patch ? TEXT("AdviseBadDownload") : TEXT("AdviseBadMedia"), *FullSrc ); } if( Info.Ref!=TEXT("") ) { CalcOldCRC = appMemCrc( Buffer, Count, CalcOldCRC ); } DestAr->Serialize( Buffer, Count ); if( DestAr->IsError() ) { delete SrcAr; delete DestAr; LocalizedFileError( TEXT("FailedWritingDest"), TEXT("AdviseBadDest"), *ThisDest ); } if( !Poll->Poll(*FullDest,Pos,Size,RunningBytes+=Count,TotalBytes) ) { delete SrcAr; delete DestAr; DidCancel(); } } } delete SrcAr; if( !DestAr->Close() ) LocalizedFileError( TEXT("FailedClosingDest"), TEXT("AdviseBadDest"), *ThisDest ); delete DestAr; unguard; // Patch SrcAr + DeltaFile -> DestAr. if( Info.Ref!=TEXT("") ) { guard(PatchFile); BYTE Buffer[4096]; // Open files. FString ThisSrc = FullPatch; FArchive* SrcAr = GFileManager->CreateFileReader( *ThisSrc ); if( !SrcAr ) LocalizedFileError( TEXT("FailedOpenSource"), Patch ? TEXT("AdviseBadDownload") : TEXT("AdviseBadMedia"), *ThisSrc ); INT Size = SrcAr->TotalSize(); FArchive* DestAr = GFileManager->CreateFileWriter(*FullDest,FILEWRITE_EvenIfReadOnly); if( !DestAr ) LocalizedFileError( TEXT("FailedOpenDest"), TEXT("AdviseBadDest"), *FullDest ); // Load delta file. TArray<BYTE> Delta; FString DeltaName = Info.Src; if( !appLoadFileToArray( Delta, *DeltaName ) ) LocalizedFileError( TEXT("FailedLoadingUpdate"), TEXT("AdviseBadDownload"), *Info.Src ); debugf( TEXT("Patching %s to %s with %s"), *ThisSrc, *FullDest, *DeltaName ); // Decompress variables. INT PrevSpot=0, CountSize=0, CRC=0; INT Magic=0, OldSize=0, OldCRC=0, NewSize=0, NewCRC; FBufferReader Reader( Delta ); Reader << Magic << OldSize << OldCRC << NewSize << NewCRC; // Validate. if( Magic!=0x92f92912 ) appErrorf( LineFormat(LocalizeError("PatchCorrupt")), *DeltaName, LocalizeError("AdviseBadDownload") ); if( OldSize!=Size || OldCRC!=CalcOldCRC ) appErrorf( LocalizeError("CdFileMismatch"), *Info.Ref, *LocalProduct ); // Delta decode it. INT OldCountSize=0; while( !Reader.AtEnd() ) { INT Index; Reader << AR_INDEX(Index); if( Index<0 ) { CRC = appMemCrc( &Delta(Reader.Tell()), -Index, CRC ); DestAr->Serialize( &Delta(Reader.Tell()), -Index ); if( DestAr->IsError() ) LocalizedFileError( TEXT("FailedWritingDest"), TEXT("AdviseBadDest"), *FullDest ); Reader.Seek( Reader.Tell() - Index ); CountSize -= Index; } else { INT CopyPos; Reader << AR_INDEX(CopyPos); CopyPos += PrevSpot; check(CopyPos>=0); check(CopyPos+Index<=Size); SrcAr->Seek( CopyPos ); for( INT Base=Index; Base>0; Base-=sizeof(Buffer) ) { INT Move = Min(Base,(INT)sizeof(Buffer)); SrcAr->Serialize( Buffer, Move ); if( SrcAr->IsError() ) LocalizedFileError( TEXT("FailedReadingSource"), Patch ? TEXT("AdviseBadDownload") : TEXT("AdviseBadDownload"), *ThisSrc ); CRC = appMemCrc( Buffer, Move, CRC ); DestAr->Serialize( Buffer, Move ); if( DestAr->IsError() ) LocalizedFileError( TEXT("FailedWritingDest"), TEXT("AdviseBadDest"), *FullDest ); } CountSize += Index; PrevSpot = CopyPos + Index; } if( ((CountSize^OldCountSize)&~(sizeof(Buffer)-1)) || Reader.AtEnd() ) { if( !Poll->Poll(*FullDest,CountSize,Info.Size,RunningBytes+=(CountSize-OldCountSize),TotalBytes) ) { delete SrcAr; delete DestAr; DidCancel(); } OldCountSize = CountSize; } } if( NewSize!=CountSize || NewCRC!=CRC ) appErrorf( LineFormat(LocalizeError("PatchCorrupt")), *DeltaName, LocalizeError("AdviseBadDownload") ); delete SrcAr; if( !DestAr->Close() ) LocalizedFileError( TEXT("FailedClosingDest"), TEXT("AdviseBadDest"), *FullDest ); delete DestAr; GFileManager->Delete( *ThisSrc ); unguard; } } } unguard; }
bool FBuildPatchFileConstructor::ConstructFileFromChunks( const FString& Filename, bool bResumeExisting ) { const bool bIsFileData = BuildManifest->IsFileDataManifest(); bResumeExisting = bResumeExisting && !bIsFileData; bool bSuccess = true; FString ErrorString; FString NewFilename = StagingDirectory / Filename; // Calculate the hash as we write the data FSHA1 HashState; FSHAHashData HashValue; // First make sure we can get the file manifest const FFileManifestData* FileManifest = BuildManifest->GetFileManifest(Filename); bSuccess = FileManifest != nullptr; if( bSuccess ) { if( !FileManifest->SymlinkTarget.IsEmpty() ) { #if PLATFORM_MAC bSuccess = symlink(TCHAR_TO_UTF8(*FileManifest->SymlinkTarget), TCHAR_TO_UTF8(*NewFilename)) == 0; #else const bool bSymlinkNotImplemented = false; check(bSymlinkNotImplemented); bSuccess = false; #endif return bSuccess; } // Check for resuming of existing file int64 StartPosition = 0; int32 StartChunkPart = 0; if( bResumeExisting ) { // We have to read in the existing file so that the hash check can still be done. FArchive* NewFileReader = IFileManager::Get().CreateFileReader( *NewFilename ); if( NewFileReader != NULL ) { // Read buffer uint8* ReadBuffer = new uint8[ FBuildPatchData::ChunkDataSize ]; // Reuse a certain amount of the file StartPosition = FMath::Max<int64>( 0, NewFileReader->TotalSize() - NUM_BYTES_RESUME_IGNORE ); // We'll also find the correct chunkpart to start writing from int64 ByteCounter = 0; for( int32 ChunkPartIdx = StartChunkPart; ChunkPartIdx < FileManifest->FileChunkParts.Num() && !FBuildPatchInstallError::HasFatalError(); ++ChunkPartIdx ) { const FChunkPartData& ChunkPart = FileManifest->FileChunkParts[ ChunkPartIdx ]; const int64 NextBytePosition = ByteCounter + ChunkPart.Size; if( NextBytePosition <= StartPosition ) { // Read data for hash check NewFileReader->Serialize( ReadBuffer, ChunkPart.Size ); HashState.Update( ReadBuffer, ChunkPart.Size ); // Count bytes read from file ByteCounter = NextBytePosition; // Set to resume from next chunk part StartChunkPart = ChunkPartIdx + 1; // Inform the chunk cache of the chunk part skip FBuildPatchChunkCache::Get().SkipChunkPart( ChunkPart ); // Wait if paused BuildProgress->WaitWhilePaused(); } else { // No more parts on disk break; } } // Set start position to the byte we got up to StartPosition = ByteCounter; // Clean read buffer delete[] ReadBuffer; // Close file NewFileReader->Close(); delete NewFileReader; } } // Now we can make sure the chunk cache knows to start downloading chunks if( !bIsFileData && !bIsDownloadStarted && !FBuildPatchInstallError::HasFatalError() ) { bIsDownloadStarted = true; FBuildPatchChunkCache::Get().BeginDownloads(); } // Attempt to create the file FArchive* NewFile = IFileManager::Get().CreateFileWriter( *NewFilename, bResumeExisting ? EFileWrite::FILEWRITE_Append : 0 ); bSuccess = NewFile != NULL; if( bSuccess ) { // Whenever we start writing again, there's no more resuming to be done BuildProgress->SetStateProgress( EBuildPatchProgress::Resuming, 1.0f ); // Seek to file write position NewFile->Seek( StartPosition ); // For each chunk, load it, and place it's data into the file for( int32 ChunkPartIdx = StartChunkPart; ChunkPartIdx < FileManifest->FileChunkParts.Num() && bSuccess && !FBuildPatchInstallError::HasFatalError(); ++ChunkPartIdx ) { const FChunkPartData& ChunkPart = FileManifest->FileChunkParts[ChunkPartIdx]; if( bIsFileData ) { bSuccess = InsertFileData( ChunkPart, *NewFile, HashState ); } else { bSuccess = InsertChunkData( ChunkPart, *NewFile, HashState ); } if( bSuccess ) { CountBytesProcessed( ChunkPart.Size ); // Wait if paused BuildProgress->WaitWhilePaused(); } else { ErrorString = TEXT( "Failed to construct file " ); ErrorString += Filename; ErrorString += TEXT( " because of chunk " ); ErrorString += ChunkPart.Guid.ToString(); GWarn->Logf( TEXT( "BuildPatchFileConstructor: ERROR: %s" ), *ErrorString ); FBuildPatchInstallError::SetFatalError( EBuildPatchInstallError::FileConstructionFail, ErrorString ); } } // Close the file writer NewFile->Close(); delete NewFile; } else { FBuildPatchAnalytics::RecordConstructionError( Filename, FPlatformMisc::GetLastError(), TEXT( "Could Not Create File" ) ); ErrorString = TEXT( "Could not create new file " ); ErrorString += Filename; GWarn->Logf( TEXT( "BuildPatchFileConstructor: ERROR: %s" ), *ErrorString ); FBuildPatchInstallError::SetFatalError( EBuildPatchInstallError::FileConstructionFail, ErrorString ); } } else { FBuildPatchAnalytics::RecordConstructionError( Filename, INDEX_NONE, TEXT( "Missing File Manifest" ) ); ErrorString = TEXT( "Build manifest does not contain a file manifest for " ); ErrorString += Filename; FBuildPatchInstallError::SetFatalError( EBuildPatchInstallError::FileConstructionFail, ErrorString ); } // Verify the hash for the file that we created if( bSuccess ) { HashState.Final(); HashState.GetHash( HashValue.Hash ); bSuccess = HashValue == FileManifest->FileHash; if( !bSuccess ) { FBuildPatchAnalytics::RecordConstructionError( Filename, INDEX_NONE, TEXT( "Serialised Verify Fail" ) ); ErrorString = TEXT( "Verify failed after constructing file " ); ErrorString += Filename; GWarn->Logf( TEXT( "BuildDataGenerator: ERROR: %s" ), *ErrorString ); FBuildPatchInstallError::SetFatalError( EBuildPatchInstallError::FileConstructionFail, ErrorString ); } } #if PLATFORM_MAC if( bSuccess && FileManifest->bIsUnixExecutable ) { // Enable executable permission bit struct stat FileInfo; if (stat(TCHAR_TO_UTF8(*NewFilename), &FileInfo) == 0) { bSuccess = chmod(TCHAR_TO_UTF8(*NewFilename), FileInfo.st_mode | S_IXUSR | S_IXGRP | S_IXOTH) == 0; } } #endif // Delete the staging file if unsuccessful by means of construction fail (i.e. keep if canceled or download issue) if( !bSuccess && FBuildPatchInstallError::GetErrorState() == EBuildPatchInstallError::FileConstructionFail ) { IFileManager::Get().Delete( *NewFilename, false, true ); } return bSuccess; }
const bool FChunkWriter::FQueuedChunkWriter::WriteChunkData(const FString& ChunkFilename, FChunkFile* ChunkFile, const FGuid& ChunkGuid) { // Chunks are saved with GUID, so if a file already exists it will never be different. // Skip with return true if already exists if( FPaths::FileExists( ChunkFilename ) ) { const int64 ChunkFilesSize = IFileManager::Get().FileSize(*ChunkFilename); ChunkFileSizesCS.Lock(); ChunkFileSizes.Add(ChunkGuid, ChunkFilesSize); ChunkFileSizesCS.Unlock(); return true; } FArchive* FileOut = IFileManager::Get().CreateFileWriter( *ChunkFilename ); bool bSuccess = FileOut != NULL; if( bSuccess ) { // Setup to handle compression bool bDataIsCompressed = true; uint8* ChunkDataSource = ChunkFile->ChunkData; int32 ChunkDataSourceSize = FBuildPatchData::ChunkDataSize; TArray< uint8 > TempCompressedData; TempCompressedData.Empty( ChunkDataSourceSize ); TempCompressedData.AddUninitialized( ChunkDataSourceSize ); int32 CompressedSize = ChunkDataSourceSize; // Compressed can increase in size, but the function will return as failure in that case // we can allow that to happen since we would not keep larger compressed data anyway. bDataIsCompressed = FCompression::CompressMemory( static_cast< ECompressionFlags >( COMPRESS_ZLIB | COMPRESS_BiasMemory ), TempCompressedData.GetData(), CompressedSize, ChunkFile->ChunkData, FBuildPatchData::ChunkDataSize ); // If compression succeeded, set data vars if( bDataIsCompressed ) { ChunkDataSource = TempCompressedData.GetData(); ChunkDataSourceSize = CompressedSize; } // Setup Header FChunkHeader& Header = ChunkFile->ChunkHeader; *FileOut << Header; Header.HeaderSize = FileOut->Tell(); Header.StoredAs = bDataIsCompressed ? FChunkHeader::STORED_COMPRESSED : FChunkHeader::STORED_RAW; Header.DataSize = ChunkDataSourceSize; Header.HashType = FChunkHeader::HASH_ROLLING; // Write out files FileOut->Seek( 0 ); *FileOut << Header; FileOut->Serialize( ChunkDataSource, ChunkDataSourceSize ); const int64 ChunkFilesSize = FileOut->TotalSize(); FileOut->Close(); ChunkFileSizesCS.Lock(); ChunkFileSizes.Add(ChunkGuid, ChunkFilesSize); ChunkFileSizesCS.Unlock(); bSuccess = !FileOut->GetError(); delete FileOut; } // Log errors if( !bSuccess ) { GLog->Logf( TEXT( "BuildPatchServices: Error: Could not save out generated chunk file %s" ), *ChunkFilename ); } return bSuccess; }
uint32 FFileManagerGeneric::CopyWithProgress( const TCHAR* InDestFile, const TCHAR* InSrcFile, bool ReplaceExisting, bool EvenIfReadOnly, bool Attributes, FCopyProgress* Progress ) { uint32 Result = COPY_OK; // Direct file copier. if( Progress->Poll( 0.0 ) ) { FString SrcFile = InSrcFile; FString DestFile = InDestFile; FArchive* Src = CreateFileReader( *SrcFile ); if( !Src ) { Result = COPY_Fail; } else { FArchive* Dest = CreateFileWriter( *DestFile,( ReplaceExisting ? 0 : FILEWRITE_NoReplaceExisting ) | ( EvenIfReadOnly ? FILEWRITE_EvenIfReadOnly : 0 ) ); if( !Dest ) { Result = COPY_Fail; } else { int64 Size = Src->TotalSize(); int64 Percent = 0, NewPercent = 0; uint8* Buffer = new uint8[COPYBLOCKSIZE]; for( int64 Total = 0; Total < Size; Total += sizeof(Buffer) ) { int64 Count = FMath::Min( Size - Total, (int64)sizeof(Buffer) ); Src->Serialize( Buffer, Count ); if( Src->IsError() ) { Result = COPY_Fail; break; } Dest->Serialize( Buffer, Count ); if( Dest->IsError() ) { Result = COPY_Fail; break; } NewPercent = Total * 100 / Size; if( Progress && Percent != NewPercent && !Progress->Poll( ( float )NewPercent / 100.f ) ) { Result = COPY_Canceled; break; } Percent = NewPercent; } delete [] Buffer; if( Result == COPY_OK && !Dest->Close() ) { Result = COPY_Fail; } delete Dest; if( Result != COPY_OK ) { Delete( *DestFile ); } } if( Result == COPY_OK && !Src->Close() ) { Result = COPY_Fail; } delete Src; } if( Progress && Result==COPY_OK && !Progress->Poll( 1.0 ) ) { Result = COPY_Canceled; } } else { Result = COPY_Canceled; } return Result; }
void UMediaPlayer::InitializePlayer() { if (URL != CurrentUrl) { // close previous player CurrentUrl = FString(); if (Player.IsValid()) { Player->Close(); Player->OnClosed().RemoveAll(this); Player->OnOpened().RemoveAll(this); Player.Reset(); } if (URL.IsEmpty()) { return; } // create new player IMediaModule* MediaModule = FModuleManager::LoadModulePtr<IMediaModule>("Media"); if (MediaModule == nullptr) { return; } Player = MediaModule->CreatePlayer(URL); if (!Player.IsValid()) { return; } Player->OnClosed().AddUObject(this, &UMediaPlayer::HandleMediaPlayerMediaClosed); Player->OnOpened().AddUObject(this, &UMediaPlayer::HandleMediaPlayerMediaOpened); // open the new media file bool OpenedSuccessfully = false; if (URL.Contains(TEXT("://"))) { OpenedSuccessfully = Player->Open(URL); } else { const FString FullUrl = FPaths::ConvertRelativePathToFull(FPaths::IsRelative(URL) ? FPaths::GameContentDir() / URL : URL); if (StreamMode == EMediaPlayerStreamModes::MASM_FromUrl) { OpenedSuccessfully = Player->Open(FullUrl); } else if (FPaths::FileExists(FullUrl)) { FArchive* FileReader = IFileManager::Get().CreateFileReader(*FullUrl); if (FileReader == nullptr) { return; } if (FileReader->TotalSize() > 0) { TArray<uint8>* FileData = new TArray<uint8>(); FileData->AddUninitialized(FileReader->TotalSize()); FileReader->Serialize(FileData->GetData(), FileReader->TotalSize()); OpenedSuccessfully = Player->Open(MakeShareable(FileData), FullUrl); } delete FileReader; } } // finish initialization if (OpenedSuccessfully) { CurrentUrl = URL; } } if (Player.IsValid()) { Player->SetLooping(Looping); } }
void SDocumentationToolTip::CreateExcerpt( FString FileSource, FString InExcerptName ) { FText CheckoutFailReason; bool bNewFile = true; bool bCheckoutOrAddSucceeded = true; if (FPaths::FileExists(FileSource)) { // Check out the existing file bNewFile = false; bCheckoutOrAddSucceeded = SourceControlHelpers::CheckoutOrMarkForAdd(FileSource, NSLOCTEXT("SToolTip", "DocumentationSCCActionDesc", "tool tip excerpt"), FOnPostCheckOut(), /*out*/ CheckoutFailReason); } FArchive* FileWriter = IFileManager::Get().CreateFileWriter( *FileSource, EFileWrite::FILEWRITE_Append | EFileWrite::FILEWRITE_AllowRead | EFileWrite::FILEWRITE_EvenIfReadOnly ); if (bNewFile) { FString UdnHeader; UdnHeader += "Availability:NoPublish"; UdnHeader += LINE_TERMINATOR; UdnHeader += "Title:"; UdnHeader += LINE_TERMINATOR; UdnHeader += "Crumbs:"; UdnHeader += LINE_TERMINATOR; UdnHeader += "Description:"; UdnHeader += LINE_TERMINATOR; FileWriter->Serialize( TCHAR_TO_ANSI( *UdnHeader ), UdnHeader.Len() ); } FString NewExcerpt; NewExcerpt += LINE_TERMINATOR; NewExcerpt += "[EXCERPT:"; NewExcerpt += InExcerptName; NewExcerpt += "]"; NewExcerpt += LINE_TERMINATOR; NewExcerpt += TextContent.Get().ToString(); NewExcerpt += LINE_TERMINATOR; NewExcerpt += "[/EXCERPT:"; NewExcerpt += InExcerptName; NewExcerpt += "]"; NewExcerpt += LINE_TERMINATOR; if (!bNewFile) { FileWriter->Seek( FMath::Max( FileWriter->TotalSize(), (int64)0 ) ); } FileWriter->Serialize( TCHAR_TO_ANSI( *NewExcerpt ), NewExcerpt.Len() ); FileWriter->Close(); delete FileWriter; if (bNewFile) { // Add the new file bCheckoutOrAddSucceeded = SourceControlHelpers::CheckoutOrMarkForAdd(FileSource, NSLOCTEXT("SToolTip", "DocumentationSCCActionDesc", "tool tip excerpt"), FOnPostCheckOut(), /*out*/ CheckoutFailReason); } ISourceCodeAccessModule& SourceCodeAccessModule = FModuleManager::LoadModuleChecked<ISourceCodeAccessModule>("SourceCodeAccess"); SourceCodeAccessModule.GetAccessor().OpenFileAtLine(FileSource, 0); if (!bCheckoutOrAddSucceeded) { FNotificationInfo Info(CheckoutFailReason); Info.ExpireDuration = 3.0f; FSlateNotificationManager::Get().AddNotification(Info); } ReloadDocumentation(); }