void FAssetDataGatherer::SerializeCache(FArchive& Ar)
{
	double SerializeStartTime = FPlatformTime::Seconds();

	// serialize number of objects
	int32 LocalNumAssets = NewCachedAssetDataMap.Num();
	Ar << LocalNumAssets;

	if (Ar.IsSaving())
	{
		// save out by walking the TMap
		for (auto CacheIt = NewCachedAssetDataMap.CreateConstIterator(); CacheIt; ++CacheIt)
		{
			FName PackageName = CacheIt.Key();
			Ar << PackageName;
			Ar << *CacheIt.Value();
		}
	}
	else
	{
		// allocate one single block for all asset data structs (to reduce tens of thousands of heap allocations)
		DiskCachedAssetDataMap.Empty(LocalNumAssets);

		for (int32 AssetIndex = 0; AssetIndex < LocalNumAssets; ++AssetIndex)
		{
			// Load the name first to add the entry to the tmap below
			FName PackageName;
			Ar << PackageName;
			if (Ar.IsError())
			{
				// There was an error reading the cache. Bail out.
				break;
			}

			// Add to the cached map
			FDiskCachedAssetData& CachedAssetData = DiskCachedAssetDataMap.Add(PackageName);

			// Now load the data
			Ar << CachedAssetData;

			if (Ar.IsError())
			{
				// There was an error reading the cache. Bail out.
				break;
			}
		}

		// If there was an error loading the cache, abandon all data loaded from it so we can build a clean one.
		if (Ar.IsError())
		{
			UE_LOG(LogAssetRegistry, Error, TEXT("There was an error loading the asset registry cache. Generating a new one."));
			DiskCachedAssetDataMap.Empty();
		}
	}

	UE_LOG(LogAssetRegistry, Verbose, TEXT("Asset data gatherer serialized in %0.6f seconds"), FPlatformTime::Seconds() - SerializeStartTime);
}
Ejemplo n.º 2
0
void FAssetDataGatherer::SerializeCache(FArchive& Ar)
{
	double SerializeStartTime = FPlatformTime::Seconds();

	// serialize number of objects
	int32 LocalNumAssets = NewCachedAssetDataMap.Num();
	Ar << LocalNumAssets;

	if (Ar.IsSaving())
	{
		// save out by walking the TMap
		for (auto CacheIt = NewCachedAssetDataMap.CreateConstIterator(); CacheIt; ++CacheIt)
		{
			Ar << *CacheIt.Value();
		}
	}
	else
	{
		// allocate one single block for all asset data structs (to reduce tens of thousands of heap allocations)
		DiskCachedAssetDataMap.Empty(LocalNumAssets);
		DiskCachedAssetDataBuffer = new FDiskCachedAssetData[LocalNumAssets];

		for (int32 AssetIndex = 0; AssetIndex < LocalNumAssets; ++AssetIndex)
		{
			// make a new asset data object
			FDiskCachedAssetData* NewCachedAssetDataPtr = &DiskCachedAssetDataBuffer[AssetIndex];

			// load it
			Ar << *NewCachedAssetDataPtr;

			if (Ar.IsError())
			{
				// There was an error reading the cache. Bail out.
				break;
			}

			// hash it
			DiskCachedAssetDataMap.Add(NewCachedAssetDataPtr->PackageName, NewCachedAssetDataPtr);
		}

		// If there was an error loading the cache, abandon all data loaded from it so we can build a clean one.
		if (Ar.IsError())
		{
			UE_LOG(LogAssetRegistry, Error, TEXT("There was an error loading the asset registry cache. Generating a new one."));
			DiskCachedAssetDataMap.Empty();
			delete DiskCachedAssetDataBuffer;
			DiskCachedAssetDataBuffer = nullptr;
		}
	}

	UE_LOG(LogAssetRegistry, Verbose, TEXT("Asset data gatherer serialized in %0.6f seconds"), FPlatformTime::Seconds() - SerializeStartTime);
}
Ejemplo n.º 3
0
bool FBuildPatchAppManifest::Serialize(FArchive& Ar)
{
	// Make sure we use the correct serialization version, this is now fixed and must never use a newer version,
	// because the property tag has changed in structure meaning older clients would not read correctly.
	Ar.SetUE4Ver(VER_UE4_STRUCT_GUID_IN_PROPERTY_TAG - 1);

	if (Ar.IsLoading())
	{
		DestroyData();
	}

	Data->Serialize(Ar);

	if (Ar.IsLoading())
	{
		// If we didn't load the version number, we know it was skipped when saving therefore must be
		// the first UObject version
		if (Data->ManifestFileVersion == static_cast<uint8>(EBuildPatchAppManifestVersion::Invalid))
		{
			Data->ManifestFileVersion = EBuildPatchAppManifestVersion::StoredAsCompressedUClass;
		}

		// Setup internal lookups
		InitLookups();
	}

	return !Ar.IsError();
}
Ejemplo n.º 4
0
bool FBuildPatchAppManifest::SaveToFile(const FString& Filename, bool bUseBinary)
{
	bool bSuccess = false;
	FArchive* FileOut = IFileManager::Get().CreateFileWriter(*Filename);
	if (FileOut)
	{
		if (bUseBinary)
		{
			Data->ManifestFileVersion = EBuildPatchAppManifestVersion::GetLatestVersion();
			FManifestWriter ManifestData;
			Serialize(ManifestData);
			ManifestData.Finalize();
			if (!ManifestData.IsError())
			{
				int32 DataSize = ManifestData.TotalSize();
				TArray<uint8> TempCompressed;
				TempCompressed.AddUninitialized(DataSize);
				int32 CompressedSize = DataSize;
				bool bDataIsCompressed = FCompression::CompressMemory(
					static_cast<ECompressionFlags>(COMPRESS_ZLIB | COMPRESS_BiasMemory),
					TempCompressed.GetData(),
					CompressedSize,
					ManifestData.GetBytes().GetData(),
					DataSize);
				TempCompressed.SetNum(CompressedSize);

				TArray<uint8>& FileData = bDataIsCompressed ? TempCompressed : ManifestData.GetBytes();

				FManifestFileHeader Header;
				*FileOut << Header;
				Header.HeaderSize = FileOut->Tell();
				Header.StoredAs = bDataIsCompressed ? EManifestFileHeader::STORED_COMPRESSED : EManifestFileHeader::STORED_RAW;
				Header.DataSize = DataSize;
				Header.CompressedSize = bDataIsCompressed ? CompressedSize : 0;
				FSHA1::HashBuffer(FileData.GetData(), FileData.Num(), Header.SHAHash.Hash);

				FileOut->Seek(0);
				*FileOut << Header;

				FileOut->Serialize(FileData.GetData(), FileData.Num());
				bSuccess = !FileOut->IsError();
			}
		}
		else
		{
			Data->ManifestFileVersion = EBuildPatchAppManifestVersion::GetLatestJsonVersion();
			FString JSONOutput;
			SerializeToJSON(JSONOutput);
			FTCHARToUTF8 JsonUTF8(*JSONOutput);
			FileOut->Serialize((UTF8CHAR*)JsonUTF8.Get(), JsonUTF8.Length() * sizeof(UTF8CHAR));
		}
		FileOut->Close();
		delete FileOut;
		FileOut = nullptr;
	}

	return bSuccess;
}
Ejemplo n.º 5
0
bool FBuildPatchAppManifest::Serialize(FArchive& Ar)
{
	if (Ar.IsLoading())
	{
		DestroyData();
	}

	Data->Serialize(Ar);

	if (Ar.IsLoading())
	{
		InitLookups();
	}

	return !Ar.IsError();
}
Ejemplo n.º 6
0
void SerializeChecksum(FArchive &Ar, uint32 x, bool ErrorOK)
{
	if (Ar.IsLoading() )
	{
		uint32 Magic = 0;
		Ar << Magic;
		if((!ErrorOK || !Ar.IsError()) && !ensure(Magic==x))
		{
			UE_LOG(LogCoreNet, Warning, TEXT("%d == %d"), Magic, x );
		}
		
	}
	else
	{
		uint32 Magic = x;
		Ar << Magic;
	}
}
Ejemplo n.º 7
0
TSharedPtr<FVoicePacket> FOnlineVoiceSteam::SerializeRemotePacket(FArchive& Ar)
{
	TSharedPtr<FVoicePacketSteam> NewPacket = MakeShareable(new FVoicePacketSteam());
	NewPacket->Serialize(Ar);
	if (Ar.IsError() == false && NewPacket->GetBufferSize() > 0)
	{
		if (!SteamSubsystem->IsDedicated())
		{
			FUniqueNetIdMatcher PlayerMatch(*NewPacket->GetSender());
			if (MuteList.IndexOfByPredicate(PlayerMatch) == INDEX_NONE)
			{
				VoiceData.RemotePackets.Add(NewPacket);
			}
		}

		return NewPacket;
	}

	return NULL;
}
TSharedPtr<FVoicePacket> FOnlineVoiceImpl::SerializeRemotePacket(FArchive& Ar)
{
	TSharedPtr<FVoicePacketImpl> NewPacket = MakeShareable(new FVoicePacketImpl());
	NewPacket->Serialize(Ar);
	if (Ar.IsError() == false && NewPacket->GetBufferSize() > 0)
	{
		if (!IsRunningDedicatedServer())
		{
			FUniqueNetIdMatcher PlayerMatch(*NewPacket->GetSender());
			if (MuteList.FindMatch(PlayerMatch) == INDEX_NONE)
			{
				VoiceData.RemotePackets.Add(NewPacket);
			}
		}

		return NewPacket;
	}

	return NULL;
}
Ejemplo n.º 9
0
bool FBuildPatchAppManifest::Serialize(FArchive& Ar)
{
	if (Ar.IsLoading())
	{
		DestroyData();
	}

	Data->Serialize(Ar);

	if (Ar.IsLoading())
	{
		// If we didn't load the version number, we know it was skipped when saving therefore must be
		// the first UObject version
		if (Data->ManifestFileVersion == static_cast<uint8>(EBuildPatchAppManifestVersion::Invalid))
		{
			Data->ManifestFileVersion = EBuildPatchAppManifestVersion::StoredAsCompressedUClass;
		}

		// Setup internal lookups
		InitLookups();
	}

	return !Ar.IsError();
}
bool FGameplayAbilityTargetDataHandle::NetSerialize(FArchive& Ar, class UPackageMap* Map, bool& bOutSuccess)
{
	uint8 DataNum;
	if (Ar.IsSaving())
	{
		UE_CLOG(Data.Num() > MAX_uint8, LogAbilitySystem, Warning, TEXT("Too many TargetData sources (%d!) to net serialize. Clamping to %d"), Data.Num(), MAX_uint8);
		DataNum = FMath::Min<int32>( Data.Num(), MAX_uint8 );
	}
	Ar << DataNum;
	if (Ar.IsLoading())
	{
		Data.SetNumZeroed(DataNum);
	}

	for (int32 i = 0; i < DataNum && !Ar.IsError(); ++i)
	{
		UScriptStruct* ScriptStruct = Data[i].IsValid() ? Data[i]->GetScriptStruct() : NULL;
		Ar << ScriptStruct;

		if (ScriptStruct)
		{
			if (Ar.IsLoading())
			{
				// For now, just always reset/reallocate the data when loading.
				// Longer term if we want to generalize this and use it for property replication, we should support
				// only reallocating when necessary
				check(!Data[i].IsValid());

				FGameplayAbilityTargetData * NewData = (FGameplayAbilityTargetData*)FMemory::Malloc(ScriptStruct->GetCppStructOps()->GetSize());
				ScriptStruct->InitializeStruct(NewData);

				Data[i] = TSharedPtr<FGameplayAbilityTargetData>(NewData);
			}

			void* ContainerPtr = Data[i].Get();

			if (ScriptStruct->StructFlags & STRUCT_NetSerializeNative)
			{
				ScriptStruct->GetCppStructOps()->NetSerialize(Ar, Map, bOutSuccess, Data[i].Get());
			}
			else
			{
				// This won't work since UStructProperty::NetSerializeItem is deprecrated.
				//	1) we have to manually crawl through the topmost struct's fields since we don't have a UStructProperty for it (just the UScriptProperty)
				//	2) if there are any UStructProperties in the topmost struct's fields, we will assert in UStructProperty::NetSerializeItem.

				ABILITY_LOG(Fatal, TEXT("FGameplayAbilityTargetDataHandle::NetSerialize called on data struct %s without a native NetSerialize"), *ScriptStruct->GetName());

				for (TFieldIterator<UProperty> It(ScriptStruct); It; ++It)
				{
					if (It->PropertyFlags & CPF_RepSkip)
					{
						continue;
					}

					void* PropertyData = It->ContainerPtrToValuePtr<void*>(ContainerPtr);

					It->NetSerializeItem(Ar, Map, PropertyData);
				}
			}
		}
	}

	//ABILITY_LOG(Warning, TEXT("FGameplayAbilityTargetDataHandle Serialized: %s"), ScriptStruct ? *ScriptStruct->GetName() : TEXT("NULL") );

	bOutSuccess = true;
	return true;
}
Ejemplo n.º 11
0
// cooked package asset registry saves information about all the cooked packages and assets contained within for stats purposes
// in json format
bool FChunkManifestGenerator::SaveCookedPackageAssetRegistry( const FString& SandboxCookedRegistryFilename, const bool Append )
{
	bool bSuccess = false;
	for ( const auto& Platform : Platforms )
	{
		TSet<FName> CookedPackages;

		// save the file 
		const FString CookedAssetRegistryFilename = SandboxCookedRegistryFilename.Replace(TEXT("[Platform]"), *Platform->PlatformName());

		FString JsonOutString;
		JsonWriter Json = TJsonWriterFactory<TCHAR, TPrettyJsonPrintPolicy<TCHAR> >::Create(&JsonOutString);

		Json->WriteObjectStart();
		Json->WriteArrayStart(TEXT("Packages"));

		for ( const auto& Package : AllCookedPackages )
		{
			Json->WriteObjectStart(); // unnamed package start
			const FName& PackageName = Package.Key;
			const FString& SandboxPath = Package.Value;

			CookedPackages.Add( PackageName );

			FString PlatformSandboxPath = SandboxPath.Replace(TEXT("[Platform]"), *Platform->PlatformName());
			
			FDateTime TimeStamp = IFileManager::Get().GetTimeStamp( *PlatformSandboxPath );

			Json->WriteValue( "SourcePackageName", PackageName.ToString() );
			Json->WriteValue( "CookedPackageName", PlatformSandboxPath );
			Json->WriteValue( "CookedPackageTimeStamp", TimeStamp.ToString() );
			
			Json->WriteArrayStart("AssetData");
			for (const auto& AssetData : AssetRegistryData)
			{	// Add only assets that have actually been cooked and belong to any chunk
				if (AssetData.ChunkIDs.Num() > 0 && (AssetData.PackageName == PackageName))
				{
					Json->WriteObjectStart();
					// save all their infos 
					Json->WriteValue(TEXT("ObjectPath"), AssetData.ObjectPath.ToString() );
					Json->WriteValue(TEXT("PackageName"), AssetData.PackageName.ToString() );
					Json->WriteValue(TEXT("PackagePath"), AssetData.PackagePath.ToString() );
					Json->WriteValue(TEXT("GroupNames"), AssetData.GroupNames.ToString() );
					Json->WriteValue(TEXT("AssetName"), AssetData.AssetName.ToString() );
					Json->WriteValue(TEXT("AssetClass"), AssetData.AssetClass.ToString() );
					Json->WriteObjectStart("TagsAndValues");
					for ( const auto& Tag : AssetData.TagsAndValues )
					{
						Json->WriteValue( Tag.Key.ToString(), Tag.Value );
					}
					Json->WriteObjectEnd(); // end tags and values object
					Json->WriteObjectEnd(); // end unnamed array object
				}
			}
			Json->WriteArrayEnd();
			Json->WriteObjectEnd(); // unnamed package
		}

		if ( Append )
		{
			FString JsonInString;
			if ( FFileHelper::LoadFileToString(JsonInString, *CookedAssetRegistryFilename) )
			{
				// load up previous package asset registry and fill in any packages which weren't recooked on this run
				JsonReader Reader = TJsonReaderFactory<TCHAR>::Create(JsonInString);
				TSharedPtr<FJsonObject> JsonObject;
				bool shouldRead = FJsonSerializer::Deserialize(Reader, JsonObject) && JsonObject.IsValid() && JsonObject->HasTypedField<EJson::Array>(TEXT("Packages"));
				if ( shouldRead )
				{
					TArray<TSharedPtr<FJsonValue>> PackageList = JsonObject->GetArrayField(TEXT("Packages"));
					for (auto PackageListIt = PackageList.CreateConstIterator(); PackageListIt && shouldRead; ++PackageListIt)
					{
						const TSharedPtr<FJsonValue>& JsonValue = *PackageListIt;
						shouldRead = JsonValue->Type == EJson::Object;
						if ( shouldRead )
						{
							const TSharedPtr<FJsonObject>& JsonPackage = JsonValue->AsObject();

							// get the package name and see if we have already written it out this run
							
							FString CookedPackageName;
							verify( JsonPackage->TryGetStringField(TEXT("SourcePackageName"), CookedPackageName) );

							const FName CookedPackageFName(*CookedPackageName);
							if ( CookedPackages.Contains(CookedPackageFName))
							{
								// don't need to process this package
								continue;
							}


							// check that the on disk version is still valid
							FString SourcePackageName;
							check( JsonPackage->TryGetStringField( TEXT("SourcePackageName"), SourcePackageName) );

							// if our timestamp is different then don't copy the information over
							FDateTime CurrentTimeStamp = IFileManager::Get().GetTimeStamp( *CookedPackageName );

							FString SavedTimeString;
							check( JsonPackage->TryGetStringField(TEXT("CookedPackageTimeStamp"), SavedTimeString) );
							FDateTime SavedTimeStamp;
							FDateTime::Parse(SavedTimeString, SavedTimeStamp);

							if ( SavedTimeStamp != CurrentTimeStamp )
							{
								continue;
							}



							CopyJsonValueToWriter(Json, FString(), JsonValue);
							// read in all the other stuff and copy it over to the new registry
							/*Json->WriteObjectStart(); // open package

							// copy all the values over
							for ( const auto& JsonPackageValue : JsonPackage->Values)
							{
								CopyJsonValueToWriter(Json, JsonPackageValue.Key, JsonPackageValue.Value);
							}

							Json->WriteObjectEnd();*/
						}
						
					}
				}
				else
				{
					UE_LOG(LogChunkManifestGenerator, Warning, TEXT("Unable to read or json is invalid format %s"), *CookedAssetRegistryFilename);
				}
			}
		}


		Json->WriteArrayEnd();
		Json->WriteObjectEnd();

		if (Json->Close())
		{
			FArchive* ItemTemplatesFile = IFileManager::Get().CreateFileWriter(*CookedAssetRegistryFilename);
			if (ItemTemplatesFile)
			{
				// serialize the file contents
				TStringConversion<FTCHARToUTF8_Convert> Convert(*JsonOutString);
				ItemTemplatesFile->Serialize(const_cast<ANSICHAR*>(Convert.Get()), Convert.Length());
				ItemTemplatesFile->Close();
				if ( !ItemTemplatesFile->IsError() )
				{
					bSuccess = true;
				}
				else
				{
					UE_LOG(LogChunkManifestGenerator, Error, TEXT("Unable to write to %s"), *CookedAssetRegistryFilename);
				}
				delete ItemTemplatesFile;
			}
			else
			{
				UE_LOG(LogChunkManifestGenerator, Error, TEXT("Unable to open %s for writing."), *CookedAssetRegistryFilename);
			}
		}
		else
		{
			UE_LOG(LogChunkManifestGenerator, Error, TEXT("Error closing Json Writer"));
		}
	}
	return bSuccess;
}
Ejemplo n.º 12
0
void USetupDefinition::ProcessCopy( FString Key, FString Value, UBOOL Selected, FInstallPoll* Poll )
{
	guard(USetupDefinition::ProcessCopy);
	BYTE Buffer[4096];
	if( Selected && Key==TEXT("File") )
	{
		// Get source and dest filenames.
		FFileInfo Info(*Value);
		if( Info.Lang==TEXT("") || Info.Lang==UObject::GetLanguage() )
		{
			if( Info.Dest==TEXT("") )
				Info.Dest = Info.Src;
			if( !LocateSourceFile(Info.Src) )
				LocalizedFileError( TEXT("MissingInstallerFile"), Patch ? TEXT("AdviseBadDownload") : TEXT("AdviseBadMedia"), *Info.Src );
			FString FullDest  = DestPath * Info.Dest;
			FString FullSrc   = Info.Ref==TEXT("") ? Info.Src : GetFullRef(*Info.Ref);
			FString FullPatch = FullDest + TEXT("_tmp");

			// Update uninstallation log.
			UninstallLogAdd( TEXT("File"), *Info.Dest, 0, 1 );

			// Make destination directory.
			if( !GFileManager->MakeDirectory( *BasePath(FullDest), 1 ) )
				LocalizedFileError( TEXT("FailedMakeDir"), TEXT("AdviseBadDest"), *FullDest );

			// Status display.
			if( !Poll->Poll(*FullDest,0,0,RunningBytes,TotalBytes) )
				DidCancel();

			// Copy SrcAr -> DestAr.
			INT CalcOldCRC = 0;
			guard(CopyFile);
			FString ThisDest = Info.Ref==TEXT("") ? FullDest : FullPatch;
			debugf( TEXT("Copying %s to %s"), *FullSrc, *ThisDest);
			FArchive* SrcAr = GFileManager->CreateFileReader( *FullSrc );
			if( !SrcAr )
				LocalizedFileError( TEXT("FailedOpenSource"), Patch ? TEXT("AdviseBadDownload") : TEXT("AdviseBadMedia"), *FullSrc );
			INT Size = SrcAr->TotalSize();
			FArchive* DestAr = GFileManager->CreateFileWriter( *ThisDest, FILEWRITE_EvenIfReadOnly );
			if( !DestAr )
				LocalizedFileError( TEXT("FailedOpenDest"), TEXT("AdviseBadDest"), *ThisDest );

			if( FullSrc.Right(3).Caps() == TEXT(".UZ") && ThisDest.Right(3).Caps() != TEXT(".UZ"))
			{
				INT Signature;
				FString OrigFilename;
				*SrcAr << Signature;
				if( Signature != 5678 )
					LocalizedFileError( TEXT("FailedOpenSource"), TEXT("AdviseBadMedia"), *FullSrc );
				else
				{
					*SrcAr << OrigFilename;
					FCodecFull Codec;
					Codec.AddCodec(new FCodecRLE);
					Codec.AddCodec(new FCodecBWT);
					Codec.AddCodec(new FCodecMTF);
					Codec.AddCodec(new FCodecRLE);
					Codec.AddCodec(new FCodecHuffman);
					Codec.Decode( *SrcAr, *DestAr );
					if( !Poll->Poll(*FullDest,Size,Size,RunningBytes+=Size,TotalBytes) )					{
						delete SrcAr;
						delete DestAr;
						DidCancel();
					}
				}
			}
			else
			{
				for( SQWORD Pos=0; Pos<Size; Pos+=sizeof(Buffer) )
				{
					INT Count = Min( Size-Pos, (SQWORD)sizeof(Buffer) );
					SrcAr->Serialize( Buffer, Count );
					if( SrcAr->IsError() )
					{
						delete SrcAr;
						delete DestAr;
						LocalizedFileError( TEXT("FailedReadingSource"), Patch ? TEXT("AdviseBadDownload") : TEXT("AdviseBadMedia"), *FullSrc );
					}
					if( Info.Ref!=TEXT("") )
					{
						CalcOldCRC = appMemCrc( Buffer, Count, CalcOldCRC );
					}
					DestAr->Serialize( Buffer, Count );
					if( DestAr->IsError() )
					{
						delete SrcAr;
						delete DestAr;
						LocalizedFileError( TEXT("FailedWritingDest"), TEXT("AdviseBadDest"), *ThisDest );
					}
					if( !Poll->Poll(*FullDest,Pos,Size,RunningBytes+=Count,TotalBytes) )
					{
						delete SrcAr;
						delete DestAr;
						DidCancel();
					}
				}
			}
			delete SrcAr;
			if( !DestAr->Close() )
				LocalizedFileError( TEXT("FailedClosingDest"), TEXT("AdviseBadDest"), *ThisDest );
			delete DestAr;
			unguard;

			// Patch SrcAr + DeltaFile -> DestAr.
			if( Info.Ref!=TEXT("") )
			{
				guard(PatchFile);
				BYTE Buffer[4096];

				// Open files.
				FString ThisSrc = FullPatch;
				FArchive* SrcAr = GFileManager->CreateFileReader( *ThisSrc );
				if( !SrcAr )
					LocalizedFileError( TEXT("FailedOpenSource"), Patch ? TEXT("AdviseBadDownload") : TEXT("AdviseBadMedia"), *ThisSrc );
				INT Size = SrcAr->TotalSize();
				FArchive* DestAr = GFileManager->CreateFileWriter(*FullDest,FILEWRITE_EvenIfReadOnly);
				if( !DestAr )
					LocalizedFileError( TEXT("FailedOpenDest"), TEXT("AdviseBadDest"), *FullDest );

				// Load delta file.
				TArray<BYTE> Delta;
				FString DeltaName = Info.Src;
				if( !appLoadFileToArray( Delta, *DeltaName ) )
					LocalizedFileError( TEXT("FailedLoadingUpdate"), TEXT("AdviseBadDownload"), *Info.Src );
				debugf( TEXT("Patching %s to %s with %s"), *ThisSrc, *FullDest, *DeltaName );

				// Decompress variables.
				INT PrevSpot=0, CountSize=0, CRC=0;
				INT Magic=0, OldSize=0, OldCRC=0, NewSize=0, NewCRC;
				FBufferReader Reader( Delta );
				Reader << Magic << OldSize << OldCRC << NewSize << NewCRC;

				// Validate.
				if( Magic!=0x92f92912 )
					appErrorf( LineFormat(LocalizeError("PatchCorrupt")), *DeltaName, LocalizeError("AdviseBadDownload") );
				if( OldSize!=Size || OldCRC!=CalcOldCRC )
					appErrorf( LocalizeError("CdFileMismatch"), *Info.Ref, *LocalProduct );

				// Delta decode it.
				INT OldCountSize=0;
				while( !Reader.AtEnd() )
				{
					INT Index;
					Reader << AR_INDEX(Index);
					if( Index<0 )
					{
						CRC = appMemCrc( &Delta(Reader.Tell()), -Index, CRC );
						DestAr->Serialize( &Delta(Reader.Tell()), -Index );
						if( DestAr->IsError() )
							LocalizedFileError( TEXT("FailedWritingDest"), TEXT("AdviseBadDest"), *FullDest );
						Reader.Seek( Reader.Tell() - Index );
						CountSize -= Index;
					}
					else
					{
						INT CopyPos;
						Reader << AR_INDEX(CopyPos);
						CopyPos += PrevSpot;
						check(CopyPos>=0);
						check(CopyPos+Index<=Size);
						SrcAr->Seek( CopyPos );
						for( INT Base=Index; Base>0; Base-=sizeof(Buffer) )
						{
							INT Move = Min(Base,(INT)sizeof(Buffer));
							SrcAr->Serialize( Buffer, Move );
							if( SrcAr->IsError() )
								LocalizedFileError( TEXT("FailedReadingSource"), Patch ? TEXT("AdviseBadDownload") : TEXT("AdviseBadDownload"), *ThisSrc );
							CRC = appMemCrc( Buffer, Move, CRC );
							DestAr->Serialize( Buffer, Move );
							if( DestAr->IsError() )
								LocalizedFileError( TEXT("FailedWritingDest"), TEXT("AdviseBadDest"), *FullDest );
						}
						CountSize += Index;
						PrevSpot = CopyPos + Index;
					}
					if( ((CountSize^OldCountSize)&~(sizeof(Buffer)-1)) || Reader.AtEnd() )
					{
						if( !Poll->Poll(*FullDest,CountSize,Info.Size,RunningBytes+=(CountSize-OldCountSize),TotalBytes) )
						{
							delete SrcAr;
							delete DestAr;
							DidCancel();
						}
						OldCountSize = CountSize;
					}
				}
				if( NewSize!=CountSize || NewCRC!=CRC )
					appErrorf( LineFormat(LocalizeError("PatchCorrupt")), *DeltaName, LocalizeError("AdviseBadDownload") );
				delete SrcAr;
				if( !DestAr->Close() )
					LocalizedFileError( TEXT("FailedClosingDest"), TEXT("AdviseBadDest"), *FullDest );
				delete DestAr;
				GFileManager->Delete( *ThisSrc );
				unguard;
			}
		}
	}
	unguard;
}
uint32 FFileManagerGeneric::CopyWithProgress( const TCHAR* InDestFile, const TCHAR* InSrcFile, bool ReplaceExisting, bool EvenIfReadOnly, bool Attributes, FCopyProgress* Progress )
{
	uint32	Result = COPY_OK;

	// Direct file copier.
	if( Progress->Poll( 0.0 ) )
	{
		FString SrcFile		= InSrcFile;
		FString DestFile	= InDestFile;
	
		FArchive* Src = CreateFileReader( *SrcFile );
		if( !Src )
		{
			Result = COPY_Fail;
		}
		else
		{
			FArchive* Dest = CreateFileWriter( *DestFile,( ReplaceExisting ? 0 : FILEWRITE_NoReplaceExisting ) | ( EvenIfReadOnly ? FILEWRITE_EvenIfReadOnly : 0 ) );
			if( !Dest )
			{
				Result = COPY_Fail;
			}
			else
			{
				int64 Size = Src->TotalSize();
				int64 Percent = 0, NewPercent = 0;
				uint8* Buffer = new uint8[COPYBLOCKSIZE];
				for( int64 Total = 0; Total < Size; Total += sizeof(Buffer) )
				{
					int64 Count = FMath::Min( Size - Total, (int64)sizeof(Buffer) );
					Src->Serialize( Buffer, Count );
					if( Src->IsError() )
					{
						Result = COPY_Fail;
						break;
					}
					Dest->Serialize( Buffer, Count );
					if( Dest->IsError() )
					{
						Result = COPY_Fail;
						break;
					}
					NewPercent = Total * 100 / Size;
					if( Progress && Percent != NewPercent && !Progress->Poll( ( float )NewPercent / 100.f ) )
					{
						Result = COPY_Canceled;
						break;
					}
					Percent = NewPercent;
				}
				delete [] Buffer;
				if( Result == COPY_OK && !Dest->Close() )
				{
					Result = COPY_Fail;
				}
				delete Dest;
				if( Result != COPY_OK )
				{
					Delete( *DestFile );
				}
			}
			if( Result == COPY_OK && !Src->Close() )
			{
				Result = COPY_Fail;
			}
			delete Src;
		}
		if( Progress && Result==COPY_OK && !Progress->Poll( 1.0 ) )
		{
			Result = COPY_Canceled;
		}
	}
	else
	{
		Result = COPY_Canceled;
	}

	return Result;
}