bool operator<<(FArchive& Ar,FVertexFactoryParameterRef& Ref) { bool bShaderHasOutdatedParameters = false; Ar << Ref.VertexFactoryType; uint8 ShaderFrequencyByte = Ref.ShaderFrequency; Ar << ShaderFrequencyByte; if(Ar.IsLoading()) { Ref.ShaderFrequency = (EShaderFrequency)ShaderFrequencyByte; } Ar << Ref.VFHash; if (Ar.IsLoading()) { delete Ref.Parameters; if (Ref.VertexFactoryType) { Ref.Parameters = Ref.VertexFactoryType->CreateShaderParameters(Ref.ShaderFrequency); } else { bShaderHasOutdatedParameters = true; Ref.Parameters = NULL; } } // Need to be able to skip over parameters for no longer existing vertex factories. int32 SkipOffset = Ar.Tell(); { FArchive::FScopeSetDebugSerializationFlags S(Ar, DSF_IgnoreDiff); // Write placeholder. Ar << SkipOffset; } if(Ref.Parameters) { Ref.Parameters->Serialize(Ar); } else if(Ar.IsLoading()) { Ar.Seek( SkipOffset ); } if( Ar.IsSaving() ) { int32 EndOffset = Ar.Tell(); Ar.Seek( SkipOffset ); Ar << EndOffset; Ar.Seek( EndOffset ); } return bShaderHasOutdatedParameters; }
bool FBuildPatchAppManifest::SaveToFile(const FString& Filename, bool bUseBinary) { bool bSuccess = false; FArchive* FileOut = IFileManager::Get().CreateFileWriter(*Filename); if (FileOut) { if (bUseBinary) { Data->ManifestFileVersion = EBuildPatchAppManifestVersion::GetLatestVersion(); FManifestWriter ManifestData; Serialize(ManifestData); ManifestData.Finalize(); if (!ManifestData.IsError()) { int32 DataSize = ManifestData.TotalSize(); TArray<uint8> TempCompressed; TempCompressed.AddUninitialized(DataSize); int32 CompressedSize = DataSize; bool bDataIsCompressed = FCompression::CompressMemory( static_cast<ECompressionFlags>(COMPRESS_ZLIB | COMPRESS_BiasMemory), TempCompressed.GetData(), CompressedSize, ManifestData.GetBytes().GetData(), DataSize); TempCompressed.SetNum(CompressedSize); TArray<uint8>& FileData = bDataIsCompressed ? TempCompressed : ManifestData.GetBytes(); FManifestFileHeader Header; *FileOut << Header; Header.HeaderSize = FileOut->Tell(); Header.StoredAs = bDataIsCompressed ? EManifestFileHeader::STORED_COMPRESSED : EManifestFileHeader::STORED_RAW; Header.DataSize = DataSize; Header.CompressedSize = bDataIsCompressed ? CompressedSize : 0; FSHA1::HashBuffer(FileData.GetData(), FileData.Num(), Header.SHAHash.Hash); FileOut->Seek(0); *FileOut << Header; FileOut->Serialize(FileData.GetData(), FileData.Num()); bSuccess = !FileOut->IsError(); } } else { Data->ManifestFileVersion = EBuildPatchAppManifestVersion::GetLatestJsonVersion(); FString JSONOutput; SerializeToJSON(JSONOutput); FTCHARToUTF8 JsonUTF8(*JSONOutput); FileOut->Serialize((UTF8CHAR*)JsonUTF8.Get(), JsonUTF8.Length() * sizeof(UTF8CHAR)); } FileOut->Close(); delete FileOut; FileOut = nullptr; } return bSuccess; }
bool CopyCompressedFileToPak(FArchive& InPak, const FString& InMountPoint, const FPakInputPair& InFile, const FCompressedFileBuffer& CompressedFile, FPakEntryPair& OutNewEntry) { if (CompressedFile.TotalCompressedSize == 0) { return false; } int64 HeaderTell = InPak.Tell(); OutNewEntry.Info.CompressionMethod = CompressedFile.FileCompressionMethod; OutNewEntry.Info.CompressionBlocks.AddUninitialized(CompressedFile.CompressedBlocks.Num()); int64 TellPos = InPak.Tell() + OutNewEntry.Info.GetSerializedSize(FPakInfo::PakFile_Version_Latest); const TArray<FPakCompressedBlock>& Blocks = CompressedFile.CompressedBlocks; for (int32 BlockIndex = 0, BlockCount = CompressedFile.CompressedBlocks.Num(); BlockIndex < BlockCount; ++BlockIndex) { OutNewEntry.Info.CompressionBlocks[BlockIndex].CompressedStart = Blocks[BlockIndex].CompressedStart + TellPos; OutNewEntry.Info.CompressionBlocks[BlockIndex].CompressedEnd = Blocks[BlockIndex].CompressedEnd + TellPos; } if (InFile.bNeedEncryption) { FAES::EncryptData(CompressedFile.CompressedBuffer.Get(), CompressedFile.TotalCompressedSize); } //Hash the final buffer thats written FSHA1 Hash; Hash.Update(CompressedFile.CompressedBuffer.Get(), CompressedFile.TotalCompressedSize); Hash.Final(); // Update file size & Hash OutNewEntry.Info.CompressionBlockSize = CompressedFile.FileCompressionBlockSize; OutNewEntry.Info.UncompressedSize = CompressedFile.OriginalSize; OutNewEntry.Info.Size = CompressedFile.TotalCompressedSize; Hash.GetHash(OutNewEntry.Info.Hash); // Write the header, then the data OutNewEntry.Filename = InFile.Dest.Mid(InMountPoint.Len()); OutNewEntry.Info.Offset = 0; // Don't serialize offsets here. OutNewEntry.Info.bEncrypted = InFile.bNeedEncryption; OutNewEntry.Info.Serialize(InPak,FPakInfo::PakFile_Version_Latest); InPak.Serialize(CompressedFile.CompressedBuffer.Get(), CompressedFile.TotalCompressedSize); return true; }
void SkipLazyArray(FArchive &Ar) { guard(SkipLazyArray); assert(Ar.IsLoading); int pos; Ar << pos; assert(Ar.Tell() < pos); Ar.Seek(pos); unguard; }
/** * Loads the data from disk into the specified memory block. This requires us still being attached to an * archive we can use for serialization. * * @param Dest Memory to serialize data into */ void FUntypedBulkData::LoadDataIntoMemory( void* Dest ) { #if WITH_EDITOR checkf( AttachedAr, TEXT( "Attempted to load bulk data without an attached archive. Most likely the bulk data was loaded twice on console, which is not supported" ) ); // Keep track of current position in file so we can restore it later. int64 PushedPos = AttachedAr->Tell(); // Seek to the beginning of the bulk data in the file. AttachedAr->Seek( BulkDataOffsetInFile ); SerializeBulkData( *AttachedAr, Dest ); // Restore file pointer. AttachedAr->Seek( PushedPos ); #else bool bWasLoadedSuccessfully = false; if (IsInGameThread() && Linker.IsValid()) { ULinkerLoad* LinkerLoad = Linker.Get(); if ( LinkerLoad && LinkerLoad->Loader && !LinkerLoad->IsCompressed() ) { FArchive* Ar = LinkerLoad; // keep track of current position in this archive int64 CurPos = Ar->Tell(); // Seek to the beginning of the bulk data in the file. Ar->Seek( BulkDataOffsetInFile ); // serialize the bulk data SerializeBulkData( *Ar, Dest ); // seek back to the position the archive was before Ar->Seek(CurPos); // note that we loaded it bWasLoadedSuccessfully = true; } } // if we weren't able to load via linker, load directly by filename if (!bWasLoadedSuccessfully) { // load from the specied filename when the linker has been cleared checkf( Filename != TEXT(""), TEXT( "Attempted to load bulk data without a proper filename." ) ); FArchive* Ar = IFileManager::Get().CreateFileReader(*Filename, FILEREAD_Silent); checkf( Ar != NULL, TEXT( "Attempted to load bulk data from an invalid filename '%s'." ), *Filename ); // Seek to the beginning of the bulk data in the file. Ar->Seek( BulkDataOffsetInFile ); SerializeBulkData( *Ar, Dest ); delete Ar; } #endif // WITH_EDITOR }
uint8 FBuildPatchUtils::VerifyFile(const FString& FileToVerify, const FSHAHashData& Hash1, const FSHAHashData& Hash2, FBuildPatchFloatDelegate ProgressDelegate, FBuildPatchBoolRetDelegate ShouldPauseDelegate, double& TimeSpentPaused) { uint8 ReturnValue = 0; FArchive* FileReader = IFileManager::Get().CreateFileReader(*FileToVerify); ProgressDelegate.ExecuteIfBound(0.0f); if (FileReader != NULL) { FSHA1 HashState; FSHAHashData HashValue; const int64 FileSize = FileReader->TotalSize(); uint8* FileReadBuffer = new uint8[FileBufferSize]; while (!FileReader->AtEnd() && !FBuildPatchInstallError::HasFatalError()) { // Pause if necessary const double PrePauseTime = FPlatformTime::Seconds(); double PostPauseTime = PrePauseTime; bool bShouldPause = ShouldPauseDelegate.IsBound() && ShouldPauseDelegate.Execute(); while (bShouldPause && !FBuildPatchInstallError::HasFatalError()) { FPlatformProcess::Sleep(0.1f); bShouldPause = ShouldPauseDelegate.Execute(); PostPauseTime = FPlatformTime::Seconds(); } // Count up pause time TimeSpentPaused += PostPauseTime - PrePauseTime; // Read file and update hash state const int64 SizeLeft = FileSize - FileReader->Tell(); const uint32 ReadLen = FMath::Min< int64 >(FileBufferSize, SizeLeft); FileReader->Serialize(FileReadBuffer, ReadLen); HashState.Update(FileReadBuffer, ReadLen); const double FileSizeTemp = FileSize; const float Progress = 1.0f - ((SizeLeft - ReadLen) / FileSizeTemp); ProgressDelegate.ExecuteIfBound(Progress); } delete[] FileReadBuffer; HashState.Final(); HashState.GetHash(HashValue.Hash); ReturnValue = (HashValue == Hash1) ? 1 : (HashValue == Hash2) ? 2 : 0; if (ReturnValue == 0) { GLog->Logf(TEXT("BuildDataGenerator: Verify failed on %s"), *FPaths::GetCleanFilename(FileToVerify)); } FileReader->Close(); delete FileReader; } else { GLog->Logf(TEXT("BuildDataGenerator: ERROR VerifyFile cannot open %s"), *FileToVerify); } ProgressDelegate.ExecuteIfBound(1.0f); return ReturnValue; }
void FShaderCache::Save(FArchive& Ar, const map<FGuid, FShader*>& InShaders) { Ar << m_nPlatform; // serialize the global shader crc UINT NumShaderBuilderCRC = m_mapShaderBuilderCRC.size(); Ar << NumShaderBuilderCRC; map<FShaderBuilder*, DWORD>::iterator it; for( it = m_mapShaderBuilderCRC.begin(); it != m_mapShaderBuilderCRC.end(); ++it ) { FShaderBuilder* ShaderBuilder = it->first; Ar << ShaderBuilder; Ar << it->second; } // serialize the global shaders UINT NumShaders = InShaders.size(); Ar << NumShaders; for( map<FGuid, FShader*>::const_iterator it = InShaders.begin(); it != InShaders.end(); ++it ) { FShader* Shader = it->second; // shader builder的序列化,在加载时可用于检测此类型的shader是否仍存在 FShaderBuilder* ShaderBuilder = Shader->GetShaderBuilder(); FGuid ShaderId = Shader->GetId(); Ar << ShaderBuilder << ShaderId; // 占个位先。。。应该记录序列化此shader的结束位置 INT SkipOffset = Ar.Tell(); Ar << SkipOffset; Shader->Serialize(Ar); INT EndOffset = Ar.Tell(); Ar.Seek(SkipOffset); // 定位回之前位置 Ar << EndOffset; // 记录此shader的结束位置 Ar.Seek(EndOffset); // 定位结束位置,继续下一个shader的序列化 } }
void UMeshAnimation::SerializeLineageMoves(FArchive &Ar) { guard(UMeshAnimation::SerializeLineageMoves); if (Ar.ArVer < 123 || Ar.ArLicenseeVer < 0x19) { // standard UE2 format Ar << Moves; return; } assert(Ar.IsLoading); int pos, count; // pos = global skip pos, count = data count Ar << pos << AR_INDEX(count); Moves.Empty(count); for (int i = 0; i < count; i++) { int localPos; Ar << localPos; MotionChunk *M = new(Moves) MotionChunk; Ar << *M; assert(Ar.Tell() == localPos); } assert(Ar.Tell() == pos); unguard; }
//------------------------------------------------------------------------------ FStructScriptLoader::FStructScriptLoader(UStruct* TargetScriptContainer, FArchive& Ar) : BytecodeBufferSize(0) , SerializedScriptSize(0) , ScriptSerializationOffset(INDEX_NONE) { if (!Ar.IsLoading()) { return; } Ar << BytecodeBufferSize; Ar << SerializedScriptSize; if (SerializedScriptSize > 0) { ScriptSerializationOffset = Ar.Tell(); } ClearScriptCode(TargetScriptContainer); }
FCacheEntryMetadata* FRuntimeAssetCacheBackend::GetCachedData(const FName Bucket, const TCHAR* CacheKey, TArray<uint8>& OutData) { FCacheEntryMetadata* Result = nullptr; FArchive* Ar = CreateReadArchive(Bucket, CacheKey); if (!Ar) { return Result; } Result = PreloadMetadata(Ar); int64 TotalSize = Ar->TotalSize(); int64 CurrentPosition = Ar->Tell(); int64 NumberOfBytesToSerialize = TotalSize - CurrentPosition; OutData.Reset(); OutData.AddUninitialized(NumberOfBytesToSerialize); Ar->Serialize(OutData.GetData(), NumberOfBytesToSerialize); Ar->Close(); delete Ar; return Result; }
static void ReadTimeArray(FArchive &Ar, int NumKeys, TArray<float> &Times, int NumFrames) { guard(ReadTimeArray); Times.Empty(NumKeys); if (NumKeys <= 1) return; // appPrintf(" pos=%4X keys (max=%X)[ ", Ar.Tell(), NumFrames); if (NumFrames < 256) { for (int k = 0; k < NumKeys; k++) { uint8 v; Ar << v; Times.Add(v); // if (k < 4 || k > NumKeys - 5) appPrintf(" %02X ", v); // else if (k == 4) appPrintf("..."); } } else { for (int k = 0; k < NumKeys; k++) { uint16 v; Ar << v; Times.Add(v); // if (k < 4 || k > NumKeys - 5) appPrintf(" %04X ", v); // else if (k == 4) appPrintf("..."); } } // appPrintf(" ]\n"); // align to 4 bytes Ar.Seek(Align(Ar.Tell(), 4)); unguard; }
bool FBuildPatchUtils::VerifyChunkFile( FArchive& ChunkFileData, bool bQuickCheck ) { const int64 FileSize = ChunkFileData.TotalSize(); bool bSuccess = ChunkFileData.IsLoading(); if ( !bSuccess ) { GLog->Logf( TEXT( "BuildPatchServices: ERROR: VerifyChunkFile expected readonly archive" ) ); } else { // Read the header FChunkHeader Header; ChunkFileData << Header; // Check header magic if ( !Header.IsValidMagic() ) { bSuccess = false; GLog->Logf( TEXT( "BuildPatchServices: ERROR: VerifyChunkFile corrupt header" ) ); } // Check Header and data size if ( bSuccess && ( Header.HeaderSize + Header.DataSize ) != FileSize ) { bSuccess = false; GLog->Logf( TEXT( "BuildPatchServices: ERROR: VerifyChunkFile header info does not match file size" ) ); } if( bSuccess && !bQuickCheck ) { // Hashes for checking data FSHA1 SHAHasher; FSHAHashData SHAHash; uint64 CycPoly64Hash = 0; // Load the data to check uint8* FileReadBuffer = new uint8[ FileBufferSize ]; int64 DataOffset = 0; switch ( Header.StoredAs ) { case FChunkHeader::STORED_RAW: while( !ChunkFileData.AtEnd() ) { const int64 SizeLeft = FileSize - ChunkFileData.Tell(); const uint32 ReadLen = FMath::Min< int64 >( FileBufferSize, SizeLeft ); ChunkFileData.Serialize( FileReadBuffer, ReadLen ); switch ( Header.HashType ) { case FChunkHeader::HASH_ROLLING: CycPoly64Hash = FCycPoly64Hash::GetHashForDataSet(FileReadBuffer, ReadLen, CycPoly64Hash); break; case FChunkHeader::HASH_SHA1: SHAHasher.Update( FileReadBuffer, ReadLen ); break; default: check( false ); // @TODO LSwift: Implement other storage methods! bSuccess = false; break; } DataOffset += ReadLen; } if( bSuccess ) { switch ( Header.HashType ) { case FChunkHeader::HASH_ROLLING: bSuccess = Header.RollingHash == CycPoly64Hash; break; case FChunkHeader::HASH_SHA1: SHAHasher.Final(); SHAHasher.GetHash( SHAHash.Hash ); bSuccess = SHAHash == Header.SHAHash; break; } if (!bSuccess) { GLog->Logf(TEXT("BuildPatchServices: ERROR: VerifyChunkFile file hashcheck failed")); } } break; default: GLog->Logf( TEXT( "BuildPatchServices: ERROR: VerifyChunkFile failed, unknown storage type" ) ); bSuccess = false; break; } delete[] FileReadBuffer; } } return bSuccess; }
void FBuildPatchChunkCache::ReserveChunkInventorySlotForce( const FGuid& ChunkGuid ) { // If already reserved, return immediate if( ChunkCache.HasReservation( ChunkGuid ) || ChunkCache.Contains( ChunkGuid ) ) { return; } // Begin by checking if any slots can be freed ChunkCache.PurgeUnreferenced(); // Try to add the reservation bool bReservationAccepted = ChunkCache.TryAddReservation( ChunkGuid ); // If we couldn't reserve, we need to boot out a chunk for this required one if( bReservationAccepted == false ) { // We create a unique ref array from the use order so that chunks not needed // for longer times end up nearer the bottom of the array TArray< FGuid > ChunkPriorityList; ChunkInfoLock.Lock(); for( int32 ChunkUseOrderStackIdx = ChunkUseOrderStack.Num() - 1; ChunkUseOrderStackIdx >= 0 ; --ChunkUseOrderStackIdx ) { ChunkPriorityList.AddUnique( ChunkUseOrderStack[ ChunkUseOrderStackIdx ] ); } ChunkInfoLock.Unlock(); // Starting at the bottom of the list, we look for a chunk that is contained in the cache for( int32 ChunkPriorityListIdx = ChunkPriorityList.Num() - 1; ChunkPriorityListIdx >= 0 && !bReservationAccepted; --ChunkPriorityListIdx ) { const FGuid& LowPriChunk = ChunkPriorityList[ ChunkPriorityListIdx ]; BuildProgress->WaitWhilePaused(); // Check if there were any errors while paused, like canceling if( FBuildPatchInstallError::HasFatalError() ) { return; } if( ChunkCache.Contains( LowPriChunk ) ) { GWarn->Logf( TEXT( "FBuildPatchChunkCache: Booting chunk %s" ), *LowPriChunk.ToString() ); // Save chunk to disk so we don't have to download again bool bSuccess = true; const FString NewChunkFilename = FBuildPatchUtils::GetChunkOldFilename( ChunkCacheStage, LowPriChunk ); FChunkFile* LowPriChunkFile = ChunkCache.Get( LowPriChunk ); FChunkHeader* LowPriChunkHeader; uint8* LowPriChunkData; LowPriChunkFile->GetDataLock( &LowPriChunkData, &LowPriChunkHeader ); FArchive* FileOut = IFileManager::Get().CreateFileWriter( *NewChunkFilename ); bSuccess = FileOut != NULL; const int32 LastError = FPlatformMisc::GetLastError(); if( bSuccess ) { // Setup Header *FileOut << *LowPriChunkHeader; LowPriChunkHeader->HeaderSize = FileOut->Tell(); LowPriChunkHeader->StoredAs = FChunkHeader::STORED_RAW; LowPriChunkHeader->DataSize = FBuildPatchData::ChunkDataSize; // This would change if compressing/encrypting // Write out file FileOut->Seek( 0 ); *FileOut << *LowPriChunkHeader; FileOut->Serialize( LowPriChunkData, FBuildPatchData::ChunkDataSize ); FileOut->Close(); delete FileOut; } LowPriChunkFile->ReleaseDataLock(); // Setup new chunk origin if( bSuccess ) { ChunkOrigins[ LowPriChunk ] = EChunkOrigin::Harddisk; } else { // Queue download if save failed ChunkOrigins[ LowPriChunk ] = EChunkOrigin::Download; FBuildPatchDownloader::Get().AddChunkToDownload( LowPriChunk ); FBuildPatchAnalytics::RecordChunkCacheError( ChunkGuid, NewChunkFilename, LastError, TEXT( "ChunkBooting" ), TEXT( "Chunk Save Failed" ) ); } // Boot this chunk ChunkCache.Remove( LowPriChunk ); // Try get the reservation again! bReservationAccepted = ChunkCache.TryAddReservation( ChunkGuid ); // Count the boot NumChunksCacheBooted.Increment(); } } // We must have been able to make room check( bReservationAccepted ); } }
void UTexture::Serialize(FArchive &Ar) { guard(UTexture::Serialize); Super::Serialize(Ar); #if BIOSHOCK TRIBES_HDR(Ar, 0x2E); if (Ar.Game == GAME_Bioshock && t3_hdrSV >= 1) Ar << CachedBulkDataSize; if (Ar.Game == GAME_Bioshock && Format == 12) // remap format; note: Bioshock used 3DC name, but real format is DXT5N Format = TEXF_DXT5N; #endif // BIOSHOCK #if SWRC if (Ar.Game == GAME_RepCommando) { if (Format == 14) Format = TEXF_CxV8U8; //?? not verified } #endif // SWRC #if VANGUARD if (Ar.Game == GAME_Vanguard && Ar.ArVer >= 128 && Ar.ArLicenseeVer >= 25) { // has some table for fast mipmap lookups Ar.Seek(Ar.Tell() + 142); // skip that table // serialize mips using AR_INDEX count (this game uses int for array counts in all other places) int Count; Ar << AR_INDEX(Count); Mips.AddDefaulted(Count); for (int i = 0; i < Count; i++) Ar << Mips[i]; return; } #endif // VANGUARD #if AA2 if (Ar.Game == GAME_AA2 && Ar.ArLicenseeVer >= 8) { int unk; // always 10619 Ar << unk; } #endif // AA2 Ar << Mips; if (Ar.Engine() == GAME_UE1) { // UE1 bMasked = false; // ignored by UE1, used surface.PolyFlags instead (but UE2 ignores PolyFlags ...) if (bHasComp) // skip compressed mipmaps { TArray<FMipmap> CompMips; Ar << CompMips; } } #if XIII if (Ar.Game == GAME_XIII) { if (Ar.ArLicenseeVer >= 42) { // serialize palette if (Format == TEXF_P8 || Format == 13) // 13 == TEXF_P4 { assert(!Palette); Palette = new UPalette; Ar << Palette->Colors; } } if (Ar.ArLicenseeVer >= 55) Ar.Seek(Ar.Tell() + 3); } #endif // XIII #if EXTEEL if (Ar.Game == GAME_Exteel) { // note: this property is serialized as UObject's property too byte MaterialType; // enum GFMaterialType Ar << MaterialType; } #endif // EXTEEL unguard; }
static bool ReadXprFile(const CGameFileInfo *file) { guard(ReadXprFile); FArchive *Ar = appCreateFileReader(file); int Tag, FileLen, DataStart, DataCount; *Ar << Tag << FileLen << DataStart << DataCount; //?? "XPR0" - xpr variant with a single object (texture) inside if (Tag != BYTES4('X','P','R','1')) { #if XPR_DEBUG appPrintf("Unknown XPR tag in %s\n", file->RelativeName); #endif delete Ar; return true; } #if XPR_DEBUG appPrintf("Scanning %s ...\n", file->RelativeName); #endif XprInfo *Info = new(xprFiles) XprInfo; Info->File = file; Info->DataStart = DataStart; // read filelist int i; for (i = 0; i < DataCount; i++) { int NameOffset, DataOffset; *Ar << NameOffset << DataOffset; int savePos = Ar->Tell(); Ar->Seek(NameOffset + 12); // read name char c, buf[256]; int n = 0; while (true) { *Ar << c; if (n < ARRAY_COUNT(buf)) buf[n++] = c; if (!c) break; } buf[ARRAY_COUNT(buf)-1] = 0; // just in case // create item XprEntry *Entry = new(Info->Items) XprEntry; appStrncpyz(Entry->Name, buf, ARRAY_COUNT(Entry->Name)); Entry->DataOffset = DataOffset + 12; assert(Entry->DataOffset < DataStart); // seek back Ar->Seek(savePos); // setup size of previous item if (i >= 1) { XprEntry *PrevEntry = &Info->Items[i - 1]; PrevEntry->DataSize = Entry->DataOffset - PrevEntry->DataOffset; } // setup size of the last item if (i == DataCount - 1) Entry->DataSize = DataStart - Entry->DataOffset; } // scan data // data block is either embedded in this block or followed after DataStart position for (i = 0; i < DataCount; i++) { XprEntry *Entry = &Info->Items[i]; #if XPR_DEBUG // appPrintf(" %08X [%08X] %s\n", Entry->DataOffset, Entry->DataSize, Entry->Name); #endif Ar->Seek(Entry->DataOffset); int id; *Ar << id; switch (id) { case 0x80020001: // header is 4 dwords + immediately followed data Entry->DataOffset += 4 * 4; Entry->DataSize -= 4 * 4; break; case 0x00040001: // header is 5 dwords + external data { int pos; *Ar << pos; Entry->DataOffset = DataStart + pos; } break; case 0x00020001: // header is 4 dwords + external data { int d1, d2, pos; *Ar << d1 << d2 << pos; Entry->DataOffset = DataStart + pos; } break; default: // header is 2 dwords - offset and size + external data { int pos; *Ar << pos; Entry->DataOffset = DataStart + pos; } break; } } // setup sizes of blocks placed after DataStart (not embedded into file list) for (i = 0; i < DataCount; i++) { XprEntry *Entry = &Info->Items[i]; if (Entry->DataOffset < DataStart) continue; // embedded data // Entry points to a data block placed after DataStart position // we should find a next block int NextPos = FileLen; for (int j = i + 1; j < DataCount; j++) { XprEntry *NextEntry = &Info->Items[j]; if (NextEntry->DataOffset < DataStart) continue; // embedded data NextPos = NextEntry->DataOffset; break; } Entry->DataSize = NextPos - Entry->DataOffset; } #if XPR_DEBUG for (i = 0; i < DataCount; i++) { XprEntry *Entry = &Info->Items[i]; appPrintf(" %3d %08X [%08X] .. %08X %s\n", i, Entry->DataOffset, Entry->DataSize, Entry->DataOffset + Entry->DataSize, Entry->Name); } #endif delete Ar; return true; unguardf("%s", file->RelativeName); }
//------------------------------------------------------------------------------ bool FStructScriptLoader::LoadStructWithScript(UStruct* DestScriptContainer, FArchive& Ar, bool bAllowDeferredSerialization) { if (!Ar.IsLoading() || !IsPrimed() || GIsDuplicatingClassForReinstancing) { return false; } bool const bIsLinkerLoader = Ar.IsPersistent() && (Ar.GetLinker() != nullptr); int32 const ScriptEndOffset = ScriptSerializationOffset + SerializedScriptSize; // to help us move development forward (and not have to support ancient // script code), we define a minimum script version bool bSkipScriptSerialization = (Ar.UE4Ver() < VER_MIN_SCRIPTVM_UE4) || (Ar.LicenseeUE4Ver() < VER_MIN_SCRIPTVM_LICENSEEUE4); #if WITH_EDITOR static const FBoolConfigValueHelper SkipByteCodeHelper(TEXT("StructSerialization"), TEXT("SkipByteCodeSerialization")); // in editor builds, we're going to regenerate the bytecode anyways, so it // is a waste of cycles to try and serialize it in bSkipScriptSerialization |= (bool)SkipByteCodeHelper; #endif // WITH_EDITOR bSkipScriptSerialization &= bIsLinkerLoader; // to keep consistent with old UStruct::Serialize() functionality if (bSkipScriptSerialization) { int32 TrackedBufferSize = BytecodeBufferSize; BytecodeBufferSize = 0; // temporarily clear so that ClearScriptCode() doesn't leave Class->Script with anything allocated ClearScriptCode(DestScriptContainer); BytecodeBufferSize = TrackedBufferSize; // we have to at least move the archiver forward, so it is positioned // where it expects to be (as if we read in the script) Ar.Seek(ScriptEndOffset); return false; } bAllowDeferredSerialization &= bIsLinkerLoader; if (bAllowDeferredSerialization && ShouldDeferScriptSerialization(Ar)) { ULinkerLoad* Linker = CastChecked<ULinkerLoad>(Ar.GetLinker()); FDeferredScriptTracker::Get().AddDeferredScriptObject(Linker, DestScriptContainer, *this); // we have to at least move the archiver forward, so it is positioned // where it expects to be (as if we read in the script) Ar.Seek(ScriptEndOffset); return false; } Ar.Seek(ScriptSerializationOffset); if (bIsLinkerLoader) { ULinkerLoad* LinkerLoad = CastChecked<ULinkerLoad>(Ar.GetLinker()); TArray<uint8> ShaScriptBuffer; ShaScriptBuffer.AddUninitialized(SerializedScriptSize); Ar.Serialize(ShaScriptBuffer.GetData(), SerializedScriptSize); ensure(ScriptEndOffset == Ar.Tell()); LinkerLoad->UpdateScriptSHAKey(ShaScriptBuffer); Ar.Seek(ScriptSerializationOffset); } DestScriptContainer->Script.Empty(BytecodeBufferSize); DestScriptContainer->Script.AddUninitialized(BytecodeBufferSize); int32 BytecodeIndex = 0; while (BytecodeIndex < BytecodeBufferSize) { DestScriptContainer->SerializeExpr(BytecodeIndex, Ar); } ensure(ScriptEndOffset == Ar.Tell()); checkf(BytecodeIndex == BytecodeBufferSize, TEXT("'%s' script expression-count mismatch; Expected: %i, Got: %i"), *DestScriptContainer->GetName(), BytecodeBufferSize, BytecodeIndex); if (!GUObjectArray.IsDisregardForGC(DestScriptContainer)) { DestScriptContainer->ScriptObjectReferences.Empty(); FArchiveScriptReferenceCollector ObjRefCollector(DestScriptContainer->ScriptObjectReferences); BytecodeIndex = 0; while (BytecodeIndex < BytecodeBufferSize) { DestScriptContainer->SerializeExpr(BytecodeIndex, ObjRefCollector); } } // success! (we filled the target with serialized script code) return true; }
static bool SaveXMASound(const UObject *Obj, void *Data, int DataSize, const char *DefExt) { // check for enough place for header if (DataSize < 16) { appPrintf("ERROR: %s'%s': empty data\n", Obj->GetClassName(), Obj->Name); return false; } FMemReader Reader(Data, DataSize); Reader.ReverseBytes = true; FXmaInfoHeader Hdr; Reader << Hdr; int ComputedDataSize = Reader.Tell() + Hdr.WaveFormatLength + Hdr.SeekTableSize + Hdr.CompressedDataSize; if (ComputedDataSize != DataSize) { if (ComputedDataSize > DataSize) { // does not fit into appPrintf("ERROR: %s'%s': wrong data\n", Obj->GetClassName(), Obj->Name); return false; } appPrintf("WARNING: %s'%s': wrong data\n", Obj->GetClassName(), Obj->Name); } // +4 bytes - RIFF "WAVE" id // +8 bytes - fmt or XMA2 chunk header // +8 bytes - data chunk header int ResultFileSize = Hdr.WaveFormatLength + /*??Hdr.SeekTableSize+*/ Hdr.CompressedDataSize + (4+8+8); FArchive *Ar; if (Hdr.WaveFormatLength == 0x34) // sizeof(XMA2WAVEFORMATEX) { Ar = CreateExportArchive(Obj, "%s.%s", Obj->Name, DefExt); if (!Ar) return false; WriteRiffHeader(*Ar, ResultFileSize); WriteRiffChunk(*Ar, "fmt ", Hdr.WaveFormatLength); XMA2WAVEFORMATEX fmt; // read with conversion from big-endian to little-endian Reader << fmt; // write in little-endian format (*Ar) << fmt; } else if (Hdr.WaveFormatLength == 0x2C) // sizeof(XMA2WAVEFORMAT) { Ar = CreateExportArchive(Obj, "%s.%s", Obj->Name, DefExt); if (!Ar) return false; WriteRiffHeader(*Ar, ResultFileSize); WriteRiffChunk(*Ar, "XMA2", Hdr.WaveFormatLength); // XMA2WAVEFORMAT should be stored in big-endian format, so no byte swapping performed Ar->Serialize((byte*)Data + Reader.Tell(), Hdr.WaveFormatLength); Reader.Seek(Reader.Tell() + Hdr.WaveFormatLength); // skip WAVEFORMAT } else { appPrintf("ERROR: %s'%s': unknown XBox360 WAVEFORMAT - %X bytes\n", Obj->GetClassName(), Obj->Name, Hdr.WaveFormatLength); return false; } //?? create "seek chunk" // write data chunk WriteRiffChunk(*Ar, "data", Hdr.CompressedDataSize); Ar->Serialize((byte*)Data + Reader.Tell() + Hdr.SeekTableSize, Hdr.CompressedDataSize); // check correctness of ResultFileSize - should equal to file length -8 bytes (exclude RIFF header) assert(Ar->Tell() == ResultFileSize + 8); delete Ar; return true; }
void FShaderCache::Load(FArchive& Ar) { Ar << m_nPlatform; UINT NumShaderBuilderCRC = 0; Ar << NumShaderBuilderCRC; for(UINT IndexBuilder = 0; IndexBuilder < NumShaderBuilderCRC; ++IndexBuilder) { FShaderBuilder* ShaderBuilder = NULL; Ar << ShaderBuilder; DWORD CRC = 0; Ar << CRC; if( ShaderBuilder ) { m_mapShaderBuilderCRC[ShaderBuilder] = CRC; } } // serialize the global shaders UINT NumShaders = 0; UINT NumDesertedShaders = 0; UINT NumRedundantShaders = 0; vector<FString> OutdatedShaderBuilders; Ar << NumShaders; for(UINT IndexShader = 0; IndexShader < NumShaders; ++IndexShader) { FShaderBuilder* ShaderBuilder = NULL; FGuid ShaderId; Ar << ShaderBuilder << ShaderId; INT SkipOffset = 0; Ar << SkipOffset; if( !ShaderBuilder ) { ++NumDesertedShaders; Ar.Seek(SkipOffset); // this shader builder doesn't exist any more, skip the shader } else { DWORD CurrentCRC = 0; DWORD SavedCRC = 0; // 比较当前与之前版本的CRC,检测是否有改动 CurrentCRC = ShaderBuilder->GetSourceCRC(); map<FShaderBuilder*, DWORD>::const_iterator it = m_mapShaderBuilderCRC.find(ShaderBuilder); if( it != m_mapShaderBuilderCRC.end() ) { SavedCRC = it->second; } FShader* Shader = ShaderBuilder->FindShaderById(ShaderId); if( Shader ) { ++NumRedundantShaders; Ar.Seek(SkipOffset); // has already exist, skip it } else if( SavedCRC != CurrentCRC ) { ++NumDesertedShaders; Ar.Seek(SkipOffset); if( SavedCRC != 0 ) // denote SHADER BUILDER exists, but it has changed { OutdatedShaderBuilders.push_back(ShaderBuilder->GetShaderName()); } } else { // the shader is compatiable, create it Shader = ShaderBuilder->ConstructSerialization(); UBOOL bShaderHasOutdatedParameters = Shader->Serialize(Ar); if( bShaderHasOutdatedParameters ) { ShaderBuilder->UnregisterShader(Shader); delete Shader; } check(Ar.Tell() == SkipOffset); } } } if( OutdatedShaderBuilders.size() > 0 ) { debugf(TEXT("Skip %d outdated FShaderBuilder"), OutdatedShaderBuilders.size()); for(UINT IndexBuilder = 0; IndexBuilder < OutdatedShaderBuilders.size(); ++IndexBuilder) { debugf(TEXT(" %s"), OutdatedShaderBuilders.at(IndexBuilder).c_str()); } } if( NumShaders > 0 ) { debugf(TEXT("Loaded %d shaders (%d deserted, %d redundant)"), NumShaders, NumDesertedShaders, NumRedundantShaders); } }
void SkipFixedArray(FArchive &Ar, int ItemSize) { TArray<DummyItem> DummyArray; Ar << DummyArray; Ar.Seek(Ar.Tell() + DummyArray.Num() * ItemSize); }
void UArrayProperty::SerializeItem( FArchive& Ar, void* Value, void const* Defaults ) const { checkSlow(Inner); // Ensure that the Inner itself has been loaded before calling SerializeItem() on it Ar.Preload(Inner); FScriptArrayHelper ArrayHelper(this, Value); int32 n = ArrayHelper.Num(); Ar << n; if( Ar.IsLoading() ) { // If using a custom property list, don't empty the array on load. Not all indices may have been serialized, so we need to preserve existing values at those slots. if (Ar.ArUseCustomPropertyList) { const int32 OldNum = ArrayHelper.Num(); if (n > OldNum) { ArrayHelper.AddValues(n - OldNum); } else if (n < OldNum) { ArrayHelper.RemoveValues(n, OldNum - n); } } else { ArrayHelper.EmptyAndAddValues(n); } } ArrayHelper.CountBytes( Ar ); // Serialize a PropertyTag for the inner property of this array, allows us to validate the inner struct to see if it has changed FPropertyTag InnerTag(Ar, Inner, 0, (uint8*)Value, (uint8*)Defaults); if (Ar.UE4Ver() >= VER_UE4_INNER_ARRAY_TAG_INFO && InnerTag.Type == NAME_StructProperty) { if (Ar.IsSaving()) { Ar << InnerTag; } else if (Ar.IsLoading()) { Ar << InnerTag; auto CanSerializeFromStructWithDifferentName = [](const FArchive& InAr, const FPropertyTag& PropertyTag, const UStructProperty* StructProperty) { return PropertyTag.StructGuid.IsValid() && StructProperty && StructProperty->Struct && (PropertyTag.StructGuid == StructProperty->Struct->GetCustomGuid()); }; // Check if the Inner property can successfully serialize, the type may have changed UStructProperty* StructProperty = CastChecked<UStructProperty>(Inner); // if check redirector to make sure if the name has changed FName* NewName = FLinkerLoad::StructNameRedirects.Find(InnerTag.StructName); FName StructName = CastChecked<UStructProperty>(StructProperty)->Struct->GetFName(); if (NewName != nullptr && *NewName == StructName) { InnerTag.StructName = *NewName; } if (InnerTag.StructName != StructProperty->Struct->GetFName() && !CanSerializeFromStructWithDifferentName(Ar, InnerTag, StructProperty)) { UE_LOG(LogClass, Warning, TEXT("Property %s of %s has a struct type mismatch (tag %s != prop %s) in package: %s. If that struct got renamed, add an entry to ActiveStructRedirects."), *InnerTag.Name.ToString(), *GetName(), *InnerTag.StructName.ToString(), *CastChecked<UStructProperty>(Inner)->Struct->GetName(), *Ar.GetArchiveName()); #if WITH_EDITOR // Ensure the structure is initialized for (int32 i = 0; i < n; i++) { StructProperty->Struct->InitializeDefaultValue(ArrayHelper.GetRawPtr(i)); } #endif // WITH_EDITOR // Skip the property const int64 StartOfProperty = Ar.Tell(); const int64 RemainingSize = InnerTag.Size - (Ar.Tell() - StartOfProperty); uint8 B; for (int64 i = 0; i < RemainingSize; i++) { Ar << B; } return; } } } // need to know how much data this call to SerializeItem consumes, so mark where we are int32 DataOffset = Ar.Tell(); // If we're using a custom property list, first serialize any explicit indices int32 i = 0; bool bSerializeRemainingItems = true; bool bUsingCustomPropertyList = Ar.ArUseCustomPropertyList; if (bUsingCustomPropertyList && Ar.ArCustomPropertyList != nullptr) { // Initially we only serialize indices that are explicitly specified (in order) bSerializeRemainingItems = false; const FCustomPropertyListNode* CustomPropertyList = Ar.ArCustomPropertyList; const FCustomPropertyListNode* PropertyNode = CustomPropertyList; while (PropertyNode && i < n && !bSerializeRemainingItems) { if (PropertyNode->Property != Inner) { // A null property value signals that we should serialize the remaining array values in full starting at this index if (PropertyNode->Property == nullptr) { i = PropertyNode->ArrayIndex; } bSerializeRemainingItems = true; } else { // Set a temporary node to represent the item FCustomPropertyListNode ItemNode = *PropertyNode; ItemNode.ArrayIndex = 0; ItemNode.PropertyListNext = nullptr; Ar.ArCustomPropertyList = &ItemNode; // Serialize the item at this array index i = PropertyNode->ArrayIndex; Inner->SerializeItem(Ar, ArrayHelper.GetRawPtr(i)); PropertyNode = PropertyNode->PropertyListNext; // Restore the current property list Ar.ArCustomPropertyList = CustomPropertyList; } } } if (bSerializeRemainingItems) { // Temporarily suspend the custom property list (as we need these items to be serialized in full) Ar.ArUseCustomPropertyList = false; // Serialize each item until we get to the end of the array while (i < n) { Inner->SerializeItem(Ar, ArrayHelper.GetRawPtr(i++)); } // Restore use of the custom property list (if it was previously enabled) Ar.ArUseCustomPropertyList = bUsingCustomPropertyList; } if (Ar.UE4Ver() >= VER_UE4_INNER_ARRAY_TAG_INFO && Ar.IsSaving() && InnerTag.Type == NAME_StructProperty) { // set the tag's size InnerTag.Size = Ar.Tell() - DataOffset; if (InnerTag.Size > 0) { // mark our current location DataOffset = Ar.Tell(); // go back and re-serialize the size now that we know it Ar.Seek(InnerTag.SizeOffset); Ar << InnerTag.Size; // return to the current location Ar.Seek(DataOffset); } } }
const bool FChunkWriter::FQueuedChunkWriter::WriteChunkData(const FString& ChunkFilename, FChunkFile* ChunkFile, const FGuid& ChunkGuid) { // Chunks are saved with GUID, so if a file already exists it will never be different. // Skip with return true if already exists if( FPaths::FileExists( ChunkFilename ) ) { const int64 ChunkFilesSize = IFileManager::Get().FileSize(*ChunkFilename); ChunkFileSizesCS.Lock(); ChunkFileSizes.Add(ChunkGuid, ChunkFilesSize); ChunkFileSizesCS.Unlock(); return true; } FArchive* FileOut = IFileManager::Get().CreateFileWriter( *ChunkFilename ); bool bSuccess = FileOut != NULL; if( bSuccess ) { // Setup to handle compression bool bDataIsCompressed = true; uint8* ChunkDataSource = ChunkFile->ChunkData; int32 ChunkDataSourceSize = FBuildPatchData::ChunkDataSize; TArray< uint8 > TempCompressedData; TempCompressedData.Empty( ChunkDataSourceSize ); TempCompressedData.AddUninitialized( ChunkDataSourceSize ); int32 CompressedSize = ChunkDataSourceSize; // Compressed can increase in size, but the function will return as failure in that case // we can allow that to happen since we would not keep larger compressed data anyway. bDataIsCompressed = FCompression::CompressMemory( static_cast< ECompressionFlags >( COMPRESS_ZLIB | COMPRESS_BiasMemory ), TempCompressedData.GetData(), CompressedSize, ChunkFile->ChunkData, FBuildPatchData::ChunkDataSize ); // If compression succeeded, set data vars if( bDataIsCompressed ) { ChunkDataSource = TempCompressedData.GetData(); ChunkDataSourceSize = CompressedSize; } // Setup Header FChunkHeader& Header = ChunkFile->ChunkHeader; *FileOut << Header; Header.HeaderSize = FileOut->Tell(); Header.StoredAs = bDataIsCompressed ? FChunkHeader::STORED_COMPRESSED : FChunkHeader::STORED_RAW; Header.DataSize = ChunkDataSourceSize; Header.HashType = FChunkHeader::HASH_ROLLING; // Write out files FileOut->Seek( 0 ); *FileOut << Header; FileOut->Serialize( ChunkDataSource, ChunkDataSourceSize ); const int64 ChunkFilesSize = FileOut->TotalSize(); FileOut->Close(); ChunkFileSizesCS.Lock(); ChunkFileSizes.Add(ChunkGuid, ChunkFilesSize); ChunkFileSizesCS.Unlock(); bSuccess = !FileOut->GetError(); delete FileOut; } // Log errors if( !bSuccess ) { GLog->Logf( TEXT( "BuildPatchServices: Error: Could not save out generated chunk file %s" ), *ChunkFilename ); } return bSuccess; }
void UNavCollision::Serialize(FArchive& Ar) { Super::Serialize(Ar); const int32 VerInitial = 1; const int32 VerAreaClass = 2; const int32 VerConvexTransforms = 3; const int32 VerLatest = VerConvexTransforms; // use magic number to determine if serialized stream has version :/ const int32 MagicNum = 0xA237F237; int64 StreamStartPos = Ar.Tell(); int32 Version = VerLatest; int32 MyMagicNum = MagicNum; Ar << MyMagicNum; if (MyMagicNum != MagicNum) { Version = VerInitial; Ar.Seek(StreamStartPos); } else { Ar << Version; } // loading a dummy GUID to have serialization not break on // packages serialized before switching over UNavCollision to // use BodySetup's guid rather than its own one // motivation: not creating a new engine version // @NOTE could be addressed during next engine version bump FGuid Guid; Ar << Guid; bool bCooked = Ar.IsCooking(); Ar << bCooked; if (FPlatformProperties::RequiresCookedData() && !bCooked && Ar.IsLoading()) { UE_LOG(LogNavigation, Fatal, TEXT("This platform requires cooked packages, and NavCollision data was not cooked into %s."), *GetFullName()); } if (bCooked && ShouldUseConvexCollision()) { if (Ar.IsCooking()) { FName Format = NAVCOLLISION_FORMAT; GetCookedData(Format); // Get the data from the DDC or build it TArray<FName> ActualFormatsToSave; ActualFormatsToSave.Add(Format); CookedFormatData.Serialize(Ar, this, &ActualFormatsToSave); } else { CookedFormatData.Serialize(Ar, this); } } if (Version >= VerAreaClass) { Ar << AreaClass; } if (Version < VerConvexTransforms && Ar.IsLoading() && GIsEditor) { bForceGeometryRebuild = true; } }
void USkeletalMesh::Serialize(FArchive &Ar) { guard(USkeletalMesh::Serialize); assert(Ar.Game < GAME_UE3); #if UNREAL1 if (Ar.Engine() == GAME_UE1) { SerializeSkelMesh1(Ar); return; } #endif #if BIOSHOCK if (Ar.Game == GAME_Bioshock) { SerializeBioshockMesh(Ar); return; } #endif Super::Serialize(Ar); #if SPLINTER_CELL if (Ar.Game == GAME_SplinterCell) { SerializeSCell(Ar); return; } #endif // SPLINTER_CELL #if TRIBES3 TRIBES_HDR(Ar, 4); #endif Ar << Points2; #if BATTLE_TERR if (Ar.Game == GAME_BattleTerr && Ar.ArVer >= 134) { TArray<FVector> Points3; Ar << Points3; } #endif // BATTLE_TERR Ar << RefSkeleton; #if DEBUG_SKELMESH appPrintf("RefSkeleton: %d bones\n", RefSkeleton.Num()); for (int i1 = 0; i1 < RefSkeleton.Num(); i1++) appPrintf(" [%d] n=%s p=%d\n", i1, *RefSkeleton[i1].Name, RefSkeleton[i1].ParentIndex); #endif // DEBUG_SKELMESH #if SWRC if (Ar.Game == GAME_RepCommando && Ar.ArVer >= 142) { for (int i = 0; i < RefSkeleton.Num(); i++) { FMeshBone &B = RefSkeleton[i]; B.BonePos.Orientation.X *= -1; B.BonePos.Orientation.Y *= -1; B.BonePos.Orientation.Z *= -1; } } if (Ar.Game == GAME_RepCommando && Version >= 5) { TArray<FMeshAnimLinkSWRC> Anims; Ar << Anims; if (Anims.Num() >= 1) Animation = Anims[0].Anim; } else #endif // SWRC Ar << Animation; #if AA2 if (Ar.Game == GAME_AA2 && Ar.ArLicenseeVer >= 22) { TArray<UObject*> unk230; Ar << unk230; } #endif // AA2 Ar << SkeletalDepth << WeightIndices << BoneInfluences; #if SWRC if (Ar.Game == GAME_RepCommando && Ar.ArVer >= 140) { TArray<FAttachSocketSWRC> Sockets; Ar << Sockets; //?? convert } else #endif // SWRC { Ar << AttachAliases << AttachBoneNames << AttachCoords; } if (Version <= 1) { // appNotify("SkeletalMesh of version %d\n", Version); TArray<FLODMeshSection> tmp1, tmp2; TArray<word> tmp3; Ar << tmp1 << tmp2 << tmp3; // copy and convert data from old mesh format UpgradeMesh(); } else { #if UC2 if (Ar.Engine() == GAME_UE2X && Ar.ArVer >= 136) { int f338; Ar << f338; } #endif // UC2 #if SWRC if (Ar.Game == GAME_RepCommando) { int f1C4; if (Version >= 6) Ar << f1C4; Ar << LODModels; if (Version < 5) Ar << f224; Ar << Points << Wedges << Triangles << VertInfluences; Ar << CollapseWedge << f1C8; goto skip_remaining; } #endif // SWRC #if EOS if (Ar.Game == GAME_EOS) { int unk1; UObject* unk2; UObject* unk3; if (Version >= 6) Ar << unk1 << unk2; if (Version >= 7) Ar << unk3; Ar << LODModels; goto skip_remaining; } #endif // EOS #if 0 // Shui Hu Q Zhuan 2 Online if (Ar.ArVer == 126 && Ar.ArLicenseeVer == 1) { // skip LOD models int Num; Ar << AR_INDEX(Num); for (int i = 0; i < Num; i++) { int Pos; Ar << Pos; Ar.Seek(Ar.Tell() + Pos - 4); } goto after_lods; } #endif Ar << LODModels; after_lods: Ar << f224 << Points; #if BATTLE_TERR if (Ar.Game == GAME_BattleTerr && Ar.ArVer >= 134) { TLazyArray<int> unk15C; Ar << unk15C; } #endif // BATTLE_TERR Ar << Wedges << Triangles << VertInfluences; Ar << CollapseWedge << f1C8; } #if TRIBES3 if ((Ar.Game == GAME_Tribes3 || Ar.Game == GAME_Swat4) && t3_hdrSV >= 3) { #if 0 // it looks like format of following data was chenged sinse // data was prepared, and game executeble does not load these // LazyArrays (otherwise error should occur) -- so we are // simply skipping these arrays TLazyArray<FT3Unk1> unk1; TLazyArray<FMeshWedge> unk2; TLazyArray<word> unk3; Ar << unk1 << unk2 << unk3; #else SkipLazyArray(Ar); SkipLazyArray(Ar); SkipLazyArray(Ar); #endif // nothing interesting below ... goto skip_remaining; } #endif // TRIBES3 #if BATTLE_TERR if (Ar.Game == GAME_BattleTerr) goto skip_remaining; #endif #if UC2 if (Ar.Engine() == GAME_UE2X) goto skip_remaining; #endif #if LINEAGE2 if (Ar.Game == GAME_Lineage2) { int unk1, unk3, unk4; TArray<float> unk2; if (Ar.ArVer >= 118 && Ar.ArLicenseeVer >= 3) Ar << unk1; if (Ar.ArVer >= 123 && Ar.ArLicenseeVer >= 0x12) Ar << unk2; if (Ar.ArVer >= 120) Ar << unk3; // AuthKey ? if (Ar.ArLicenseeVer >= 0x23) Ar << unk4; ConvertMesh(); return; } #endif // LINEAGE2 if (Ar.ArVer >= 120) { Ar << AuthKey; } #if LOCO if (Ar.Game == GAME_Loco) goto skip_remaining; // Loco codepath is similar to UT2004, but sometimes has different version switches #endif #if UT2 if (Ar.Game == GAME_UT2) { // UT2004 has branched version of UE2, which is slightly different // in comparison with generic UE2, which is used in all other UE2 games. if (Ar.ArVer >= 122) Ar << KarmaProps << BoundingSpheres << BoundingBoxes << f32C; if (Ar.ArVer >= 127) Ar << CollisionMesh; ConvertMesh(); return; } #endif // UT2 // generic UE2 code if (Ar.ArVer >= 124) Ar << KarmaProps << BoundingSpheres << BoundingBoxes; if (Ar.ArVer >= 125) Ar << f32C; #if XIII if (Ar.Game == GAME_XIII) goto skip_remaining; #endif #if RAGNAROK2 if (Ar.Game == GAME_Ragnarok2 && Ar.ArVer >= 131) { float unk1, unk2; Ar << unk1 << unk2; } #endif // RAGNAROK2 if (Ar.ArLicenseeVer && (Ar.Tell() != Ar.GetStopper())) { appPrintf("Serializing SkeletalMesh'%s' of unknown game: %d unreal bytes\n", Name, Ar.GetStopper() - Ar.Tell()); skip_remaining: DROP_REMAINING_DATA(Ar); } ConvertMesh(); unguard; }
/** * Serialize function used to serialize this bulk data structure. * * @param Ar Archive to serialize with * @param Owner Object owning the bulk data * @param Idx Index of bulk data item being serialized */ void FUntypedBulkData::Serialize( FArchive& Ar, UObject* Owner, int32 Idx ) { check( LockStatus == LOCKSTATUS_Unlocked ); if(Ar.IsTransacting()) { // Special case for transacting bulk data arrays. // constructing the object during load will save it to the transaction buffer. If it tries to load the bulk data now it will try to break it. bool bActuallySave = Ar.IsSaving() && (!Owner || !Owner->HasAnyFlags(RF_NeedLoad)); Ar << bActuallySave; if (bActuallySave) { if(Ar.IsLoading()) { // Flags for bulk data. Ar << BulkDataFlags; // Number of elements in array. Ar << ElementCount; // Allocate bulk data. check(bShouldFreeOnEmpty); BulkData = FMemory::Realloc( BulkData, GetBulkDataSize() ); // Deserialize bulk data. SerializeBulkData( Ar, BulkData ); } else if(Ar.IsSaving()) { // Flags for bulk data. Ar << BulkDataFlags; // Number of elements in array. Ar << ElementCount; // Don't attempt to load or serialize BulkData if the current size is 0. // This could be a newly constructed BulkData that has not yet been loaded, // and allocating 0 bytes now will cause a crash when we load. if (GetBulkDataSize() > 0) { // Make sure bulk data is loaded. MakeSureBulkDataIsLoaded(); // Serialize bulk data. SerializeBulkData(Ar, BulkData); } } } } else if( Ar.IsPersistent() && !Ar.IsObjectReferenceCollector() && !Ar.ShouldSkipBulkData() ) { #if TRACK_BULKDATA_USE FThreadSafeBulkDataToObjectMap::Get().Add( this, Owner ); #endif // Offset where the bulkdata flags are stored int64 SavedBulkDataFlagsPos = Ar.Tell(); Ar << BulkDataFlags; // Number of elements in array. Ar << ElementCount; // We're loading from the persistent archive. if( Ar.IsLoading() ) { Filename = TEXT(""); // @todo when Landscape (and others?) only Lock/Unlock once, we can enable this if (false) // FPlatformProperties::RequiresCookedData()) { // Bulk data that is being serialized via seekfree loading is single use only. This allows us // to free the memory as e.g. the bulk data won't be attached to an archive in the case of // seek free loading. BulkDataFlags |= BULKDATA_SingleUse; } // Size on disk, which in the case of compression is != GetBulkDataSize() Ar << BulkDataSizeOnDisk; Ar << BulkDataOffsetInFile; // fix up the file offset if (Owner != NULL && Owner->GetLinker()) { BulkDataOffsetInFile += Owner->GetLinker()->Summary.BulkDataStartOffset; } // determine whether the payload is stored inline or at the end of the file bool bPayloadInline = !(BulkDataFlags&BULKDATA_PayloadAtEndOfFile); // check( (bPayloadInline && BulkDataOffsetInFile == Ar.Tell()) || // (!bPayloadInline && BulkDataOffsetInFile > Ar.Tell())); // We're allowing defered serialization. if( Ar.IsAllowingLazyLoading() && Owner != NULL) { Linker = Owner->GetLinker(); #if WITH_EDITOR check(Linker); Ar.AttachBulkData( Owner, this ); AttachedAr = &Ar; #else check(Linker.IsValid()); Filename = Linker->Filename; #endif // WITH_EDITOR // only skip over payload, if it's stored inline if (bPayloadInline) { Ar.Seek( Ar.Tell() + BulkDataSizeOnDisk ); } } // Serialize the bulk data right away. else { // memory for bulk data can come from preallocated GPU-accessible resource memory or default to system memory BulkData = GetBulkDataResourceMemory(Owner,Idx); if( !BulkData ) { BulkData = FMemory::Realloc( BulkData, GetBulkDataSize() ); } if (bPayloadInline) { // if the payload is stored inline, just serialize it SerializeBulkData( Ar, BulkData ); } else { // if the payload is NOT stored inline ... // store the current file offset int64 CurOffset = Ar.Tell(); // seek to the location in the file where the payload is stored Ar.Seek(BulkDataOffsetInFile); // serialize the payload SerializeBulkData( Ar, BulkData ); // seek to the location we came from Ar.Seek(CurOffset); } } } // We're saving to the persistent archive. else if( Ar.IsSaving() ) { // check if we save the package compressed UPackage* Pkg = Owner ? dynamic_cast<UPackage*>(Owner->GetOutermost()) : nullptr; if (Pkg && !!(Pkg->PackageFlags & PKG_StoreCompressed) ) { ECompressionFlags BaseCompressionMethod = COMPRESS_Default; if (Ar.IsCooking()) { BaseCompressionMethod = Ar.CookingTarget()->GetBaseCompressionMethod(); } StoreCompressedOnDisk(BaseCompressionMethod); } // Remove single element serialization requirement before saving out bulk data flags. BulkDataFlags &= ~BULKDATA_ForceSingleElementSerialization; // Make sure bulk data is loaded. MakeSureBulkDataIsLoaded(); // Only serialize status information if wanted. int64 SavedBulkDataSizeOnDiskPos = INDEX_NONE; int64 SavedBulkDataOffsetInFilePos = INDEX_NONE; // Keep track of position we are going to serialize placeholder BulkDataSizeOnDisk. SavedBulkDataSizeOnDiskPos = Ar.Tell(); BulkDataSizeOnDisk = INDEX_NONE; // And serialize the placeholder which is going to be overwritten later. Ar << BulkDataSizeOnDisk; // Keep track of position we are going to serialize placeholder BulkDataOffsetInFile. SavedBulkDataOffsetInFilePos = Ar.Tell(); BulkDataOffsetInFile = INDEX_NONE; // And serialize the placeholder which is going to be overwritten later. Ar << BulkDataOffsetInFile; // try to get the linkersave object ULinkerSave* LinkerSave = dynamic_cast<ULinkerSave*>(Ar.GetLinker()); // determine whether we are going to store the payload inline or not. bool bStoreInline = !!(BulkDataFlags&BULKDATA_ForceInlinePayload) || LinkerSave == NULL; if (!bStoreInline) { // set the flag indicating where the payload is stored BulkDataFlags |= BULKDATA_PayloadAtEndOfFile; // with no LinkerSave we have to store the data inline check(LinkerSave != NULL); // add the bulkdata storage info object to the linkersave int32 Index = LinkerSave->BulkDataToAppend.AddZeroed(1); ULinkerSave::FBulkDataStorageInfo& BulkStore = LinkerSave->BulkDataToAppend[Index]; BulkStore.BulkDataOffsetInFilePos = SavedBulkDataOffsetInFilePos; BulkStore.BulkDataSizeOnDiskPos = SavedBulkDataSizeOnDiskPos; BulkStore.BulkData = this; // Serialize bulk data into the storage info BulkDataSizeOnDisk = -1; } else { // set the flag indicating where the payload is stored BulkDataFlags &= ~BULKDATA_PayloadAtEndOfFile; int64 SavedBulkDataStartPos = Ar.Tell(); // Serialize bulk data. SerializeBulkData( Ar, BulkData ); // store the payload endpos int64 SavedBulkDataEndPos = Ar.Tell(); checkf(SavedBulkDataStartPos >= 0 && SavedBulkDataEndPos >= 0, TEXT("Bad archive positions for bulkdata. StartPos=%d EndPos=%d"), SavedBulkDataStartPos, SavedBulkDataEndPos); BulkDataSizeOnDisk = SavedBulkDataEndPos - SavedBulkDataStartPos; BulkDataOffsetInFile = SavedBulkDataStartPos; } // store current file offset before seeking back int64 CurrentFileOffset = Ar.Tell(); // Seek back and overwrite the flags Ar.Seek(SavedBulkDataFlagsPos); Ar << BulkDataFlags; // Seek back and overwrite placeholder for BulkDataSizeOnDisk Ar.Seek( SavedBulkDataSizeOnDiskPos ); Ar << BulkDataSizeOnDisk; // Seek back and overwrite placeholder for BulkDataOffsetInFile Ar.Seek( SavedBulkDataOffsetInFilePos ); Ar << BulkDataOffsetInFile; // Seek to the end of written data so we don't clobber any data in subsequent write // operations Ar.Seek(CurrentFileOffset); } } }