bool FBuildPatchAppManifest::SaveToFile(const FString& Filename, bool bUseBinary) { bool bSuccess = false; FArchive* FileOut = IFileManager::Get().CreateFileWriter(*Filename); if (FileOut) { if (bUseBinary) { Data->ManifestFileVersion = EBuildPatchAppManifestVersion::GetLatestVersion(); FManifestWriter ManifestData; Serialize(ManifestData); ManifestData.Finalize(); if (!ManifestData.IsError()) { int32 DataSize = ManifestData.TotalSize(); TArray<uint8> TempCompressed; TempCompressed.AddUninitialized(DataSize); int32 CompressedSize = DataSize; bool bDataIsCompressed = FCompression::CompressMemory( static_cast<ECompressionFlags>(COMPRESS_ZLIB | COMPRESS_BiasMemory), TempCompressed.GetData(), CompressedSize, ManifestData.GetBytes().GetData(), DataSize); TempCompressed.SetNum(CompressedSize); TArray<uint8>& FileData = bDataIsCompressed ? TempCompressed : ManifestData.GetBytes(); FManifestFileHeader Header; *FileOut << Header; Header.HeaderSize = FileOut->Tell(); Header.StoredAs = bDataIsCompressed ? EManifestFileHeader::STORED_COMPRESSED : EManifestFileHeader::STORED_RAW; Header.DataSize = DataSize; Header.CompressedSize = bDataIsCompressed ? CompressedSize : 0; FSHA1::HashBuffer(FileData.GetData(), FileData.Num(), Header.SHAHash.Hash); FileOut->Seek(0); *FileOut << Header; FileOut->Serialize(FileData.GetData(), FileData.Num()); bSuccess = !FileOut->IsError(); } } else { Data->ManifestFileVersion = EBuildPatchAppManifestVersion::GetLatestJsonVersion(); FString JSONOutput; SerializeToJSON(JSONOutput); FTCHARToUTF8 JsonUTF8(*JSONOutput); FileOut->Serialize((UTF8CHAR*)JsonUTF8.Get(), JsonUTF8.Length() * sizeof(UTF8CHAR)); } FileOut->Close(); delete FileOut; FileOut = nullptr; } return bSuccess; }
void SkipLazyArray(FArchive &Ar) { guard(SkipLazyArray); assert(Ar.IsLoading); int pos; Ar << pos; assert(Ar.Tell() < pos); Ar.Seek(pos); unguard; }
static byte *FindBioTexture(const UTexture *Tex) { int needSize = Tex->CachedBulkDataSize & 0xFFFFFFFF; #if DEBUG_BIO_BULK appPrintf("Search for ... %s (size=%X)\n", Tex->Name, needSize); #endif BioReadBulkCatalog(); for (int i = 0; i < bioCatalog.Num(); i++) { BioBulkCatalog &Cat = bioCatalog[i]; for (int j = 0; j < Cat.Files.Num(); j++) { const BioBulkCatalogFile &File = Cat.Files[j]; for (int k = 0; k < File.Items.Num(); k++) { const BioBulkCatalogItem &Item = File.Items[k]; if (!strcmp(Tex->Name, Item.ObjectName)) { if (abs(needSize - Item.DataSize) > 0x4000) // differs in 16k { #if DEBUG_BIO_BULK appPrintf("... Found %s in %s with wrong BulkDataSize %X (need %X)\n", Tex->Name, *File.Filename, Item.DataSize, needSize); #endif continue; } #if DEBUG_BIO_BULK appPrintf("... Found %s in %s at %X size %X (%dx%d fmt=%d bpp=%g strip:%d mips:%d)\n", Tex->Name, *File.Filename, Item.DataOffset, Item.DataSize, Tex->USize, Tex->VSize, Tex->Format, (float)Item.DataSize / (Tex->USize * Tex->VSize), Tex->HasBeenStripped, Tex->StrippedNumMips); #endif // found const CGameFileInfo *bulkFile = appFindGameFile(File.Filename); if (!bulkFile) { // no bulk file appPrintf("Decompressing %s: %s is missing\n", Tex->Name, *File.Filename); return NULL; } appPrintf("Reading %s mip level %d (%dx%d) from %s\n", Tex->Name, 0, Tex->USize, Tex->VSize, bulkFile->RelativeName); FArchive *Reader = appCreateFileReader(bulkFile); Reader->Seek(Item.DataOffset); byte *buf = (byte*)appMalloc(max(Item.DataSize, needSize)); Reader->Serialize(buf, Item.DataSize); delete Reader; return buf; } } } } #if DEBUG_BIO_BULK appPrintf("... Bulk for %s was not found\n", Tex->Name); #endif return NULL; }
void FShaderCache::Save(FArchive& Ar, const map<FGuid, FShader*>& InShaders) { Ar << m_nPlatform; // serialize the global shader crc UINT NumShaderBuilderCRC = m_mapShaderBuilderCRC.size(); Ar << NumShaderBuilderCRC; map<FShaderBuilder*, DWORD>::iterator it; for( it = m_mapShaderBuilderCRC.begin(); it != m_mapShaderBuilderCRC.end(); ++it ) { FShaderBuilder* ShaderBuilder = it->first; Ar << ShaderBuilder; Ar << it->second; } // serialize the global shaders UINT NumShaders = InShaders.size(); Ar << NumShaders; for( map<FGuid, FShader*>::const_iterator it = InShaders.begin(); it != InShaders.end(); ++it ) { FShader* Shader = it->second; // shader builder的序列化,在加载时可用于检测此类型的shader是否仍存在 FShaderBuilder* ShaderBuilder = Shader->GetShaderBuilder(); FGuid ShaderId = Shader->GetId(); Ar << ShaderBuilder << ShaderId; // 占个位先。。。应该记录序列化此shader的结束位置 INT SkipOffset = Ar.Tell(); Ar << SkipOffset; Shader->Serialize(Ar); INT EndOffset = Ar.Tell(); Ar.Seek(SkipOffset); // 定位回之前位置 Ar << EndOffset; // 记录此shader的结束位置 Ar.Seek(EndOffset); // 定位结束位置,继续下一个shader的序列化 } }
bool UncompressCopyFile(FArchive& Dest, FArchive& Source, const FPakEntry& Entry, uint8*& PersistentBuffer, int64& BufferSize) { if (Entry.UncompressedSize == 0) { return false; } int64 WorkingSize = Entry.CompressionBlockSize; int32 MaxCompressionBlockSize = FCompression::CompressMemoryBound((ECompressionFlags)Entry.CompressionMethod, WorkingSize); WorkingSize += MaxCompressionBlockSize; if (BufferSize < WorkingSize) { PersistentBuffer = (uint8*)FMemory::Realloc(PersistentBuffer, WorkingSize); BufferSize = WorkingSize; } uint8* UncompressedBuffer = PersistentBuffer+MaxCompressionBlockSize; for (uint32 BlockIndex=0, BlockIndexNum=Entry.CompressionBlocks.Num(); BlockIndex < BlockIndexNum; ++BlockIndex) { uint32 CompressedBlockSize = Entry.CompressionBlocks[BlockIndex].CompressedEnd - Entry.CompressionBlocks[BlockIndex].CompressedStart; uint32 UncompressedBlockSize = (uint32)FMath::Min<int64>(Entry.UncompressedSize - Entry.CompressionBlockSize*BlockIndex, Entry.CompressionBlockSize); Source.Seek(Entry.CompressionBlocks[BlockIndex].CompressedStart); uint32 SizeToRead = Entry.bEncrypted ? Align(CompressedBlockSize, FAES::AESBlockSize) : CompressedBlockSize; Source.Serialize(PersistentBuffer, SizeToRead); if (Entry.bEncrypted) { FAES::DecryptData(PersistentBuffer, SizeToRead); } if(!FCompression::UncompressMemory((ECompressionFlags)Entry.CompressionMethod,UncompressedBuffer,UncompressedBlockSize,PersistentBuffer,CompressedBlockSize)) { return false; } Dest.Serialize(UncompressedBuffer,UncompressedBlockSize); } return true; }
static void ReadTimeArray(FArchive &Ar, int NumKeys, TArray<float> &Times, int NumFrames) { guard(ReadTimeArray); Times.Empty(NumKeys); if (NumKeys <= 1) return; // appPrintf(" pos=%4X keys (max=%X)[ ", Ar.Tell(), NumFrames); if (NumFrames < 256) { for (int k = 0; k < NumKeys; k++) { uint8 v; Ar << v; Times.Add(v); // if (k < 4 || k > NumKeys - 5) appPrintf(" %02X ", v); // else if (k == 4) appPrintf("..."); } } else { for (int k = 0; k < NumKeys; k++) { uint16 v; Ar << v; Times.Add(v); // if (k < 4 || k > NumKeys - 5) appPrintf(" %04X ", v); // else if (k == 4) appPrintf("..."); } } // appPrintf(" ]\n"); // align to 4 bytes Ar.Seek(Align(Ar.Tell(), 4)); unguard; }
//------------------------------------------------------------------------------ bool FStructScriptLoader::LoadStructWithScript(UStruct* DestScriptContainer, FArchive& Ar, bool bAllowDeferredSerialization) { if (!Ar.IsLoading() || !IsPrimed() || GIsDuplicatingClassForReinstancing) { return false; } bool const bIsLinkerLoader = Ar.IsPersistent() && (Ar.GetLinker() != nullptr); int32 const ScriptEndOffset = ScriptSerializationOffset + SerializedScriptSize; // to help us move development forward (and not have to support ancient // script code), we define a minimum script version bool bSkipScriptSerialization = (Ar.UE4Ver() < VER_MIN_SCRIPTVM_UE4) || (Ar.LicenseeUE4Ver() < VER_MIN_SCRIPTVM_LICENSEEUE4); #if WITH_EDITOR static const FBoolConfigValueHelper SkipByteCodeHelper(TEXT("StructSerialization"), TEXT("SkipByteCodeSerialization")); // in editor builds, we're going to regenerate the bytecode anyways, so it // is a waste of cycles to try and serialize it in bSkipScriptSerialization |= (bool)SkipByteCodeHelper; #endif // WITH_EDITOR bSkipScriptSerialization &= bIsLinkerLoader; // to keep consistent with old UStruct::Serialize() functionality if (bSkipScriptSerialization) { int32 TrackedBufferSize = BytecodeBufferSize; BytecodeBufferSize = 0; // temporarily clear so that ClearScriptCode() doesn't leave Class->Script with anything allocated ClearScriptCode(DestScriptContainer); BytecodeBufferSize = TrackedBufferSize; // we have to at least move the archiver forward, so it is positioned // where it expects to be (as if we read in the script) Ar.Seek(ScriptEndOffset); return false; } bAllowDeferredSerialization &= bIsLinkerLoader; if (bAllowDeferredSerialization && ShouldDeferScriptSerialization(Ar)) { ULinkerLoad* Linker = CastChecked<ULinkerLoad>(Ar.GetLinker()); FDeferredScriptTracker::Get().AddDeferredScriptObject(Linker, DestScriptContainer, *this); // we have to at least move the archiver forward, so it is positioned // where it expects to be (as if we read in the script) Ar.Seek(ScriptEndOffset); return false; } Ar.Seek(ScriptSerializationOffset); if (bIsLinkerLoader) { ULinkerLoad* LinkerLoad = CastChecked<ULinkerLoad>(Ar.GetLinker()); TArray<uint8> ShaScriptBuffer; ShaScriptBuffer.AddUninitialized(SerializedScriptSize); Ar.Serialize(ShaScriptBuffer.GetData(), SerializedScriptSize); ensure(ScriptEndOffset == Ar.Tell()); LinkerLoad->UpdateScriptSHAKey(ShaScriptBuffer); Ar.Seek(ScriptSerializationOffset); } DestScriptContainer->Script.Empty(BytecodeBufferSize); DestScriptContainer->Script.AddUninitialized(BytecodeBufferSize); int32 BytecodeIndex = 0; while (BytecodeIndex < BytecodeBufferSize) { DestScriptContainer->SerializeExpr(BytecodeIndex, Ar); } ensure(ScriptEndOffset == Ar.Tell()); checkf(BytecodeIndex == BytecodeBufferSize, TEXT("'%s' script expression-count mismatch; Expected: %i, Got: %i"), *DestScriptContainer->GetName(), BytecodeBufferSize, BytecodeIndex); if (!GUObjectArray.IsDisregardForGC(DestScriptContainer)) { DestScriptContainer->ScriptObjectReferences.Empty(); FArchiveScriptReferenceCollector ObjRefCollector(DestScriptContainer->ScriptObjectReferences); BytecodeIndex = 0; while (BytecodeIndex < BytecodeBufferSize) { DestScriptContainer->SerializeExpr(BytecodeIndex, ObjRefCollector); } } // success! (we filled the target with serialized script code) return true; }
void UTexture::Serialize(FArchive &Ar) { guard(UTexture::Serialize); Super::Serialize(Ar); #if BIOSHOCK TRIBES_HDR(Ar, 0x2E); if (Ar.Game == GAME_Bioshock && t3_hdrSV >= 1) Ar << CachedBulkDataSize; if (Ar.Game == GAME_Bioshock && Format == 12) // remap format; note: Bioshock used 3DC name, but real format is DXT5N Format = TEXF_DXT5N; #endif // BIOSHOCK #if SWRC if (Ar.Game == GAME_RepCommando) { if (Format == 14) Format = TEXF_CxV8U8; //?? not verified } #endif // SWRC #if VANGUARD if (Ar.Game == GAME_Vanguard && Ar.ArVer >= 128 && Ar.ArLicenseeVer >= 25) { // has some table for fast mipmap lookups Ar.Seek(Ar.Tell() + 142); // skip that table // serialize mips using AR_INDEX count (this game uses int for array counts in all other places) int Count; Ar << AR_INDEX(Count); Mips.AddDefaulted(Count); for (int i = 0; i < Count; i++) Ar << Mips[i]; return; } #endif // VANGUARD #if AA2 if (Ar.Game == GAME_AA2 && Ar.ArLicenseeVer >= 8) { int unk; // always 10619 Ar << unk; } #endif // AA2 Ar << Mips; if (Ar.Engine() == GAME_UE1) { // UE1 bMasked = false; // ignored by UE1, used surface.PolyFlags instead (but UE2 ignores PolyFlags ...) if (bHasComp) // skip compressed mipmaps { TArray<FMipmap> CompMips; Ar << CompMips; } } #if XIII if (Ar.Game == GAME_XIII) { if (Ar.ArLicenseeVer >= 42) { // serialize palette if (Format == TEXF_P8 || Format == 13) // 13 == TEXF_P4 { assert(!Palette); Palette = new UPalette; Ar << Palette->Colors; } } if (Ar.ArLicenseeVer >= 55) Ar.Seek(Ar.Tell() + 3); } #endif // XIII #if EXTEEL if (Ar.Game == GAME_Exteel) { // note: this property is serialized as UObject's property too byte MaterialType; // enum GFMaterialType Ar << MaterialType; } #endif // EXTEEL unguard; }
static bool ReadXprFile(const CGameFileInfo *file) { guard(ReadXprFile); FArchive *Ar = appCreateFileReader(file); int Tag, FileLen, DataStart, DataCount; *Ar << Tag << FileLen << DataStart << DataCount; //?? "XPR0" - xpr variant with a single object (texture) inside if (Tag != BYTES4('X','P','R','1')) { #if XPR_DEBUG appPrintf("Unknown XPR tag in %s\n", file->RelativeName); #endif delete Ar; return true; } #if XPR_DEBUG appPrintf("Scanning %s ...\n", file->RelativeName); #endif XprInfo *Info = new(xprFiles) XprInfo; Info->File = file; Info->DataStart = DataStart; // read filelist int i; for (i = 0; i < DataCount; i++) { int NameOffset, DataOffset; *Ar << NameOffset << DataOffset; int savePos = Ar->Tell(); Ar->Seek(NameOffset + 12); // read name char c, buf[256]; int n = 0; while (true) { *Ar << c; if (n < ARRAY_COUNT(buf)) buf[n++] = c; if (!c) break; } buf[ARRAY_COUNT(buf)-1] = 0; // just in case // create item XprEntry *Entry = new(Info->Items) XprEntry; appStrncpyz(Entry->Name, buf, ARRAY_COUNT(Entry->Name)); Entry->DataOffset = DataOffset + 12; assert(Entry->DataOffset < DataStart); // seek back Ar->Seek(savePos); // setup size of previous item if (i >= 1) { XprEntry *PrevEntry = &Info->Items[i - 1]; PrevEntry->DataSize = Entry->DataOffset - PrevEntry->DataOffset; } // setup size of the last item if (i == DataCount - 1) Entry->DataSize = DataStart - Entry->DataOffset; } // scan data // data block is either embedded in this block or followed after DataStart position for (i = 0; i < DataCount; i++) { XprEntry *Entry = &Info->Items[i]; #if XPR_DEBUG // appPrintf(" %08X [%08X] %s\n", Entry->DataOffset, Entry->DataSize, Entry->Name); #endif Ar->Seek(Entry->DataOffset); int id; *Ar << id; switch (id) { case 0x80020001: // header is 4 dwords + immediately followed data Entry->DataOffset += 4 * 4; Entry->DataSize -= 4 * 4; break; case 0x00040001: // header is 5 dwords + external data { int pos; *Ar << pos; Entry->DataOffset = DataStart + pos; } break; case 0x00020001: // header is 4 dwords + external data { int d1, d2, pos; *Ar << d1 << d2 << pos; Entry->DataOffset = DataStart + pos; } break; default: // header is 2 dwords - offset and size + external data { int pos; *Ar << pos; Entry->DataOffset = DataStart + pos; } break; } } // setup sizes of blocks placed after DataStart (not embedded into file list) for (i = 0; i < DataCount; i++) { XprEntry *Entry = &Info->Items[i]; if (Entry->DataOffset < DataStart) continue; // embedded data // Entry points to a data block placed after DataStart position // we should find a next block int NextPos = FileLen; for (int j = i + 1; j < DataCount; j++) { XprEntry *NextEntry = &Info->Items[j]; if (NextEntry->DataOffset < DataStart) continue; // embedded data NextPos = NextEntry->DataOffset; break; } Entry->DataSize = NextPos - Entry->DataOffset; } #if XPR_DEBUG for (i = 0; i < DataCount; i++) { XprEntry *Entry = &Info->Items[i]; appPrintf(" %3d %08X [%08X] .. %08X %s\n", i, Entry->DataOffset, Entry->DataSize, Entry->DataOffset + Entry->DataSize, Entry->Name); } #endif delete Ar; return true; unguardf("%s", file->RelativeName); }
/** * Serialize function used to serialize this bulk data structure. * * @param Ar Archive to serialize with * @param Owner Object owning the bulk data * @param Idx Index of bulk data item being serialized */ void FUntypedBulkData::Serialize( FArchive& Ar, UObject* Owner, int32 Idx ) { check( LockStatus == LOCKSTATUS_Unlocked ); if(Ar.IsTransacting()) { // Special case for transacting bulk data arrays. // constructing the object during load will save it to the transaction buffer. If it tries to load the bulk data now it will try to break it. bool bActuallySave = Ar.IsSaving() && (!Owner || !Owner->HasAnyFlags(RF_NeedLoad)); Ar << bActuallySave; if (bActuallySave) { if(Ar.IsLoading()) { // Flags for bulk data. Ar << BulkDataFlags; // Number of elements in array. Ar << ElementCount; // Allocate bulk data. check(bShouldFreeOnEmpty); BulkData = FMemory::Realloc( BulkData, GetBulkDataSize() ); // Deserialize bulk data. SerializeBulkData( Ar, BulkData ); } else if(Ar.IsSaving()) { // Flags for bulk data. Ar << BulkDataFlags; // Number of elements in array. Ar << ElementCount; // Don't attempt to load or serialize BulkData if the current size is 0. // This could be a newly constructed BulkData that has not yet been loaded, // and allocating 0 bytes now will cause a crash when we load. if (GetBulkDataSize() > 0) { // Make sure bulk data is loaded. MakeSureBulkDataIsLoaded(); // Serialize bulk data. SerializeBulkData(Ar, BulkData); } } } } else if( Ar.IsPersistent() && !Ar.IsObjectReferenceCollector() && !Ar.ShouldSkipBulkData() ) { #if TRACK_BULKDATA_USE FThreadSafeBulkDataToObjectMap::Get().Add( this, Owner ); #endif // Offset where the bulkdata flags are stored int64 SavedBulkDataFlagsPos = Ar.Tell(); Ar << BulkDataFlags; // Number of elements in array. Ar << ElementCount; // We're loading from the persistent archive. if( Ar.IsLoading() ) { Filename = TEXT(""); // @todo when Landscape (and others?) only Lock/Unlock once, we can enable this if (false) // FPlatformProperties::RequiresCookedData()) { // Bulk data that is being serialized via seekfree loading is single use only. This allows us // to free the memory as e.g. the bulk data won't be attached to an archive in the case of // seek free loading. BulkDataFlags |= BULKDATA_SingleUse; } // Size on disk, which in the case of compression is != GetBulkDataSize() Ar << BulkDataSizeOnDisk; Ar << BulkDataOffsetInFile; // fix up the file offset if (Owner != NULL && Owner->GetLinker()) { BulkDataOffsetInFile += Owner->GetLinker()->Summary.BulkDataStartOffset; } // determine whether the payload is stored inline or at the end of the file bool bPayloadInline = !(BulkDataFlags&BULKDATA_PayloadAtEndOfFile); // check( (bPayloadInline && BulkDataOffsetInFile == Ar.Tell()) || // (!bPayloadInline && BulkDataOffsetInFile > Ar.Tell())); // We're allowing defered serialization. if( Ar.IsAllowingLazyLoading() && Owner != NULL) { Linker = Owner->GetLinker(); #if WITH_EDITOR check(Linker); Ar.AttachBulkData( Owner, this ); AttachedAr = &Ar; #else check(Linker.IsValid()); Filename = Linker->Filename; #endif // WITH_EDITOR // only skip over payload, if it's stored inline if (bPayloadInline) { Ar.Seek( Ar.Tell() + BulkDataSizeOnDisk ); } } // Serialize the bulk data right away. else { // memory for bulk data can come from preallocated GPU-accessible resource memory or default to system memory BulkData = GetBulkDataResourceMemory(Owner,Idx); if( !BulkData ) { BulkData = FMemory::Realloc( BulkData, GetBulkDataSize() ); } if (bPayloadInline) { // if the payload is stored inline, just serialize it SerializeBulkData( Ar, BulkData ); } else { // if the payload is NOT stored inline ... // store the current file offset int64 CurOffset = Ar.Tell(); // seek to the location in the file where the payload is stored Ar.Seek(BulkDataOffsetInFile); // serialize the payload SerializeBulkData( Ar, BulkData ); // seek to the location we came from Ar.Seek(CurOffset); } } } // We're saving to the persistent archive. else if( Ar.IsSaving() ) { // check if we save the package compressed UPackage* Pkg = Owner ? dynamic_cast<UPackage*>(Owner->GetOutermost()) : nullptr; if (Pkg && !!(Pkg->PackageFlags & PKG_StoreCompressed) ) { ECompressionFlags BaseCompressionMethod = COMPRESS_Default; if (Ar.IsCooking()) { BaseCompressionMethod = Ar.CookingTarget()->GetBaseCompressionMethod(); } StoreCompressedOnDisk(BaseCompressionMethod); } // Remove single element serialization requirement before saving out bulk data flags. BulkDataFlags &= ~BULKDATA_ForceSingleElementSerialization; // Make sure bulk data is loaded. MakeSureBulkDataIsLoaded(); // Only serialize status information if wanted. int64 SavedBulkDataSizeOnDiskPos = INDEX_NONE; int64 SavedBulkDataOffsetInFilePos = INDEX_NONE; // Keep track of position we are going to serialize placeholder BulkDataSizeOnDisk. SavedBulkDataSizeOnDiskPos = Ar.Tell(); BulkDataSizeOnDisk = INDEX_NONE; // And serialize the placeholder which is going to be overwritten later. Ar << BulkDataSizeOnDisk; // Keep track of position we are going to serialize placeholder BulkDataOffsetInFile. SavedBulkDataOffsetInFilePos = Ar.Tell(); BulkDataOffsetInFile = INDEX_NONE; // And serialize the placeholder which is going to be overwritten later. Ar << BulkDataOffsetInFile; // try to get the linkersave object ULinkerSave* LinkerSave = dynamic_cast<ULinkerSave*>(Ar.GetLinker()); // determine whether we are going to store the payload inline or not. bool bStoreInline = !!(BulkDataFlags&BULKDATA_ForceInlinePayload) || LinkerSave == NULL; if (!bStoreInline) { // set the flag indicating where the payload is stored BulkDataFlags |= BULKDATA_PayloadAtEndOfFile; // with no LinkerSave we have to store the data inline check(LinkerSave != NULL); // add the bulkdata storage info object to the linkersave int32 Index = LinkerSave->BulkDataToAppend.AddZeroed(1); ULinkerSave::FBulkDataStorageInfo& BulkStore = LinkerSave->BulkDataToAppend[Index]; BulkStore.BulkDataOffsetInFilePos = SavedBulkDataOffsetInFilePos; BulkStore.BulkDataSizeOnDiskPos = SavedBulkDataSizeOnDiskPos; BulkStore.BulkData = this; // Serialize bulk data into the storage info BulkDataSizeOnDisk = -1; } else { // set the flag indicating where the payload is stored BulkDataFlags &= ~BULKDATA_PayloadAtEndOfFile; int64 SavedBulkDataStartPos = Ar.Tell(); // Serialize bulk data. SerializeBulkData( Ar, BulkData ); // store the payload endpos int64 SavedBulkDataEndPos = Ar.Tell(); checkf(SavedBulkDataStartPos >= 0 && SavedBulkDataEndPos >= 0, TEXT("Bad archive positions for bulkdata. StartPos=%d EndPos=%d"), SavedBulkDataStartPos, SavedBulkDataEndPos); BulkDataSizeOnDisk = SavedBulkDataEndPos - SavedBulkDataStartPos; BulkDataOffsetInFile = SavedBulkDataStartPos; } // store current file offset before seeking back int64 CurrentFileOffset = Ar.Tell(); // Seek back and overwrite the flags Ar.Seek(SavedBulkDataFlagsPos); Ar << BulkDataFlags; // Seek back and overwrite placeholder for BulkDataSizeOnDisk Ar.Seek( SavedBulkDataSizeOnDiskPos ); Ar << BulkDataSizeOnDisk; // Seek back and overwrite placeholder for BulkDataOffsetInFile Ar.Seek( SavedBulkDataOffsetInFilePos ); Ar << BulkDataOffsetInFile; // Seek to the end of written data so we don't clobber any data in subsequent write // operations Ar.Seek(CurrentFileOffset); } } }
void Serialize(int64 DesiredPosition, void* V, int64 Length) { const int32 CompressionBlockSize = PakEntry.CompressionBlockSize; uint32 CompressionBlockIndex = DesiredPosition / CompressionBlockSize; uint8* WorkingBuffers[2]; int64 DirectCopyStart = DesiredPosition % PakEntry.CompressionBlockSize; FAsyncTask<FPakUncompressTask> UncompressTask; FCompressionScratchBuffers& ScratchSpace = FCompressionScratchBuffers::Get(); bool bStartedUncompress = false; int64 WorkingBufferRequiredSize = FCompression::CompressMemoryBound((ECompressionFlags)PakEntry.CompressionMethod,CompressionBlockSize); WorkingBufferRequiredSize = EncryptionPolicy::AlignReadRequest(WorkingBufferRequiredSize); ScratchSpace.EnsureBufferSpace(CompressionBlockSize, WorkingBufferRequiredSize*2); WorkingBuffers[0] = ScratchSpace.ScratchBuffer; WorkingBuffers[1] = ScratchSpace.ScratchBuffer + WorkingBufferRequiredSize; while (Length > 0) { const FPakCompressedBlock& Block = PakEntry.CompressionBlocks[CompressionBlockIndex]; int64 Pos = CompressionBlockIndex * CompressionBlockSize; int64 CompressedBlockSize = Block.CompressedEnd-Block.CompressedStart; int64 UncompressedBlockSize = FMath::Min<int64>(PakEntry.UncompressedSize-Pos, PakEntry.CompressionBlockSize); int64 ReadSize = EncryptionPolicy::AlignReadRequest(CompressedBlockSize); int64 WriteSize = FMath::Min<int64>(UncompressedBlockSize - DirectCopyStart, Length); PakReader->Seek(Block.CompressedStart); PakReader->Serialize(WorkingBuffers[CompressionBlockIndex & 1],ReadSize); if (bStartedUncompress) { UncompressTask.EnsureCompletion(); bStartedUncompress = false; } FPakUncompressTask& TaskDetails = UncompressTask.GetTask(); if (DirectCopyStart == 0 && Length >= CompressionBlockSize) { // Block can be decompressed directly into output buffer TaskDetails.Flags = (ECompressionFlags)PakEntry.CompressionMethod; TaskDetails.UncompressedBuffer = (uint8*)V; TaskDetails.UncompressedSize = UncompressedBlockSize; TaskDetails.CompressedBuffer = WorkingBuffers[CompressionBlockIndex & 1]; TaskDetails.CompressedSize = CompressedBlockSize; TaskDetails.CopyOut = nullptr; } else { // Block needs to be copied from a working buffer TaskDetails.Flags = (ECompressionFlags)PakEntry.CompressionMethod; TaskDetails.UncompressedBuffer = (uint8*)ScratchSpace.TempBuffer; TaskDetails.UncompressedSize = UncompressedBlockSize; TaskDetails.CompressedBuffer = WorkingBuffers[CompressionBlockIndex & 1]; TaskDetails.CompressedSize = CompressedBlockSize; TaskDetails.CopyOut = V; TaskDetails.CopyOffset = DirectCopyStart; TaskDetails.CopyLength = WriteSize; } if (Length == WriteSize) { UncompressTask.StartSynchronousTask(); } else { UncompressTask.StartBackgroundTask(); } bStartedUncompress = true; V = (void*)((uint8*)V + WriteSize); Length -= WriteSize; DirectCopyStart = 0; ++CompressionBlockIndex; } if(bStartedUncompress) { UncompressTask.EnsureCompletion(); } }
void USetupDefinition::ProcessCopy( FString Key, FString Value, UBOOL Selected, FInstallPoll* Poll ) { guard(USetupDefinition::ProcessCopy); BYTE Buffer[4096]; if( Selected && Key==TEXT("File") ) { // Get source and dest filenames. FFileInfo Info(*Value); if( Info.Lang==TEXT("") || Info.Lang==UObject::GetLanguage() ) { if( Info.Dest==TEXT("") ) Info.Dest = Info.Src; if( !LocateSourceFile(Info.Src) ) LocalizedFileError( TEXT("MissingInstallerFile"), Patch ? TEXT("AdviseBadDownload") : TEXT("AdviseBadMedia"), *Info.Src ); FString FullDest = DestPath * Info.Dest; FString FullSrc = Info.Ref==TEXT("") ? Info.Src : GetFullRef(*Info.Ref); FString FullPatch = FullDest + TEXT("_tmp"); // Update uninstallation log. UninstallLogAdd( TEXT("File"), *Info.Dest, 0, 1 ); // Make destination directory. if( !GFileManager->MakeDirectory( *BasePath(FullDest), 1 ) ) LocalizedFileError( TEXT("FailedMakeDir"), TEXT("AdviseBadDest"), *FullDest ); // Status display. if( !Poll->Poll(*FullDest,0,0,RunningBytes,TotalBytes) ) DidCancel(); // Copy SrcAr -> DestAr. INT CalcOldCRC = 0; guard(CopyFile); FString ThisDest = Info.Ref==TEXT("") ? FullDest : FullPatch; debugf( TEXT("Copying %s to %s"), *FullSrc, *ThisDest); FArchive* SrcAr = GFileManager->CreateFileReader( *FullSrc ); if( !SrcAr ) LocalizedFileError( TEXT("FailedOpenSource"), Patch ? TEXT("AdviseBadDownload") : TEXT("AdviseBadMedia"), *FullSrc ); INT Size = SrcAr->TotalSize(); FArchive* DestAr = GFileManager->CreateFileWriter( *ThisDest, FILEWRITE_EvenIfReadOnly ); if( !DestAr ) LocalizedFileError( TEXT("FailedOpenDest"), TEXT("AdviseBadDest"), *ThisDest ); if( FullSrc.Right(3).Caps() == TEXT(".UZ") && ThisDest.Right(3).Caps() != TEXT(".UZ")) { INT Signature; FString OrigFilename; *SrcAr << Signature; if( Signature != 5678 ) LocalizedFileError( TEXT("FailedOpenSource"), TEXT("AdviseBadMedia"), *FullSrc ); else { *SrcAr << OrigFilename; FCodecFull Codec; Codec.AddCodec(new FCodecRLE); Codec.AddCodec(new FCodecBWT); Codec.AddCodec(new FCodecMTF); Codec.AddCodec(new FCodecRLE); Codec.AddCodec(new FCodecHuffman); Codec.Decode( *SrcAr, *DestAr ); if( !Poll->Poll(*FullDest,Size,Size,RunningBytes+=Size,TotalBytes) ) { delete SrcAr; delete DestAr; DidCancel(); } } } else { for( SQWORD Pos=0; Pos<Size; Pos+=sizeof(Buffer) ) { INT Count = Min( Size-Pos, (SQWORD)sizeof(Buffer) ); SrcAr->Serialize( Buffer, Count ); if( SrcAr->IsError() ) { delete SrcAr; delete DestAr; LocalizedFileError( TEXT("FailedReadingSource"), Patch ? TEXT("AdviseBadDownload") : TEXT("AdviseBadMedia"), *FullSrc ); } if( Info.Ref!=TEXT("") ) { CalcOldCRC = appMemCrc( Buffer, Count, CalcOldCRC ); } DestAr->Serialize( Buffer, Count ); if( DestAr->IsError() ) { delete SrcAr; delete DestAr; LocalizedFileError( TEXT("FailedWritingDest"), TEXT("AdviseBadDest"), *ThisDest ); } if( !Poll->Poll(*FullDest,Pos,Size,RunningBytes+=Count,TotalBytes) ) { delete SrcAr; delete DestAr; DidCancel(); } } } delete SrcAr; if( !DestAr->Close() ) LocalizedFileError( TEXT("FailedClosingDest"), TEXT("AdviseBadDest"), *ThisDest ); delete DestAr; unguard; // Patch SrcAr + DeltaFile -> DestAr. if( Info.Ref!=TEXT("") ) { guard(PatchFile); BYTE Buffer[4096]; // Open files. FString ThisSrc = FullPatch; FArchive* SrcAr = GFileManager->CreateFileReader( *ThisSrc ); if( !SrcAr ) LocalizedFileError( TEXT("FailedOpenSource"), Patch ? TEXT("AdviseBadDownload") : TEXT("AdviseBadMedia"), *ThisSrc ); INT Size = SrcAr->TotalSize(); FArchive* DestAr = GFileManager->CreateFileWriter(*FullDest,FILEWRITE_EvenIfReadOnly); if( !DestAr ) LocalizedFileError( TEXT("FailedOpenDest"), TEXT("AdviseBadDest"), *FullDest ); // Load delta file. TArray<BYTE> Delta; FString DeltaName = Info.Src; if( !appLoadFileToArray( Delta, *DeltaName ) ) LocalizedFileError( TEXT("FailedLoadingUpdate"), TEXT("AdviseBadDownload"), *Info.Src ); debugf( TEXT("Patching %s to %s with %s"), *ThisSrc, *FullDest, *DeltaName ); // Decompress variables. INT PrevSpot=0, CountSize=0, CRC=0; INT Magic=0, OldSize=0, OldCRC=0, NewSize=0, NewCRC; FBufferReader Reader( Delta ); Reader << Magic << OldSize << OldCRC << NewSize << NewCRC; // Validate. if( Magic!=0x92f92912 ) appErrorf( LineFormat(LocalizeError("PatchCorrupt")), *DeltaName, LocalizeError("AdviseBadDownload") ); if( OldSize!=Size || OldCRC!=CalcOldCRC ) appErrorf( LocalizeError("CdFileMismatch"), *Info.Ref, *LocalProduct ); // Delta decode it. INT OldCountSize=0; while( !Reader.AtEnd() ) { INT Index; Reader << AR_INDEX(Index); if( Index<0 ) { CRC = appMemCrc( &Delta(Reader.Tell()), -Index, CRC ); DestAr->Serialize( &Delta(Reader.Tell()), -Index ); if( DestAr->IsError() ) LocalizedFileError( TEXT("FailedWritingDest"), TEXT("AdviseBadDest"), *FullDest ); Reader.Seek( Reader.Tell() - Index ); CountSize -= Index; } else { INT CopyPos; Reader << AR_INDEX(CopyPos); CopyPos += PrevSpot; check(CopyPos>=0); check(CopyPos+Index<=Size); SrcAr->Seek( CopyPos ); for( INT Base=Index; Base>0; Base-=sizeof(Buffer) ) { INT Move = Min(Base,(INT)sizeof(Buffer)); SrcAr->Serialize( Buffer, Move ); if( SrcAr->IsError() ) LocalizedFileError( TEXT("FailedReadingSource"), Patch ? TEXT("AdviseBadDownload") : TEXT("AdviseBadDownload"), *ThisSrc ); CRC = appMemCrc( Buffer, Move, CRC ); DestAr->Serialize( Buffer, Move ); if( DestAr->IsError() ) LocalizedFileError( TEXT("FailedWritingDest"), TEXT("AdviseBadDest"), *FullDest ); } CountSize += Index; PrevSpot = CopyPos + Index; } if( ((CountSize^OldCountSize)&~(sizeof(Buffer)-1)) || Reader.AtEnd() ) { if( !Poll->Poll(*FullDest,CountSize,Info.Size,RunningBytes+=(CountSize-OldCountSize),TotalBytes) ) { delete SrcAr; delete DestAr; DidCancel(); } OldCountSize = CountSize; } } if( NewSize!=CountSize || NewCRC!=CRC ) appErrorf( LineFormat(LocalizeError("PatchCorrupt")), *DeltaName, LocalizeError("AdviseBadDownload") ); delete SrcAr; if( !DestAr->Close() ) LocalizedFileError( TEXT("FailedClosingDest"), TEXT("AdviseBadDest"), *FullDest ); delete DestAr; GFileManager->Delete( *ThisSrc ); unguard; } } } unguard; }
void FShaderCache::Load(FArchive& Ar) { Ar << m_nPlatform; UINT NumShaderBuilderCRC = 0; Ar << NumShaderBuilderCRC; for(UINT IndexBuilder = 0; IndexBuilder < NumShaderBuilderCRC; ++IndexBuilder) { FShaderBuilder* ShaderBuilder = NULL; Ar << ShaderBuilder; DWORD CRC = 0; Ar << CRC; if( ShaderBuilder ) { m_mapShaderBuilderCRC[ShaderBuilder] = CRC; } } // serialize the global shaders UINT NumShaders = 0; UINT NumDesertedShaders = 0; UINT NumRedundantShaders = 0; vector<FString> OutdatedShaderBuilders; Ar << NumShaders; for(UINT IndexShader = 0; IndexShader < NumShaders; ++IndexShader) { FShaderBuilder* ShaderBuilder = NULL; FGuid ShaderId; Ar << ShaderBuilder << ShaderId; INT SkipOffset = 0; Ar << SkipOffset; if( !ShaderBuilder ) { ++NumDesertedShaders; Ar.Seek(SkipOffset); // this shader builder doesn't exist any more, skip the shader } else { DWORD CurrentCRC = 0; DWORD SavedCRC = 0; // 比较当前与之前版本的CRC,检测是否有改动 CurrentCRC = ShaderBuilder->GetSourceCRC(); map<FShaderBuilder*, DWORD>::const_iterator it = m_mapShaderBuilderCRC.find(ShaderBuilder); if( it != m_mapShaderBuilderCRC.end() ) { SavedCRC = it->second; } FShader* Shader = ShaderBuilder->FindShaderById(ShaderId); if( Shader ) { ++NumRedundantShaders; Ar.Seek(SkipOffset); // has already exist, skip it } else if( SavedCRC != CurrentCRC ) { ++NumDesertedShaders; Ar.Seek(SkipOffset); if( SavedCRC != 0 ) // denote SHADER BUILDER exists, but it has changed { OutdatedShaderBuilders.push_back(ShaderBuilder->GetShaderName()); } } else { // the shader is compatiable, create it Shader = ShaderBuilder->ConstructSerialization(); UBOOL bShaderHasOutdatedParameters = Shader->Serialize(Ar); if( bShaderHasOutdatedParameters ) { ShaderBuilder->UnregisterShader(Shader); delete Shader; } check(Ar.Tell() == SkipOffset); } } } if( OutdatedShaderBuilders.size() > 0 ) { debugf(TEXT("Skip %d outdated FShaderBuilder"), OutdatedShaderBuilders.size()); for(UINT IndexBuilder = 0; IndexBuilder < OutdatedShaderBuilders.size(); ++IndexBuilder) { debugf(TEXT(" %s"), OutdatedShaderBuilders.at(IndexBuilder).c_str()); } } if( NumShaders > 0 ) { debugf(TEXT("Loaded %d shaders (%d deserted, %d redundant)"), NumShaders, NumDesertedShaders, NumRedundantShaders); } }
void FProfilerClientManager::LoadCapture( const FString& DataFilepath, const FGuid& ProfileId ) { #if STATS // start an async load LoadConnection = &Connections.FindOrAdd(ProfileId); LoadConnection->InstanceId = ProfileId; LoadConnection->MetaData.CriticalSection = &LoadConnection->CriticalSection; LoadConnection->MetaData.SecondsPerCycle = FPlatformTime::GetSecondsPerCycle(); // fix this by adding a message which specifies this const int64 Size = IFileManager::Get().FileSize( *DataFilepath ); if( Size < 4 ) { UE_LOG( LogProfile, Error, TEXT( "Could not open: %s" ), *DataFilepath ); return; } #if PROFILER_THREADED_LOAD FArchive* FileReader = IFileManager::Get().CreateFileReader(*DataFilepath); #else FileReader = IFileManager::Get().CreateFileReader( *DataFilepath ); #endif if( !FileReader ) { UE_LOG( LogProfile, Error, TEXT( "Could not open: %s" ), *DataFilepath ); return; } if( !LoadConnection->Stream.ReadHeader( *FileReader ) ) { UE_LOG( LogProfile, Error, TEXT( "Could not open, bad magic: %s" ), *DataFilepath ); delete FileReader; return; } // This shouldn't happen. if( LoadConnection->Stream.Header.bRawStatsFile ) { delete FileReader; return; } const bool bIsFinalized = LoadConnection->Stream.Header.IsFinalized(); if( bIsFinalized ) { // Read metadata. TArray<FStatMessage> MetadataMessages; LoadConnection->Stream.ReadFNamesAndMetadataMessages( *FileReader, MetadataMessages ); LoadConnection->CurrentThreadState.ProcessMetaDataOnly( MetadataMessages ); // Read frames offsets. LoadConnection->Stream.ReadFramesOffsets( *FileReader ); FileReader->Seek( LoadConnection->Stream.FramesInfo[0].FrameFileOffset ); } if( LoadConnection->Stream.Header.HasCompressedData() ) { UE_CLOG( !bIsFinalized, LogProfile, Fatal, TEXT( "Compressed stats file has to be finalized" ) ); } #if PROFILER_THREADED_LOAD LoadTask = new FAsyncTask<FAsyncReadWorker>(LoadConnection, FileReader); LoadTask->StartBackgroundTask(); #endif RetryTime = 0.05f; TickDelegateHandle = FTicker::GetCoreTicker().AddTicker(TickDelegate, RetryTime); ProfilerLoadStartedDelegate.Broadcast(ProfileId); #endif }
void UNavCollision::Serialize(FArchive& Ar) { Super::Serialize(Ar); const int32 VerInitial = 1; const int32 VerAreaClass = 2; const int32 VerConvexTransforms = 3; const int32 VerLatest = VerConvexTransforms; // use magic number to determine if serialized stream has version :/ const int32 MagicNum = 0xA237F237; int64 StreamStartPos = Ar.Tell(); int32 Version = VerLatest; int32 MyMagicNum = MagicNum; Ar << MyMagicNum; if (MyMagicNum != MagicNum) { Version = VerInitial; Ar.Seek(StreamStartPos); } else { Ar << Version; } // loading a dummy GUID to have serialization not break on // packages serialized before switching over UNavCollision to // use BodySetup's guid rather than its own one // motivation: not creating a new engine version // @NOTE could be addressed during next engine version bump FGuid Guid; Ar << Guid; bool bCooked = Ar.IsCooking(); Ar << bCooked; if (FPlatformProperties::RequiresCookedData() && !bCooked && Ar.IsLoading()) { UE_LOG(LogNavigation, Fatal, TEXT("This platform requires cooked packages, and NavCollision data was not cooked into %s."), *GetFullName()); } if (bCooked && ShouldUseConvexCollision()) { if (Ar.IsCooking()) { FName Format = NAVCOLLISION_FORMAT; GetCookedData(Format); // Get the data from the DDC or build it TArray<FName> ActualFormatsToSave; ActualFormatsToSave.Add(Format); CookedFormatData.Serialize(Ar, this, &ActualFormatsToSave); } else { CookedFormatData.Serialize(Ar, this); } } if (Version >= VerAreaClass) { Ar << AreaClass; } if (Version < VerConvexTransforms && Ar.IsLoading() && GIsEditor) { bForceGeometryRebuild = true; } }
bool FFileTests::RunTest( const FString& Parameters ) { // Disabled for now, pending changes to make all platforms consistent. return true; const FString TempFilename = FPaths::EngineSavedDir() / FGuid::NewGuid().ToString(); TArray<uint8> TestData; TArray<uint8> ReadData; uint8 One = 1; FArchive* TestFile = nullptr; // We will test the platform abstraction class, IFileManager IFileManager& FileManager = IFileManager::Get(); // Ensure always starting off without test file existing FileManager.Delete(*TempFilename); check(FileManager.FileExists(*TempFilename) == false); // Check a new file can be created TestData.AddZeroed(64); TestFile = FileManager.CreateFileWriter(*TempFilename, 0); check(TestFile != nullptr); TestFile->Serialize(TestData.GetData(), TestData.Num()); delete TestFile; // Confirm same data check(FFileHelper::LoadFileToArray(ReadData, *TempFilename)); if(ReadData != TestData) { return false; } // Using append flag should open the file, and writing data immediatly should append to the end. // We should also be capable of seeking writing. TestData.Add(One); TestData[10] = One; TestFile = FileManager.CreateFileWriter(*TempFilename, EFileWrite::FILEWRITE_Append); check(TestFile != nullptr); TestFile->Serialize(&One, 1); TestFile->Seek(10); TestFile->Serialize(&One, 1); delete TestFile; // Confirm same data check(FFileHelper::LoadFileToArray(ReadData, *TempFilename)); if(ReadData != TestData) { return false; } // No flags should clobber existing file TestData.Empty(); TestData.Add(One); TestFile = FileManager.CreateFileWriter(*TempFilename, 0); check(TestFile != nullptr); TestFile->Serialize(&One, 1); delete TestFile; // Confirm same data check(FFileHelper::LoadFileToArray(ReadData, *TempFilename)); if(ReadData != TestData) { return false; } // Delete temp file FileManager.Delete(*TempFilename); return true; }
bool FBuildPatchFileConstructor::ConstructFileFromChunks( const FString& Filename, bool bResumeExisting ) { const bool bIsFileData = BuildManifest->IsFileDataManifest(); bResumeExisting = bResumeExisting && !bIsFileData; bool bSuccess = true; FString ErrorString; FString NewFilename = StagingDirectory / Filename; // Calculate the hash as we write the data FSHA1 HashState; FSHAHashData HashValue; // First make sure we can get the file manifest const FFileManifestData* FileManifest = BuildManifest->GetFileManifest(Filename); bSuccess = FileManifest != nullptr; if( bSuccess ) { if( !FileManifest->SymlinkTarget.IsEmpty() ) { #if PLATFORM_MAC bSuccess = symlink(TCHAR_TO_UTF8(*FileManifest->SymlinkTarget), TCHAR_TO_UTF8(*NewFilename)) == 0; #else const bool bSymlinkNotImplemented = false; check(bSymlinkNotImplemented); bSuccess = false; #endif return bSuccess; } // Check for resuming of existing file int64 StartPosition = 0; int32 StartChunkPart = 0; if( bResumeExisting ) { // We have to read in the existing file so that the hash check can still be done. FArchive* NewFileReader = IFileManager::Get().CreateFileReader( *NewFilename ); if( NewFileReader != NULL ) { // Read buffer uint8* ReadBuffer = new uint8[ FBuildPatchData::ChunkDataSize ]; // Reuse a certain amount of the file StartPosition = FMath::Max<int64>( 0, NewFileReader->TotalSize() - NUM_BYTES_RESUME_IGNORE ); // We'll also find the correct chunkpart to start writing from int64 ByteCounter = 0; for( int32 ChunkPartIdx = StartChunkPart; ChunkPartIdx < FileManifest->FileChunkParts.Num() && !FBuildPatchInstallError::HasFatalError(); ++ChunkPartIdx ) { const FChunkPartData& ChunkPart = FileManifest->FileChunkParts[ ChunkPartIdx ]; const int64 NextBytePosition = ByteCounter + ChunkPart.Size; if( NextBytePosition <= StartPosition ) { // Read data for hash check NewFileReader->Serialize( ReadBuffer, ChunkPart.Size ); HashState.Update( ReadBuffer, ChunkPart.Size ); // Count bytes read from file ByteCounter = NextBytePosition; // Set to resume from next chunk part StartChunkPart = ChunkPartIdx + 1; // Inform the chunk cache of the chunk part skip FBuildPatchChunkCache::Get().SkipChunkPart( ChunkPart ); // Wait if paused BuildProgress->WaitWhilePaused(); } else { // No more parts on disk break; } } // Set start position to the byte we got up to StartPosition = ByteCounter; // Clean read buffer delete[] ReadBuffer; // Close file NewFileReader->Close(); delete NewFileReader; } } // Now we can make sure the chunk cache knows to start downloading chunks if( !bIsFileData && !bIsDownloadStarted && !FBuildPatchInstallError::HasFatalError() ) { bIsDownloadStarted = true; FBuildPatchChunkCache::Get().BeginDownloads(); } // Attempt to create the file FArchive* NewFile = IFileManager::Get().CreateFileWriter( *NewFilename, bResumeExisting ? EFileWrite::FILEWRITE_Append : 0 ); bSuccess = NewFile != NULL; if( bSuccess ) { // Whenever we start writing again, there's no more resuming to be done BuildProgress->SetStateProgress( EBuildPatchProgress::Resuming, 1.0f ); // Seek to file write position NewFile->Seek( StartPosition ); // For each chunk, load it, and place it's data into the file for( int32 ChunkPartIdx = StartChunkPart; ChunkPartIdx < FileManifest->FileChunkParts.Num() && bSuccess && !FBuildPatchInstallError::HasFatalError(); ++ChunkPartIdx ) { const FChunkPartData& ChunkPart = FileManifest->FileChunkParts[ChunkPartIdx]; if( bIsFileData ) { bSuccess = InsertFileData( ChunkPart, *NewFile, HashState ); } else { bSuccess = InsertChunkData( ChunkPart, *NewFile, HashState ); } if( bSuccess ) { CountBytesProcessed( ChunkPart.Size ); // Wait if paused BuildProgress->WaitWhilePaused(); } else { ErrorString = TEXT( "Failed to construct file " ); ErrorString += Filename; ErrorString += TEXT( " because of chunk " ); ErrorString += ChunkPart.Guid.ToString(); GWarn->Logf( TEXT( "BuildPatchFileConstructor: ERROR: %s" ), *ErrorString ); FBuildPatchInstallError::SetFatalError( EBuildPatchInstallError::FileConstructionFail, ErrorString ); } } // Close the file writer NewFile->Close(); delete NewFile; } else { FBuildPatchAnalytics::RecordConstructionError( Filename, FPlatformMisc::GetLastError(), TEXT( "Could Not Create File" ) ); ErrorString = TEXT( "Could not create new file " ); ErrorString += Filename; GWarn->Logf( TEXT( "BuildPatchFileConstructor: ERROR: %s" ), *ErrorString ); FBuildPatchInstallError::SetFatalError( EBuildPatchInstallError::FileConstructionFail, ErrorString ); } } else { FBuildPatchAnalytics::RecordConstructionError( Filename, INDEX_NONE, TEXT( "Missing File Manifest" ) ); ErrorString = TEXT( "Build manifest does not contain a file manifest for " ); ErrorString += Filename; FBuildPatchInstallError::SetFatalError( EBuildPatchInstallError::FileConstructionFail, ErrorString ); } // Verify the hash for the file that we created if( bSuccess ) { HashState.Final(); HashState.GetHash( HashValue.Hash ); bSuccess = HashValue == FileManifest->FileHash; if( !bSuccess ) { FBuildPatchAnalytics::RecordConstructionError( Filename, INDEX_NONE, TEXT( "Serialised Verify Fail" ) ); ErrorString = TEXT( "Verify failed after constructing file " ); ErrorString += Filename; GWarn->Logf( TEXT( "BuildDataGenerator: ERROR: %s" ), *ErrorString ); FBuildPatchInstallError::SetFatalError( EBuildPatchInstallError::FileConstructionFail, ErrorString ); } } #if PLATFORM_MAC if( bSuccess && FileManifest->bIsUnixExecutable ) { // Enable executable permission bit struct stat FileInfo; if (stat(TCHAR_TO_UTF8(*NewFilename), &FileInfo) == 0) { bSuccess = chmod(TCHAR_TO_UTF8(*NewFilename), FileInfo.st_mode | S_IXUSR | S_IXGRP | S_IXOTH) == 0; } } #endif // Delete the staging file if unsuccessful by means of construction fail (i.e. keep if canceled or download issue) if( !bSuccess && FBuildPatchInstallError::GetErrorState() == EBuildPatchInstallError::FileConstructionFail ) { IFileManager::Get().Delete( *NewFilename, false, true ); } return bSuccess; }
const bool FChunkWriter::FQueuedChunkWriter::WriteChunkData(const FString& ChunkFilename, FChunkFile* ChunkFile, const FGuid& ChunkGuid) { // Chunks are saved with GUID, so if a file already exists it will never be different. // Skip with return true if already exists if( FPaths::FileExists( ChunkFilename ) ) { const int64 ChunkFilesSize = IFileManager::Get().FileSize(*ChunkFilename); ChunkFileSizesCS.Lock(); ChunkFileSizes.Add(ChunkGuid, ChunkFilesSize); ChunkFileSizesCS.Unlock(); return true; } FArchive* FileOut = IFileManager::Get().CreateFileWriter( *ChunkFilename ); bool bSuccess = FileOut != NULL; if( bSuccess ) { // Setup to handle compression bool bDataIsCompressed = true; uint8* ChunkDataSource = ChunkFile->ChunkData; int32 ChunkDataSourceSize = FBuildPatchData::ChunkDataSize; TArray< uint8 > TempCompressedData; TempCompressedData.Empty( ChunkDataSourceSize ); TempCompressedData.AddUninitialized( ChunkDataSourceSize ); int32 CompressedSize = ChunkDataSourceSize; // Compressed can increase in size, but the function will return as failure in that case // we can allow that to happen since we would not keep larger compressed data anyway. bDataIsCompressed = FCompression::CompressMemory( static_cast< ECompressionFlags >( COMPRESS_ZLIB | COMPRESS_BiasMemory ), TempCompressedData.GetData(), CompressedSize, ChunkFile->ChunkData, FBuildPatchData::ChunkDataSize ); // If compression succeeded, set data vars if( bDataIsCompressed ) { ChunkDataSource = TempCompressedData.GetData(); ChunkDataSourceSize = CompressedSize; } // Setup Header FChunkHeader& Header = ChunkFile->ChunkHeader; *FileOut << Header; Header.HeaderSize = FileOut->Tell(); Header.StoredAs = bDataIsCompressed ? FChunkHeader::STORED_COMPRESSED : FChunkHeader::STORED_RAW; Header.DataSize = ChunkDataSourceSize; Header.HashType = FChunkHeader::HASH_ROLLING; // Write out files FileOut->Seek( 0 ); *FileOut << Header; FileOut->Serialize( ChunkDataSource, ChunkDataSourceSize ); const int64 ChunkFilesSize = FileOut->TotalSize(); FileOut->Close(); ChunkFileSizesCS.Lock(); ChunkFileSizes.Add(ChunkGuid, ChunkFilesSize); ChunkFileSizesCS.Unlock(); bSuccess = !FileOut->GetError(); delete FileOut; } // Log errors if( !bSuccess ) { GLog->Logf( TEXT( "BuildPatchServices: Error: Could not save out generated chunk file %s" ), *ChunkFilename ); } return bSuccess; }
bool FBuildPatchChunkCache::RecycleChunkFromBuild( const FGuid& ChunkGuid ) { // Must never double acquire check( ChunkCache.Contains( ChunkGuid ) == false ); // Debug leaving any files open bool bSuccess = true; // Get the app manifest that this chunk can be sourced from FBuildPatchAppManifestPtr ChunkSourceAppManifest = InstallationInfo.GetManifestContainingChunk(ChunkGuid); if (!ChunkSourceAppManifest.IsValid()) { return false; } // Get the install directory for this manifest const FString ChunkSourceInstallDir = InstallationInfo.GetManifestInstallDir(ChunkSourceAppManifest); if(ChunkSourceInstallDir.Len() <= 0) { return false; } // We need to generate an inventory of all chunk parts in this build that refer to the chunk that we require TMap< FGuid, TArray< FFileChunkPart > > ChunkPartInventory; TArray< FGuid > Array; Array.Add( ChunkGuid ); ChunkSourceAppManifest->EnumerateChunkPartInventory(Array, ChunkPartInventory); // Attempt construction of the chunk from the parts FArchive* BuildFileIn = NULL; FString BuildFileOpened; int64 BuildFileInSize = 0; // We must have a hash for this chunk or else we cant verify it uint8 HashType = 0; uint64 ChunkHash = 0; FSHAHashData ChunkShaHash; if (InstallManifet->GetChunkShaHash(ChunkGuid, ChunkShaHash)) { HashType = FChunkHeader::HASH_SHA1; } else if (ChunkSourceAppManifest->GetChunkHash(ChunkGuid, ChunkHash)) { HashType = FChunkHeader::HASH_ROLLING; } TArray< FFileChunkPart >* FileChunkPartsPtr = ChunkPartInventory.Find( ChunkGuid ); bSuccess = (FileChunkPartsPtr != NULL && HashType != 0); if( bSuccess ) { const TArray< FFileChunkPart >& FileChunkParts = *FileChunkPartsPtr; TArray< uint8 > TempArray; TempArray.AddUninitialized( FBuildPatchData::ChunkDataSize ); uint8* TempChunkConstruction = TempArray.GetData(); FMemory::Memzero( TempChunkConstruction, FBuildPatchData::ChunkDataSize ); bSuccess = FileChunkParts.Num() > 0; for( auto FileChunkPartIt = FileChunkParts.CreateConstIterator(); FileChunkPartIt && bSuccess && !FBuildPatchInstallError::HasFatalError(); ++FileChunkPartIt ) { const FFileChunkPart& FileChunkPart = *FileChunkPartIt; FString FullFilename = ChunkSourceInstallDir / FileChunkPart.Filename; // Close current build file ? if( BuildFileIn != NULL && BuildFileOpened != FullFilename ) { BuildFileIn->Close(); delete BuildFileIn; BuildFileIn = NULL; BuildFileOpened = TEXT( "" ); BuildFileInSize = 0; } // Open build file ? if( BuildFileIn == NULL ) { BuildFileIn = IFileManager::Get().CreateFileReader( *FullFilename ); bSuccess = BuildFileIn != NULL; if( !bSuccess ) { BuildFileOpened = TEXT( "" ); FBuildPatchAnalytics::RecordChunkCacheError( ChunkGuid, FileChunkPart.Filename, FPlatformMisc::GetLastError(), TEXT( "ChunkRecycle" ), TEXT( "Source File Missing" ) ); GWarn->Logf( TEXT( "BuildPatchChunkConstruction: Warning: Failed to load source file for chunk. %s" ), *FullFilename ); } else { BuildFileOpened = FullFilename; BuildFileInSize = BuildFileIn->TotalSize(); } } // Grab the section of the chunk if( BuildFileIn != NULL ) { // Make sure we don't attempt to read off the end of the file const int64 LastRequiredByte = FileChunkPart.FileOffset + FileChunkPart.ChunkPart.Size; if( BuildFileInSize >= LastRequiredByte ) { BuildFileIn->Seek( FileChunkPart.FileOffset ); BuildFileIn->Serialize( TempChunkConstruction + FileChunkPart.ChunkPart.Offset, FileChunkPart.ChunkPart.Size ); } else { bSuccess = false; FBuildPatchAnalytics::RecordChunkCacheError( ChunkGuid, FileChunkPart.Filename, INDEX_NONE, TEXT( "ChunkRecycle" ), TEXT( "Source File Too Small" ) ); GWarn->Logf( TEXT( "BuildPatchChunkConstruction: Warning: Source file too small for chunk position. %s" ), *FullFilename ); } } } // Check no other fatal errors were registered in the meantime bSuccess = bSuccess && !FBuildPatchInstallError::HasFatalError(); // Check chunk hash if( bSuccess ) { FSHAHashData ShaHashCheck; switch (HashType) { case FChunkHeader::HASH_ROLLING: bSuccess = FRollingHash< FBuildPatchData::ChunkDataSize >::GetHashForDataSet(TempChunkConstruction) == ChunkHash; break; case FChunkHeader::HASH_SHA1: FSHA1::HashBuffer(TempChunkConstruction, FBuildPatchData::ChunkDataSize, ShaHashCheck.Hash); bSuccess = ShaHashCheck == ChunkShaHash; break; default: bSuccess = false; } if( !bSuccess ) { FBuildPatchAnalytics::RecordChunkCacheError( ChunkGuid, TEXT( "" ), INDEX_NONE, TEXT( "ChunkRecycle" ), TEXT( "Chunk Hash Fail" ) ); GWarn->Logf( TEXT( "BuildPatchChunkConstruction: Warning: Hash check failed for recycled chunk %s" ), *ChunkGuid.ToString() ); } } // Save the chunk to cache if all went well if( bSuccess ) { // It was added asynchronously!! check( ChunkCache.Contains( ChunkGuid ) == false ); // Create the ChunkFile data structure FChunkFile* NewChunkFile = new FChunkFile( GetRemainingReferenceCount( ChunkGuid ), true ); // Lock data FChunkHeader* ChunkHeader; uint8* ChunkData; NewChunkFile->GetDataLock( &ChunkData, &ChunkHeader ); // Copy the data FMemoryReader MemReader( TempArray ); MemReader.Serialize( ChunkData, FBuildPatchData::ChunkDataSize ); // Setup the header ChunkHeader->Guid = ChunkGuid; ChunkHeader->StoredAs = FChunkHeader::STORED_RAW; ChunkHeader->DataSize = FBuildPatchData::ChunkDataSize; // This would change if compressing/encrypting ChunkHeader->HashType = HashType; ChunkHeader->RollingHash = ChunkHash; ChunkHeader->SHAHash = ChunkShaHash; // Release data NewChunkFile->ReleaseDataLock(); // Count chunk NumChunksRecycled.Increment(); // Add it to our cache. ChunkCache.Add( ChunkGuid, NewChunkFile ); } // Close any open file if( BuildFileIn != NULL ) { BuildFileIn->Close(); delete BuildFileIn; BuildFileIn = NULL; } } return bSuccess; }
void SDocumentationToolTip::CreateExcerpt( FString FileSource, FString InExcerptName ) { FText CheckoutFailReason; bool bNewFile = true; bool bCheckoutOrAddSucceeded = true; if (FPaths::FileExists(FileSource)) { // Check out the existing file bNewFile = false; bCheckoutOrAddSucceeded = SourceControlHelpers::CheckoutOrMarkForAdd(FileSource, NSLOCTEXT("SToolTip", "DocumentationSCCActionDesc", "tool tip excerpt"), FOnPostCheckOut(), /*out*/ CheckoutFailReason); } FArchive* FileWriter = IFileManager::Get().CreateFileWriter( *FileSource, EFileWrite::FILEWRITE_Append | EFileWrite::FILEWRITE_AllowRead | EFileWrite::FILEWRITE_EvenIfReadOnly ); if (bNewFile) { FString UdnHeader; UdnHeader += "Availability:NoPublish"; UdnHeader += LINE_TERMINATOR; UdnHeader += "Title:"; UdnHeader += LINE_TERMINATOR; UdnHeader += "Crumbs:"; UdnHeader += LINE_TERMINATOR; UdnHeader += "Description:"; UdnHeader += LINE_TERMINATOR; FileWriter->Serialize( TCHAR_TO_ANSI( *UdnHeader ), UdnHeader.Len() ); } FString NewExcerpt; NewExcerpt += LINE_TERMINATOR; NewExcerpt += "[EXCERPT:"; NewExcerpt += InExcerptName; NewExcerpt += "]"; NewExcerpt += LINE_TERMINATOR; NewExcerpt += TextContent.Get().ToString(); NewExcerpt += LINE_TERMINATOR; NewExcerpt += "[/EXCERPT:"; NewExcerpt += InExcerptName; NewExcerpt += "]"; NewExcerpt += LINE_TERMINATOR; if (!bNewFile) { FileWriter->Seek( FMath::Max( FileWriter->TotalSize(), (int64)0 ) ); } FileWriter->Serialize( TCHAR_TO_ANSI( *NewExcerpt ), NewExcerpt.Len() ); FileWriter->Close(); delete FileWriter; if (bNewFile) { // Add the new file bCheckoutOrAddSucceeded = SourceControlHelpers::CheckoutOrMarkForAdd(FileSource, NSLOCTEXT("SToolTip", "DocumentationSCCActionDesc", "tool tip excerpt"), FOnPostCheckOut(), /*out*/ CheckoutFailReason); } ISourceCodeAccessModule& SourceCodeAccessModule = FModuleManager::LoadModuleChecked<ISourceCodeAccessModule>("SourceCodeAccess"); SourceCodeAccessModule.GetAccessor().OpenFileAtLine(FileSource, 0); if (!bCheckoutOrAddSucceeded) { FNotificationInfo Info(CheckoutFailReason); Info.ExpireDuration = 3.0f; FSlateNotificationManager::Get().AddNotification(Info); } ReloadDocumentation(); }
void FBuildPatchChunkCache::ReserveChunkInventorySlotForce( const FGuid& ChunkGuid ) { // If already reserved, return immediate if( ChunkCache.HasReservation( ChunkGuid ) || ChunkCache.Contains( ChunkGuid ) ) { return; } // Begin by checking if any slots can be freed ChunkCache.PurgeUnreferenced(); // Try to add the reservation bool bReservationAccepted = ChunkCache.TryAddReservation( ChunkGuid ); // If we couldn't reserve, we need to boot out a chunk for this required one if( bReservationAccepted == false ) { // We create a unique ref array from the use order so that chunks not needed // for longer times end up nearer the bottom of the array TArray< FGuid > ChunkPriorityList; ChunkInfoLock.Lock(); for( int32 ChunkUseOrderStackIdx = ChunkUseOrderStack.Num() - 1; ChunkUseOrderStackIdx >= 0 ; --ChunkUseOrderStackIdx ) { ChunkPriorityList.AddUnique( ChunkUseOrderStack[ ChunkUseOrderStackIdx ] ); } ChunkInfoLock.Unlock(); // Starting at the bottom of the list, we look for a chunk that is contained in the cache for( int32 ChunkPriorityListIdx = ChunkPriorityList.Num() - 1; ChunkPriorityListIdx >= 0 && !bReservationAccepted; --ChunkPriorityListIdx ) { const FGuid& LowPriChunk = ChunkPriorityList[ ChunkPriorityListIdx ]; BuildProgress->WaitWhilePaused(); // Check if there were any errors while paused, like canceling if( FBuildPatchInstallError::HasFatalError() ) { return; } if( ChunkCache.Contains( LowPriChunk ) ) { GWarn->Logf( TEXT( "FBuildPatchChunkCache: Booting chunk %s" ), *LowPriChunk.ToString() ); // Save chunk to disk so we don't have to download again bool bSuccess = true; const FString NewChunkFilename = FBuildPatchUtils::GetChunkOldFilename( ChunkCacheStage, LowPriChunk ); FChunkFile* LowPriChunkFile = ChunkCache.Get( LowPriChunk ); FChunkHeader* LowPriChunkHeader; uint8* LowPriChunkData; LowPriChunkFile->GetDataLock( &LowPriChunkData, &LowPriChunkHeader ); FArchive* FileOut = IFileManager::Get().CreateFileWriter( *NewChunkFilename ); bSuccess = FileOut != NULL; const int32 LastError = FPlatformMisc::GetLastError(); if( bSuccess ) { // Setup Header *FileOut << *LowPriChunkHeader; LowPriChunkHeader->HeaderSize = FileOut->Tell(); LowPriChunkHeader->StoredAs = FChunkHeader::STORED_RAW; LowPriChunkHeader->DataSize = FBuildPatchData::ChunkDataSize; // This would change if compressing/encrypting // Write out file FileOut->Seek( 0 ); *FileOut << *LowPriChunkHeader; FileOut->Serialize( LowPriChunkData, FBuildPatchData::ChunkDataSize ); FileOut->Close(); delete FileOut; } LowPriChunkFile->ReleaseDataLock(); // Setup new chunk origin if( bSuccess ) { ChunkOrigins[ LowPriChunk ] = EChunkOrigin::Harddisk; } else { // Queue download if save failed ChunkOrigins[ LowPriChunk ] = EChunkOrigin::Download; FBuildPatchDownloader::Get().AddChunkToDownload( LowPriChunk ); FBuildPatchAnalytics::RecordChunkCacheError( ChunkGuid, NewChunkFilename, LastError, TEXT( "ChunkBooting" ), TEXT( "Chunk Save Failed" ) ); } // Boot this chunk ChunkCache.Remove( LowPriChunk ); // Try get the reservation again! bReservationAccepted = ChunkCache.TryAddReservation( ChunkGuid ); // Count the boot NumChunksCacheBooted.Increment(); } } // We must have been able to make room check( bReservationAccepted ); } }
void UArrayProperty::SerializeItem( FArchive& Ar, void* Value, void const* Defaults ) const { checkSlow(Inner); // Ensure that the Inner itself has been loaded before calling SerializeItem() on it Ar.Preload(Inner); FScriptArrayHelper ArrayHelper(this, Value); int32 n = ArrayHelper.Num(); Ar << n; if( Ar.IsLoading() ) { // If using a custom property list, don't empty the array on load. Not all indices may have been serialized, so we need to preserve existing values at those slots. if (Ar.ArUseCustomPropertyList) { const int32 OldNum = ArrayHelper.Num(); if (n > OldNum) { ArrayHelper.AddValues(n - OldNum); } else if (n < OldNum) { ArrayHelper.RemoveValues(n, OldNum - n); } } else { ArrayHelper.EmptyAndAddValues(n); } } ArrayHelper.CountBytes( Ar ); // Serialize a PropertyTag for the inner property of this array, allows us to validate the inner struct to see if it has changed FPropertyTag InnerTag(Ar, Inner, 0, (uint8*)Value, (uint8*)Defaults); if (Ar.UE4Ver() >= VER_UE4_INNER_ARRAY_TAG_INFO && InnerTag.Type == NAME_StructProperty) { if (Ar.IsSaving()) { Ar << InnerTag; } else if (Ar.IsLoading()) { Ar << InnerTag; auto CanSerializeFromStructWithDifferentName = [](const FArchive& InAr, const FPropertyTag& PropertyTag, const UStructProperty* StructProperty) { return PropertyTag.StructGuid.IsValid() && StructProperty && StructProperty->Struct && (PropertyTag.StructGuid == StructProperty->Struct->GetCustomGuid()); }; // Check if the Inner property can successfully serialize, the type may have changed UStructProperty* StructProperty = CastChecked<UStructProperty>(Inner); // if check redirector to make sure if the name has changed FName* NewName = FLinkerLoad::StructNameRedirects.Find(InnerTag.StructName); FName StructName = CastChecked<UStructProperty>(StructProperty)->Struct->GetFName(); if (NewName != nullptr && *NewName == StructName) { InnerTag.StructName = *NewName; } if (InnerTag.StructName != StructProperty->Struct->GetFName() && !CanSerializeFromStructWithDifferentName(Ar, InnerTag, StructProperty)) { UE_LOG(LogClass, Warning, TEXT("Property %s of %s has a struct type mismatch (tag %s != prop %s) in package: %s. If that struct got renamed, add an entry to ActiveStructRedirects."), *InnerTag.Name.ToString(), *GetName(), *InnerTag.StructName.ToString(), *CastChecked<UStructProperty>(Inner)->Struct->GetName(), *Ar.GetArchiveName()); #if WITH_EDITOR // Ensure the structure is initialized for (int32 i = 0; i < n; i++) { StructProperty->Struct->InitializeDefaultValue(ArrayHelper.GetRawPtr(i)); } #endif // WITH_EDITOR // Skip the property const int64 StartOfProperty = Ar.Tell(); const int64 RemainingSize = InnerTag.Size - (Ar.Tell() - StartOfProperty); uint8 B; for (int64 i = 0; i < RemainingSize; i++) { Ar << B; } return; } } } // need to know how much data this call to SerializeItem consumes, so mark where we are int32 DataOffset = Ar.Tell(); // If we're using a custom property list, first serialize any explicit indices int32 i = 0; bool bSerializeRemainingItems = true; bool bUsingCustomPropertyList = Ar.ArUseCustomPropertyList; if (bUsingCustomPropertyList && Ar.ArCustomPropertyList != nullptr) { // Initially we only serialize indices that are explicitly specified (in order) bSerializeRemainingItems = false; const FCustomPropertyListNode* CustomPropertyList = Ar.ArCustomPropertyList; const FCustomPropertyListNode* PropertyNode = CustomPropertyList; while (PropertyNode && i < n && !bSerializeRemainingItems) { if (PropertyNode->Property != Inner) { // A null property value signals that we should serialize the remaining array values in full starting at this index if (PropertyNode->Property == nullptr) { i = PropertyNode->ArrayIndex; } bSerializeRemainingItems = true; } else { // Set a temporary node to represent the item FCustomPropertyListNode ItemNode = *PropertyNode; ItemNode.ArrayIndex = 0; ItemNode.PropertyListNext = nullptr; Ar.ArCustomPropertyList = &ItemNode; // Serialize the item at this array index i = PropertyNode->ArrayIndex; Inner->SerializeItem(Ar, ArrayHelper.GetRawPtr(i)); PropertyNode = PropertyNode->PropertyListNext; // Restore the current property list Ar.ArCustomPropertyList = CustomPropertyList; } } } if (bSerializeRemainingItems) { // Temporarily suspend the custom property list (as we need these items to be serialized in full) Ar.ArUseCustomPropertyList = false; // Serialize each item until we get to the end of the array while (i < n) { Inner->SerializeItem(Ar, ArrayHelper.GetRawPtr(i++)); } // Restore use of the custom property list (if it was previously enabled) Ar.ArUseCustomPropertyList = bUsingCustomPropertyList; } if (Ar.UE4Ver() >= VER_UE4_INNER_ARRAY_TAG_INFO && Ar.IsSaving() && InnerTag.Type == NAME_StructProperty) { // set the tag's size InnerTag.Size = Ar.Tell() - DataOffset; if (InnerTag.Size > 0) { // mark our current location DataOffset = Ar.Tell(); // go back and re-serialize the size now that we know it Ar.Seek(InnerTag.SizeOffset); Ar << InnerTag.Size; // return to the current location Ar.Seek(DataOffset); } } }
void USkeletalMesh::Serialize(FArchive &Ar) { guard(USkeletalMesh::Serialize); assert(Ar.Game < GAME_UE3); #if UNREAL1 if (Ar.Engine() == GAME_UE1) { SerializeSkelMesh1(Ar); return; } #endif #if BIOSHOCK if (Ar.Game == GAME_Bioshock) { SerializeBioshockMesh(Ar); return; } #endif Super::Serialize(Ar); #if SPLINTER_CELL if (Ar.Game == GAME_SplinterCell) { SerializeSCell(Ar); return; } #endif // SPLINTER_CELL #if TRIBES3 TRIBES_HDR(Ar, 4); #endif Ar << Points2; #if BATTLE_TERR if (Ar.Game == GAME_BattleTerr && Ar.ArVer >= 134) { TArray<FVector> Points3; Ar << Points3; } #endif // BATTLE_TERR Ar << RefSkeleton; #if DEBUG_SKELMESH appPrintf("RefSkeleton: %d bones\n", RefSkeleton.Num()); for (int i1 = 0; i1 < RefSkeleton.Num(); i1++) appPrintf(" [%d] n=%s p=%d\n", i1, *RefSkeleton[i1].Name, RefSkeleton[i1].ParentIndex); #endif // DEBUG_SKELMESH #if SWRC if (Ar.Game == GAME_RepCommando && Ar.ArVer >= 142) { for (int i = 0; i < RefSkeleton.Num(); i++) { FMeshBone &B = RefSkeleton[i]; B.BonePos.Orientation.X *= -1; B.BonePos.Orientation.Y *= -1; B.BonePos.Orientation.Z *= -1; } } if (Ar.Game == GAME_RepCommando && Version >= 5) { TArray<FMeshAnimLinkSWRC> Anims; Ar << Anims; if (Anims.Num() >= 1) Animation = Anims[0].Anim; } else #endif // SWRC Ar << Animation; #if AA2 if (Ar.Game == GAME_AA2 && Ar.ArLicenseeVer >= 22) { TArray<UObject*> unk230; Ar << unk230; } #endif // AA2 Ar << SkeletalDepth << WeightIndices << BoneInfluences; #if SWRC if (Ar.Game == GAME_RepCommando && Ar.ArVer >= 140) { TArray<FAttachSocketSWRC> Sockets; Ar << Sockets; //?? convert } else #endif // SWRC { Ar << AttachAliases << AttachBoneNames << AttachCoords; } if (Version <= 1) { // appNotify("SkeletalMesh of version %d\n", Version); TArray<FLODMeshSection> tmp1, tmp2; TArray<word> tmp3; Ar << tmp1 << tmp2 << tmp3; // copy and convert data from old mesh format UpgradeMesh(); } else { #if UC2 if (Ar.Engine() == GAME_UE2X && Ar.ArVer >= 136) { int f338; Ar << f338; } #endif // UC2 #if SWRC if (Ar.Game == GAME_RepCommando) { int f1C4; if (Version >= 6) Ar << f1C4; Ar << LODModels; if (Version < 5) Ar << f224; Ar << Points << Wedges << Triangles << VertInfluences; Ar << CollapseWedge << f1C8; goto skip_remaining; } #endif // SWRC #if EOS if (Ar.Game == GAME_EOS) { int unk1; UObject* unk2; UObject* unk3; if (Version >= 6) Ar << unk1 << unk2; if (Version >= 7) Ar << unk3; Ar << LODModels; goto skip_remaining; } #endif // EOS #if 0 // Shui Hu Q Zhuan 2 Online if (Ar.ArVer == 126 && Ar.ArLicenseeVer == 1) { // skip LOD models int Num; Ar << AR_INDEX(Num); for (int i = 0; i < Num; i++) { int Pos; Ar << Pos; Ar.Seek(Ar.Tell() + Pos - 4); } goto after_lods; } #endif Ar << LODModels; after_lods: Ar << f224 << Points; #if BATTLE_TERR if (Ar.Game == GAME_BattleTerr && Ar.ArVer >= 134) { TLazyArray<int> unk15C; Ar << unk15C; } #endif // BATTLE_TERR Ar << Wedges << Triangles << VertInfluences; Ar << CollapseWedge << f1C8; } #if TRIBES3 if ((Ar.Game == GAME_Tribes3 || Ar.Game == GAME_Swat4) && t3_hdrSV >= 3) { #if 0 // it looks like format of following data was chenged sinse // data was prepared, and game executeble does not load these // LazyArrays (otherwise error should occur) -- so we are // simply skipping these arrays TLazyArray<FT3Unk1> unk1; TLazyArray<FMeshWedge> unk2; TLazyArray<word> unk3; Ar << unk1 << unk2 << unk3; #else SkipLazyArray(Ar); SkipLazyArray(Ar); SkipLazyArray(Ar); #endif // nothing interesting below ... goto skip_remaining; } #endif // TRIBES3 #if BATTLE_TERR if (Ar.Game == GAME_BattleTerr) goto skip_remaining; #endif #if UC2 if (Ar.Engine() == GAME_UE2X) goto skip_remaining; #endif #if LINEAGE2 if (Ar.Game == GAME_Lineage2) { int unk1, unk3, unk4; TArray<float> unk2; if (Ar.ArVer >= 118 && Ar.ArLicenseeVer >= 3) Ar << unk1; if (Ar.ArVer >= 123 && Ar.ArLicenseeVer >= 0x12) Ar << unk2; if (Ar.ArVer >= 120) Ar << unk3; // AuthKey ? if (Ar.ArLicenseeVer >= 0x23) Ar << unk4; ConvertMesh(); return; } #endif // LINEAGE2 if (Ar.ArVer >= 120) { Ar << AuthKey; } #if LOCO if (Ar.Game == GAME_Loco) goto skip_remaining; // Loco codepath is similar to UT2004, but sometimes has different version switches #endif #if UT2 if (Ar.Game == GAME_UT2) { // UT2004 has branched version of UE2, which is slightly different // in comparison with generic UE2, which is used in all other UE2 games. if (Ar.ArVer >= 122) Ar << KarmaProps << BoundingSpheres << BoundingBoxes << f32C; if (Ar.ArVer >= 127) Ar << CollisionMesh; ConvertMesh(); return; } #endif // UT2 // generic UE2 code if (Ar.ArVer >= 124) Ar << KarmaProps << BoundingSpheres << BoundingBoxes; if (Ar.ArVer >= 125) Ar << f32C; #if XIII if (Ar.Game == GAME_XIII) goto skip_remaining; #endif #if RAGNAROK2 if (Ar.Game == GAME_Ragnarok2 && Ar.ArVer >= 131) { float unk1, unk2; Ar << unk1 << unk2; } #endif // RAGNAROK2 if (Ar.ArLicenseeVer && (Ar.Tell() != Ar.GetStopper())) { appPrintf("Serializing SkeletalMesh'%s' of unknown game: %d unreal bytes\n", Name, Ar.GetStopper() - Ar.Tell()); skip_remaining: DROP_REMAINING_DATA(Ar); } ConvertMesh(); unguard; }
void SkipFixedArray(FArchive &Ar, int ItemSize) { TArray<DummyItem> DummyArray; Ar << DummyArray; Ar.Seek(Ar.Tell() + DummyArray.Num() * ItemSize); }