void FLazyObjectPtr::PossiblySerializeObjectGuid(UObject *Object, FArchive& Ar) { if (Ar.IsSaving() || Ar.IsCountingMemory()) { FUniqueObjectGuid Guid = GuidAnnotation.GetAnnotation(Object); bool HasGuid = Guid.IsValid(); Ar << HasGuid; if (HasGuid) { if (Ar.GetPortFlags() & PPF_DuplicateForPIE) { check(GPlayInEditorID != -1); FGuid &FoundGuid = PIEGuidMap[GPlayInEditorID % MAX_PIE_INSTANCES].FindOrAdd(Guid.GetGuid()); if (!FoundGuid.IsValid()) { Guid = FoundGuid = FGuid::NewGuid(); } else { Guid = FoundGuid; } } Ar << Guid; } } else if (Ar.IsLoading()) { bool HasGuid = false; Ar << HasGuid; if (HasGuid) { FUniqueObjectGuid Guid; Ar << Guid; // Don't try and resolve GUIDs when loading a package for diff'ing const UPackage* Package = Object->GetOutermost(); bool bLoadedForDiff = (Package && (Package->PackageFlags & PKG_ForDiffing)); if (!bLoadedForDiff && (!(Ar.GetPortFlags() & PPF_Duplicate) || (Ar.GetPortFlags() & PPF_DuplicateForPIE))) { check(!Guid.IsDefault()); UObject* OtherObject = Guid.ResolveObject(); if (OtherObject != Object) // on undo/redo, the object (potentially) already exists { if (OtherObject != NULL) { UE_CLOG(!((FApp::IsGame() || GIsPlayInEditorWorld) && Package && Package->ContainsMap()), LogUObjectGlobals, Warning, TEXT("Guid is in use by %s and %s, which should never happen in the editor but could happen at runtime with duplicate level loading or PIE"), *OtherObject->GetFullName(), !!Object ? *Object->GetFullName() : TEXT("NULL")); // This guid is in use, which should never happen in the editor but could happen at runtime with duplicate level loading or PIE. If so give it a new GUID to avoid crashing Guid = FGuid::NewGuid(); } GuidAnnotation.AddAnnotation(Object, Guid); FUniqueObjectGuid::InvalidateTag(); } } } } }
void FStreamedAudioChunk::Serialize(FArchive& Ar, UObject* Owner, int32 ChunkIndex) { bool bCooked = Ar.IsCooking(); Ar << bCooked; BulkData.Serialize(Ar, Owner, ChunkIndex); Ar << DataSize; #if WITH_EDITORONLY_DATA if (!bCooked) { Ar << DerivedDataKey; } #endif // #if WITH_EDITORONLY_DATA }
void FGraphReference::PostSerialize(const FArchive& Ar) { #if WITH_EDITORONLY_DATA if (Ar.UE4Ver() >= VER_UE4_K2NODE_REFERENCEGUIDS) { // Because the macro instance could have been saved with a GUID that was allocated // but the macro graph never actually saved with that value we are forced to make // sure to refresh the GUID and make sure it is up to date if (MacroGraph) { GraphGuid = MacroGraph->GraphGuid; } } #endif }
void FUniformExpressionSet::Serialize(FArchive& Ar) { Ar << UniformVectorExpressions; Ar << UniformScalarExpressions; Ar << Uniform2DTextureExpressions; Ar << UniformCubeTextureExpressions; Ar << ParameterCollections; // Recreate the uniform buffer struct after loading. if(Ar.IsLoading()) { CreateBufferStruct(); } }
void FTextHistory_AsDateTime::Serialize(FArchive& Ar) { if(Ar.IsSaving()) { int8 HistoryType = (int8)ETextHistoryType::AsDateTime; Ar << HistoryType; } Ar << SourceDateTime; int8 DateStyleInt8 = (int8)DateStyle; Ar << DateStyleInt8; DateStyle = (EDateTimeStyle::Type)DateStyleInt8; int8 TimeStyleInt8 = (int8)TimeStyle; Ar << TimeStyleInt8; TimeStyle = (EDateTimeStyle::Type)TimeStyleInt8; Ar << TimeZone; if(Ar.IsSaving()) { FString CultureName = TargetCulture.IsValid()? TargetCulture->GetName() : FString(); Ar << CultureName; } else if(Ar.IsLoading()) { FString CultureName; Ar << CultureName; if(!CultureName.IsEmpty()) { TargetCulture = FInternationalization::Get().GetCulture(CultureName); } } }
void UMeshAnimation::SerializeLineageMoves(FArchive &Ar) { guard(UMeshAnimation::SerializeLineageMoves); if (Ar.ArVer < 123 || Ar.ArLicenseeVer < 0x19) { // standard UE2 format Ar << Moves; return; } assert(Ar.IsLoading); int pos, count; // pos = global skip pos, count = data count Ar << pos << AR_INDEX(count); Moves.Empty(count); for (int i = 0; i < count; i++) { int localPos; Ar << localPos; MotionChunk *M = new(Moves) MotionChunk; Ar << *M; assert(Ar.Tell() == localPos); } assert(Ar.Tell() == pos); unguard; }
void FRawCurveTracks::Serialize(FArchive& Ar) { // @TODO: If we're about to serialize vector curve, add here if(Ar.UE4Ver() >= VER_UE4_SKELETON_ADD_SMARTNAMES) { for(FFloatCurve& Curve : FloatCurves) { Curve.Serialize(Ar); } } #if WITH_EDITORONLY_DATA if( !Ar.IsCooking() ) { if( Ar.UE4Ver() >= VER_UE4_ANIMATION_ADD_TRACKCURVES ) { for( FTransformCurve& Curve : TransformCurves ) { Curve.Serialize( Ar ); } } } #endif // WITH_EDITORONLY_DATA }
bool FBuildPatchAppManifest::Serialize(FArchive& Ar) { if (Ar.IsLoading()) { DestroyData(); } Data->Serialize(Ar); if (Ar.IsLoading()) { // If we didn't load the version number, we know it was skipped when saving therefore must be // the first UObject version if (Data->ManifestFileVersion == static_cast<uint8>(EBuildPatchAppManifestVersion::Invalid)) { Data->ManifestFileVersion = EBuildPatchAppManifestVersion::StoredAsCompressedUClass; } // Setup internal lookups InitLookups(); } return !Ar.IsError(); }
FArchive* FFileManagerWindows::InternalCreateFileWriter( const TCHAR* Filename, DWORD Flags, FOutputDevice* Error ) { INT StatsHandle = FILE_IO_STATS_GET_HANDLE( Filename ); SCOPED_FILE_IO_WRITE_OPEN_STATS( StatsHandle ); MakeDirectory(*FFilename(Filename).GetPath(), TRUE); if( (GFileManager->FileSize (Filename) >= 0) && (Flags & FILEWRITE_EvenIfReadOnly) ) { SetFileAttributesW(Filename, 0); } DWORD Access = GENERIC_WRITE; DWORD WinFlags = (Flags & FILEWRITE_AllowRead) ? FILE_SHARE_READ : 0; DWORD Create = (Flags & FILEWRITE_Append) ? OPEN_ALWAYS : (Flags & FILEWRITE_NoReplaceExisting) ? CREATE_NEW : CREATE_ALWAYS; HANDLE Handle = CreateFileW( Filename, Access, WinFlags, NULL, Create, FILE_ATTRIBUTE_NORMAL, NULL ); INT Pos = 0; if( Handle==INVALID_HANDLE_VALUE ) { if( Flags & FILEWRITE_NoFail ) { const DWORD LastError = GetLastError(); appErrorf( TEXT("Failed to create file: %s, GetLastError %u"), Filename, LastError ); } return NULL; } if( Flags & FILEWRITE_Append ) { Pos = SetFilePointer( Handle, 0, NULL, FILE_END ); } FArchive* retArch = new FArchiveFileWriterWindows(Handle,StatsHandle,Filename,Error,Pos); if( retArch && (Flags & FILEWRITE_SaveGame) ) { retArch->SetIsSaveGame( TRUE ); } return retArch; }
void DBaseDecal::SerializeChain (FArchive &arc, DBaseDecal **first) { DWORD numInChain; DBaseDecal *fresh; DBaseDecal **firstptr = first; if (arc.IsLoading ()) { numInChain = arc.ReadCount (); while (numInChain--) { arc << fresh; *firstptr = fresh; fresh->WallPrev = firstptr; firstptr = &fresh->WallNext; } } else { numInChain = 0; fresh = *firstptr; while (fresh != NULL) { fresh = fresh->WallNext; ++numInChain; } arc.WriteCount (numInChain); fresh = *firstptr; while (numInChain--) { arc << fresh; fresh = fresh->WallNext; } } }
FLocMetadataValueArray::FLocMetadataValueArray( FArchive& Archive ) { check(Archive.IsLoading()); int32 ElementCount; Archive << ElementCount; Value.SetNum(ElementCount); for (TSharedPtr<FLocMetadataValue>& Element : Value) { FLocMetadataValue* ElementRawPointer = Element.Get(); SerializeLocMetadataValue(Archive, ElementRawPointer); Element = MakeShareable(ElementRawPointer); } }
//========================================================================== // // // //========================================================================== void ADynamicLight::Serialize(FArchive &arc) { Super::Serialize (arc); arc << lightflags << lighttype; arc << m_tickCount << m_currentRadius; arc << m_Radius[0] << m_Radius[1]; if (lighttype == PulseLight) arc << m_lastUpdate << m_cycler; if (arc.IsLoading()) { // The default constructor which is used for creating objects before deserialization will not set this variable. // It needs to be true for all placed lights. visibletoplayer = true; LinkLight(); } }
/** * Loads the data from disk into the specified memory block. This requires us still being attached to an * archive we can use for serialization. * * @param Dest Memory to serialize data into */ void FUntypedBulkData::LoadDataIntoMemory( void* Dest ) { #if WITH_EDITOR checkf( AttachedAr, TEXT( "Attempted to load bulk data without an attached archive. Most likely the bulk data was loaded twice on console, which is not supported" ) ); // Keep track of current position in file so we can restore it later. int64 PushedPos = AttachedAr->Tell(); // Seek to the beginning of the bulk data in the file. AttachedAr->Seek( BulkDataOffsetInFile ); SerializeBulkData( *AttachedAr, Dest ); // Restore file pointer. AttachedAr->Seek( PushedPos ); #else bool bWasLoadedSuccessfully = false; if (IsInGameThread() && Linker.IsValid()) { ULinkerLoad* LinkerLoad = Linker.Get(); if ( LinkerLoad && LinkerLoad->Loader && !LinkerLoad->IsCompressed() ) { FArchive* Ar = LinkerLoad; // keep track of current position in this archive int64 CurPos = Ar->Tell(); // Seek to the beginning of the bulk data in the file. Ar->Seek( BulkDataOffsetInFile ); // serialize the bulk data SerializeBulkData( *Ar, Dest ); // seek back to the position the archive was before Ar->Seek(CurPos); // note that we loaded it bWasLoadedSuccessfully = true; } } // if we weren't able to load via linker, load directly by filename if (!bWasLoadedSuccessfully) { // load from the specied filename when the linker has been cleared checkf( Filename != TEXT(""), TEXT( "Attempted to load bulk data without a proper filename." ) ); FArchive* Ar = IFileManager::Get().CreateFileReader(*Filename, FILEREAD_Silent); checkf( Ar != NULL, TEXT( "Attempted to load bulk data from an invalid filename '%s'." ), *Filename ); // Seek to the beginning of the bulk data in the file. Ar->Seek( BulkDataOffsetInFile ); SerializeBulkData( *Ar, Dest ); delete Ar; } #endif // WITH_EDITOR }
void FGameplayDebuggerCategory_AI::FRepDataPath::Serialize(FArchive& Ar) { int32 NumCorridor = PathCorridor.Num(); Ar << NumCorridor; if (Ar.IsLoading()) { PathCorridor.SetNum(NumCorridor); } for (int32 Idx = 0; Idx < NumCorridor; Idx++) { Ar << PathCorridor[Idx].Points; Ar << PathCorridor[Idx].Color; } Ar << PathPoints; }
void FStreamedAudioChunk::Serialize(FArchive& Ar, UObject* Owner, int32 ChunkIndex) { DECLARE_SCOPE_CYCLE_COUNTER( TEXT("FStreamedAudioChunk::Serialize"), STAT_StreamedAudioChunk_Serialize, STATGROUP_LoadTime ); bool bCooked = Ar.IsCooking(); Ar << bCooked; BulkData.Serialize(Ar, Owner, ChunkIndex); Ar << DataSize; #if WITH_EDITORONLY_DATA if (!bCooked) { Ar << DerivedDataKey; } #endif // #if WITH_EDITORONLY_DATA }
void FShaderResource::Serialize(FArchive& Ar) { Ar << SpecificType; Ar << Target; Ar << Code; Ar << OutputHash; Ar << NumInstructions; Ar << NumTextureSamplers; if (Ar.IsLoading()) { INC_DWORD_STAT_BY_FName(GetMemoryStatType((EShaderFrequency)Target.Frequency).GetName(), (int64)Code.Num()); INC_DWORD_STAT_BY(STAT_Shaders_ShaderResourceMemory, GetSizeBytes()); FShaderCache::LogShader((EShaderPlatform)Target.Platform, (EShaderFrequency)Target.Frequency, OutputHash, Code); } }
bool FBuildPatchFileConstructor::InsertChunkData(const FChunkPartData& ChunkPart, FArchive& DestinationFile, FSHA1& HashState) { uint8* Data; uint8* DataStart; FChunkFile* ChunkFile = FBuildPatchChunkCache::Get().GetChunkFile( ChunkPart.Guid ); if( ChunkFile != NULL && !FBuildPatchInstallError::HasFatalError() ) { ChunkFile->GetDataLock( &Data, NULL ); DataStart = &Data[ ChunkPart.Offset ]; HashState.Update( DataStart, ChunkPart.Size ); DestinationFile.Serialize( DataStart, ChunkPart.Size ); ChunkFile->Dereference(); ChunkFile->ReleaseDataLock(); return true; } return false; }
void P_SerializePolyobjs (FArchive &arc) { int i; FPolyObj *po; if (arc.IsStoring ()) { int seg = ASEG_POLYOBJS; arc << seg << po_NumPolyobjs; for(i = 0, po = polyobjs; i < po_NumPolyobjs; i++, po++) { arc << po->tag << po->angle << po->StartSpot.x << po->StartSpot.y << po->interpolation; } } else { int data; angle_t angle; fixed_t deltaX, deltaY; arc << data; if (data != ASEG_POLYOBJS) I_Error ("Polyobject marker missing"); arc << data; if (data != po_NumPolyobjs) { I_Error ("UnarchivePolyobjs: Bad polyobj count"); } for (i = 0, po = polyobjs; i < po_NumPolyobjs; i++, po++) { arc << data; if (data != po->tag) { I_Error ("UnarchivePolyobjs: Invalid polyobj tag"); } arc << angle; po->RotatePolyobj (angle); arc << deltaX << deltaY << po->interpolation; deltaX -= po->StartSpot.x; deltaY -= po->StartSpot.y; po->MovePolyobj (deltaX, deltaY, true); } } }
void DEarthquake::Serialize (FArchive &arc) { int i; if (arc.IsStoring ()) { arc << m_Spot << m_Intensity << m_Countdown; for (i = 0; i < 4; i++) arc << m_TremorBox[i] << m_DamageBox[i]; } else { arc >> m_Spot >> m_Intensity >> m_Countdown; for (i = 0; i < 4; i++) arc >> m_TremorBox[i] >> m_DamageBox[i]; } }
void P_SerializePolyobjs (FArchive &arc) { int i; polyobj_t *po; if (arc.IsStoring ()) { arc << (int)ASEG_POLYOBJS << po_NumPolyobjs; for(i = 0, po = polyobjs; i < po_NumPolyobjs; i++, po++) { arc << po->tag << po->angle << po->startSpot[0] << po->startSpot[1] << po->startSpot[2]; } } else { int data; angle_t angle; fixed_t deltaX, deltaY, deltaZ; arc >> data; if (data != ASEG_POLYOBJS) I_Error ("Polyobject marker missing"); arc >> data; if (data != po_NumPolyobjs) { I_Error ("UnarchivePolyobjs: Bad polyobj count"); } for (i = 0, po = polyobjs; i < po_NumPolyobjs; i++, po++) { arc >> data; if (data != po->tag) { I_Error ("UnarchivePolyobjs: Invalid polyobj tag"); } arc >> angle; PO_RotatePolyobj (po->tag, angle); arc >> deltaX >> deltaY >> deltaZ; deltaX -= po->startSpot[0]; deltaY -= po->startSpot[1]; deltaZ -= po->startSpot[2]; PO_MovePolyobj (po->tag, deltaX, deltaY); } } }
uint8 FBuildPatchUtils::VerifyFile(const FString& FileToVerify, const FSHAHashData& Hash1, const FSHAHashData& Hash2, FBuildPatchFloatDelegate ProgressDelegate, FBuildPatchBoolRetDelegate ShouldPauseDelegate, double& TimeSpentPaused) { uint8 ReturnValue = 0; FArchive* FileReader = IFileManager::Get().CreateFileReader(*FileToVerify); ProgressDelegate.ExecuteIfBound(0.0f); if (FileReader != NULL) { FSHA1 HashState; FSHAHashData HashValue; const int64 FileSize = FileReader->TotalSize(); uint8* FileReadBuffer = new uint8[FileBufferSize]; while (!FileReader->AtEnd() && !FBuildPatchInstallError::HasFatalError()) { // Pause if necessary const double PrePauseTime = FPlatformTime::Seconds(); double PostPauseTime = PrePauseTime; bool bShouldPause = ShouldPauseDelegate.IsBound() && ShouldPauseDelegate.Execute(); while (bShouldPause && !FBuildPatchInstallError::HasFatalError()) { FPlatformProcess::Sleep(0.1f); bShouldPause = ShouldPauseDelegate.Execute(); PostPauseTime = FPlatformTime::Seconds(); } // Count up pause time TimeSpentPaused += PostPauseTime - PrePauseTime; // Read file and update hash state const int64 SizeLeft = FileSize - FileReader->Tell(); const uint32 ReadLen = FMath::Min< int64 >(FileBufferSize, SizeLeft); FileReader->Serialize(FileReadBuffer, ReadLen); HashState.Update(FileReadBuffer, ReadLen); const double FileSizeTemp = FileSize; const float Progress = 1.0f - ((SizeLeft - ReadLen) / FileSizeTemp); ProgressDelegate.ExecuteIfBound(Progress); } delete[] FileReadBuffer; HashState.Final(); HashState.GetHash(HashValue.Hash); ReturnValue = (HashValue == Hash1) ? 1 : (HashValue == Hash2) ? 2 : 0; if (ReturnValue == 0) { GLog->Logf(TEXT("BuildDataGenerator: Verify failed on %s"), *FPaths::GetCleanFilename(FileToVerify)); } FileReader->Close(); delete FileReader; } else { GLog->Logf(TEXT("BuildDataGenerator: ERROR VerifyFile cannot open %s"), *FileToVerify); } ProgressDelegate.ExecuteIfBound(1.0f); return ReturnValue; }
void FStreamedAudioPlatformData::Serialize(FArchive& Ar, USoundWave* Owner) { Ar << NumChunks; Ar << AudioFormat; if (Ar.IsLoading()) { Chunks.Empty(NumChunks); for (int32 ChunkIndex = 0; ChunkIndex < NumChunks; ++ChunkIndex) { new(Chunks) FStreamedAudioChunk(); } } for (int32 ChunkIndex = 0; ChunkIndex < NumChunks; ++ChunkIndex) { Chunks[ChunkIndex].Serialize(Ar, Owner, ChunkIndex); } }
bool FStringAssetReference::SerializeFromMismatchedTag(struct FPropertyTag const& Tag, FArchive& Ar) { struct UObjectTypePolicy { typedef UObject Type; static const FName FORCEINLINE GetTypeName() { return NAME_ObjectProperty; } }; FString Path = ToString(); bool bReturn = SerializeFromMismatchedTagTemplate<UObjectTypePolicy>(Path, Tag, Ar); if (Ar.IsLoading()) { SetPath(MoveTemp(Path)); } return bReturn; }
void DLightningThinker::Serialize (FArchive &arc) { int i; short *lights; Super::Serialize (arc); arc << Stopped << NextLightningFlash << LightningFlashCount; if (SaveVersion < 3243) { // Do nothing with old savegames and just keep whatever the constructor made // but read the obsolete data from the savegame for (i = (numsectors + (numsectors+7)/8); i > 0; --i) { if (SaveVersion < 3223) { BYTE bytelight; arc << bytelight; } else { short shortlight; arc << shortlight; } } return; } if (arc.IsLoading ()) { if (LightningLightLevels != NULL) { delete[] LightningLightLevels; } LightningLightLevels = new short[numsectors]; } lights = LightningLightLevels; for (i = numsectors; i > 0; ++lights, --i) { arc << *lights; } }
/** * Serializer * * @param Ar Archive to serialize with * @param bNeedsCPUAccess Whether the elements need to be accessed by the CPU */ void FPositionVertexBuffer::Serialize( FArchive& Ar, bool bNeedsCPUAccess ) { Ar << Stride << NumVertices; if(Ar.IsLoading()) { // Allocate the vertex data storage type. AllocateData( bNeedsCPUAccess ); } if(VertexData != NULL) { // Serialize the vertex data. VertexData->Serialize(Ar); // Make a copy of the vertex data pointer. Data = VertexData->GetDataPointer(); } }
// Serialize or unserialize the state of the level depending on the state of // the first parameter. Second parameter is true if you need to deal with hub // playerstate. Third parameter is true if you want to handle playerstate // yourself (map resets), just make sure you set it the same for both // serialization and unserialization. void G_SerializeLevel(FArchive &arc, bool hubLoad, bool noStorePlayers) { if (arc.IsStoring ()) { unsigned int playernum = players.size(); arc << level.flags << level.fadeto << level.found_secrets << level.found_items << level.killed_monsters << level.gravity << level.aircontrol; G_AirControlChanged(); for (int i = 0; i < NUM_MAPVARS; i++) arc << level.vars[i]; if (!noStorePlayers) arc << playernum; } else { unsigned int playernum; arc >> level.flags >> level.fadeto >> level.found_secrets >> level.found_items >> level.killed_monsters >> level.gravity >> level.aircontrol; G_AirControlChanged(); for (int i = 0; i < NUM_MAPVARS; i++) arc >> level.vars[i]; if (!noStorePlayers) { arc >> playernum; players.resize(playernum); } }
TSharedPtr<FVoicePacket> FOnlineVoiceSteam::SerializeRemotePacket(FArchive& Ar) { TSharedPtr<FVoicePacketSteam> NewPacket = MakeShareable(new FVoicePacketSteam()); NewPacket->Serialize(Ar); if (Ar.IsError() == false && NewPacket->GetBufferSize() > 0) { if (!SteamSubsystem->IsDedicated()) { FUniqueNetIdMatcher PlayerMatch(*NewPacket->GetSender()); if (MuteList.IndexOfByPredicate(PlayerMatch) == INDEX_NONE) { VoiceData.RemotePackets.Add(NewPacket); } } return NewPacket; } return NULL; }
TSharedPtr<FVoicePacket> FOnlineVoiceImpl::SerializeRemotePacket(FArchive& Ar) { TSharedPtr<FVoicePacketImpl> NewPacket = MakeShareable(new FVoicePacketImpl()); NewPacket->Serialize(Ar); if (Ar.IsError() == false && NewPacket->GetBufferSize() > 0) { if (!IsRunningDedicatedServer()) { FUniqueNetIdMatcher PlayerMatch(*NewPacket->GetSender()); if (MuteList.FindMatch(PlayerMatch) == INDEX_NONE) { VoiceData.RemotePackets.Add(NewPacket); } } return NewPacket; } return NULL; }
static void ExportMaterial(UUnrealMaterial* Mat, FArchive& Ar, int index, bool bLast) { char dummyName[64]; appSprintf(ARRAY_ARG(dummyName), "dummy_material_%d", index); CVec3 Color = GetMaterialDebugColor(index); Ar.Printf( " {\n" " \"name\" : \"%s\",\n" " \"pbrMetallicRoughness\" : {\n" " \"baseColorFactor\" : [ %g, %g, %g, 1.0 ],\n" " \"metallicFactor\" : 0.1,\n" " \"roughnessFactor\" : 0.5\n" " }\n" " }%s\n", Mat ? Mat->Name : dummyName, Color[0], Color[1], Color[2], bLast ? "" : "," ); }
int32 FEnumEditorUtils::ResolveEnumerator(const UEnum* Enum, FArchive& Ar, int32 EnumeratorIndex) { check(Ar.UseToResolveEnumerators()); const FArchiveEnumeratorResolver* EnumeratorResolver = (FArchiveEnumeratorResolver*)(&Ar); if(Enum == EnumeratorResolver->Enum) { const auto& OldNames = EnumeratorResolver->OldNames; if(EnumeratorIndex < OldNames.Num()) { const FName EnumeratorName = OldNames[EnumeratorIndex].Key; const int32 NewEnumIndex = Enum->FindEnumIndex(EnumeratorName); if(INDEX_NONE != NewEnumIndex) { return NewEnumIndex; } } return (Enum->NumEnums() - 1); } return EnumeratorIndex; }