Ejemplo n.º 1
0
void FHierarchicalLODBuilder::FindMST() 
{
	SCOPE_LOG_TIME(TEXT("STAT_HLOD_FindMST"), nullptr);
	if (Clusters.Num() > 0)
	{
		// now sort edge in the order of weight
		struct FCompareCluster
		{
			FORCEINLINE bool operator()(const FLODCluster& A, const FLODCluster& B) const
			{
				return (A.GetCost() < B.GetCost());
			}
		};

		Clusters.HeapSort(FCompareCluster());
	}
}
Ejemplo n.º 2
0
void FHierarchicalLODBuilder::BuildClusters(ULevel* InLevel, const bool bCreateMeshes)
{	
	SCOPE_LOG_TIME(TEXT("STAT_HLOD_BuildClusters"), nullptr);

	// I'm using stack mem within this scope of the function
	// so we need this
	FMemMark Mark(FMemStack::Get());

	// you still have to delete all objects just in case they had it and didn't want it anymore
	TArray<UObject*> AssetsToDelete;
	for (int32 ActorId=InLevel->Actors.Num()-1; ActorId >= 0; --ActorId)
	{
		ALODActor* LodActor = Cast<ALODActor>(InLevel->Actors[ActorId]);
		if (LodActor)
		{
			for (auto& Asset: LodActor->SubObjects)
			{
				// @TOOD: This is not permanent fix
				if (Asset)
				{
					AssetsToDelete.Add(Asset);
				}
			}
			World->DestroyActor(LodActor);
		}
	}

	ULevel::BuildStreamingData(InLevel->OwningWorld, InLevel);

	for (auto& Asset : AssetsToDelete)
	{
		Asset->MarkPendingKill();
		ObjectTools::DeleteSingleObject(Asset, false);
	}
	
	// garbage collect
	CollectGarbage( GARBAGE_COLLECTION_KEEPFLAGS, true );

	// only build if it's enabled
	if(InLevel->GetWorld()->GetWorldSettings()->bEnableHierarchicalLODSystem && InLevel->GetWorld()->GetWorldSettings()->HierarchicalLODSetup.Num() != 0)
	{
		// Handle HierachicalLOD volumes first
		HandleHLODVolumes(InLevel);

		AWorldSettings* WorldSetting = InLevel->GetWorld()->GetWorldSettings();		
		const int32 TotalNumLOD = InLevel->GetWorld()->GetWorldSettings()->HierarchicalLODSetup.Num();
		for(int32 LODId=0; LODId<TotalNumLOD; ++LODId)
		{
			
			// we use meter for bound. Otherwise it's very easy to get to overflow and have problem with filling ratio because
			// bound is too huge
			const float DesiredBoundRadius = WorldSetting->HierarchicalLODSetup[LODId].DesiredBoundRadius * CM_TO_METER;
			const float DesiredFillingRatio = WorldSetting->HierarchicalLODSetup[LODId].DesiredFillingPercentage * 0.01f;
			ensure(DesiredFillingRatio!=0.f);
			const float HighestCost = FMath::Pow(DesiredBoundRadius, 3) / (DesiredFillingRatio);
			const int32 MinNumActors = WorldSetting->HierarchicalLODSetup[LODId].MinNumberOfActorsToBuild;
			check (MinNumActors > 0);
			// test parameter I was playing with to cull adding to the array
			// intialization can have too many elements, decided to cull
			// the problem can be that we can create disconnected tree
			// my assumption is that if the merge cost is too high, then it's not worth merge anyway
			static int32 CullMultiplier=1;

			// since to show progress of initialization, I'm scoping it
			{
				FString LevelName = FPackageName::GetShortName(InLevel->GetOutermost()->GetName());
				FFormatNamedArguments Arguments;
				Arguments.Add(TEXT("LODIndex"), FText::AsNumber(LODId+1));
				Arguments.Add(TEXT("LevelName"), FText::FromString(LevelName));

				FScopedSlowTask SlowTask(100, FText::Format(LOCTEXT("HierarchicalLOD_InitializeCluster", "Initializing Clusters for LOD {LODIndex} of {LevelName}..."), Arguments));
				SlowTask.MakeDialog();

				// initialize Clusters
				InitializeClusters(InLevel, LODId, HighestCost*CullMultiplier);

				// move a half way - I know we can do this better but as of now this is small progress
				SlowTask.EnterProgressFrame(50);

				// now we have all pair of nodes
				FindMST();
			}

			// now we have to calculate merge clusters and build actors
			MergeClustersAndBuildActors(InLevel, LODId, HighestCost, MinNumActors, bCreateMeshes);
		}
	}
	else
	{
		// Fire map check warnings if HLOD System is not enabled
		FMessageLog MapCheck("MapCheck");
		MapCheck.Warning()
			->AddToken(FUObjectToken::Create(InLevel->GetWorld()->GetWorldSettings()))
			->AddToken(FTextToken::Create(LOCTEXT("MapCheck_Message_HLODSystemNotEnabled", "Hierarchical LOD System is disabled or no HLOD level settings available, unable to build LOD actors.")))
			->AddToken(FMapErrorToken::Create(FMapErrors::HLODSystemNotEnabled));
	}

	// Clear Clusters. It is using stack mem, so it won't be good after this
	Clusters.Empty();
	Clusters.Shrink();
}
Ejemplo n.º 3
0
void FHierarchicalLODBuilder::MergeClustersAndBuildActors(ULevel* InLevel, const int32 LODIdx, float HighestCost, int32 MinNumActors, const bool bCreateMeshes)
{	
	if (Clusters.Num() > 0)
	{
		FString LevelName = FPackageName::GetShortName(InLevel->GetOutermost()->GetName());
		FFormatNamedArguments Arguments;
		Arguments.Add(TEXT("LODIndex"), FText::AsNumber(LODIdx+1));
		Arguments.Add(TEXT("LevelName"), FText::FromString(LevelName));
		// merge clusters first
		{
			SCOPE_LOG_TIME(TEXT("HLOD_MergeClusters"), nullptr);
			static int32 TotalIteration=3;
			const int32 TotalCluster = Clusters.Num();

			FScopedSlowTask SlowTask(TotalIteration*TotalCluster, FText::Format( LOCTEXT("HierarchicalLOD_BuildClusters", "Building Clusters for LOD {LODIndex} of {LevelName}..."), Arguments) );
			SlowTask.MakeDialog();

			for(int32 Iteration=0; Iteration<TotalIteration; ++Iteration)
			{
				// now we have minimum Clusters
				for(int32 ClusterId=0; ClusterId < TotalCluster; ++ClusterId)
				{
					auto& Cluster = Clusters[ClusterId];
					UE_LOG(LogLODGenerator, Verbose, TEXT("%d. %0.2f {%s}"), ClusterId+1, Cluster.GetCost(), *Cluster.ToString());

					// progress bar update
					SlowTask.EnterProgressFrame();

					if(Cluster.IsValid())
					{
						for(int32 MergedClusterId=0; MergedClusterId < ClusterId; ++MergedClusterId)
						{
							// compare with previous clusters
							auto& MergedCluster = Clusters[MergedClusterId];
							// see if it's valid, if it contains, check the cost
							if(MergedCluster.IsValid())
							{
								if(MergedCluster.Contains(Cluster))
								{
									// if valid, see if it contains any of this actors
									// merge whole clusters
									FLODCluster NewCluster = Cluster + MergedCluster;
									float MergeCost = NewCluster.GetCost();

									// merge two clusters
									if(MergeCost <= HighestCost)
									{
										UE_LOG(LogLODGenerator, Log, TEXT("Merging of Cluster (%d) and (%d) with merge cost (%0.2f) "), ClusterId+1, MergedClusterId+1, MergeCost);

										MergedCluster = NewCluster;
										// now this cluster is invalid
										Cluster.Invalidate();
										break;
									}
									else
									{
										Cluster -= MergedCluster;
									}
								}
							}
						}

						UE_LOG(LogLODGenerator, Verbose, TEXT("Processed(%s): %0.2f {%s}"), Cluster.IsValid()? TEXT("Valid"):TEXT("Invalid"), Cluster.GetCost(), *Cluster.ToString());
					}
				}
			}
		}


		if (LODIdx == 0)
		{
			for (auto& Cluster : HLODVolumeClusters)
			{
				Clusters.Add(Cluster.Value);
			}
		}
		

		// debug flag, so that I can just see clustered data since no visualization in the editor yet
		//static bool bBuildActor=true;

		//if (bBuildActors)
		{
			SCOPE_LOG_TIME(TEXT("HLOD_BuildActors"), nullptr);
			// print data
			int32 TotalValidCluster=0;
			for(auto& Cluster: Clusters)
			{
				if(Cluster.IsValid())
				{
					++TotalValidCluster;
				}
			}

			FScopedSlowTask SlowTask(TotalValidCluster, FText::Format( LOCTEXT("HierarchicalLOD_MergeActors", "Merging Actors for LOD {LODIndex} of {LevelName}..."), Arguments) );
			SlowTask.MakeDialog();

			for(auto& Cluster: Clusters)
			{
				if(Cluster.IsValid())
				{
					SlowTask.EnterProgressFrame();

					if (Cluster.Actors.Num() >= MinNumActors)
					{
						Cluster.BuildActor(InLevel, LODIdx, bCreateMeshes);
					}
				}
			}
		}
	}
}
Ejemplo n.º 4
0
void FHierarchicalLODBuilder::InitializeClusters(ULevel* InLevel, const int32 LODIdx, float CullCost)
{
	SCOPE_LOG_TIME(TEXT("STAT_HLOD_InitializeClusters"), nullptr);
	if (InLevel->Actors.Num() > 0)
	{
		if (LODIdx == 0)
		{
			Clusters.Empty();

			TArray<AActor*> GenerationActors;			
			for (int32 ActorId = 0; ActorId < InLevel->Actors.Num(); ++ActorId)
			{
				AActor* Actor = InLevel->Actors[ActorId];
				if (ShouldGenerateCluster(Actor))
				{
					// Check whether or not this actor falls within a HierarchicalLODVolume, if so add to the Volume's cluster and exclude from normal process
					bool bAdded = false;
					for (auto& Cluster : HLODVolumeClusters)
					{
						if (Cluster.Key->EncompassesPoint(Actor->GetActorLocation(), 0.0f, nullptr))
						{
							FLODCluster ActorCluster(Actor);
							Cluster.Value += ActorCluster;
							bAdded = true;
							break;
						}
					}

					if (!bAdded)
					{
						GenerationActors.Add(Actor);
					}
				}
			}
			
			// Create clusters using actor pairs
			for (int32 ActorId = 0; ActorId<GenerationActors.Num(); ++ActorId)
			{
				AActor* Actor1 = GenerationActors[ActorId];

				for (int32 SubActorId = ActorId + 1; SubActorId<GenerationActors.Num(); ++SubActorId)
				{
					AActor* Actor2 = GenerationActors[SubActorId];

					FLODCluster NewClusterCandidate = FLODCluster(Actor1, Actor2);
					float NewClusterCost = NewClusterCandidate.GetCost();

					if ( NewClusterCost <= CullCost)
					{
						Clusters.Add(NewClusterCandidate);
					}
				}
			}
		}
		else // at this point we only care for LODActors
		{
			Clusters.Empty();

			// we filter the LOD index first
			TArray<AActor*> Actors;
			for(int32 ActorId=0; ActorId<InLevel->Actors.Num(); ++ActorId)
			{
				AActor* Actor = (InLevel->Actors[ActorId]);

				if (Actor)
				{
					if (Actor->IsA(ALODActor::StaticClass()))
					{
						ALODActor* LODActor = CastChecked<ALODActor>(Actor);
						if (LODActor->LODLevel == LODIdx)
						{
							Actors.Add(Actor);
						}
					}
					else if (ShouldGenerateCluster(Actor))
					{
						Actors.Add(Actor);
					}
				}				
			}
			
			// first we generate graph with 2 pair nodes
			// this is very expensive when we have so many actors
			// so we'll need to optimize later @todo
			for(int32 ActorId=0; ActorId<Actors.Num(); ++ActorId)
			{
				AActor* Actor1 = (Actors[ActorId]);
				for(int32 SubActorId=ActorId+1; SubActorId<Actors.Num(); ++SubActorId)
				{
					AActor* Actor2 = Actors[SubActorId];

					// create new cluster
					FLODCluster NewClusterCandidate = FLODCluster(Actor1, Actor2);
					Clusters.Add(NewClusterCandidate);
				}
			}

			// shrink after adding actors
			// LOD 0 has lots of actors, and subsequence LODs tend to have a lot less actors
			// so this should save a lot more. 
			Clusters.Shrink();
		}
	}
}
void FRawProfilerSession::PrepareLoading()
{
	SCOPE_LOG_TIME_FUNC();

	const FString Filepath = DataFilepath + FStatConstants::StatsFileRawExtension;
	const int64 Size = IFileManager::Get().FileSize( *Filepath );
	if( Size < 4 )
	{
		UE_LOG( LogStats, Error, TEXT( "Could not open: %s" ), *Filepath );
		return;
	}
	TAutoPtr<FArchive> FileReader( IFileManager::Get().CreateFileReader( *Filepath ) );
	if( !FileReader )
	{
		UE_LOG( LogStats, Error, TEXT( "Could not open: %s" ), *Filepath );
		return;
	}

	if( !Stream.ReadHeader( *FileReader ) )
	{
		UE_LOG( LogStats, Error, TEXT( "Could not open, bad magic: %s" ), *Filepath );
		return;
	}

	const bool bIsFinalized = Stream.Header.IsFinalized();
	check( bIsFinalized );
	check( Stream.Header.Version == EStatMagicWithHeader::VERSION_5 );
	StatsThreadStats.MarkAsLoaded();

	TArray<FStatMessage> Messages;
	if( Stream.Header.bRawStatsFile )
	{
		// Read metadata.
		TArray<FStatMessage> MetadataMessages;
		Stream.ReadFNamesAndMetadataMessages( *FileReader, MetadataMessages );
		StatsThreadStats.ProcessMetaDataOnly( MetadataMessages );

		const FName F00245 = FName(245, 245, 0);
		
		const FName F11602 = FName(11602, 11602, 0);
		const FName F06394 = FName(6394, 6394, 0);

		const int64 CurrentFilePos = FileReader->Tell();

		// Update profiler's metadata.
		StatMetaData->UpdateFromStatsState( StatsThreadStats );
		const uint32 GameThreadID = GetMetaData()->GetGameThreadID();

		// Read frames offsets.
		Stream.ReadFramesOffsets( *FileReader );

		// Buffer used to store the compressed and decompressed data.
		TArray<uint8> SrcArray;
		TArray<uint8> DestArray;
		const bool bHasCompressedData = Stream.Header.HasCompressedData();
		check(bHasCompressedData);

		TMap<int64, FStatPacketArray> CombinedHistory;
		int64 TotalPacketSize = 0;
		int64 MaximumPacketSize = 0;
		// Read all packets sequentially, force by the memory profiler which is now a part of the raw stats.
		// !!CAUTION!! Frame number in the raw stats is pointless, because it is time based, not frame based.
		// Background threads usually execute time consuming operations, so the frame number won't be valid.
		// Needs to be combined by the thread and the time, not by the frame number.
		{
			int64 FrameOffset0 = Stream.FramesInfo[0].FrameFileOffset;
			FileReader->Seek( FrameOffset0 );

			const int64 FileSize = FileReader->TotalSize();

			while( FileReader->Tell() < FileSize )
			{
				// Read the compressed data.
				FCompressedStatsData UncompressedData( SrcArray, DestArray );
				*FileReader << UncompressedData;
				if( UncompressedData.HasReachedEndOfCompressedData() )
				{
					break;
				}

				FMemoryReader MemoryReader( DestArray, true );

				FStatPacket* StatPacket = new FStatPacket();
				Stream.ReadStatPacket( MemoryReader, *StatPacket );
				
				const int64 FrameNum = StatPacket->Frame;
				FStatPacketArray& Frame = CombinedHistory.FindOrAdd(FrameNum);
			
				// Check if we need to combine packets from the same thread.
				FStatPacket** CombinedPacket = Frame.Packets.FindByPredicate([&](FStatPacket* Item) -> bool
				{
					return Item->ThreadId == StatPacket->ThreadId;
				});
				
				if( CombinedPacket )
				{
					(*CombinedPacket)->StatMessages += StatPacket->StatMessages;
				}
				else
				{
					Frame.Packets.Add(StatPacket);
				}

				const int64 CurrentPos = FileReader->Tell();
				const int32 PctPos = int32(100.0f*CurrentPos/FileSize);

				UE_LOG( LogStats, Log, TEXT( "%3i Processing FStatPacket: Frame %5i for thread %5i with %6i messages (%.1f MB)" ), 
					PctPos, 
					StatPacket->Frame, 
					StatPacket->ThreadId, 
					StatPacket->StatMessages.Num(), 
					StatPacket->StatMessages.GetAllocatedSize()/1024.0f/1024.0f );

				const int64 PacketSize = StatPacket->StatMessages.GetAllocatedSize();
				TotalPacketSize += PacketSize;
				MaximumPacketSize = FMath::Max( MaximumPacketSize, PacketSize );
			}
		}

		UE_LOG( LogStats, Log, TEXT( "TotalPacketSize: %.1f MB, Max: %1f MB" ), 
			TotalPacketSize/1024.0f/1024.0f, 
			MaximumPacketSize/1024.0f/1024.0f );

		TArray<int64> Frames;
		CombinedHistory.GenerateKeyArray(Frames);
		Frames.Sort();
		const int64 MiddleFrame = Frames[Frames.Num()/2];


		// Remove all frames without the game thread messages.
		for (int32 FrameIndex = 0; FrameIndex < Frames.Num(); ++FrameIndex)
		{
			const int64 TargetFrame = Frames[FrameIndex];
			const FStatPacketArray& Frame = CombinedHistory.FindChecked( TargetFrame );

			const double GameThreadTimeMS = GetMetaData()->ConvertCyclesToMS( GetFastThreadFrameTimeInternal( Frame, EThreadType::Game ) );

			if (GameThreadTimeMS == 0.0f)
			{
				CombinedHistory.Remove( TargetFrame );
				Frames.RemoveAt( FrameIndex );
				FrameIndex--;
			}
		}
		
	
		StatMetaData->SecondsPerCycle = GetSecondsPerCycle( CombinedHistory.FindChecked(MiddleFrame) );
		check( StatMetaData->GetSecondsPerCycle() > 0.0 );

		//const int32 FirstGameThreadFrame = FindFirstFrameWithGameThread( CombinedHistory, Frames );

		// Prepare profiler frame.
		{
			SCOPE_LOG_TIME( TEXT( "Preparing profiler frames" ), nullptr );

			// Prepare profiler frames.
			double ElapsedTimeMS = 0;

			for( int32 FrameIndex = 0; FrameIndex < Frames.Num(); ++FrameIndex )
			{
				const int64 TargetFrame = Frames[FrameIndex];
				const FStatPacketArray& Frame = CombinedHistory.FindChecked(TargetFrame);

				const double GameThreadTimeMS = GetMetaData()->ConvertCyclesToMS( GetFastThreadFrameTimeInternal(Frame,EThreadType::Game) );

				if( GameThreadTimeMS == 0.0f )
				{
					continue;
				}

				const double RenderThreadTimeMS = GetMetaData()->ConvertCyclesToMS( GetFastThreadFrameTimeInternal(Frame,EThreadType::Renderer) );

				// Update mini-view, convert from cycles to ms.
				TMap<uint32, float> ThreadTimesMS;
				ThreadTimesMS.Add( GameThreadID, GameThreadTimeMS );
				ThreadTimesMS.Add( GetMetaData()->GetRenderThreadID()[0], RenderThreadTimeMS );

				// Pass the reference to the stats' metadata.
				OnAddThreadTime.ExecuteIfBound( FrameIndex, ThreadTimesMS, StatMetaData );

				// Create a new profiler frame and add it to the stream.
				ElapsedTimeMS += GameThreadTimeMS;
				FProfilerFrame* ProfilerFrame = new FProfilerFrame( TargetFrame, GameThreadTimeMS, ElapsedTimeMS );
				ProfilerFrame->ThreadTimesMS = ThreadTimesMS;
				ProfilerStream.AddProfilerFrame( TargetFrame, ProfilerFrame );
			}
		}
	
		// Process the raw stats data.
		{
			SCOPE_LOG_TIME( TEXT( "Processing the raw stats" ), nullptr );

			double CycleCounterAdjustmentMS = 0.0f;

			// Read the raw stats messages.
			for( int32 FrameIndex = 0; FrameIndex < Frames.Num()-1; ++FrameIndex )
			{
				const int64 TargetFrame = Frames[FrameIndex];
				const FStatPacketArray& Frame = CombinedHistory.FindChecked(TargetFrame);

				FProfilerFrame* ProfilerFrame = ProfilerStream.GetProfilerFrame( FrameIndex );

				UE_CLOG( FrameIndex % 8 == 0, LogStats, Log, TEXT( "Processing raw stats frame: %4i/%4i" ), FrameIndex, Frames.Num() );

				ProcessStatPacketArray( Frame, *ProfilerFrame, FrameIndex ); // or ProfilerFrame->TargetFrame

				// Find the first cycle counter for the game thread.
				if( CycleCounterAdjustmentMS == 0.0f )
				{
					CycleCounterAdjustmentMS = ProfilerFrame->Root->CycleCounterStartTimeMS;
				}

				// Update thread time and mark profiler frame as valid and ready for use.
				ProfilerFrame->MarkAsValid();
			}

			// Adjust all profiler frames.
			ProfilerStream.AdjustCycleCounters( CycleCounterAdjustmentMS );
		}
	}

	const int64 AllocatedSize = ProfilerStream.GetAllocatedSize();

	// We have the whole metadata and basic information about the raw stats file, start ticking the profiler session.
	//OnTickHandle = FTicker::GetCoreTicker().AddTicker( OnTick, 0.25f );

#if	0
	if( SessionType == EProfilerSessionTypes::OfflineRaw )
	{
		// Broadcast that a capture file has been fully processed.
		OnCaptureFileProcessed.ExecuteIfBound( GetInstanceID() );
	}
#endif // 0
}