Пример #1
0
void FFoliageInstanceBaseCache::CompactInstanceBaseCache(AInstancedFoliageActor* IFA)
{
	UWorld* World = IFA->GetWorld();
	if (!World || World->IsGameWorld())
	{
		return;
	}

	FFoliageInstanceBaseCache& Cache = IFA->InstanceBaseCache;
	
	TSet<FFoliageInstanceBaseId> BasesInUse;
	for (auto& Pair : IFA->FoliageMeshes)
	{
		for (const auto& Pair : Pair.Value->ComponentHash)
		{
			if (Pair.Key != FFoliageInstanceBaseCache::InvalidBaseId)
			{
				BasesInUse.Add(Pair.Key);
			}
		}
	}
	
	// Look for any removed maps
	TSet<FFoliageInstanceBasePtr> InvalidBasePtrs;
	for (auto& Pair : Cache.InstanceBaseLevelMap)
	{
		const auto& WorldAsset = Pair.Key;
		
		bool bExists = (WorldAsset == World);
		// Check sub-levels
		if (!bExists)
		{
			const FName PackageName = FName(*FPackageName::ObjectPathToPackageName(WorldAsset.ToStringReference().ToString()));
			if (World->WorldComposition)
			{
				bExists = World->WorldComposition->DoesTileExists(PackageName);
			}
			else
			{
				bExists = (World->GetLevelStreamingForPackageName(PackageName) != nullptr);
			}
		}

		if (!bExists)
		{
			InvalidBasePtrs.Append(Pair.Value);
			Cache.InstanceBaseLevelMap.Remove(Pair.Key);
		}
		else
		{
			// Remove dead links
			for (int32 i = Pair.Value.Num()-1; i >= 0; --i)
			{
				// Base needs to be removed if it's not in use by existing instances or component was removed
				if (Pair.Value[i].IsNull() || !BasesInUse.Contains(Cache.GetInstanceBaseId(Pair.Value[i])))
				{
					InvalidBasePtrs.Add(Pair.Value[i]);
					Pair.Value.RemoveAt(i);
				}
			}

			if (Pair.Value.Num() == 0)
			{
				Cache.InstanceBaseLevelMap.Remove(Pair.Key);
			}
		}
	}
	
	TSet<FFoliageInstanceBaseId> InvalidBaseIds;
	Cache.InstanceBaseInvMap.Empty();
	// Look for any removed base components
	for (const auto& Pair : Cache.InstanceBaseMap)
	{
		const FFoliageInstanceBaseInfo& BaseInfo = Pair.Value;
		if (InvalidBasePtrs.Contains(BaseInfo.BasePtr))
		{
			InvalidBaseIds.Add(Pair.Key);
			Cache.InstanceBaseMap.Remove(Pair.Key);
		}
		else
		{
			// Regenerate inverse map
			check(!Cache.InstanceBaseInvMap.Contains(BaseInfo.BasePtr));
			Cache.InstanceBaseInvMap.Add(BaseInfo.BasePtr, Pair.Key);
		}
	}

	if (InvalidBaseIds.Num())
	{
		for (auto& Pair : IFA->FoliageMeshes)
		{
			auto& MeshInfo = Pair.Value;
			MeshInfo->ComponentHash.Empty();
			int32 InstanceIdx = 0;
			
			for (FFoliageInstance& Instance : MeshInfo->Instances)
			{
				if (InvalidBaseIds.Contains(Instance.BaseId))
				{
					Instance.BaseId = FFoliageInstanceBaseCache::InvalidBaseId;
				}

				MeshInfo->ComponentHash.FindOrAdd(Instance.BaseId).Add(InstanceIdx);
				InstanceIdx++;
			}
		}

		Cache.InstanceBaseMap.Compact();
		Cache.InstanceBaseLevelMap.Compact();
	}
}
TOptional<EItemDropZone> FSequencerFolderNode::CanDrop( FSequencerDisplayNodeDragDropOp& DragDropOp, EItemDropZone ItemDropZone ) const
{
	DragDropOp.ResetToDefaultToolTip();

	if ( ItemDropZone == EItemDropZone::AboveItem )
	{
		if ( GetParent().IsValid() )
		{
			// When dropping above, only allow it for root level nodes.
			return TOptional<EItemDropZone>();
		}
		else
		{
			// Make sure there are no folder name collisions with the root folders
			UMovieScene* FocusedMovieScene = GetParentTree().GetSequencer().GetFocusedMovieSceneSequence()->GetMovieScene();
			TSet<FName> RootFolderNames;
			for ( UMovieSceneFolder* RootFolder : FocusedMovieScene->GetRootFolders() )
			{
				RootFolderNames.Add( RootFolder->GetFolderName() );
			}

			for ( TSharedRef<FSequencerDisplayNode> DraggedNode : DragDropOp.GetDraggedNodes() )
			{
				if ( DraggedNode->GetType() == ESequencerNode::Folder )
				{
					TSharedRef<FSequencerFolderNode> DraggedFolder = StaticCastSharedRef<FSequencerFolderNode>( DraggedNode );
					if ( RootFolderNames.Contains( DraggedFolder->GetFolder().GetFolderName() ) )
					{
						DragDropOp.CurrentHoverText = FText::Format(
							NSLOCTEXT( "SeqeuencerFolderNode", "DuplicateRootFolderDragErrorFormat", "Root folder with name '{0}' already exists." ),
							FText::FromName( DraggedFolder->GetFolder().GetFolderName() ) );
						return TOptional<EItemDropZone>();
					}
				}
			}
		}
		return TOptional<EItemDropZone>( EItemDropZone::AboveItem );
	}
	else
	{
		// When dropping onto, don't allow dropping into the same folder, don't allow dropping
		// parents into children, and don't allow duplicate folder names.
		TSet<FName> ChildFolderNames;
		for ( UMovieSceneFolder* ChildFolder : GetFolder().GetChildFolders() )
		{
			ChildFolderNames.Add( ChildFolder->GetFolderName() );
		}

		for ( TSharedRef<FSequencerDisplayNode> DraggedNode : DragDropOp.GetDraggedNodes() )
		{
			TSharedPtr<FSequencerDisplayNode> ParentSeqNode = DraggedNode->GetParent();
			if ( ParentSeqNode.IsValid() && ParentSeqNode.Get() == this )
			{
				DragDropOp.CurrentHoverText = NSLOCTEXT( "SeqeuencerFolderNode", "SameParentDragError", "Can't drag a node onto the same parent." );
				return TOptional<EItemDropZone>();
			}

			if ( DraggedNode->GetType() == ESequencerNode::Folder )
			{
				TSharedRef<FSequencerFolderNode> DraggedFolder = StaticCastSharedRef<FSequencerFolderNode>( DraggedNode );
				if ( ChildFolderNames.Contains( DraggedFolder->GetFolder().GetFolderName() ) )
				{
					DragDropOp.CurrentHoverText = FText::Format(
						NSLOCTEXT( "SeqeuencerFolderNode", "DuplicateChildFolderDragErrorFormat", "Folder with name '{0}' already exists." ),
						FText::FromName( DraggedFolder->GetFolder().GetFolderName() ) );
					return TOptional<EItemDropZone>();
				}
			}
		}
		TSharedPtr<FSequencerDisplayNode> CurrentNode = SharedThis( ( FSequencerDisplayNode* )this );
		while ( CurrentNode.IsValid() )
		{
			if ( DragDropOp.GetDraggedNodes().Contains( CurrentNode ) )
			{
				DragDropOp.CurrentHoverText = NSLOCTEXT( "SeqeuencerFolderNode", "ParentIntoChildDragError", "Can't drag a parent node into one of it's children." );
				return TOptional<EItemDropZone>();
			}
			CurrentNode = CurrentNode->GetParent();
		}
		return TOptional<EItemDropZone>( EItemDropZone::OntoItem );
	}
}
Пример #3
0
void UK2Node::AutowireNewNode(UEdGraphPin* FromPin)
{
	const UEdGraphSchema_K2* K2Schema = CastChecked<UEdGraphSchema_K2>(GetSchema());
	
	// Do some auto-connection
	if (FromPin != NULL)
	{
		TSet<UEdGraphNode*> NodeList;

		// sometimes we don't always find an ideal connection, but we want to exhaust 
		// all our options first... this stores a secondary less-ideal pin to connect to, if nothing better was found
		UEdGraphPin* BackupConnection = NULL;
		// If not dragging an exec pin, auto-connect from dragged pin to first compatible pin on the new node
		for (int32 i=0; i<Pins.Num(); i++)
		{
			UEdGraphPin* Pin = Pins[i];
			check(Pin);

			ECanCreateConnectionResponse ConnectResponse = K2Schema->CanCreateConnection(FromPin, Pin).Response;
			if (ConnectResponse == ECanCreateConnectionResponse::CONNECT_RESPONSE_MAKE)
			{
				if (K2Schema->TryCreateConnection(FromPin, Pin))
				{
					NodeList.Add(FromPin->GetOwningNode());
					NodeList.Add(this);
				}

				// null out the backup connection (so we don't attempt to make it 
				// once we exit the loop... we successfully made this connection!)
				BackupConnection = NULL;
				break;
			}
			else if((FromPin->PinType.PinCategory == K2Schema->PC_Exec) && (ConnectResponse == ECanCreateConnectionResponse::CONNECT_RESPONSE_BREAK_OTHERS_A))
			{
				InsertNewNode(FromPin, Pin, NodeList);

				// null out the backup connection (so we don't attempt to make it 
				// once we exit the loop... we successfully made this connection!)
				BackupConnection = NULL;
				break;
			}
			else if ((BackupConnection == NULL) && (ConnectResponse == ECanCreateConnectionResponse::CONNECT_RESPONSE_MAKE_WITH_CONVERSION_NODE))
			{
				// save this off, in-case we don't make any connection at all
				BackupConnection = Pin;
			}
		}

		// if we didn't find an ideal connection, then lets connect this pin to 
		// the BackupConnection (something, like a connection that requires a conversion node, etc.)
		if ((BackupConnection != NULL) && K2Schema->TryCreateConnection(FromPin, BackupConnection))
		{
			NodeList.Add(FromPin->GetOwningNode());
			NodeList.Add(this);
		}

		// If we were not dragging an exec pin, but it was an output pin, try and connect the Then and Execute pins
		if ((FromPin->PinType.PinCategory != K2Schema->PC_Exec  && FromPin->Direction == EGPD_Output))
		{
			UEdGraphNode* FromPinNode = FromPin->GetOwningNode();
			UEdGraphPin* FromThenPin = FromPinNode->FindPin(K2Schema->PN_Then);

			UEdGraphPin* ToExecutePin = FindPin(K2Schema->PN_Execute);

			if ((FromThenPin != NULL) && (FromThenPin->LinkedTo.Num() == 0) && (ToExecutePin != NULL) && K2Schema->ArePinsCompatible(FromThenPin, ToExecutePin, NULL))
			{
				if (K2Schema->TryCreateConnection(FromThenPin, ToExecutePin))
				{
					NodeList.Add(FromPinNode);
					NodeList.Add(this);
				}
			}
		}

		// Send all nodes that received a new pin connection a notification
		for (auto It = NodeList.CreateConstIterator(); It; ++It)
		{
			UEdGraphNode* Node = (*It);
			Node->NodeConnectionListChanged();
		}
	}
}
uint32 FAssetDataDiscovery::Run()
{
	double DiscoverStartTime = FPlatformTime::Seconds();
	int32 NumDiscoveredFiles = 0;

	FString LocalFilenamePathToPrioritize;

	TSet<FString> LocalDiscoveredPathsSet;
	TArray<FString> LocalDiscoveredDirectories;

	TArray<FDiscoveredPackageFile> LocalPriorityFilesToSearch;
	TArray<FDiscoveredPackageFile> LocalNonPriorityFilesToSearch;

	// This set contains the folders that we should hide by default unless they contain assets
	TSet<FString> PathsToHideIfEmpty;
	PathsToHideIfEmpty.Add(TEXT("/Game/Collections"));

	auto FlushLocalResultsIfRequired = [&]()
	{
		if (LocalPriorityFilesToSearch.Num() > 0 || LocalNonPriorityFilesToSearch.Num() > 0 || LocalDiscoveredPathsSet.Num() > 0)
		{
			TArray<FString> LocalDiscoveredPathsArray = LocalDiscoveredPathsSet.Array();

			{
				FScopeLock CritSectionLock(&WorkerThreadCriticalSection);

				// Place all the discovered files into the files to search list
				DiscoveredPaths.Append(MoveTemp(LocalDiscoveredPathsArray));

				PriorityDiscoveredFiles.Append(MoveTemp(LocalPriorityFilesToSearch));
				NonPriorityDiscoveredFiles.Append(MoveTemp(LocalNonPriorityFilesToSearch));
			}
		}

		LocalDiscoveredPathsSet.Reset();

		LocalPriorityFilesToSearch.Reset();
		LocalNonPriorityFilesToSearch.Reset();
	};

	auto IsPriorityFile = [&](const FString& InPackageFilename) -> bool
	{
		return !bIsSynchronous && !LocalFilenamePathToPrioritize.IsEmpty() && InPackageFilename.StartsWith(LocalFilenamePathToPrioritize);
	};

	auto OnIterateDirectoryItem = [&](const TCHAR* InPackageFilename, const FFileStatData& InPackageStatData) -> bool
	{
		if (StopTaskCounter.GetValue() != 0)
		{
			// Requested to stop - break out of the directory iteration
			return false;
		}

		const FString PackageFilenameStr = InPackageFilename;

		if (InPackageStatData.bIsDirectory)
		{
			LocalDiscoveredDirectories.Add(PackageFilenameStr / TEXT(""));

			FString PackagePath;
			if (FPackageName::TryConvertFilenameToLongPackageName(PackageFilenameStr, PackagePath) && !PathsToHideIfEmpty.Contains(PackagePath))
			{
				LocalDiscoveredPathsSet.Add(PackagePath);
			}
		}
		else if (FPackageName::IsPackageFilename(PackageFilenameStr))
		{
			if (IsValidPackageFileToRead(PackageFilenameStr))
			{
				const FString LongPackageNameStr = FPackageName::FilenameToLongPackageName(PackageFilenameStr);

				if (IsPriorityFile(PackageFilenameStr))
				{
					LocalPriorityFilesToSearch.Add(FDiscoveredPackageFile(PackageFilenameStr, InPackageStatData.ModificationTime));
				}
				else
				{
					LocalNonPriorityFilesToSearch.Add(FDiscoveredPackageFile(PackageFilenameStr, InPackageStatData.ModificationTime));
				}

				LocalDiscoveredPathsSet.Add(FPackageName::GetLongPackagePath(LongPackageNameStr));

				++NumDiscoveredFiles;

				// Flush the data if we've processed enough
				if (!bIsSynchronous && (LocalPriorityFilesToSearch.Num() + LocalNonPriorityFilesToSearch.Num()) >= AssetDataGathererConstants::MaxFilesToDiscoverBeforeFlush)
				{
					FlushLocalResultsIfRequired();
				}
			}
		}

		return true;
	};

	bool bIsIdle = true;

	while (StopTaskCounter.GetValue() == 0)
	{
		FString LocalDirectoryToSearch;
		{
			FScopeLock CritSectionLock(&WorkerThreadCriticalSection);

			if (DirectoriesToSearch.Num() > 0)
			{
				bIsDiscoveringFiles = true;

				LocalFilenamePathToPrioritize = FilenamePathToPrioritize;

				// Pop off the first path to search
				LocalDirectoryToSearch = DirectoriesToSearch[0];
				DirectoriesToSearch.RemoveAt(0, 1, false);
			}
		}

		if (LocalDirectoryToSearch.Len() > 0)
		{
			if (bIsIdle)
			{
				bIsIdle = false;

				// About to start work - reset these
				DiscoverStartTime = FPlatformTime::Seconds();
				NumDiscoveredFiles = 0;
			}

			// Iterate the current search directory
			FLambdaDirectoryStatVisitor Visitor(OnIterateDirectoryItem);
			IFileManager::Get().IterateDirectoryStat(*LocalDirectoryToSearch, Visitor);

			{
				FScopeLock CritSectionLock(&WorkerThreadCriticalSection);

				// Push back any newly discovered sub-directories
				if (LocalDiscoveredDirectories.Num() > 0)
				{
					// Use LocalDiscoveredDirectories as scratch space, then move it back out - this puts the directories we just 
					// discovered at the start of the list for the next iteration, which can help with disk locality
					LocalDiscoveredDirectories.Append(MoveTemp(DirectoriesToSearch));
					DirectoriesToSearch = MoveTemp(LocalDiscoveredDirectories);
				}
				LocalDiscoveredDirectories.Reset();

				if (!bIsSynchronous)
				{
					FlushLocalResultsIfRequired();
					SortPathsByPriority(1);
				}
			}
		}
		else
		{
			if (!bIsIdle)
			{
				bIsIdle = true;

				{
					FScopeLock CritSectionLock(&WorkerThreadCriticalSection);
					bIsDiscoveringFiles = false;
				}

				UE_LOG(LogAssetRegistry, Verbose, TEXT("Discovery took %0.6f seconds and found %d files to process"), FPlatformTime::Seconds() - DiscoverStartTime, NumDiscoveredFiles);
			}

			// Ran out of things to do... if we have any pending results, flush those now
			FlushLocalResultsIfRequired();

			if (bIsSynchronous)
			{
				// This is synchronous. Since our work is done, we should safely exit
				Stop();
			}
			else
			{
				// No work to do. Sleep for a little and try again later.
				FPlatformProcess::Sleep(0.1);
			}
		}
	}

	return 0;
}
Пример #5
0
void UK2Node_Composite::PostPasteNode()
{
	Super::PostPasteNode();

	//@TODO: Should verify that each node in the composite can be pasted into this new graph successfully (CanPasteHere)

	if (BoundGraph != NULL)
	{
		UEdGraph* ParentGraph = CastChecked<UEdGraph>(GetOuter());
		ensure(BoundGraph != ParentGraph);

		// Update the InputSinkNode / OutputSourceNode pointers to point to the new graph
		TSet<UEdGraphNode*> BoundaryNodes;
		for (int32 NodeIndex = 0; NodeIndex < BoundGraph->Nodes.Num(); ++NodeIndex)
		{
			UEdGraphNode* Node = BoundGraph->Nodes[NodeIndex];
			
			//Remove this node if it should not exist more then one in blueprint
			if(UK2Node_Event* Event = Cast<UK2Node_Event>(Node))
			{
				UBlueprint* BP = FBlueprintEditorUtils::FindBlueprintForGraphChecked(BoundGraph);
				if(FBlueprintEditorUtils::FindOverrideForFunction(BP, Event->EventReference.GetMemberParentClass(Event->GetBlueprintClassFromNode()), Event->EventReference.GetMemberName()))
				{
					FBlueprintEditorUtils::RemoveNode(BP, Node, true);
					NodeIndex--;
					continue;
				}
			}
			
			BoundaryNodes.Add(Node);

			if (Node->GetClass() == UK2Node_Tunnel::StaticClass())
			{
				// Exactly a tunnel node, should be the entrance or exit node
				UK2Node_Tunnel* Tunnel = CastChecked<UK2Node_Tunnel>(Node);

				if (Tunnel->bCanHaveInputs && !Tunnel->bCanHaveOutputs)
				{
					OutputSourceNode = Tunnel;
					Tunnel->InputSinkNode = this;
				}
				else if (Tunnel->bCanHaveOutputs && !Tunnel->bCanHaveInputs)
				{
					InputSinkNode = Tunnel;
					Tunnel->OutputSourceNode = this;
				}
				else
				{
					ensureMsgf(false, *LOCTEXT("UnexpectedTunnelNode", "Unexpected tunnel node '%s' in cloned graph '%s' (both I/O or neither)").ToString(), *Tunnel->GetName(), *GetName());
				}
			}
		}

		RenameBoundGraphCloseToName(BoundGraph->GetName());
		ensure(BoundGraph->SubGraphs.Find(ParentGraph) == INDEX_NONE);

		//Nested composites will already be in the SubGraph array
		if(ParentGraph->SubGraphs.Find(BoundGraph) == INDEX_NONE)
		{
			ParentGraph->SubGraphs.Add(BoundGraph);
		}

		FEdGraphUtilities::PostProcessPastedNodes(BoundaryNodes);
	}
}
Пример #6
0
	void FManifestBuilderImpl::BuildManifest()
	{
		TMap<FGuid, FChunkInfo> ChunkInfoLookup;
		bool Running = true;
		while (Running)
		{
			FDataScannerPtr NextScanner = GetNextScanner();
			if (NextScanner.IsValid())
			{
				FDataScanResult ScanResult = NextScanner->GetResultWhenComplete();
				ChunkInfoLookup.Append(ScanResult.ChunkInfo);

				// Always reverse for now
				if (ScanResult.DataStructure.Num() > 0)
				{
					FChunkPart& ChunkPart = ScanResult.DataStructure[0];
					if (ChunkPart.DataOffset != FileBuilder.CurrentDataPos)
					{
						check(ChunkPart.DataOffset < FileBuilder.CurrentDataPos); // Missing data!

						bool FoundPosition = false;
						uint64 DataCount = 0;
						for (int32 FileIdx = 0; FileIdx < Manifest->Data->FileManifestList.Num() && !FoundPosition; ++FileIdx)
						{
							FFileManifestData& FileManifest = Manifest->Data->FileManifestList[FileIdx];
							FileManifest.Init();
							uint64 FileStartIdx = DataCount;
							uint64 FileEndIdx = FileStartIdx + FileManifest.GetFileSize();
							if (FileEndIdx > ChunkPart.DataOffset)
							{
								for (int32 ChunkIdx = 0; ChunkIdx < FileManifest.FileChunkParts.Num() && !FoundPosition; ++ChunkIdx)
								{
									FChunkPartData& ChunkPartData = FileManifest.FileChunkParts[ChunkIdx];
									uint64 ChunkPartEndIdx = DataCount + ChunkPartData.Size;
									if (ChunkPartEndIdx < ChunkPart.DataOffset)
									{
										DataCount += ChunkPartData.Size;
									}
									else if (ChunkPartEndIdx > ChunkPart.DataOffset)
									{
										ChunkPartData.Size = ChunkPart.DataOffset - DataCount;
										FileBuilder.CurrentDataPos = DataCount + ChunkPartData.Size;
										FileManifest.FileChunkParts.SetNum(ChunkIdx + 1, false);
										FileManifest.FileChunkParts.Emplace();
										Manifest->Data->FileManifestList.SetNum(FileIdx + 1, false);
										FileBuilder.FileManifest = &Manifest->Data->FileManifestList.Last();
										bool FoundFile = BuildStreamer->GetFileSpan(FileStartIdx, FileBuilder.FileSpan);
										check(FoundFile); // Incorrect positional tracking
										FoundPosition = true;
									}
									else
									{
										FileBuilder.CurrentDataPos = DataCount + ChunkPartData.Size;
										FileManifest.FileChunkParts.SetNum(ChunkIdx + 1, false);
										FileManifest.FileChunkParts.Emplace();
										Manifest->Data->FileManifestList.SetNum(FileIdx + 1, false);
										FileBuilder.FileManifest = &Manifest->Data->FileManifestList.Last();
										bool FoundFile = BuildStreamer->GetFileSpan(FileStartIdx, FileBuilder.FileSpan);
										check(FoundFile); // Incorrect positional tracking
										FoundPosition = true;
									}
								}
							}
							else if (FileEndIdx < ChunkPart.DataOffset)
							{
								DataCount += FileManifest.GetFileSize();
							}
							else
							{
								FileBuilder.FileManifest = nullptr;
								FileBuilder.CurrentDataPos = DataCount + FileManifest.GetFileSize();
								Manifest->Data->FileManifestList.SetNum(FileIdx + 1, false);
								FoundPosition = true;
							}
						}

						check(ChunkPart.DataOffset == FileBuilder.CurrentDataPos);
						check(FileBuilder.FileManifest == nullptr || FileBuilder.FileSpan.Filename == Manifest->Data->FileManifestList.Last().Filename);
					}
				}

				for (int32 idx = 0; idx < ScanResult.DataStructure.Num(); ++idx)
				{
					FChunkPart& ChunkPart = ScanResult.DataStructure[idx];
					// Starting new file?
					if (FileBuilder.FileManifest == nullptr)
					{
						Manifest->Data->FileManifestList.Emplace();
						FileBuilder.FileManifest = &Manifest->Data->FileManifestList.Last();

						bool FoundFile = BuildStreamer->GetFileSpan(FileBuilder.CurrentDataPos, FileBuilder.FileSpan);
						check(FoundFile); // Incorrect positional tracking

						FileBuilder.FileManifest->Filename = FileBuilder.FileSpan.Filename;
						FileBuilder.FileManifest->FileChunkParts.Emplace();
					}

					FChunkPartData& FileChunkPartData = FileBuilder.FileManifest->FileChunkParts.Last();
					FileChunkPartData.Guid = ChunkPart.ChunkGuid;
					FileChunkPartData.Offset = (FileBuilder.CurrentDataPos - ChunkPart.DataOffset) + ChunkPart.ChunkOffset;

					// Process data into file manifests
					int64 FileDataLeft = (FileBuilder.FileSpan.StartIdx + FileBuilder.FileSpan.Size) - FileBuilder.CurrentDataPos;
					int64 ChunkDataLeft = (ChunkPart.DataOffset + ChunkPart.PartSize) - FileBuilder.CurrentDataPos;
					check(FileDataLeft > 0);
					check(ChunkDataLeft > 0);

					if (ChunkDataLeft >= FileDataLeft)
					{
						FileBuilder.CurrentDataPos += FileDataLeft;
						FileChunkPartData.Size = FileDataLeft;
					}
					else
					{
						FileBuilder.CurrentDataPos += ChunkDataLeft;
						FileChunkPartData.Size = ChunkDataLeft;
					}

					FileDataLeft = (FileBuilder.FileSpan.StartIdx + FileBuilder.FileSpan.Size) - FileBuilder.CurrentDataPos;
					ChunkDataLeft = (ChunkPart.DataOffset + ChunkPart.PartSize) - FileBuilder.CurrentDataPos;
					check(FileDataLeft == 0 || ChunkDataLeft == 0);
					// End of file?
					if (FileDataLeft == 0)
					{
						// Fill out rest of data??
						FFileSpan FileSpan;
						bool FoundFile = BuildStreamer->GetFileSpan(FileBuilder.FileSpan.StartIdx, FileSpan);
						check(FoundFile); // Incorrect positional tracking
						check(FileSpan.Filename == FileBuilder.FileManifest->Filename);
						FMemory::Memcpy(FileBuilder.FileManifest->FileHash.Hash, FileSpan.SHAHash.Hash, FSHA1::DigestSize);
						FFileAttributes Attributes = FileAttributesMap.FindRef(FileSpan.Filename);
						FileBuilder.FileManifest->bIsUnixExecutable = Attributes.bUnixExecutable || FileSpan.IsUnixExecutable;
						FileBuilder.FileManifest->SymlinkTarget = FileSpan.SymlinkTarget;
						FileBuilder.FileManifest->bIsReadOnly = Attributes.bReadOnly;
						FileBuilder.FileManifest->bIsCompressed = Attributes.bCompressed;
						FileBuilder.FileManifest->InstallTags = Attributes.InstallTags.Array();
						FileBuilder.FileManifest->Init();
						check(FileBuilder.FileManifest->GetFileSize() == FileBuilder.FileSpan.Size);
						FileBuilder.FileManifest = nullptr;
					}
					else if (ChunkDataLeft == 0)
					{
						FileBuilder.FileManifest->FileChunkParts.Emplace();
					}

					// Continue with this chunk?
					if (ChunkDataLeft > 0)
					{
						--idx;
					}
				}
			}
			else
			{
				if (EndOfData)
				{
					Running = false;
				}
				else
				{
					CheckForWork->Wait();
					CheckForWork->Reset();
				}
			}
		}

		// Fill out chunk list from only chunks that remain referenced
		TSet<FGuid> ReferencedChunks;
		for (const auto& FileManifest : Manifest->Data->FileManifestList)
		{
			for (const auto& ChunkPart : FileManifest.FileChunkParts)
			{
				if (ReferencedChunks.Contains(ChunkPart.Guid) == false)
				{
					auto& ChunkInfo = ChunkInfoLookup[ChunkPart.Guid];
					ReferencedChunks.Add(ChunkPart.Guid);
					Manifest->Data->ChunkList.Emplace();
					auto& ChunkInfoData = Manifest->Data->ChunkList.Last();
					ChunkInfoData.Guid = ChunkPart.Guid;
					ChunkInfoData.Hash = ChunkInfo.Hash;
					FMemory::Memcpy(ChunkInfoData.ShaHash.Hash, ChunkInfo.ShaHash.Hash, FSHA1::DigestSize);
					ChunkInfoData.FileSize = ChunkInfo.ChunkFileSize;
					ChunkInfoData.GroupNumber = FCrc::MemCrc32(&ChunkPart.Guid, sizeof(FGuid)) % 100;
				}
			}
		}

		// Get empty files
		FSHA1 EmptyHasher;
		EmptyHasher.Final();
		const TArray< FString >& EmptyFileList = BuildStreamer->GetEmptyFiles();
		for (const auto& EmptyFile : EmptyFileList)
		{
			Manifest->Data->FileManifestList.Emplace();
			FFileManifestData& EmptyFileManifest = Manifest->Data->FileManifestList.Last();
			EmptyFileManifest.Filename = EmptyFile;
			EmptyHasher.GetHash(EmptyFileManifest.FileHash.Hash);
		}

		// Fill out lookups
		Manifest->InitLookups();
	}
void FSequencerObjectBindingNode::HandleAddTrackSubMenuNew(FMenuBuilder& AddTrackMenuBuilder, TArray<TArray<UProperty*> > KeyablePropertyPaths)
{
	// [PostProcessSettings] [Bloom1Tint] [X]
	// [PostProcessSettings] [Bloom1Tint] [Y]
	// [PostProcessSettings] [ColorGrading]

	// Create property menu data based on keyable property paths
	TSet<UProperty*> PropertiesTraversed;
	TArray<PropertyMenuData> KeyablePropertyMenuData;
	for (auto KeyablePropertyPath : KeyablePropertyPaths)
	{		
		PropertyMenuData KeyableMenuData;
		KeyableMenuData.PropertyPath = KeyablePropertyPath;

		// If the path is greater than 1, keep track of the actual properties (not channels) and only add these properties once since we can't do single channel keying of a property yet.
		if (KeyablePropertyPath.Num() > 1) //@todo
		{
			if (PropertiesTraversed.Find(KeyablePropertyPath[1]) != nullptr)
			{
				continue;
			}

			KeyableMenuData.MenuName = FObjectEditorUtils::GetCategoryFName(KeyablePropertyPath[1]).ToString();
			PropertiesTraversed.Add(KeyablePropertyPath[1]);
		}
		else
		{
			// No sub menus items, so skip
			continue; 
		}
		KeyablePropertyMenuData.Add(KeyableMenuData);
	}

	// Sort on the menu name
	KeyablePropertyMenuData.Sort([](const PropertyMenuData& A, const PropertyMenuData& B)
	{
		int32 CompareResult = A.MenuName.Compare(B.MenuName);
		return CompareResult < 0;
	});

	// Add menu items
	for (int32 MenuDataIndex = 0; MenuDataIndex < KeyablePropertyMenuData.Num(); )
	{
		TArray<TArray<UProperty*> > KeyableSubMenuPropertyPaths;
		KeyableSubMenuPropertyPaths.Add(KeyablePropertyMenuData[MenuDataIndex].PropertyPath);

		for (; MenuDataIndex < KeyablePropertyMenuData.Num()-1; )
		{
			if (KeyablePropertyMenuData[MenuDataIndex].MenuName == KeyablePropertyMenuData[MenuDataIndex+1].MenuName)
			{
				++MenuDataIndex;
				KeyableSubMenuPropertyPaths.Add(KeyablePropertyMenuData[MenuDataIndex].PropertyPath);
			}
			else
			{
				break;
			}
		}

		const int32 PropertyNameIndexStart = 1; // Strip off the struct property name
		const int32 PropertyNameIndexEnd = 2; // Stop at the property name, don't descend into the channels

		AddTrackMenuBuilder.AddSubMenu(
			FText::FromString(KeyablePropertyMenuData[MenuDataIndex].MenuName),
			FText::GetEmpty(), 
			FNewMenuDelegate::CreateSP(this, &FSequencerObjectBindingNode::AddPropertyMenuItems, KeyableSubMenuPropertyPaths, PropertyNameIndexStart, PropertyNameIndexEnd));

		++MenuDataIndex;
	}
}
void UEnvironmentQueryGraph::SpawnMissingNodes()
{
	UEnvQuery* QueryOwner = Cast<UEnvQuery>(GetOuter());
	if (QueryOwner == nullptr)
	{
		return;
	}

	TSet<UEnvQueryTest*> ExistingTests;
	TSet<UEnvQueryOption*> ExistingNodes;
	TArray<UEnvQueryOption*> OptionsCopy = QueryOwner->GetOptions();

	UAIGraphNode* MyRootNode = nullptr;
	for (int32 Idx = 0; Idx < Nodes.Num(); Idx++)
	{
		UEnvironmentQueryGraphNode* MyNode = Cast<UEnvironmentQueryGraphNode>(Nodes[Idx]);
		UEnvQueryOption* OptionInstance = MyNode ? Cast<UEnvQueryOption>(MyNode->NodeInstance) : nullptr;
		if (OptionInstance && OptionInstance->Generator)
		{
			ExistingNodes.Add(OptionInstance);

			ExistingTests.Empty(ExistingTests.Num());
			for (int32 SubIdx = 0; SubIdx < MyNode->SubNodes.Num(); SubIdx++)
			{
				UEnvironmentQueryGraphNode* MySubNode = Cast<UEnvironmentQueryGraphNode>(MyNode->SubNodes[SubIdx]);
				UEnvQueryTest* TestInstance = MySubNode ? Cast<UEnvQueryTest>(MySubNode->NodeInstance) : nullptr;
				if (TestInstance)
				{
					ExistingTests.Add(TestInstance);
				}
				else
				{
					MyNode->RemoveSubNode(MySubNode);
					SubIdx--;
				}
			}

			SpawnMissingSubNodes(OptionInstance, ExistingTests, MyNode);
		}

		UEnvironmentQueryGraphNode_Root* RootNode = Cast<UEnvironmentQueryGraphNode_Root>(Nodes[Idx]);
		if (RootNode)
		{
			MyRootNode = RootNode;
		}
	}

	UEdGraphPin* RootOutPin = MyRootNode ? FindGraphNodePin(MyRootNode, EGPD_Output) : nullptr;
	ExistingTests.Empty(0);

	for (int32 Idx = 0; Idx < OptionsCopy.Num(); Idx++)
	{
		UEnvQueryOption* OptionInstance = OptionsCopy[Idx];
		if (ExistingNodes.Contains(OptionInstance) || OptionInstance == nullptr || OptionInstance->Generator == nullptr)
		{
			continue;
		}

		FGraphNodeCreator<UEnvironmentQueryGraphNode_Option> NodeBuilder(*this);
		UEnvironmentQueryGraphNode_Option* MyNode = NodeBuilder.CreateNode();
		UAIGraphNode::UpdateNodeClassDataFrom(OptionInstance->Generator->GetClass(), MyNode->ClassData);
		MyNode->ErrorMessage = MyNode->ClassData.GetDeprecatedMessage();
		NodeBuilder.Finalize();

		if (MyRootNode)
		{
			MyNode->NodePosX = MyRootNode->NodePosX + (Idx * 300);
			MyNode->NodePosY = MyRootNode->NodePosY + 100;
		}

		MyNode->NodeInstance = OptionInstance;
		SpawnMissingSubNodes(OptionInstance, ExistingTests, MyNode);

		UEdGraphPin* SpawnedInPin = FindGraphNodePin(MyNode, EGPD_Input);
		if (RootOutPin && SpawnedInPin)
		{
			RootOutPin->MakeLinkTo(SpawnedInPin);
		}
	}
}
void FHotReloadClassReinstancer::SerializeCDOProperties(UObject* InObject, FHotReloadClassReinstancer::FCDOPropertyData& OutData)
{
	// Creates a mem-comparable CDO data
	class FCDOWriter : public FMemoryWriter
	{
		/** Objects already visited by this archive */
		TSet<UObject*>& VisitedObjects;
		/** Output property data */
		FCDOPropertyData& PropertyData;
		/** Current subobject being serialized */
		FName SubobjectName;

	public:
		/** Serializes all script properties of the provided DefaultObject */
		FCDOWriter(FCDOPropertyData& InOutData, UObject* DefaultObject, TSet<UObject*>& InVisitedObjects, FName InSubobjectName = NAME_None)
			: FMemoryWriter(InOutData.Bytes, /* bIsPersistent = */ false, /* bSetOffset = */ true)
			, VisitedObjects(InVisitedObjects)
			, PropertyData(InOutData)
			, SubobjectName(InSubobjectName)
		{
			// Disable delta serialization, we want to serialize everything
			ArNoDelta = true;
			DefaultObject->SerializeScriptProperties(*this);
		}
		virtual void Serialize(void* Data, int64 Num) override
		{
			// Collect serialized properties so we can later update their values on instances if they change
			auto SerializedProperty = GetSerializedProperty();
			if (SerializedProperty != nullptr)
			{
				FCDOProperty& PropertyInfo = PropertyData.Properties.FindOrAdd(SerializedProperty->GetFName());
				if (PropertyInfo.Property == nullptr)
				{
					PropertyInfo.Property = SerializedProperty;
					PropertyInfo.SubobjectName = SubobjectName;
					PropertyInfo.SerializedValueOffset = Tell();
					PropertyInfo.SerializedValueSize = Num;
					PropertyData.Properties.Add(SerializedProperty->GetFName(), PropertyInfo);
				}
				else
				{
					PropertyInfo.SerializedValueSize += Num;
				}
			}
			FMemoryWriter::Serialize(Data, Num);
		}
		/** Serializes an object. Only name and class for normal references, deep serialization for DSOs */
		virtual FArchive& operator<<(class UObject*& InObj) override
		{
			FArchive& Ar = *this;
			if (InObj)
			{
				FName ClassName = InObj->GetClass()->GetFName();
				FName ObjectName = InObj->GetFName();
				Ar << ClassName;
				Ar << ObjectName;
				if (!VisitedObjects.Contains(InObj))
				{
					VisitedObjects.Add(InObj);
					if (Ar.GetSerializedProperty() && Ar.GetSerializedProperty()->ContainsInstancedObjectProperty())
					{
						// Serialize all DSO properties too					
						FCDOWriter DefaultSubobjectWriter(PropertyData, InObj, VisitedObjects, InObj->GetFName());
						Seek(PropertyData.Bytes.Num());
					}
				}
			}
			else
			{
				FName UnusedName = NAME_None;
				Ar << UnusedName;
				Ar << UnusedName;
			}

			return *this;
		}
		/** Serializes an FName as its index and number */
		virtual FArchive& operator<<(FName& InName) override
		{
			FArchive& Ar = *this;
			NAME_INDEX ComparisonIndex = InName.GetComparisonIndex();
			NAME_INDEX DisplayIndex = InName.GetDisplayIndex();
			int32 Number = InName.GetNumber();
			Ar << ComparisonIndex;
			Ar << DisplayIndex;
			Ar << Number;
			return Ar;
		}
		virtual FArchive& operator<<(FLazyObjectPtr& LazyObjectPtr) override
		{
			FArchive& Ar = *this;
			auto UniqueID = LazyObjectPtr.GetUniqueID();
			Ar << UniqueID;
			return *this;
		}
		virtual FArchive& operator<<(FAssetPtr& AssetPtr) override
		{
			FArchive& Ar = *this;
			auto UniqueID = AssetPtr.GetUniqueID();
			Ar << UniqueID;
			return Ar;
		}
		virtual FArchive& operator<<(FStringAssetReference& Value) override
		{
			FArchive& Ar = *this;

			FString Path = Value.ToString();

			Ar << Path;

			if (IsLoading())
			{
				Value.SetPath(MoveTemp(Path));
			}

			return Ar;
		}
		/** Archive name, for debugging */
		virtual FString GetArchiveName() const override { return TEXT("FCDOWriter"); }
	};
	TSet<UObject*> VisitedObjects;
	VisitedObjects.Add(InObject);
	FCDOWriter Ar(OutData, InObject, VisitedObjects);
}
Пример #10
0
	/** Add a new statistic to the internal map (or update an existing one) from the supplied component */
	UPrimitiveStats* Add(UPrimitiveComponent* InPrimitiveComponent, EPrimitiveObjectSets InObjectSet)
	{
		// Objects in transient package or transient objects are not part of level.
		if( InPrimitiveComponent->GetOutermost() == GetTransientPackage() || InPrimitiveComponent->HasAnyFlags( RF_Transient ) )
		{
			return NULL;
		}

		// Owned by a default object? Not part of a level either.
		if(InPrimitiveComponent->GetOuter() && InPrimitiveComponent->GetOuter()->IsDefaultSubobject() )
		{
			return NULL;
		}

		UStaticMeshComponent*	StaticMeshComponent		= Cast<UStaticMeshComponent>(InPrimitiveComponent);
		UModelComponent*		ModelComponent			= Cast<UModelComponent>(InPrimitiveComponent);
		USkeletalMeshComponent*	SkeletalMeshComponent	= Cast<USkeletalMeshComponent>(InPrimitiveComponent);
		ULandscapeComponent*	LandscapeComponent		= Cast<ULandscapeComponent>(InPrimitiveComponent);
		UObject*				Resource				= NULL;
		AActor*					ActorOuter				= Cast<AActor>(InPrimitiveComponent->GetOuter());

		int32 VertexColorMem		= 0;
		int32 InstVertexColorMem	= 0;
		// Calculate number of direct and other lights relevant to this component.
		int32 LightsLMCount			= 0;
		int32 LightsOtherCount		= 0;
		bool bUsesOnlyUnlitMaterials = InPrimitiveComponent->UsesOnlyUnlitMaterials();

		// The static mesh is a static mesh component's resource.
		if( StaticMeshComponent )
		{
			UStaticMesh* Mesh = StaticMeshComponent->StaticMesh;
			Resource = Mesh;

			// Calculate vertex color memory on the actual mesh.
			if( Mesh && Mesh->RenderData )
			{
				// Accumulate memory for each LOD
				for( int32 LODIndex = 0; LODIndex < Mesh->RenderData->LODResources.Num(); ++LODIndex )
				{
					VertexColorMem += Mesh->RenderData->LODResources[LODIndex].ColorVertexBuffer.GetAllocatedSize();
				}
			}

			// Calculate instanced vertex color memory used on the component.
			for( int32 LODIndex = 0; LODIndex < StaticMeshComponent->LODData.Num(); ++LODIndex )
			{
				// Accumulate memory for each LOD
				const FStaticMeshComponentLODInfo& LODInfo = StaticMeshComponent->LODData[ LODIndex ];
				if( LODInfo.OverrideVertexColors )
				{
					InstVertexColorMem += LODInfo.OverrideVertexColors->GetAllocatedSize();	
				}
			}
			// Calculate the number of lightmap and shadow map lights
			if( !bUsesOnlyUnlitMaterials )
			{
				if( StaticMeshComponent->LODData.Num() > 0 )
				{
					FStaticMeshComponentLODInfo& ComponentLODInfo = StaticMeshComponent->LODData[0];
					if( ComponentLODInfo.LightMap )
					{
						LightsLMCount = ComponentLODInfo.LightMap->LightGuids.Num();
					}
				}
			}
		}
		// A model component is its own resource.
		else if( ModelComponent )			
		{
			// Make sure model component is referenced by level.
			ULevel* Level = CastChecked<ULevel>(ModelComponent->GetOuter());
			if( Level->ModelComponents.Find( ModelComponent ) != INDEX_NONE )
			{
				Resource = ModelComponent->GetModel();

				// Calculate the number of lightmap and shadow map lights
				if( !bUsesOnlyUnlitMaterials )
				{
					const TIndirectArray<FModelElement> Elements = ModelComponent->GetElements();
					if( Elements.Num() > 0 )
					{
						if( Elements[0].LightMap )
						{
							LightsLMCount = Elements[0].LightMap->LightGuids.Num();
						}
					}
				}
			}
		}
		// The skeletal mesh of a skeletal mesh component is its resource.
		else if( SkeletalMeshComponent )
		{
			USkeletalMesh* Mesh = SkeletalMeshComponent->SkeletalMesh;
			Resource = Mesh;
			// Calculate vertex color usage for skeletal meshes
			if( Mesh )
			{
				FSkeletalMeshResource* SkelMeshResource = Mesh->GetResourceForRendering();
				for( int32 LODIndex = 0; LODIndex < SkelMeshResource->LODModels.Num(); ++LODIndex )
				{
					const FStaticLODModel& LODModel = SkelMeshResource->LODModels[ LODIndex ];
					VertexColorMem += LODModel.ColorVertexBuffer.GetVertexDataSize();
				}
			}
		}
		// The landscape of a landscape component is its resource.
		else if (LandscapeComponent)
		{
			Resource = LandscapeComponent->GetLandscapeProxy();
			if (LandscapeComponent->LightMap)
			{
				LightsLMCount = LandscapeComponent->LightMap->LightGuids.Num();
			}
		}

		UWorld* World = InPrimitiveComponent->GetWorld();
	//	check(World); // @todo: re-instate this check once the GWorld migration has completed
		/// If we should skip the actor. Skip if the actor has no outer or if we are only showing selected actors and the actor isn't selected
		const bool bShouldSkip = World == NULL || ActorOuter == NULL || (ActorOuter != NULL && InObjectSet == PrimitiveObjectSets_SelectedObjects && ActorOuter->IsSelected() == false );
		// Dont' care about components without a resource.
		if(	Resource 
			// Require actor association for selection and to disregard mesh emitter components. The exception being model components.
			&&	(!bShouldSkip || (ModelComponent && InObjectSet != PrimitiveObjectSets_SelectedObjects ) )
			// Only list primitives in visible levels
			&&	IsInVisibleLevel( InPrimitiveComponent, World ) 
			// Don't list pending kill components.
			&&	!InPrimitiveComponent->IsPendingKill() )
		{
			// Retrieve relevant lights.
			TArray<const ULightComponent*> RelevantLights;
			World->Scene->GetRelevantLights( InPrimitiveComponent, &RelevantLights );

			// Only look for relevant lights if we aren't unlit.
			if( !bUsesOnlyUnlitMaterials )
			{
				// Lightmap and shadow map lights are calculated above, per component type, infer the "other" light count here
				LightsOtherCount = RelevantLights.Num() >= LightsLMCount ? RelevantLights.Num() - LightsLMCount : 0;
			}

			// Figure out memory used by light and shadow maps and light/ shadow map resolution.
			int32 LightMapWidth			= 0;
			int32 LightMapHeight		= 0;
			InPrimitiveComponent->GetLightMapResolution( LightMapWidth, LightMapHeight );
			int32 LMSMResolution		= FMath::Sqrt( LightMapHeight * LightMapWidth );
			int32 LightMapData			= 0;
			int32 LegacyShadowMapData	= 0;
			InPrimitiveComponent->GetLightAndShadowMapMemoryUsage( LightMapData, LegacyShadowMapData );

			// Check whether we already have an entry for the associated static mesh.
			UPrimitiveStats** StatsEntryPtr = ResourceToStatsMap.Find( Resource );
			if( StatsEntryPtr )
			{
				check(*StatsEntryPtr);
				UPrimitiveStats* StatsEntry = *StatsEntryPtr;

				// We do. Update existing entry.
				StatsEntry->Count++;
				StatsEntry->Actors.AddUnique(ActorOuter);
				StatsEntry->RadiusMin		= FMath::Min( StatsEntry->RadiusMin, InPrimitiveComponent->Bounds.SphereRadius );
				StatsEntry->RadiusMax		= FMath::Max( StatsEntry->RadiusMax, InPrimitiveComponent->Bounds.SphereRadius );
				StatsEntry->RadiusAvg		+= InPrimitiveComponent->Bounds.SphereRadius;
				StatsEntry->LightsLM		+= LightsLMCount;
				StatsEntry->LightsOther		+= LightsOtherCount;
				StatsEntry->LightMapData	+= (float)LightMapData / 1024.0f;
				StatsEntry->LMSMResolution	+= LMSMResolution;
				StatsEntry->UpdateNames();

				if ( !ModelComponent && !LandscapeComponent )
				{
					// Count instanced sections
					StatsEntry->InstSections += StatsEntry->Sections;
					StatsEntry->InstTriangles += StatsEntry->Triangles;
				}

				// ... in the case of a model component (aka BSP).
				if( ModelComponent )
				{
					// If Count represents the Model itself, we do NOT want to increment it now.
					StatsEntry->Count--;

					for (const auto& Element : ModelComponent->GetElements())
					{
						StatsEntry->Triangles += Element.NumTriangles;
						StatsEntry->Sections++;
					}

					StatsEntry->InstSections = StatsEntry->Sections;
					StatsEntry->InstTriangles = StatsEntry->Triangles;
				}
				else if( StaticMeshComponent )
				{
					// This stat is used by multiple components so accumulate instanced vertex color memory.
					StatsEntry->InstVertexColorMem += InstVertexColorMem;
				}
				else if (LandscapeComponent)
				{
					// If Count represents the Landscape itself, we do NOT want to increment it now.
					StatsEntry->Count--;
				}
			}
			else
			{
				// We don't. Create new base entry.
				UPrimitiveStats* NewStatsEntry = NewObject<UPrimitiveStats>();
				NewStatsEntry->AddToRoot();
				NewStatsEntry->Object			= Resource;
				NewStatsEntry->Actors.AddUnique(ActorOuter);
				NewStatsEntry->Count			= 1;
				NewStatsEntry->Triangles		= 0;
				NewStatsEntry->InstTriangles	= 0;
				NewStatsEntry->ResourceSize		= (float)(FArchiveCountMem(Resource).GetNum() + Resource->GetResourceSize(EResourceSizeMode::Exclusive)) / 1024.0f;
				NewStatsEntry->Sections			= 0;
				NewStatsEntry->InstSections = 0;
				NewStatsEntry->RadiusMin		= InPrimitiveComponent->Bounds.SphereRadius;
				NewStatsEntry->RadiusAvg		= InPrimitiveComponent->Bounds.SphereRadius;
				NewStatsEntry->RadiusMax		= InPrimitiveComponent->Bounds.SphereRadius;
				NewStatsEntry->LightsLM			= LightsLMCount;
				NewStatsEntry->LightsOther		= (float)LightsOtherCount;
				NewStatsEntry->LightMapData		= (float)LightMapData / 1024.0f;
				NewStatsEntry->LMSMResolution	= LMSMResolution;
				NewStatsEntry->VertexColorMem	= (float)VertexColorMem / 1024.0f;
				NewStatsEntry->InstVertexColorMem = (float)InstVertexColorMem / 1024.0f;
				NewStatsEntry->UpdateNames();

				// Fix up triangle and section count...

				// ... in the case of a static mesh component.
				if( StaticMeshComponent )
				{
					UStaticMesh* StaticMesh = StaticMeshComponent->StaticMesh;
					if( StaticMesh && StaticMesh->RenderData )
					{
						for( int32 SectionIndex=0; SectionIndex<StaticMesh->RenderData->LODResources[0].Sections.Num(); SectionIndex++ )
						{
							const FStaticMeshSection& StaticMeshSection = StaticMesh->RenderData->LODResources[0].Sections[SectionIndex];
							NewStatsEntry->Triangles	+= StaticMeshSection.NumTriangles;
							NewStatsEntry->Sections++;
						}
					}
				}
				// ... in the case of a model component (aka BSP).
				else if( ModelComponent )
				{
					TIndirectArray<FModelElement> Elements = ModelComponent->GetElements();
					for( int32 ElementIndex=0; ElementIndex<Elements.Num(); ElementIndex++ )
					{
						const FModelElement& Element = Elements[ElementIndex];
						NewStatsEntry->Triangles += Element.NumTriangles;
						NewStatsEntry->Sections++;
					}

				}
				// ... in the case of skeletal mesh component.
				else if( SkeletalMeshComponent )
				{
					USkeletalMesh* SkeletalMesh = SkeletalMeshComponent->SkeletalMesh;
					if( SkeletalMesh )
					{
						FSkeletalMeshResource* SkelMeshResource = SkeletalMesh->GetResourceForRendering();
						if (SkelMeshResource->LODModels.Num())
						{
							const FStaticLODModel& BaseLOD = SkelMeshResource->LODModels[0];
							for( int32 SectionIndex=0; SectionIndex<BaseLOD.Sections.Num(); SectionIndex++ )
							{
								const FSkelMeshSection& Section = BaseLOD.Sections[SectionIndex];
								NewStatsEntry->Triangles += Section.NumTriangles;
								NewStatsEntry->Sections++;
							}
						}
					}
				}
				else if (LandscapeComponent)
				{
					TSet<UTexture2D*> UniqueTextures;
					for (auto ItComponents = LandscapeComponent->GetLandscapeProxy()->LandscapeComponents.CreateConstIterator(); ItComponents; ++ItComponents)
					{
						const ULandscapeComponent* CurrentComponent = *ItComponents;

						// count triangles and sections in the landscape
						NewStatsEntry->Triangles += FMath::Square(CurrentComponent->ComponentSizeQuads) * 2;
						NewStatsEntry->Sections += FMath::Square(CurrentComponent->NumSubsections);

						// count resource usage of landscape
						bool bNotUnique = false;
						UniqueTextures.Add(CurrentComponent->HeightmapTexture, &bNotUnique);
						if (!bNotUnique)
						{
							NewStatsEntry->ResourceSize += CurrentComponent->HeightmapTexture->GetResourceSize(EResourceSizeMode::Exclusive);
						}
						if (CurrentComponent->XYOffsetmapTexture)
						{
							UniqueTextures.Add(CurrentComponent->XYOffsetmapTexture, &bNotUnique);
							if (!bNotUnique)
							{
								NewStatsEntry->ResourceSize += CurrentComponent->XYOffsetmapTexture->GetResourceSize(EResourceSizeMode::Exclusive);
							}
						}

						for (auto ItWeightmaps = CurrentComponent->WeightmapTextures.CreateConstIterator(); ItWeightmaps; ++ItWeightmaps)
						{
							UniqueTextures.Add((*ItWeightmaps), &bNotUnique);
							if (!bNotUnique)
							{
								NewStatsEntry->ResourceSize += (*ItWeightmaps)->GetResourceSize(EResourceSizeMode::Exclusive);
							}
						}
					}
				}

				NewStatsEntry->InstTriangles = NewStatsEntry->Triangles;
				NewStatsEntry->InstSections = NewStatsEntry->Sections;

				// Add to map.
				ResourceToStatsMap.Add( Resource, NewStatsEntry );

				return NewStatsEntry;
			}
		}

		return NULL;
	}
Пример #11
0
FShadowMap2D* FShadowMap2D::AllocateInstancedShadowMap(UInstancedStaticMeshComponent* Component, TArray<TMap<ULightComponent*, TUniquePtr<FShadowMapData2D>>> InstancedShadowMapData,
	const FBoxSphereBounds& Bounds, ELightMapPaddingType InPaddingType, EShadowMapFlags InShadowmapFlags)
{
#if WITH_EDITOR
	check(InstancedShadowMapData.Num() > 0);

	// Verify all instance shadowmaps are the same size, and build complete list of shadow lights
	int32 SizeX = -1;
	int32 SizeY = -1;
	TSet<ULightComponent*> AllLights;
	for (auto& ShadowMapData : InstancedShadowMapData)
	{
		for (const auto& ShadowDataPair : ShadowMapData)
		{
			if (SizeX == -1)
			{
				SizeX = ShadowDataPair.Value->GetSizeX();
				SizeY = ShadowDataPair.Value->GetSizeY();
			}
			else
			{
				check(ShadowDataPair.Value->GetSizeX() == SizeX);
				check(ShadowDataPair.Value->GetSizeY() == SizeY);
			}
			AllLights.Add(ShadowDataPair.Key);
		}
	}

	check(SizeX != -1 && SizeY != -1); // No valid shadowmaps

	TArray<FGuid> LightGuids;
	LightGuids.Reserve(AllLights.Num());
	for (ULightComponent* Light : AllLights)
	{
		LightGuids.Add(Light->LightGuid);
	}

	// Unify all the shadow map data to contain the same lights in the same order
	for (auto& ShadowMapData : InstancedShadowMapData)
	{
		for (ULightComponent* Light : AllLights)
		{
			if (!ShadowMapData.Contains(Light))
			{
				ShadowMapData.Add(Light, MakeUnique<FQuantizedShadowSignedDistanceFieldData2D>(SizeX, SizeY));
			}
		}
	}

	FShadowMapAllocationGroup AllocationGroup;
	AllocationGroup.TextureOuter = Component->GetOutermost();
	AllocationGroup.ShadowmapFlags = InShadowmapFlags;
	AllocationGroup.Bounds = Bounds;
	if (!GAllowStreamingLightmaps)
	{
		AllocationGroup.ShadowmapFlags = EShadowMapFlags(AllocationGroup.ShadowmapFlags & ~SMF_Streamed);
	}

	FShadowMap2D* BaseShadowmap = nullptr;

	for (int32 InstanceIndex = 0; InstanceIndex < InstancedShadowMapData.Num(); ++InstanceIndex)
	{
		auto& ShadowMapData = InstancedShadowMapData[InstanceIndex];
		check(ShadowMapData.Num() > 0);

		// Create a new shadow-map.
		FShadowMap2D* ShadowMap = new FShadowMap2D(LightGuids);

		if (InstanceIndex == 0)
		{
			BaseShadowmap = ShadowMap;
		}

		// Add a pending allocation for this shadow-map.
		TUniquePtr<FShadowMapAllocation> Allocation = MakeUnique<FShadowMapAllocation>();
		Allocation->PaddingType = InPaddingType;
		Allocation->ShadowMap = ShadowMap;
		Allocation->TotalSizeX = SizeX;
		Allocation->TotalSizeY = SizeY;
		Allocation->MappedRect = FIntRect(0, 0, SizeX, SizeY);
		Allocation->Primitive = Component;
		Allocation->InstanceIndex = InstanceIndex;

		for (auto& ShadowDataPair : ShadowMapData)
		{
			auto& RawData = ShadowDataPair.Value;
			auto& DistanceFieldShadowData = Allocation->ShadowMapData.Add(ShadowDataPair.Key, TArray<FQuantizedSignedDistanceFieldShadowSample>());

			switch (RawData->GetType())
			{
			case FShadowMapData2D::SHADOW_SIGNED_DISTANCE_FIELD_DATA:
			case FShadowMapData2D::SHADOW_SIGNED_DISTANCE_FIELD_DATA_QUANTIZED:
				// If the data is already quantized, this will just copy the data
				RawData->Quantize(DistanceFieldShadowData);
				break;
			default:
				check(0);
			}

			RawData.Reset();

			// Track the size of pending light-maps.
			PendingShadowMapSize += Allocation->TotalSizeX * Allocation->TotalSizeY;
		}

		// Assumes bAlignByFour
		AllocationGroup.TotalTexels += ((Allocation->MappedRect.Width() + 3) & ~3) * ((Allocation->MappedRect.Height() + 3) & ~3);

		AllocationGroup.Allocations.Add(MoveTemp(Allocation));
	}

	PendingShadowMaps.Add(MoveTemp(AllocationGroup));

	return BaseShadowmap;
#else
	return nullptr;
#endif
}
void UMaterialParameterCollection::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent)
{
	// If the array counts have changed, an element has been added or removed, and we need to update the uniform buffer layout,
	// Which also requires recompiling any referencing materials
	if (ScalarParameters.Num() != PreviousScalarParameters.Num()
		|| VectorParameters.Num() != PreviousVectorParameters.Num())
	{
		// Limit the count of parameters to fit within uniform buffer limits
		const uint32 MaxScalarParameters = 1024;

		if (ScalarParameters.Num() > MaxScalarParameters)
		{
			ScalarParameters.RemoveAt(MaxScalarParameters, ScalarParameters.Num() - MaxScalarParameters);
		}

		const uint32 MaxVectorParameters = 1024;

		if (VectorParameters.Num() > MaxVectorParameters)
		{
			VectorParameters.RemoveAt(MaxVectorParameters, VectorParameters.Num() - MaxVectorParameters);
		}

		// Generate a new Id so that unloaded materials that reference this collection will update correctly on load
		StateId = FGuid::NewGuid();

		// Update the uniform buffer layout
		CreateBufferStruct();

		// Recreate each instance of this collection
		for (TObjectIterator<UWorld> It; It; ++It)
		{
			UWorld* CurrentWorld = *It;
			CurrentWorld->AddParameterCollectionInstance(this, false);
		}

		// Build set of changed parameter names
		TSet<FName> ParameterNames;
		for (const FCollectionVectorParameter& Param : PreviousVectorParameters)
		{
			ParameterNames.Add(Param.ParameterName);
		}

		for (const FCollectionScalarParameter& Param : PreviousScalarParameters)
		{
			ParameterNames.Add(Param.ParameterName);
		}

		for (const FCollectionVectorParameter& Param : VectorParameters)
		{
			ParameterNames.Remove(Param.ParameterName);
		}

		for (const FCollectionScalarParameter& Param : ScalarParameters)
		{
			ParameterNames.Remove(Param.ParameterName);
		}

		// Create a material update context so we can safely update materials using this parameter collection.
		{
			FMaterialUpdateContext UpdateContext;

			// Go through all materials in memory and recompile them if they use this material parameter collection
			for (TObjectIterator<UMaterial> It; It; ++It)
			{
				UMaterial* CurrentMaterial = *It;

				bool bRecompile = false;

				// Preview materials often use expressions for rendering that are not in their Expressions array, 
				// And therefore their MaterialParameterCollectionInfos are not up to date.
				if (CurrentMaterial->bIsPreviewMaterial)
				{
					bRecompile = true;
				}
				else
				{
					for (int32 FunctionIndex = 0; FunctionIndex < CurrentMaterial->MaterialParameterCollectionInfos.Num() && !bRecompile; FunctionIndex++)
					{
						if (CurrentMaterial->MaterialParameterCollectionInfos[FunctionIndex].ParameterCollection == this)
						{
							TArray<UMaterialExpressionCollectionParameter*> CollectionParameters;
							CurrentMaterial->GetAllExpressionsInMaterialAndFunctionsOfType(CollectionParameters);
							for (UMaterialExpressionCollectionParameter* CollectionParameter : CollectionParameters)
							{
								if (ParameterNames.Contains(CollectionParameter->ParameterName))
								{
									bRecompile = true;
									break;
								}
							}
						}
					}
				}

				if (bRecompile)
				{
					UpdateContext.AddMaterial(CurrentMaterial);

					// Propagate the change to this material
					CurrentMaterial->PreEditChange(NULL);
					CurrentMaterial->PostEditChange();
					CurrentMaterial->MarkPackageDirty();
				}
			}
		}
	}

	// Update each world's scene with the new instance, and update each instance's uniform buffer to reflect the changes made by the user
	for (TObjectIterator<UWorld> It; It; ++It)
	{
		UWorld* CurrentWorld = *It;
		CurrentWorld->UpdateParameterCollectionInstances(true);
	}

	PreviousScalarParameters.Empty();
	PreviousVectorParameters.Empty();

	Super::PostEditChangeProperty(PropertyChangedEvent);
}
// cooked package asset registry saves information about all the cooked packages and assets contained within for stats purposes
// in json format
bool FChunkManifestGenerator::SaveCookedPackageAssetRegistry( const FString& SandboxCookedRegistryFilename, const bool Append )
{
    bool bSuccess = false;
    for ( const auto& Platform : Platforms )
    {
        TSet<FName> CookedPackages;

        // save the file
        const FString CookedAssetRegistryFilename = SandboxCookedRegistryFilename.Replace(TEXT("[Platform]"), *Platform->PlatformName());

        FString JsonOutString;
        JsonWriter Json = TJsonWriterFactory<TCHAR, TPrettyJsonPrintPolicy<TCHAR> >::Create(&JsonOutString);

        Json->WriteObjectStart();
        Json->WriteArrayStart(TEXT("Packages"));

        for ( const auto& Package : AllCookedPackages )
        {
            Json->WriteObjectStart(); // unnamed package start
            const FName& PackageName = Package.Key;
            const FString& SandboxPath = Package.Value;

            CookedPackages.Add( PackageName );

            FString PlatformSandboxPath = SandboxPath.Replace(TEXT("[Platform]"), *Platform->PlatformName());

            FDateTime TimeStamp = IFileManager::Get().GetTimeStamp( *PlatformSandboxPath );

            Json->WriteValue( "SourcePackageName", PackageName.ToString() );
            Json->WriteValue( "CookedPackageName", PlatformSandboxPath );
            Json->WriteValue( "CookedPackageTimeStamp", TimeStamp.ToString() );

            Json->WriteArrayStart("AssetData");
            for (const auto& AssetData : AssetRegistryData)
            {   // Add only assets that have actually been cooked and belong to any chunk
                if (AssetData.ChunkIDs.Num() > 0 && (AssetData.PackageName == PackageName))
                {
                    Json->WriteObjectStart();
                    // save all their infos
                    Json->WriteValue(TEXT("ObjectPath"), AssetData.ObjectPath.ToString() );
                    Json->WriteValue(TEXT("PackageName"), AssetData.PackageName.ToString() );
                    Json->WriteValue(TEXT("PackagePath"), AssetData.PackagePath.ToString() );
                    Json->WriteValue(TEXT("GroupNames"), AssetData.GroupNames.ToString() );
                    Json->WriteValue(TEXT("AssetName"), AssetData.AssetName.ToString() );
                    Json->WriteValue(TEXT("AssetClass"), AssetData.AssetClass.ToString() );
                    Json->WriteObjectStart("TagsAndValues");
                    for ( const auto& Tag : AssetData.TagsAndValues )
                    {
                        Json->WriteValue( Tag.Key.ToString(), Tag.Value );
                    }
                    Json->WriteObjectEnd(); // end tags and values object
                    Json->WriteObjectEnd(); // end unnamed array object
                }
            }
            Json->WriteArrayEnd();
            Json->WriteObjectEnd(); // unnamed package
        }

        if ( Append )
        {
            FString JsonInString;
            if ( FFileHelper::LoadFileToString(JsonInString, *CookedAssetRegistryFilename) )
            {
                // load up previous package asset registry and fill in any packages which weren't recooked on this run
                JsonReader Reader = TJsonReaderFactory<TCHAR>::Create(JsonInString);
                TSharedPtr<FJsonObject> JsonObject;
                bool shouldRead = FJsonSerializer::Deserialize(Reader, JsonObject) && JsonObject.IsValid() && JsonObject->HasTypedField<EJson::Array>(TEXT("Packages"));
                if ( shouldRead )
                {
                    TArray<TSharedPtr<FJsonValue>> PackageList = JsonObject->GetArrayField(TEXT("Packages"));
                    for (auto PackageListIt = PackageList.CreateConstIterator(); PackageListIt && shouldRead; ++PackageListIt)
                    {
                        const TSharedPtr<FJsonValue>& JsonValue = *PackageListIt;
                        shouldRead = JsonValue->Type == EJson::Object;
                        if ( shouldRead )
                        {
                            const TSharedPtr<FJsonObject>& JsonPackage = JsonValue->AsObject();

                            // get the package name and see if we have already written it out this run

                            FString CookedPackageName;
                            verify( JsonPackage->TryGetStringField(TEXT("SourcePackageName"), CookedPackageName) );

                            const FName CookedPackageFName(*CookedPackageName);
                            if ( CookedPackages.Contains(CookedPackageFName))
                            {
                                // don't need to process this package
                                continue;
                            }


                            // check that the on disk version is still valid
                            FString SourcePackageName;
                            check( JsonPackage->TryGetStringField( TEXT("SourcePackageName"), SourcePackageName) );

                            // if our timestamp is different then don't copy the information over
                            FDateTime CurrentTimeStamp = IFileManager::Get().GetTimeStamp( *CookedPackageName );

                            FString SavedTimeString;
                            check( JsonPackage->TryGetStringField(TEXT("CookedPackageTimeStamp"), SavedTimeString) );
                            FDateTime SavedTimeStamp;
                            FDateTime::Parse(SavedTimeString, SavedTimeStamp);

                            if ( SavedTimeStamp != CurrentTimeStamp )
                            {
                                continue;
                            }



                            CopyJsonValueToWriter(Json, FString(), JsonValue);
                            // read in all the other stuff and copy it over to the new registry
                            /*Json->WriteObjectStart(); // open package

                            // copy all the values over
                            for ( const auto& JsonPackageValue : JsonPackage->Values)
                            {
                            	CopyJsonValueToWriter(Json, JsonPackageValue.Key, JsonPackageValue.Value);
                            }

                            Json->WriteObjectEnd();*/
                        }

                    }
                }
                else
                {
                    UE_LOG(LogChunkManifestGenerator, Warning, TEXT("Unable to read or json is invalid format %s"), *CookedAssetRegistryFilename);
                }
            }
        }


        Json->WriteArrayEnd();
        Json->WriteObjectEnd();

        if (Json->Close())
        {
            FArchive* ItemTemplatesFile = IFileManager::Get().CreateFileWriter(*CookedAssetRegistryFilename);
            if (ItemTemplatesFile)
            {
                // serialize the file contents
                TStringConversion<FTCHARToUTF8_Convert> Convert(*JsonOutString);
                ItemTemplatesFile->Serialize(const_cast<ANSICHAR*>(Convert.Get()), Convert.Length());
                ItemTemplatesFile->Close();
                if ( !ItemTemplatesFile->IsError() )
                {
                    bSuccess = true;
                }
                else
                {
                    UE_LOG(LogChunkManifestGenerator, Error, TEXT("Unable to write to %s"), *CookedAssetRegistryFilename);
                }
                delete ItemTemplatesFile;
            }
            else
            {
                UE_LOG(LogChunkManifestGenerator, Error, TEXT("Unable to open %s for writing."), *CookedAssetRegistryFilename);
            }
        }
        else
        {
            UE_LOG(LogChunkManifestGenerator, Error, TEXT("Error closing Json Writer"));
        }
    }
    return bSuccess;
}
Пример #14
0
void UTexture::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent)
{
	Super::PostEditChangeProperty(PropertyChangedEvent);

	SetLightingGuid();

	// Determine whether any property that requires recompression of the texture, or notification to Materials has changed.
	bool RequiresNotifyMaterials = false;
	bool DeferCompressionWasEnabled = false;

	UProperty* PropertyThatChanged = PropertyChangedEvent.Property;
	if( PropertyThatChanged )
	{
		static const FName CompressionSettingsName("CompressionSettings");
		static const FName LODGroupName("LODGroup");
		static const FName DeferCompressionName("DeferCompression");
#if WITH_EDITORONLY_DATA
		static const FName MaxTextureSizeName("MaxTextureSize");
#endif // #if WITH_EDITORONLY_DATA

		const FName PropertyName = PropertyThatChanged->GetFName();
		if (PropertyName == CompressionSettingsName || PropertyName == LODGroupName)
		{
			RequiresNotifyMaterials = true;
		}
		else if (PropertyName == DeferCompressionName)
		{
			DeferCompressionWasEnabled = DeferCompression;
		}
#if WITH_EDITORONLY_DATA
		else if (PropertyName == MaxTextureSizeName)
		{
			if (MaxTextureSize <= 0)
			{
				MaxTextureSize = 0;
			}
			else
			{
				MaxTextureSize = FMath::Min<int32>(FMath::RoundUpToPowerOfTwo(MaxTextureSize), GetMaximumDimension());
			}
		}
#endif // #if WITH_EDITORONLY_DATA

		bool bPreventSRGB = (CompressionSettings == TC_Alpha || CompressionSettings == TC_Normalmap || CompressionSettings == TC_Masks || CompressionSettings == TC_HDR || CompressionSettings == TC_HDR_Compressed);
		if(bPreventSRGB && SRGB == true)
		{
			SRGB = false;
		}
	}
	else
	{
		FMaterialUpdateContext UpdateContext;
		// Update any material that uses this texture
		TSet<UMaterial*> BaseMaterialsThatUseThisTexture;
		for (TObjectIterator<UMaterialInterface> It; It; ++It)
		{
			UMaterialInterface* MaterialInterface = *It;
			if (DoesMaterialUseTexture(MaterialInterface, this))
			{
				UMaterial *Material = MaterialInterface->GetMaterial();
				bool MaterialAlreadyCompute = false;
				BaseMaterialsThatUseThisTexture.Add(Material, &MaterialAlreadyCompute);
				if (!MaterialAlreadyCompute)
				{
					UpdateContext.AddMaterial(Material);
					if (Material->IsTextureForceRecompileCacheRessource(this))
					{
						Material->UpdateMaterialShaderCacheAndTextureReferences();
					}
				}
			}
		}
		//If the DDC key was different the material is already recompile here
		RequiresNotifyMaterials = false;
	}

	NumCinematicMipLevels = FMath::Max<int32>( NumCinematicMipLevels, 0 );

	// Don't update the texture resource if we've turned "DeferCompression" on, as this 
	// would cause it to immediately update as an uncompressed texture
	if( !DeferCompressionWasEnabled && (PropertyChangedEvent.ChangeType & EPropertyChangeType::Interactive) == 0 )
	{
		// Update the texture resource. This will recache derived data if necessary
		// which may involve recompressing the texture.
		UpdateResource();
	}

	// Notify any loaded material instances if changed our compression format
	if (RequiresNotifyMaterials)
	{
		TArray<UMaterialInterface*> MaterialsThatUseThisTexture;

		// Create a material update context to safely update materials.
		{
			FMaterialUpdateContext UpdateContext;

			// Notify any material that uses this texture
			TSet<UMaterial*> BaseMaterialsThatUseThisTexture;
			for (TObjectIterator<UMaterialInterface> It; It; ++It)
			{
				UMaterialInterface* MaterialInterface = *It;
				if (DoesMaterialUseTexture(MaterialInterface,this))
				{
					MaterialsThatUseThisTexture.Add(MaterialInterface);

					// This is a bit tricky. We want to make sure all materials using this texture are
					// updated. Materials are always updated. Material instances may also have to be
					// updated and if they have static permutations their children must be updated
					// whether they use the texture or not! The safe thing to do is to add the instance's
					// base material to the update context causing all materials in the tree to update.
					BaseMaterialsThatUseThisTexture.Add(MaterialInterface->GetMaterial());
				}
			}

			// Go ahead and update any base materials that need to be.
			for (TSet<UMaterial*>::TConstIterator It(BaseMaterialsThatUseThisTexture); It; ++It)
			{
				UpdateContext.AddMaterial(*It);
				(*It)->PostEditChange();
			}
		}

		// Now that all materials and instances have updated send necessary callbacks.
		for (int32 i = 0; i < MaterialsThatUseThisTexture.Num(); ++i)
		{
			FEditorSupportDelegates::MaterialTextureSettingsChanged.Broadcast(MaterialsThatUseThisTexture[i]);
		}
	}
		
#if WITH_EDITORONLY_DATA
	// any texture that is referencing this texture as AssociatedNormalMap needs to be informed
	{
		TArray<UTexture*> TexturesThatUseThisTexture;

		for (TObjectIterator<UTexture> It; It; ++It)
		{
			UTexture* Tex = *It;

			if(Tex != this && Tex->CompositeTexture == this && Tex->CompositeTextureMode != CTM_Disabled)
			{
				TexturesThatUseThisTexture.Add(Tex);
			}
		}
		for (int32 i = 0; i < TexturesThatUseThisTexture.Num(); ++i)
		{
			TexturesThatUseThisTexture[i]->PostEditChange();
		}
	}
#endif
}
Пример #15
0
// @TODO LSwift: Perhaps replace FromBlob and ToBlob usage with hexadecimal notation instead
bool FBuildPatchAppManifest::DeserializeFromJSON( const FString& JSONInput )
{
	bool bSuccess = true;
	TSharedPtr<FJsonObject> JSONManifestObject;
	TSharedRef<TJsonReader<TCHAR>> Reader = TJsonReaderFactory<TCHAR>::Create(JSONInput);

	// Clear current data
	DestroyData();

	// Attempt to deserialize JSON
	if (!FJsonSerializer::Deserialize(Reader, JSONManifestObject) || !JSONManifestObject.IsValid())
	{
		return false;
	}

	// Store a list of all data GUID for later use
	TSet<FGuid> AllDataGuids;

	// Get the values map
	TMap<FString, TSharedPtr<FJsonValue>>& JsonValueMap = JSONManifestObject->Values;

	// Manifest version did not always exist
	int32 ManifestFileVersionInt = 0;
	TSharedPtr<FJsonValue> JsonManifestFileVersion = JsonValueMap.FindRef(TEXT("ManifestFileVersion"));
	if (JsonManifestFileVersion.IsValid() && FromStringBlob(JsonManifestFileVersion->AsString(), ManifestFileVersionInt))
	{
		Data->ManifestFileVersion = static_cast<EBuildPatchAppManifestVersion::Type>(ManifestFileVersionInt);
	}
	else
	{
		// Then we presume version just before we started outputting the version
		Data->ManifestFileVersion = static_cast<EBuildPatchAppManifestVersion::Type>(EBuildPatchAppManifestVersion::StartStoringVersion - 1);
	}

	// Get the app and version strings
	TSharedPtr< FJsonValue > JsonAppID = JsonValueMap.FindRef( TEXT("AppID") );
	TSharedPtr< FJsonValue > JsonAppNameString = JsonValueMap.FindRef( TEXT("AppNameString") );
	TSharedPtr< FJsonValue > JsonBuildVersionString = JsonValueMap.FindRef( TEXT("BuildVersionString") );
	TSharedPtr< FJsonValue > JsonLaunchExe = JsonValueMap.FindRef( TEXT("LaunchExeString") );
	TSharedPtr< FJsonValue > JsonLaunchCommand = JsonValueMap.FindRef( TEXT("LaunchCommand") );
	TSharedPtr< FJsonValue > JsonPrereqName = JsonValueMap.FindRef( TEXT("PrereqName") );
	TSharedPtr< FJsonValue > JsonPrereqPath = JsonValueMap.FindRef( TEXT("PrereqPath") );
	TSharedPtr< FJsonValue > JsonPrereqArgs = JsonValueMap.FindRef( TEXT("PrereqArgs") );
	bSuccess = bSuccess && JsonAppID.IsValid();
	if( bSuccess )
	{
		bSuccess = bSuccess && FromStringBlob( JsonAppID->AsString(), Data->AppID );
	}
	bSuccess = bSuccess && JsonAppNameString.IsValid();
	if( bSuccess )
	{
		Data->AppName = JsonAppNameString->AsString();
	}
	bSuccess = bSuccess && JsonBuildVersionString.IsValid();
	if( bSuccess )
	{
		Data->BuildVersion = JsonBuildVersionString->AsString();
	}
	bSuccess = bSuccess && JsonLaunchExe.IsValid();
	if( bSuccess )
	{
		Data->LaunchExe = JsonLaunchExe->AsString();
	}
	bSuccess = bSuccess && JsonLaunchCommand.IsValid();
	if( bSuccess )
	{
		Data->LaunchCommand = JsonLaunchCommand->AsString();
	}

	// Get the prerequisites installer info.  These are optional entries.
	Data->PrereqName = JsonPrereqName.IsValid() ? JsonPrereqName->AsString() : FString();
	Data->PrereqPath = JsonPrereqPath.IsValid() ? JsonPrereqPath->AsString() : FString();
	Data->PrereqArgs = JsonPrereqArgs.IsValid() ? JsonPrereqArgs->AsString() : FString();

	// Get the FileManifestList
	TSharedPtr<FJsonValue> JsonFileManifestList = JsonValueMap.FindRef(TEXT("FileManifestList"));
	bSuccess = bSuccess && JsonFileManifestList.IsValid();
	if( bSuccess )
	{
		TArray<TSharedPtr<FJsonValue>> JsonFileManifestArray = JsonFileManifestList->AsArray();
		for (auto JsonFileManifestIt = JsonFileManifestArray.CreateConstIterator(); JsonFileManifestIt && bSuccess; ++JsonFileManifestIt)
		{
			TSharedPtr<FJsonObject> JsonFileManifest = (*JsonFileManifestIt)->AsObject();

			const int32 FileIndex = Data->FileManifestList.Add(FFileManifestData());
			FFileManifestData& FileManifest = Data->FileManifestList[FileIndex];
			FileManifest.Filename = JsonFileManifest->GetStringField(TEXT("Filename"));
			bSuccess = bSuccess && FString::ToBlob(JsonFileManifest->GetStringField(TEXT("FileHash")), FileManifest.FileHash.Hash, FSHA1::DigestSize);
			TArray<TSharedPtr<FJsonValue>> JsonChunkPartArray = JsonFileManifest->GetArrayField(TEXT("FileChunkParts"));
			for (auto JsonChunkPartIt = JsonChunkPartArray.CreateConstIterator(); JsonChunkPartIt && bSuccess; ++JsonChunkPartIt)
			{
				const int32 ChunkIndex = FileManifest.FileChunkParts.Add(FChunkPartData());
				FChunkPartData& FileChunkPart = FileManifest.FileChunkParts[ChunkIndex];
				TSharedPtr<FJsonObject> JsonChunkPart = (*JsonChunkPartIt)->AsObject();
				bSuccess = bSuccess && FGuid::Parse(JsonChunkPart->GetStringField(TEXT("Guid")), FileChunkPart.Guid);
				bSuccess = bSuccess && FromStringBlob(JsonChunkPart->GetStringField(TEXT("Offset")), FileChunkPart.Offset);
				bSuccess = bSuccess && FromStringBlob(JsonChunkPart->GetStringField(TEXT("Size")), FileChunkPart.Size);
				AllDataGuids.Add(FileChunkPart.Guid);
			}
			FileManifest.bIsUnixExecutable = JsonFileManifest->HasField(TEXT("bIsUnixExecutable")) && JsonFileManifest->GetBoolField(TEXT("bIsUnixExecutable"));
			FileManifest.bIsReadOnly = JsonFileManifest->HasField(TEXT("bIsReadOnly")) && JsonFileManifest->GetBoolField(TEXT("bIsReadOnly"));
			FileManifest.bIsCompressed = JsonFileManifest->HasField(TEXT("bIsCompressed")) && JsonFileManifest->GetBoolField(TEXT("bIsCompressed"));
			FileManifest.SymlinkTarget = JsonFileManifest->HasField(TEXT("SymlinkTarget")) ? JsonFileManifest->GetStringField(TEXT("SymlinkTarget")) : TEXT("");
			FileManifest.Init();
		}
	}
	Data->FileManifestList.Sort();
	for (auto& FileManifest : Data->FileManifestList)
	{
		FileManifestLookup.Add(FileManifest.Filename, &FileManifest);
	}

	// For each chunk setup it's info
	for (const auto& DataGuid : AllDataGuids)
	{
		int32 ChunkIndex = Data->ChunkList.Add(FChunkInfoData());
		Data->ChunkList[ChunkIndex].Guid = DataGuid;
	}

	// Setup chunk info lookup
	for (auto& ChunkInfo : Data->ChunkList)
	{
		ChunkInfoLookup.Add(ChunkInfo.Guid, &ChunkInfo);
	}

	// Get the ChunkHashList
	bool bHasChunkHashList = false;
	TSharedPtr<FJsonValue> JsonChunkHashList = JsonValueMap.FindRef(TEXT("ChunkHashList"));
	bSuccess = bSuccess && JsonChunkHashList.IsValid();
	if (bSuccess)
	{
		TSharedPtr<FJsonObject> JsonChunkHashListObj = JsonChunkHashList->AsObject();
		for (auto ChunkHashIt = JsonChunkHashListObj->Values.CreateConstIterator(); ChunkHashIt && bSuccess; ++ChunkHashIt)
		{
			FGuid ChunkGuid;
			uint64 ChunkHash = 0;
			bSuccess = bSuccess && FGuid::Parse(ChunkHashIt.Key(), ChunkGuid);
			bSuccess = bSuccess && FromStringBlob(ChunkHashIt.Value()->AsString(), ChunkHash);
			if (bSuccess && ChunkInfoLookup.Contains(ChunkGuid))
			{
				FChunkInfoData* ChunkInfoData = ChunkInfoLookup[ChunkGuid];
				ChunkInfoData->Hash = ChunkHash;
				bHasChunkHashList = true;
			}
		}
	}

	// Get the DataGroupList
	TSharedPtr<FJsonValue> JsonDataGroupList = JsonValueMap.FindRef(TEXT("DataGroupList"));
	if (JsonDataGroupList.IsValid())
	{
		TSharedPtr<FJsonObject> JsonDataGroupListObj = JsonDataGroupList->AsObject();
		for (auto DataGroupIt = JsonDataGroupListObj->Values.CreateConstIterator(); DataGroupIt && bSuccess; ++DataGroupIt)
		{
			FGuid DataGuid;
			uint8 DataGroup = INDEX_NONE;
			// If the list exists, we must be able to parse it ok otherwise error
			bSuccess = bSuccess && FGuid::Parse(DataGroupIt.Key(), DataGuid);
			bSuccess = bSuccess && FromStringBlob(DataGroupIt.Value()->AsString(), DataGroup);
			if (bSuccess && ChunkInfoLookup.Contains(DataGuid))
			{
				FChunkInfoData* ChunkInfoData = ChunkInfoLookup[DataGuid];
				ChunkInfoData->GroupNumber = DataGroup;
			}
		}
	}
	else if (bSuccess)
	{
		// If the list did not exist in the manifest then the grouping is the deprecated crc functionality, as long
		// as there are no previous parsing errors we can build the group list from the Guids.
		for (auto& ChunkInfo : Data->ChunkList)
		{
			ChunkInfo.GroupNumber = FCrc::MemCrc_DEPRECATED(&ChunkInfo.Guid, sizeof(FGuid)) % 100;
		}
	}

	// Get the ChunkFilesizeList
	bool bHasChunkFilesizeList = false;
	TSharedPtr< FJsonValue > JsonChunkFilesizeList = JsonValueMap.FindRef(TEXT("ChunkFilesizeList"));
	if (JsonChunkFilesizeList.IsValid())
	{
		TSharedPtr< FJsonObject > JsonChunkFilesizeListObj = JsonChunkFilesizeList->AsObject();
		for (auto ChunkFilesizeIt = JsonChunkFilesizeListObj->Values.CreateConstIterator(); ChunkFilesizeIt; ++ChunkFilesizeIt)
		{
			FGuid ChunkGuid;
			int64 ChunkSize = 0;
			if (FGuid::Parse(ChunkFilesizeIt.Key(), ChunkGuid))
			{
				FromStringBlob(ChunkFilesizeIt.Value()->AsString(), ChunkSize);
				if (ChunkInfoLookup.Contains(ChunkGuid))
				{
					FChunkInfoData* ChunkInfoData = ChunkInfoLookup[ChunkGuid];
					ChunkInfoData->FileSize = ChunkSize;
					bHasChunkFilesizeList = true;
				}
			}
		}
	}
	if (bHasChunkFilesizeList == false)
	{
		// Missing chunk list, version before we saved them compressed.. Assume chunk size
		for (FChunkInfoData& ChunkInfo : Data->ChunkList)
		{
			ChunkInfo.FileSize = FBuildPatchData::ChunkDataSize;
		}
	}

	// Get the bIsFileData value. The variable will exist in versions of StoresIfChunkOrFileData or later, otherwise the previous method is to check
	// if ChunkHashList is empty.
	TSharedPtr<FJsonValue> JsonIsFileData = JsonValueMap.FindRef(TEXT("bIsFileData"));
	if (JsonIsFileData.IsValid() && JsonIsFileData->Type == EJson::Boolean)
	{
		Data->bIsFileData = JsonIsFileData->AsBool();
	}
	else
	{
		Data->bIsFileData = !bHasChunkHashList;
	}

	// Get the custom fields. This is optional, and should not fail if it does not exist
	TSharedPtr< FJsonValue > JsonCustomFields = JsonValueMap.FindRef( TEXT( "CustomFields" ) );
	if( JsonCustomFields.IsValid() )
	{
		TSharedPtr< FJsonObject > JsonCustomFieldsObj = JsonCustomFields->AsObject();
		for( auto CustomFieldIt = JsonCustomFieldsObj->Values.CreateConstIterator(); CustomFieldIt && bSuccess; ++CustomFieldIt )
		{
			Data->CustomFields.Add(FCustomFieldData(CustomFieldIt.Key(), CustomFieldIt.Value()->AsString()));
		}
	}
	CustomFieldLookup.Empty(Data->CustomFields.Num());
	for (auto& CustomField : Data->CustomFields)
	{
		CustomFieldLookup.Add(CustomField.Key, &CustomField);
	}

	// If this is file data, fill out the guid to filename lookup, and chunk file size
	if (Data->bIsFileData)
	{
		for (auto& FileManifest : Data->FileManifestList)
		{
			if (FileManifest.FileChunkParts.Num() == 1)
			{
				FGuid& Guid = FileManifest.FileChunkParts[0].Guid;
				FileNameLookup.Add(Guid, &FileManifest.Filename);
				if (ChunkInfoLookup.Contains(Guid))
				{
					FChunkInfoData* ChunkInfoData = ChunkInfoLookup[Guid];
					ChunkInfoData->FileSize = FileManifest.GetFileSize();
				}
			}
			else
			{
				bSuccess = false;
			}
		}
	}

	// Calculate build size
	TotalBuildSize = 0;
	TotalDownloadSize = 0;
	if (bSuccess)
	{
		for (auto& FileManifest : Data->FileManifestList)
		{
			TotalBuildSize += FileManifest.GetFileSize();
		}
		for (auto& Chunk : Data->ChunkList)
		{
			TotalDownloadSize += Chunk.FileSize;
		}
	}

	// Mark as should be re-saved, client that stores manifests should start using binary
	bNeedsResaving = true;

	// Make sure we don't have any half loaded data
	if( !bSuccess )
	{
		DestroyData();
	}

	return bSuccess;
}
Пример #16
0
//Main function called from the android entry point
int32 AndroidMain(struct android_app* state)
{
	FPlatformMisc::LowLevelOutputDebugString(L"Entered AndroidMain()");

	// Force the first call to GetJavaEnv() to happen on the game thread, allowing subsequent calls to occur on any thread
	FAndroidApplication::GetJavaEnv();

	// adjust the file descriptor limits to allow as many open files as possible
	rlimit cur_fd_limit;
	{
		int result = getrlimit(RLIMIT_NOFILE, & cur_fd_limit);
		//FPlatformMisc::LowLevelOutputDebugStringf(TEXT("(%d) Current fd limits: soft = %lld, hard = %lld"), result, cur_fd_limit.rlim_cur, cur_fd_limit.rlim_max);
	}
	{
		rlimit new_limit = cur_fd_limit;
		new_limit.rlim_cur = cur_fd_limit.rlim_max;
		new_limit.rlim_max = cur_fd_limit.rlim_max;
		int result = setrlimit(RLIMIT_NOFILE, &new_limit);
		//FPlatformMisc::LowLevelOutputDebugStringf(TEXT("(%d) Setting fd limits: soft = %lld, hard = %lld"), result, new_limit.rlim_cur, new_limit.rlim_max);
	}
	{
		int result = getrlimit(RLIMIT_NOFILE, & cur_fd_limit);
		//FPlatformMisc::LowLevelOutputDebugStringf(TEXT("(%d) Current fd limits: soft = %lld, hard = %lld"), result, cur_fd_limit.rlim_cur, cur_fd_limit.rlim_max);
	}

	// setup joystick support
	// r19 is the first NDK to include AMotionEvenet_getAxisValue in the headers
	// However, it has existed in the so since Honeycomb, query for the symbol
	// to determine whether to try controller support
	{
		void* Lib = dlopen("libandroid.so",0);
		if (Lib != NULL)
		{
			GetAxes = (GetAxesType)dlsym(Lib, "AMotionEvent_getAxisValue");
		}

		if (GetAxes != NULL)
		{
			FPlatformMisc::LowLevelOutputDebugStringf(TEXT("Controller interface supported\n"));
		}
		else
		{
			FPlatformMisc::LowLevelOutputDebugStringf(TEXT("Controller interface UNsupported\n"));
		}
	}

	// setup key filtering
	static const uint32 MAX_KEY_MAPPINGS(256);
	uint16 KeyCodes[MAX_KEY_MAPPINGS];
	uint32 NumKeyCodes = FPlatformMisc::GetKeyMap(KeyCodes, nullptr, MAX_KEY_MAPPINGS);

	for (int i = 0; i < NumKeyCodes; ++i)
	{
		MappedKeyCodes.Add(KeyCodes[i]);
	}

	const int IgnoredGamepadKeyCodeCount = sizeof(IgnoredGamepadKeyCodesList)/sizeof(uint16);
	for (int i = 0; i < IgnoredGamepadKeyCodeCount; ++i)
	{
		IgnoredGamepadKeyCodes.Add(IgnoredGamepadKeyCodesList[i]);
	}

	const int ValidGamepadKeyCodeCount = sizeof(ValidGamepadKeyCodesList)/sizeof(uint16);
	for (int i = 0; i < ValidGamepadKeyCodeCount; ++i)
	{
		ValidGamepadKeyCodes.Add(ValidGamepadKeyCodesList[i]);
	}

	// wait for java activity onCreate to finish
	while (!GResumeMainInit)
	{
		FPlatformProcess::Sleep(0.01f);
		FPlatformMisc::MemoryBarrier();
	}

	// read the command line file
	InitCommandLine();
	FPlatformMisc::LowLevelOutputDebugStringf(TEXT("Final commandline: %s\n"), FCommandLine::Get());

	EventHandlerEvent = FPlatformProcess::GetSynchEventFromPool(false);
	FPlatformMisc::LowLevelOutputDebugString(L"Created sync event");
	FAppEventManager::GetInstance()->SetEventHandlerEvent(EventHandlerEvent);

	// ready for onCreate to complete
	GEventHandlerInitialized = true;

	// Initialize file system access (i.e. mount OBBs, etc.).
	// We need to do this really early for Android so that files in the
	// OBBs and APK are found.
	IPlatformFile::GetPlatformPhysical().Initialize(nullptr, FCommandLine::Get());

#if 0
	for (int32 i = 0; i < 10; i++)
	{
		sleep(1);
		FPlatformMisc::LowLevelOutputDebugStringf(TEXT("[Patch %d]"), i);

	}
	FPlatformMisc::LowLevelOutputDebugStringf(TEXT("[Patch] : Dont Patch \n"));
#endif

	// initialize the engine
	GEngineLoop.PreInit(0, NULL, FCommandLine::Get());

	// initialize HMDs
	InitHMDs();

	UE_LOG(LogAndroid, Display, TEXT("Passed PreInit()"));

	GLog->SetCurrentThreadAsMasterThread();

	GEngineLoop.Init();

	UE_LOG(LogAndroid, Log, TEXT("Passed GEngineLoop.Init()"));

	// tick until done
	while (!GIsRequestingExit)
	{
		FAppEventManager::GetInstance()->Tick();
		if(!FAppEventManager::GetInstance()->IsGamePaused())
		{
			GEngineLoop.Tick();

			float timeToSleep = 0.05f; //in seconds
			sleep(timeToSleep);
		}

#if !UE_BUILD_SHIPPING
		// show console window on next game tick
		if (GShowConsoleWindowNextTick)
		{
			GShowConsoleWindowNextTick = false;
			AndroidThunkCpp_ShowConsoleWindow();
		}
#endif
	}

	UE_LOG(LogAndroid, Log, TEXT("Exiting"));

	// exit out!
	GEngineLoop.Exit();

	return 0;
}
void FMovieSceneSequenceInstance::RefreshInstance( IMovieScenePlayer& Player )
{
	if(MovieSceneSequence.IsValid())
	{
		UMovieScene* MovieScene = MovieSceneSequence->GetMovieScene();
		TimeRange = MovieScene->GetPlaybackRange();

		UMovieSceneTrack* CameraCutTrack = MovieScene->GetCameraCutTrack();

		if (CameraCutTrack != nullptr)
		{
			FMovieSceneInstanceMap CameraCutTrackInstanceMap;

			if (CameraCutTrackInstance.IsValid())
			{
				CameraCutTrackInstanceMap.Add(CameraCutTrack, CameraCutTrackInstance);
			}

			TArray<TWeakObjectPtr<UObject>> Objects;
			TArray<UMovieSceneTrack*> Tracks;
			Tracks.Add(CameraCutTrack);
			RefreshInstanceMap(Tracks, Objects, CameraCutTrackInstanceMap, Player);

			CameraCutTrackInstance = CameraCutTrackInstanceMap.FindRef(CameraCutTrack);
		}
		else if(CameraCutTrackInstance.IsValid())
		{
			CameraCutTrackInstance->ClearInstance(Player, *this);
			CameraCutTrackInstance.Reset();
		}

		// Get all the master tracks and create instances for them if needed
		const TArray<UMovieSceneTrack*>& MasterTracks = MovieScene->GetMasterTracks();
		TArray<TWeakObjectPtr<UObject>> Objects;
		RefreshInstanceMap( MasterTracks, Objects, MasterTrackInstances, Player );

		TSet< FGuid > FoundObjectBindings;
		// Get all tracks for each object binding and create instances for them if needed
		const TArray<FMovieSceneBinding>& ObjectBindings = MovieScene->GetBindings();
		for( int32 BindingIndex = 0; BindingIndex < ObjectBindings.Num(); ++BindingIndex )
		{
			const FMovieSceneBinding& ObjectBinding = ObjectBindings[BindingIndex];

			// Create an instance for this object binding
			FMovieSceneObjectBindingInstance& BindingInstance = ObjectBindingInstances.FindOrAdd( ObjectBinding.GetObjectGuid() );
			BindingInstance.ObjectGuid = ObjectBinding.GetObjectGuid();

			FoundObjectBindings.Add( ObjectBinding.GetObjectGuid() );

			// Populate the runtime objects for this instance of the binding.
			// @todo sequencer: SubSequences: We need to know which actors were removed and which actors were added so we know which saved actor state to restore/create
			BindingInstance.RuntimeObjects.Empty();
			Player.GetRuntimeObjects( SharedThis( this ), BindingInstance.ObjectGuid, BindingInstance.RuntimeObjects );

			// Refresh the instance's tracks
			const TArray<UMovieSceneTrack*>& Tracks = ObjectBinding.GetTracks();
			RefreshInstanceMap( Tracks, BindingInstance.RuntimeObjects, BindingInstance.TrackInstances, Player );
		}

		IMovieSceneSpawnRegister& SpawnRegister = Player.GetSpawnRegister();

		// Remove object binding instances which are no longer bound
		TMap<FGuid, FMovieSceneObjectBindingInstance>::TIterator It = ObjectBindingInstances.CreateIterator();
		for( ; It; ++It )
		{
			if( !FoundObjectBindings.Contains( It.Key() ) )
			{
				SpawnRegister.DestroySpawnedObject(It.Key(), *this, Player);

				// The instance no longer is bound to an existing guid
				It.RemoveCurrent();
			}
		}
	}
}
bool LocalizationCommandletTasks::ReportLoadedAudioAssets(const TArray<ULocalizationTarget*>& Targets, const TOptional<FString>& CultureName)
{
	TSet<FString> LoadedDialogueWaveAssets;
	TSet<FString> LoadedSoundWaveAssets;

	for (const ULocalizationTarget* Target : Targets)
	{
		const FString RootAssetPath = Target->IsMemberOfEngineTargetSet() ? TEXT("/Engine") : TEXT("/Game");

		TArray<FString> CulturesToTest;
		{
			if (CultureName.IsSet())
			{
				CulturesToTest.Add(CultureName.GetValue());
			}
			else
			{
				CulturesToTest.Reserve(Target->Settings.SupportedCulturesStatistics.Num());
				for (const FCultureStatistics& CultureData : Target->Settings.SupportedCulturesStatistics)
				{
					CulturesToTest.Add(CultureData.CultureName);
				}
			}
		}

		TArray<FString> DialogueWavePathsToTest;
		TArray<FString> SoundWavePathsToTest;
		{
			const FString NativeCulture = Target->Settings.SupportedCulturesStatistics.IsValidIndex(Target->Settings.NativeCultureIndex) ? Target->Settings.SupportedCulturesStatistics[Target->Settings.NativeCultureIndex].CultureName : FString();
			const bool bImportNativeAsSource = Target->Settings.ImportDialogueSettings.bImportNativeAsSource && !NativeCulture.IsEmpty();
			if (bImportNativeAsSource)
			{
				DialogueWavePathsToTest.Add(RootAssetPath);
				SoundWavePathsToTest.Add(RootAssetPath / Target->Settings.ImportDialogueSettings.ImportedDialogueFolder);
			}

			for (const FString& Culture : CulturesToTest)
			{
				if (bImportNativeAsSource && Culture == NativeCulture)
				{
					continue;
				}

				DialogueWavePathsToTest.Add(RootAssetPath / TEXT("L10N") / Culture);
				SoundWavePathsToTest.Add(RootAssetPath / TEXT("L10N") / Culture / Target->Settings.ImportDialogueSettings.ImportedDialogueFolder);
			}
		}

		ForEachObjectOfClass(UDialogueWave::StaticClass(), [&](UObject* InObject)
		{
			const FString ObjectPath = InObject->GetPathName();

			auto FindAssetPathPredicate = [&](const FString& InAssetPath) -> bool
			{
				return ObjectPath.StartsWith(InAssetPath, ESearchCase::IgnoreCase);
			};

			if (DialogueWavePathsToTest.ContainsByPredicate(FindAssetPathPredicate))
			{
				LoadedDialogueWaveAssets.Add(ObjectPath);
			}
		});

		ForEachObjectOfClass(USoundWave::StaticClass(), [&](UObject* InObject)
		{
			const FString ObjectPath = InObject->GetPathName();

			auto FindAssetPathPredicate = [&](const FString& InAssetPath) -> bool
			{
				return ObjectPath.StartsWith(InAssetPath, ESearchCase::IgnoreCase);
			};

			if (SoundWavePathsToTest.ContainsByPredicate(FindAssetPathPredicate))
			{
				LoadedSoundWaveAssets.Add(ObjectPath);
			}
		});
	}

	if (LoadedDialogueWaveAssets.Num() > 0 || LoadedSoundWaveAssets.Num() > 0)
	{
		FTextBuilder MsgBuilder;
		MsgBuilder.AppendLine(LOCTEXT("Warning_LoadedAudioAssetsMsg", "The following audio assets have been loaded by the editor and may cause the dialogue import to fail as their files will be read-only."));
		MsgBuilder.AppendLine(FText::GetEmpty());
		MsgBuilder.AppendLine(LOCTEXT("Warning_LoadedAudioAssetsMsg_Continue", "Do you want to continue?"));
				
		if (LoadedDialogueWaveAssets.Num() > 0)
		{
			MsgBuilder.AppendLine(FText::GetEmpty());
			MsgBuilder.AppendLine(LOCTEXT("Warning_LoadedAudioAssetsMsg_DialogueWaves", "Dialogue Waves:"));

			MsgBuilder.Indent();
			for (const FString& LoadedDialogueWaveAsset : LoadedDialogueWaveAssets)
			{
				MsgBuilder.AppendLine(LoadedDialogueWaveAsset);
			}
			MsgBuilder.Unindent();
		}

		if (LoadedSoundWaveAssets.Num() > 0)
		{
			MsgBuilder.AppendLine(FText::GetEmpty());
			MsgBuilder.AppendLine(LOCTEXT("Warning_LoadedAudioAssetsMsg_SoundWaves", "Sound Waves:"));

			MsgBuilder.Indent();
			for (const FString& LoadedSoundWaveAsset : LoadedSoundWaveAssets)
			{
				MsgBuilder.AppendLine(LoadedSoundWaveAsset);
			}
			MsgBuilder.Unindent();
		}

		const FText MsgTitle = LOCTEXT("Warning_LoadedAudioAssetsTitle", "Warning - Loaded Audio Assets");
		return FMessageDialog::Open(EAppMsgType::YesNo, MsgBuilder.ToText(), &MsgTitle) == EAppReturnType::Yes;
	}

	return true;
}
FReply FLandscapeEditorDetailCustomization_CopyPaste::OnGizmoExportButtonClicked()
{
	FEdModeLandscape* LandscapeEdMode = GetEditorMode();
	if (LandscapeEdMode != NULL)
	{
		ALandscapeGizmoActiveActor* Gizmo = LandscapeEdMode->CurrentGizmoActor.Get();
		if (Gizmo && Gizmo->TargetLandscapeInfo && Gizmo->SelectedData.Num())
		{
			int32 TargetIndex = -1;
			ULandscapeInfo* LandscapeInfo = Gizmo->TargetLandscapeInfo;
			TArray<FString> Filenames;

			// Local set for export
			TSet<ULandscapeLayerInfoObject*> LayerInfoSet;
			for (int32 i = 0; i < Gizmo->LayerInfos.Num(); i++)
			{
				if (LandscapeEdMode->CurrentToolTarget.TargetType == ELandscapeToolTargetType::Weightmap && LandscapeEdMode->CurrentToolTarget.LayerInfo == Gizmo->LayerInfos[i])
				{
					TargetIndex = i;
				}
				LayerInfoSet.Add(Gizmo->LayerInfos[i]);
			}

			for (int32 i = -1; i < Gizmo->LayerInfos.Num(); i++)
			{
				if (!LandscapeEdMode->UISettings->bApplyToAllTargets && i != TargetIndex)
				{
					continue;
				}
				FString SaveDialogTitle;
				FString DefaultFilename;
				FString FileTypes;

				if (i < 0)
				{
					if (!(Gizmo->DataType & LGT_Height))
					{
						continue;
					}
					SaveDialogTitle = NSLOCTEXT("UnrealEd", "LandscapeExport_HeightmapFilename", "Choose filename for Heightmap Export").ToString();
					DefaultFilename = TEXT("Heightmap.raw");
					FileTypes = TEXT("Heightmap .raw files|*.raw|Heightmap .r16 files|*.r16|All files|*.*");
				}
				else
				{
					if (!(Gizmo->DataType & LGT_Weight))
					{
						continue;
					}

					FName LayerName = Gizmo->LayerInfos[i]->LayerName;
					SaveDialogTitle = FText::Format(NSLOCTEXT("UnrealEd", "LandscapeExport_LayerFilename", "Choose filename for Layer {0} Export"), FText::FromString(LayerName.ToString())).ToString();
					DefaultFilename = FString::Printf(TEXT("%s.raw"), *LayerName.ToString());
					FileTypes = TEXT("Layer .raw files|*.raw|Layer .r8 files|*.r8|All files|*.*");
				}

				TArray<FString> SaveFilenames;
				IDesktopPlatform* DesktopPlatform = FDesktopPlatformModule::Get();
				bool bSave = false;
				if (DesktopPlatform)
				{
					void* ParentWindowWindowHandle = NULL;

					IMainFrameModule& MainFrameModule = FModuleManager::LoadModuleChecked<IMainFrameModule>(TEXT("MainFrame"));
					const TSharedPtr<SWindow>& MainFrameParentWindow = MainFrameModule.GetParentWindow();
					if (MainFrameParentWindow.IsValid() && MainFrameParentWindow->GetNativeWindow().IsValid())
					{
						ParentWindowWindowHandle = MainFrameParentWindow->GetNativeWindow()->GetOSWindowHandle();
					}

					bSave = DesktopPlatform->SaveFileDialog(
						ParentWindowWindowHandle,
						SaveDialogTitle,
						LandscapeEdMode->UISettings->LastImportPath,
						DefaultFilename,
						FileTypes,
						EFileDialogFlags::None,
						SaveFilenames
						);
				}

				if (!bSave)
				{
					return FReply::Handled();
				}

				Filenames.Add(SaveFilenames[0]);
				LandscapeEdMode->UISettings->LastImportPath = FPaths::GetPath(SaveFilenames[0]);
			}

			Gizmo->Export(TargetIndex, Filenames);
		}
	}

	return FReply::Handled();
}
	virtual bool FixupObject(const FName& InObjectPath, FName& OutNewObjectPath) override
	{
		OutNewObjectPath = NAME_None;

		if (InObjectPath.ToString().StartsWith(TEXT("/Script/")))
		{
			// We can't use FindObject while we're saving
			if (!GIsSavingPackage)
			{
				const FString ClassPathStr = InObjectPath.ToString();

				UClass* FoundClass = FindObject<UClass>(ANY_PACKAGE, *ClassPathStr);
				if (!FoundClass)
				{
					// Use the linker to search for class name redirects (from the loaded ActiveClassRedirects)
					const FString ClassName = FPackageName::ObjectPathToObjectName(ClassPathStr);
					const FName NewClassName = FLinkerLoad::FindNewNameForClass(*ClassName, false);

					if (!NewClassName.IsNone())
					{
						// Our new class name might be lacking the path, so try and find it so we can use the full path in the collection
						FoundClass = FindObject<UClass>(ANY_PACKAGE, *NewClassName.ToString());
						if (FoundClass)
						{
							OutNewObjectPath = *FoundClass->GetPathName();
						}
					}
				}
			}
		}
		else
		{
			// Keep track of visted redirectors in case we loop.
			TSet<FName> VisitedRedirectors;

			// Use the asset registry to avoid loading the object
			FAssetData ObjectAssetData = AssetRegistryModule.Get().GetAssetByObjectPath(InObjectPath);
			while (ObjectAssetData.IsValid() && ObjectAssetData.IsRedirector())
			{
				// Check to see if we've already seen this path before, it's possible we might have found a redirector loop.
				if ( VisitedRedirectors.Contains(ObjectAssetData.ObjectPath) )
				{
					UE_LOG(LogContentBrowser, Error, TEXT("Redirector Loop Found!"));
					for ( FName Redirector : VisitedRedirectors )
					{
						UE_LOG(LogContentBrowser, Error, TEXT("Redirector: %s"), *Redirector.ToString());
					}

					ObjectAssetData = FAssetData();
					break;
				}

				VisitedRedirectors.Add(ObjectAssetData.ObjectPath);

				// Get the destination object from the meta-data rather than load the redirector object, as 
				// loading a redirector will also load the object it points to, which could cause a large hitch
				FString DestinationObjectPath;
				if (ObjectAssetData.GetTagValue("DestinationObject", DestinationObjectPath))
				{
					ConstructorHelpers::StripObjectClass(DestinationObjectPath);
					ObjectAssetData = AssetRegistryModule.Get().GetAssetByObjectPath(*DestinationObjectPath);
				}
				else
				{
					ObjectAssetData = FAssetData();
				}
			}

			OutNewObjectPath = ObjectAssetData.ObjectPath;
		}

		return OutNewObjectPath != NAME_None && InObjectPath != OutNewObjectPath;
	}
Пример #21
0
void FBaseToolkit::BringToolkitToFront()
{
	if( ensure( ToolkitHost.IsValid() ) )
	{
		// Bring the host window to front
		ToolkitHost.Pin()->BringToFront();

		// First, figure out what the foreground tab is in each tab stack we have tabs docked inside of
		TSet< SDockTabStack* > TabStacksWithOurTabsForegrounded;
		{
			for( auto CurSpotIt( ToolkitTabsInSpots.CreateConstIterator() ); CurSpotIt; ++CurSpotIt )
			{
				const auto& TabsForSpot = CurSpotIt.Value();
				for( auto CurTabIt( TabsForSpot.CreateConstIterator() ); CurTabIt; ++CurTabIt )
				{
					const auto& PinnedTab = CurTabIt->Pin();
					if( PinnedTab.IsValid() )
					{
						if( PinnedTab->IsForeground() )
						{
							const auto& TabStack = PinnedTab->GetParentDockTabStack();
							if( TabStack.IsValid() )
							{
								TabStacksWithOurTabsForegrounded.Add( TabStack.Get() );
							}
						}
					}
				}
			}
		}

		// @todo toolkit major: Also draw user's attention when clicked?

		// @todo toolkit major: If any of the tabs are in their own floating windows, these should be brought to front

		// Now, make sure that our tabs are foregrounded in their respective stacks!
		// NOTE: We don't want to push tabs to the front that are in a stack where one of our other tabs is already front-most
		for( auto CurSpotIt( ToolkitTabsInSpots.CreateConstIterator() ); CurSpotIt; ++CurSpotIt )
		{
			const auto& TabsForSpot = CurSpotIt.Value();
			for( auto CurTabIt( TabsForSpot.CreateConstIterator() ); CurTabIt; ++CurTabIt )
			{
				const auto& PinnedTab = CurTabIt->Pin();

				if( PinnedTab.IsValid() )
				{
					const auto& TabStack = PinnedTab->GetParentDockTabStack();
					if( TabStack.IsValid() )
					{
						// Only foreground if we don't already have a tab foregrounded in this tab's stack
						if( !TabStacksWithOurTabsForegrounded.Contains( TabStack.Get() ) )
						{
							PinnedTab->BringToFrontInParent();

							// Take note that we've foregrounded a tab in this stack, no need to do that again
							TabStacksWithOurTabsForegrounded.Add( TabStack.Get() );
						}
					}
					else
					{
						// Just do what we can to foreground ourselves
						PinnedTab->BringToFrontInParent();
					}
				}
			}
		}
		// Tell the toolkit its been brought to the fore - give it a chance to update anything it needs to
		ToolkitBroughtToFront();
	}
}
Пример #22
0
void FEnumEditorUtils::BroadcastChanges(const UUserDefinedEnum* Enum, const TArray<TPair<FName, uint8>>& OldNames, bool bResolveData)
{
	check(NULL != Enum);
	if (bResolveData)
	{
		FArchiveEnumeratorResolver EnumeratorResolver(Enum, OldNames);

		TArray<UClass*> ClassesToCheck;
		for (TObjectIterator<UByteProperty> PropertyIter; PropertyIter; ++PropertyIter)
		{
			const UByteProperty* ByteProperty = *PropertyIter;
			if (ByteProperty && (Enum == ByteProperty->GetIntPropertyEnum()))
			{
				UClass* OwnerClass = ByteProperty->GetOwnerClass();
				if (OwnerClass)
				{
					ClassesToCheck.Add(OwnerClass);
				}
			}
		}

		for (FObjectIterator ObjIter; ObjIter; ++ObjIter)
		{
			for (auto ClassIter = ClassesToCheck.CreateConstIterator(); ClassIter; ++ClassIter)
			{
				if (ObjIter->IsA(*ClassIter))
				{
					ObjIter->Serialize(EnumeratorResolver);
					break;
				}
			}
		}
	}

	struct FNodeValidatorHelper
	{
		static bool IsValid(UK2Node* Node)
		{
			return Node
				&& (NULL != Cast<UEdGraph>(Node->GetOuter()))
				&& !Node->HasAnyFlags(RF_Transient | RF_PendingKill);
		}
	};

	TSet<UBlueprint*> BlueprintsToRefresh;

	{
		//CUSTOM NODES DEPENTENT ON ENUM

		for (TObjectIterator<UK2Node> It(RF_Transient); It; ++It)
		{
			UK2Node* Node = *It;
			INodeDependingOnEnumInterface* NodeDependingOnEnum = Cast<INodeDependingOnEnumInterface>(Node);
			if (FNodeValidatorHelper::IsValid(Node) && NodeDependingOnEnum && (Enum == NodeDependingOnEnum->GetEnum()))
			{
				if (UBlueprint* Blueprint = Node->GetBlueprint())
				{
					if (NodeDependingOnEnum->ShouldBeReconstructedAfterEnumChanged())
					{
						Node->ReconstructNode();
					}
					BlueprintsToRefresh.Add(Blueprint);
				}
			}
		}
	}

	for (TObjectIterator<UEdGraphPin> It(RF_Transient); It; ++It)
	{
		UEdGraphPin* Pin = *It;
		if (Pin && (Enum == Pin->PinType.PinSubCategoryObject.Get()) && (EEdGraphPinDirection::EGPD_Input == Pin->Direction))
		{
			UK2Node* Node = Cast<UK2Node>(Pin->GetOuter());
			if (FNodeValidatorHelper::IsValid(Node))
			{
				if (UBlueprint* Blueprint = Node->GetBlueprint())
				{
					if (INDEX_NONE == Enum->FindEnumIndex(*Pin->DefaultValue))
					{
						Pin->Modify();
						if (Blueprint->BlueprintType == BPTYPE_Interface)
						{
							Pin->DefaultValue = Enum->GetEnumName(0);
						}
						else
						{
							Pin->DefaultValue = FEnumEditorUtilsHelper::InvalidName();
						}
						Node->PinDefaultValueChanged(Pin);
						BlueprintsToRefresh.Add(Blueprint);
					}
				}
			}
		}
	}

	for (auto It = BlueprintsToRefresh.CreateIterator(); It; ++It)
	{
		FBlueprintEditorUtils::MarkBlueprintAsModified(*It);
		(*It)->BroadcastChanged();
	}

	FEnumEditorManager::Get().PostChange(Enum, EEnumEditorChangeInfo::Changed);
}
int32 UDerivedDataCacheCommandlet::Main( const FString& Params )
{
	TArray<FString> Tokens, Switches;
	ParseCommandLine(*Params, Tokens, Switches);

	bool bFillCache = Switches.Contains("FILL");   // do the equivalent of a "loadpackage -all" to fill the DDC
	bool bStartupOnly = Switches.Contains("STARTUPONLY");   // regardless of any other flags, do not iterate packages

	// Subsets for parallel processing
	uint32 SubsetMod = 0;
	uint32 SubsetTarget = MAX_uint32;
	FParse::Value(*Params, TEXT("SubsetMod="), SubsetMod);
	FParse::Value(*Params, TEXT("SubsetTarget="), SubsetTarget);
	bool bDoSubset = SubsetMod > 0 && SubsetTarget < SubsetMod;
	double FindProcessedPackagesTime = 0.0;
	double GCTime = 0.0;

	if (!bStartupOnly && bFillCache)
	{
		FCoreDelegates::PackageCreatedForLoad.AddUObject(this, &UDerivedDataCacheCommandlet::MaybeMarkPackageAsAlreadyLoaded);

		TArray<FString> FilesInPath;

		Tokens.Empty(2);
		Tokens.Add(FString("*") + FPackageName::GetAssetPackageExtension());
		Tokens.Add(FString("*") + FPackageName::GetMapPackageExtension());
		
		uint8 PackageFilter = NORMALIZE_DefaultFlags;
		if ( Switches.Contains(TEXT("MAPSONLY")) )
		{
			PackageFilter |= NORMALIZE_ExcludeContentPackages;
		}

		if ( !Switches.Contains(TEXT("DEV")) )
		{
			PackageFilter |= NORMALIZE_ExcludeDeveloperPackages;
		}

		// assume the first token is the map wildcard/pathname
		TArray<FString> Unused;
		for ( int32 TokenIndex = 0; TokenIndex < Tokens.Num(); TokenIndex++ )
		{
			TArray<FString> TokenFiles;
			if ( !NormalizePackageNames( Unused, TokenFiles, Tokens[TokenIndex], PackageFilter) )
			{
				UE_LOG(LogDerivedDataCacheCommandlet, Display, TEXT("No packages found for parameter %i: '%s'"), TokenIndex, *Tokens[TokenIndex]);
				continue;
			}

			FilesInPath += TokenFiles;
		}

		if ( FilesInPath.Num() == 0 )
		{
			UE_LOG(LogDerivedDataCacheCommandlet, Warning, TEXT("No files found."));
		}

		ITargetPlatformManagerModule* TPM = GetTargetPlatformManager();
		const TArray<ITargetPlatform*>& Platforms = TPM->GetActiveTargetPlatforms();

		for (int32 Index = 0; Index < Platforms.Num(); Index++)
		{
			TArray<FName> DesiredShaderFormats;
			Platforms[Index]->GetShaderFormats(DesiredShaderFormats);

			for (int32 FormatIndex = 0; FormatIndex < DesiredShaderFormats.Num(); FormatIndex++)
			{
				const EShaderPlatform TargetPlatform = ShaderFormatToLegacyShaderPlatform(DesiredShaderFormats[FormatIndex]);
				// Kick off global shader compiles for each target platform
				GetGlobalShaderMap(TargetPlatform);
			}
		}

		const int32 GCInterval = 100;
		int32 NumProcessedSinceLastGC = GCInterval;
		bool bLastPackageWasMap = true; // 'true' is to prime the ProcessedPackages list
		TSet<FString> ProcessedPackages;

		UE_LOG(LogDerivedDataCacheCommandlet, Display, TEXT("%d packages to load..."), FilesInPath.Num());

		for( int32 FileIndex = FilesInPath.Num() - 1; ; FileIndex-- )
		{
			// Keep track of which packages have already been processed along with the map.
			if (NumProcessedSinceLastGC >= GCInterval || bLastPackageWasMap || FileIndex == FilesInPath.Num() - 1)
			{
				const double FindProcessedPackagesStartTime = FPlatformTime::Seconds();
				TArray<UObject *> ObjectsInOuter;
				GetObjectsWithOuter(NULL, ObjectsInOuter, false);
				for( int32 Index = 0; Index < ObjectsInOuter.Num(); Index++ )
				{
					UPackage* Pkg = Cast<UPackage>(ObjectsInOuter[Index]);
					if (!Pkg)
					{
						continue;
					}
					FString Filename;
					if (FPackageName::DoesPackageExist(Pkg->GetName(), NULL, &Filename))
					{
						if (!ProcessedPackages.Contains(Filename))
						{
							ProcessedPackages.Add(Filename);

							PackagesToNotReload.Add(Pkg->GetName());
							Pkg->PackageFlags |= PKG_ReloadingForCooker;
							{
								TArray<UObject *> ObjectsInPackage;
								GetObjectsWithOuter(Pkg, ObjectsInPackage, true);
								for( int32 IndexPackage = 0; IndexPackage < ObjectsInPackage.Num(); IndexPackage++ )
								{
									ObjectsInPackage[IndexPackage]->CookerWillNeverCookAgain();
								}
							}
						}
					}
				}
				FindProcessedPackagesTime += FPlatformTime::Seconds() - FindProcessedPackagesStartTime;
			}

			if (NumProcessedSinceLastGC >= GCInterval || FileIndex < 0 || bLastPackageWasMap)
			{
				const double StartGCTime = FPlatformTime::Seconds();
				if (NumProcessedSinceLastGC >= GCInterval || FileIndex < 0)
				{
					UE_LOG(LogDerivedDataCacheCommandlet, Display, TEXT("GC (Full)..."));
					CollectGarbage( RF_Native );
					NumProcessedSinceLastGC = 0;
				}
				else
				{
					UE_LOG(LogDerivedDataCacheCommandlet, Display, TEXT("GC..."));
					CollectGarbage( RF_Native | RF_Standalone );
				}				
				GCTime += FPlatformTime::Seconds() - StartGCTime;

				bLastPackageWasMap = false;
			}
			if (FileIndex < 0)
			{
				break;
			}
			const FString& Filename = FilesInPath[FileIndex];
			if (ProcessedPackages.Contains(Filename))
			{
				continue;
			}
			if (bDoSubset)
			{
				const FString& PackageName = FPackageName::PackageFromPath(*Filename);
				if (FCrc::StrCrc_DEPRECATED(*PackageName.ToUpper()) % SubsetMod != SubsetTarget)
				{
					continue;
				}
			}

			UE_LOG(LogDerivedDataCacheCommandlet, Display, TEXT("Loading (%d) %s"), FilesInPath.Num() - FileIndex, *Filename );

			UPackage* Package = LoadPackage( NULL, *Filename, LOAD_None );
			if( Package == NULL )
			{
				UE_LOG(LogDerivedDataCacheCommandlet, Error, TEXT("Error loading %s!"), *Filename );
			}
			else
			{
				bLastPackageWasMap = Package->ContainsMap();
				NumProcessedSinceLastGC++;
			}
		}
	}

	IConsoleManager::Get().ProcessUserConsoleInput(TEXT("Tex.DerivedDataTimings"), *GWarn, NULL);
	UE_LOG(LogDerivedDataCacheCommandlet, Display, TEXT("Waiting for shaders to finish."));
	GShaderCompilingManager->FinishAllCompilation();
	UE_LOG(LogDerivedDataCacheCommandlet, Display, TEXT("Done waiting for shaders to finish."));
	GetDerivedDataCacheRef().WaitForQuiescence(true);

	UE_LOG(LogDerivedDataCacheCommandlet, Display, TEXT("%.2lfs spent looking for processed packages, %.2lfs spent on GC."), FindProcessedPackagesTime, GCTime);

	return 0;
}
Пример #24
0
/**
 * Helper method designed to perform the necessary preparations required to complete an automated editor build
 *
 * @param	BuildSettings		Build settings that will be used for the editor build
 * @param	OutPkgsToSubmit		Set of packages that need to be saved and submitted after a successful build
 * @param	OutErrorMessages	Errors that resulted from the preparation (may or may not force the build to stop, depending on build settings)
 *
 * @return	true if the preparation was successful and the build should continue; false if the preparation failed and the build should be aborted
 */
bool FEditorBuildUtils::PrepForAutomatedBuild( const FEditorAutomatedBuildSettings& BuildSettings, TSet<UPackage*>& OutPkgsToSubmit, FText& OutErrorMessages )
{
	// Assume the preparation is successful to start
	bool bBuildSuccessful = true;

	OutPkgsToSubmit.Empty();

	ISourceControlProvider& SourceControlProvider = ISourceControlModule::Get().GetProvider();

	// Source control is required for the automated build, so ensure that SCC support is compiled in and
	// that the server is enabled and available for use
	if ( !ISourceControlModule::Get().IsEnabled() || !SourceControlProvider.IsAvailable() )
	{
		bBuildSuccessful = false;
		LogErrorMessage( NSLOCTEXT("UnrealEd", "AutomatedBuild_Error_SCCError", "Cannot connect to source control; automated build aborted."), OutErrorMessages );
	}

	// Empty changelists aren't allowed; abort the build if one wasn't provided
	if ( bBuildSuccessful && BuildSettings.ChangeDescription.Len() == 0 )
	{
		bBuildSuccessful = false;
		LogErrorMessage( NSLOCTEXT("UnrealEd", "AutomatedBuild_Error_NoCLDesc", "A changelist description must be provided; automated build aborted."), OutErrorMessages );
	}

	TArray<UPackage*> PreviouslySavedWorldPackages;
	TArray<UPackage*> PackagesToCheckout;
	TArray<ULevel*> LevelsToSave;

	if ( bBuildSuccessful )
	{
		TArray<UWorld*> AllWorlds;
		FString UnsavedWorlds;
		EditorLevelUtils::GetWorlds( GWorld, AllWorlds, true );

		// Check all of the worlds that will be built to ensure they have been saved before and have a filename
		// associated with them. If they don't, they won't be able to be submitted to source control.
		FString CurWorldPkgFileName;
		for ( TArray<UWorld*>::TConstIterator WorldIter( AllWorlds ); WorldIter; ++WorldIter )
		{
			const UWorld* CurWorld = *WorldIter;
			check( CurWorld );

			UPackage* CurWorldPackage = CurWorld->GetOutermost();
			check( CurWorldPackage );

			if ( FPackageName::DoesPackageExist( CurWorldPackage->GetName(), NULL, &CurWorldPkgFileName ) )
			{
				PreviouslySavedWorldPackages.AddUnique( CurWorldPackage );

				// Add all packages which have a corresponding file to the set of packages to submit for now. As preparation continues
				// any packages that can't be submitted due to some error will be removed.
				OutPkgsToSubmit.Add( CurWorldPackage );
			}
			else
			{
				UnsavedWorlds += FString::Printf( TEXT("%s\n"), *CurWorldPackage->GetName() );
			}
		}

		// If any of the worlds haven't been saved before, process the build setting's behavior to see if the build
		// should proceed or not
		if ( UnsavedWorlds.Len() > 0 )
		{
			bBuildSuccessful = ProcessAutomatedBuildBehavior( BuildSettings.NewMapBehavior, 
				FText::Format( NSLOCTEXT("UnrealEd", "AutomatedBuild_Error_UnsavedMap", "The following levels have never been saved before and cannot be submitted:\n\n{0}\n\nAttempt to continue the build?"), FText::FromString(UnsavedWorlds) ),
				OutErrorMessages );
		}
	}

	// Load the asset tools module
	FAssetToolsModule& AssetToolsModule = FModuleManager::GetModuleChecked<FAssetToolsModule>("AssetTools");

	if ( bBuildSuccessful )
	{
		// Update the source control status of any relevant world packages in order to determine which need to be
		// checked out, added to the depot, etc.
		SourceControlProvider.Execute( ISourceControlOperation::Create<FUpdateStatus>(), SourceControlHelpers::PackageFilenames(PreviouslySavedWorldPackages) );

		FString PkgsThatCantBeCheckedOut;
		for ( TArray<UPackage*>::TConstIterator PkgIter( PreviouslySavedWorldPackages ); PkgIter; ++PkgIter )
		{
			UPackage* CurPackage = *PkgIter;
			const FString CurPkgName = CurPackage->GetName();
			FSourceControlStatePtr SourceControlState = SourceControlProvider.GetState(CurPackage, EStateCacheUsage::ForceUpdate);

			if( !SourceControlState.IsValid() ||
				(!SourceControlState->IsSourceControlled() &&
				 !SourceControlState->IsUnknown() &&
				 !SourceControlState->IsIgnored()))
			{
				FString CurFilename;
				if ( FPackageName::DoesPackageExist( CurPkgName, NULL, &CurFilename ) )
				{
					if ( IFileManager::Get().IsReadOnly( *CurFilename ) )
					{
						PkgsThatCantBeCheckedOut += FString::Printf( TEXT("%s\n"), *CurPkgName );
						OutPkgsToSubmit.Remove( CurPackage );
					}
				}
			}
			else if(SourceControlState->CanCheckout())
			{
				PackagesToCheckout.Add( CurPackage );
			}
			else
			{
				PkgsThatCantBeCheckedOut += FString::Printf( TEXT("%s\n"), *CurPkgName );
				OutPkgsToSubmit.Remove( CurPackage );
			}
		}

		// If any of the packages can't be checked out or are read-only, process the build setting's behavior to see if the build
		// should proceed or not
		if ( PkgsThatCantBeCheckedOut.Len() > 0 )
		{
			bBuildSuccessful = ProcessAutomatedBuildBehavior( BuildSettings.UnableToCheckoutFilesBehavior,
				FText::Format( NSLOCTEXT("UnrealEd", "AutomatedBuild_Error_UnsaveableFiles", "The following assets cannot be checked out of source control (or are read-only) and cannot be submitted:\n\n{0}\n\nAttempt to continue the build?"), FText::FromString(PkgsThatCantBeCheckedOut) ),
				OutErrorMessages );
		}
	}

	if ( bBuildSuccessful )
	{
		// Check out all of the packages from source control that need to be checked out
		if ( PackagesToCheckout.Num() > 0 )
		{
			TArray<FString> PackageFilenames = SourceControlHelpers::PackageFilenames(PackagesToCheckout);
			SourceControlProvider.Execute( ISourceControlOperation::Create<FCheckOut>(), PackageFilenames );

			// Update the package status of the packages that were just checked out to confirm that they
			// were actually checked out correctly
			SourceControlProvider.Execute(  ISourceControlOperation::Create<FUpdateStatus>(), PackageFilenames );

			FString FilesThatFailedCheckout;
			for ( TArray<UPackage*>::TConstIterator CheckedOutIter( PackagesToCheckout ); CheckedOutIter; ++CheckedOutIter )
			{
				UPackage* CurPkg = *CheckedOutIter;
				FSourceControlStatePtr SourceControlState = SourceControlProvider.GetState(CurPkg, EStateCacheUsage::ForceUpdate);

				// If any of the packages failed to check out, remove them from the set of packages to submit
				if ( !SourceControlState.IsValid() || (!SourceControlState->IsCheckedOut() && !SourceControlState->IsAdded() && SourceControlState->IsSourceControlled()) )
				{
					FilesThatFailedCheckout += FString::Printf( TEXT("%s\n"), *CurPkg->GetName() );
					OutPkgsToSubmit.Remove( CurPkg );
				}
			}

			// If any of the packages failed to check out correctly, process the build setting's behavior to see if the build
			// should proceed or not
			if ( FilesThatFailedCheckout.Len() > 0 )
			{
				bBuildSuccessful = ProcessAutomatedBuildBehavior( BuildSettings.UnableToCheckoutFilesBehavior,
					FText::Format( NSLOCTEXT("UnrealEd", "AutomatedBuild_Error_FilesFailedCheckout", "The following assets failed to checkout of source control and cannot be submitted:\n{0}\n\nAttempt to continue the build?"), FText::FromString(FilesThatFailedCheckout)),
					OutErrorMessages );
			}
		}
	}

	// Verify there are still actually any packages left to submit. If there aren't, abort the build and warn the user of the situation.
	if ( bBuildSuccessful )
	{
		bBuildSuccessful = OutPkgsToSubmit.Num() > 0;
		if ( !bBuildSuccessful )
		{
			LogErrorMessage( NSLOCTEXT("UnrealEd", "AutomatedBuild_Error_NoValidLevels", "None of the current levels are valid for submission; automated build aborted."), OutErrorMessages );
		}
	}

	// If the build is safe to commence, force all of the levels visible to make sure the build operates correctly
	if ( bBuildSuccessful )
	{
		bool bVisibilityToggled = false;
		if ( !FLevelUtils::IsLevelVisible( GWorld->PersistentLevel ) )
		{
			EditorLevelUtils::SetLevelVisibility( GWorld->PersistentLevel, true, false );
			bVisibilityToggled = true;
		}
		for ( TArray<ULevelStreaming*>::TConstIterator LevelIter( GWorld->StreamingLevels ); LevelIter; ++LevelIter )
		{
			ULevelStreaming* CurStreamingLevel = *LevelIter;
			if ( CurStreamingLevel && !FLevelUtils::IsLevelVisible( CurStreamingLevel ) )
			{
				CurStreamingLevel->bShouldBeVisibleInEditor = true;
				bVisibilityToggled = true;
			}
		}
		if ( bVisibilityToggled )
		{
			GWorld->FlushLevelStreaming();
		}
	}

	return bBuildSuccessful;
}
void USimpleConstructionScript::ExecuteScriptOnActor(AActor* Actor, const FTransform& RootTransform, bool bIsDefaultTransform)
{
	if(RootNodes.Num() > 0)
	{
		TSet<UActorComponent*> AllComponentsCreatedBySCS;
		TInlineComponentArray<UActorComponent*> InstancedComponents;
		for(auto NodeIt = RootNodes.CreateIterator(); NodeIt; ++NodeIt)
		{
			USCS_Node* RootNode = *NodeIt;
			if(RootNode != nullptr)
			{
				// Get all native scene components
				TInlineComponentArray<USceneComponent*> Components;
				Actor->GetComponents(Components);
				for (int32 Index = Components.Num()-1; Index >= 0; --Index)
				{
					USceneComponent* SceneComponent = Components[Index];
					if (SceneComponent->CreationMethod == EComponentCreationMethod::Instance)
					{
						Components.RemoveAt(Index);
					}
					else
					{
						// Handle the native sub-component of an instance component case
						USceneComponent* ParentSceneComponent = SceneComponent->GetTypedOuter<USceneComponent>();
						if (ParentSceneComponent && ParentSceneComponent->CreationMethod == EComponentCreationMethod::Instance)
						{
							Components.RemoveAt(Index);
						}
					}
				}

				// Get the native root component; if it's not set, the first native scene component will be used as root. This matches what's done in the SCS editor.
				USceneComponent* RootComponent = Actor->GetRootComponent();
				if(RootComponent == nullptr && Components.Num() > 0)
				{
					RootComponent = Components[0];
				}

				// If the root node specifies that it has a parent
				USceneComponent* ParentComponent = nullptr;
				if(RootNode->ParentComponentOrVariableName != NAME_None)
				{
					// Get the Actor class object
					UClass* ActorClass = Actor->GetClass();
					check(ActorClass != nullptr);

					// If the root node is parented to a "native" component (i.e. in the 'Components' array)
					if(RootNode->bIsParentComponentNative)
					{
						for(int32 CompIndex = 0; CompIndex < Components.Num(); ++CompIndex)
						{
							// If we found a match, remember the index
							if(Components[CompIndex]->GetFName() == RootNode->ParentComponentOrVariableName)
							{
								ParentComponent = Components[CompIndex];
								break;
							}
						}
					}
					else
					{
						// In the non-native case, the SCS node's variable name property is used as the parent identifier
						UObjectPropertyBase* Property = FindField<UObjectPropertyBase>(ActorClass, RootNode->ParentComponentOrVariableName);
						if(Property != nullptr)
						{
							// If we found a matching property, grab its value and use that as the parent for this node
							ParentComponent = Cast<USceneComponent>(Property->GetObjectPropertyValue_InContainer(Actor));
						}
					}
				}

				// Create the new component instance and any child components it may have
				UActorComponent* InstancedComponent = RootNode->ExecuteNodeOnActor(Actor, ParentComponent != nullptr ? ParentComponent : RootComponent, &RootTransform, bIsDefaultTransform);
				if(InstancedComponent != nullptr)
				{
					InstancedComponents.Add(InstancedComponent);
				}

				// get list of every component SCS created, in case some of them aren't in the attachment hierarchy any more (e.g. rigid bodies)
				TInlineComponentArray<USceneComponent*> ComponentsAfterSCS;
				Actor->GetComponents(ComponentsAfterSCS);
				for (USceneComponent* C : ComponentsAfterSCS)
				{
					if (Components.Contains(C) == false)
					{
						AllComponentsCreatedBySCS.Add(C);
					}
				}
			}
		}

		// Register all instanced SCS components once SCS execution has finished; sorted in order to register the scene component hierarchy first, followed by the remaining actor components (in case they happen to depend on something in the scene hierarchy)
		InstancedComponents.Sort([](const UActorComponent& A, const UActorComponent& B) { return A.IsA<USceneComponent>(); });
		for(auto InstancedComponent : InstancedComponents)
		{
			RegisterInstancedComponent(InstancedComponent);
		}

		// now that the instanced components in the attachment hierarchy are registered, register any other components that SCS made but aren't in the attachment hierarchy for whatever reason.
		for (auto C : AllComponentsCreatedBySCS)
		{
			if (C->IsRegistered() == false)
			{
				C->RegisterComponent();
			}
		}
	}
	else if(Actor->GetRootComponent() == NULL) // Must have a root component at the end of SCS, so if we don't have one already (from base class), create a SceneComponent now
	{
		USceneComponent* SceneComp = NewObject<USceneComponent>(Actor);
		SceneComp->SetFlags(RF_Transactional);
		SceneComp->CreationMethod = EComponentCreationMethod::SimpleConstructionScript;
		SceneComp->SetWorldTransform(RootTransform);
		Actor->SetRootComponent(SceneComp);
		SceneComp->RegisterComponent();
	}
}
Пример #26
0
bool EngineUtils::FindOrLoadAssetsByPath(const FString& Path, TArray<UObject*>& OutAssets)
{
	if ( !FPackageName::IsValidLongPackageName(Path, true) )
	{
		return false;
	}

	// Convert the package path to a filename with no extension (directory)
	const FString FilePath = FPackageName::LongPackageNameToFilename(Path);

	// Gather the package files in that directory and subdirectories
	TArray<FString> Filenames;
	FPackageName::FindPackagesInDirectory(Filenames, FilePath);

	// Cull out map files
	for (int32 FilenameIdx = Filenames.Num() - 1; FilenameIdx >= 0; --FilenameIdx)
	{
		const FString Extension = FPaths::GetExtension(Filenames[FilenameIdx], true);
		if ( Extension == FPackageName::GetMapPackageExtension() )
		{
			Filenames.RemoveAt(FilenameIdx);
		}
	}

	// Load packages or find existing ones and fully load them
	TSet<UPackage*> Packages;
	for (int32 FileIdx = 0; FileIdx < Filenames.Num(); ++FileIdx)
	{
		const FString& Filename = Filenames[FileIdx];

		UPackage* Package = FindPackage(NULL, *FPackageName::FilenameToLongPackageName(Filename));

		if (Package)
		{
			Package->FullyLoad();
		}
		else
		{
			Package = LoadPackage(NULL, *Filename, LOAD_None);
		}

		if (Package)
		{
			Packages.Add(Package);
		}
	}

	// If any packages were successfully loaded, find all assets that were in the packages and add them to OutAssets
	if ( Packages.Num() > 0 )
	{
		for (FObjectIterator ObjIt; ObjIt; ++ObjIt)
		{
			if ( Packages.Contains(ObjIt->GetOutermost()) && ObjIt->IsAsset() )
			{
				OutAssets.Add(*ObjIt);
			}
		}
	}

	return true;
}
Пример #27
0
void SDetailsViewBase::QueryCustomDetailLayout(FDetailLayoutBuilderImpl& CustomDetailLayout)
{
	FPropertyEditorModule& ParentPlugin = FModuleManager::GetModuleChecked<FPropertyEditorModule>("PropertyEditor");

	// Get the registered classes that customize details
	FCustomDetailLayoutNameMap& GlobalCustomLayoutNameMap = ParentPlugin.ClassNameToDetailLayoutNameMap;

	UStruct* BaseStruct = GetBaseStruct();

	// All the current customization instances need to be deleted when it is safe
	CustomizationClassInstancesPendingDelete = CustomizationClassInstances;

	CustomizationClassInstances.Empty();

	//Ask for generic details not specific to an object being viewed 
	if (GenericLayoutDelegate.IsBound())
	{
		// Create a new instance of the custom detail layout for the current class
		TSharedRef<IDetailCustomization> CustomizationInstance = GenericLayoutDelegate.Execute();

		// Ask for details immediately
		CustomizationInstance->CustomizeDetails(CustomDetailLayout);

		// Save the instance from destruction until we refresh
		CustomizationClassInstances.Add(CustomizationInstance);
	}


	// Sort them by query order.  @todo not good enough
	struct FCompareFDetailLayoutCallback
	{
		FORCEINLINE bool operator()(const FDetailLayoutCallback& A, const FDetailLayoutCallback& B) const
		{
			return A.Order < B.Order;
		}
	};

	TMap< TWeakObjectPtr<UStruct>, FDetailLayoutCallback*> FinalCallbackMap;

	for (auto ClassIt = ClassesWithProperties.CreateConstIterator(); ClassIt; ++ClassIt)
	{
		// Check the instanced map first
		FDetailLayoutCallback* Callback = InstancedClassToDetailLayoutMap.Find(*ClassIt);

		if (!Callback)
		{
			// callback wasn't found in the per instance map, try the global instances instead
			Callback = GlobalCustomLayoutNameMap.Find((*ClassIt)->GetFName());
		}

		if (Callback)
		{
			FinalCallbackMap.Add(*ClassIt, Callback);
		}
	}


	FinalCallbackMap.ValueSort(FCompareFDetailLayoutCallback());

	TSet<UStruct*> QueriedClasses;

	if (FinalCallbackMap.Num() > 0)
	{
		// Ask each class that we have properties for to customize its layout
		for (auto LayoutIt(FinalCallbackMap.CreateConstIterator()); LayoutIt; ++LayoutIt)
		{
			const TWeakObjectPtr<UStruct> WeakClass = LayoutIt.Key();

			if (WeakClass.IsValid())
			{
				UStruct* Class = WeakClass.Get();

				FClassInstanceToPropertyMap& InstancedPropertyMap = ClassToPropertyMap.FindChecked(Class->GetFName());
				for (FClassInstanceToPropertyMap::TIterator InstanceIt(InstancedPropertyMap); InstanceIt; ++InstanceIt)
				{
					FName Key = InstanceIt.Key();
					CustomDetailLayout.SetCurrentCustomizationClass(CastChecked<UClass>(Class), Key);

					const FOnGetDetailCustomizationInstance& DetailDelegate = LayoutIt.Value()->DetailLayoutDelegate;

					if (DetailDelegate.IsBound())
					{
						QueriedClasses.Add(Class);

						// Create a new instance of the custom detail layout for the current class
						TSharedRef<IDetailCustomization> CustomizationInstance = DetailDelegate.Execute();

						// Ask for details immediately
						CustomizationInstance->CustomizeDetails(CustomDetailLayout);

						// Save the instance from destruction until we refresh
						CustomizationClassInstances.Add(CustomizationInstance);
					}
				}
			}
		}
	}

	// Ensure that the base class and its parents are always queried
	TSet<UStruct*> ParentClassesToQuery;
	if (BaseStruct && !QueriedClasses.Contains(BaseStruct))
	{
		ParentClassesToQuery.Add(BaseStruct);
		ClassesWithProperties.Add(BaseStruct);
	}

	// Find base classes of queried classes that were not queried and add them to the query list
	// this supports cases where a parent class has no properties but still wants to add customization
	for (auto QueriedClassIt = ClassesWithProperties.CreateConstIterator(); QueriedClassIt; ++QueriedClassIt)
	{
		UStruct* ParentStruct = (*QueriedClassIt)->GetSuperStruct();

		while (ParentStruct && ParentStruct->IsA(UClass::StaticClass()) && !QueriedClasses.Contains(ParentStruct) && !ClassesWithProperties.Contains(ParentStruct))
		{
			ParentClassesToQuery.Add(ParentStruct);
			ParentStruct = ParentStruct->GetSuperStruct();

		}
	}

	// Query extra base classes
	for (auto ParentIt = ParentClassesToQuery.CreateConstIterator(); ParentIt; ++ParentIt)
	{
		if (Cast<UClass>(*ParentIt))
		{
			QueryLayoutForClass(CustomDetailLayout, *ParentIt);
		}
	}
}
bool UBlueprintThumbnailRenderer::CanVisualizeBlueprint(class UBlueprint* Blueprint)
{
	if ( ThumbnailScene == nullptr )
	{
		ThumbnailScene = new FBlueprintThumbnailScene();
	}

	// Only visualize actor based blueprints
	if ( Blueprint->GeneratedClass && Blueprint->GeneratedClass->IsChildOf(AActor::StaticClass()) )
	{
		// Try to find any visible primitive components in the native class' CDO
		AActor* CDO = Blueprint->GeneratedClass->GetDefaultObject<AActor>();

		TArray<UActorComponent*> Components;
		CDO->GetComponents(Components);

		for ( auto CompIt = Components.CreateConstIterator(); CompIt; ++CompIt )
		{
			if ( ThumbnailScene->IsValidComponentForVisualization(*CompIt) )
			{
				return true;
			}
		}

		// Try to find any visible primitive components in the simple construction script
		// Do this for all parent blueprint generated classes as well
		UBlueprint* BlueprintToHarvestComponents = Blueprint;
		TSet<UBlueprint*> AllVisitedBlueprints;
		while ( BlueprintToHarvestComponents )
		{
			AllVisitedBlueprints.Add(BlueprintToHarvestComponents);

			if ( BlueprintToHarvestComponents->SimpleConstructionScript )
			{
				TArray<USCS_Node*> AllNodes = BlueprintToHarvestComponents->SimpleConstructionScript->GetAllNodes();

				for ( auto NodeIt = AllNodes.CreateConstIterator(); NodeIt; ++NodeIt )
				{
					if ( ThumbnailScene->IsValidComponentForVisualization((*NodeIt)->ComponentTemplate) )
					{
						return true;
					}
				}
			}

			UClass* ParentClass = BlueprintToHarvestComponents->ParentClass;
			BlueprintToHarvestComponents = nullptr;

			// If the parent class was a blueprint generated class, check it's simple construction script components as well
			if ( ParentClass )
			{
				UBlueprint* ParentBlueprint = Cast<UBlueprint>(ParentClass->ClassGeneratedBy);

				// Also make sure we haven't visited the blueprint already. This would only happen if there was a loop of parent classes.
				if ( ParentBlueprint && !AllVisitedBlueprints.Contains(ParentBlueprint) )
				{
					BlueprintToHarvestComponents = ParentBlueprint;
				}
			}
		}
	}

	return false;
}
Пример #29
0
/*Responds to Right Click input
Picks up stacks of items at once using both 'hands'
*/
void AMyCharacter::GrabWithTwoHands()
{
	/**
		Section to treat unacceptable function calls
	*/

	//If something is allready held in hands
	if (TwoHandSlot.Num() || LeftHandSlot || RightHandSlot)
	{
		PopUp.Broadcast(FString(TEXT("Allready holding something!")));
		return;
	}

	//If no object is blocking the hit
	if (!HitObject.IsValidBlockingHit())
	{
		//PopUpMessage = ActionNotValid;
		PopUp.Broadcast(FString(TEXT("Action not valid!")));
		return;                                                                                                           
	}

	//No actor focused
	if (!HighlightedActor)
	{
		PopUp.Broadcast(FString(TEXT("Nothing to pick")));
	}

	//If highlighted actor is not pickable
	if (!HitObject.GetActor()->ActorHasTag(FName(TEXT("Stackable"))))
	{
		PopUp.Broadcast(FString(TEXT("Can't pick that with two hands")));
		return;
	}

	//If object is too far away
	if (HitObject.Distance > MaxGraspLength)
	{
		PopUp.Broadcast(FString(TEXT("You need to get closer!")));
		return;
	}

	//Local variables to perform computation
	TSet<AActor*> LocalStackVariable = GetStack(HighlightedActor);
	TSet<AActor*> ReturnStack;

	//Making sure stack is pickable by not having any elements on top (eg: Spoon, Knife)
	if (HasAnyOnTop(LocalStackVariable[FSetElementId::FromInteger(LocalStackVariable.Num() - 1)]))
	{
		PopUp.Broadcast(FString(TEXT("Make sure no item is on top!")));
		return;
	}

	if (HighlightedActor->ActorHasTag(FName(TEXT("Stackable"))))
	{
		int FirstIndex = 0;
		if (LocalStackVariable.Num() > StackGrabLimit)
		{
			FirstIndex = LocalStackVariable.Num() - StackGrabLimit;
		}

		for (int i = FirstIndex; i < LocalStackVariable.Num(); i++)
		{
			GetStaticMesh(LocalStackVariable[FSetElementId::FromInteger(i)])->SetEnableGravity(false);
			GetStaticMesh(LocalStackVariable[FSetElementId::FromInteger(i)])->SetCollisionEnabled(ECollisionEnabled::NoCollision);
			ReturnStack.Add(LocalStackVariable[FSetElementId::FromInteger(i)]);
		}
		TwoHandSlot = ReturnStack;
		SelectedObject = LocalStackVariable[FSetElementId::FromInteger(LocalStackVariable.Num()-1)];
		GetStaticMesh(SelectedObject)->SetCustomDepthStencilValue(2);
	}
	
	UpdateCharacterSpeed();
}
Пример #30
0
int Bench(int argc,char **argv)
{
	REF(Module)  moduleH;
	REF(any) r;
	char moduleName[40];
	// ProcessStats    totalTime;
	// ServerStats     totalSrvTime;
	//char*	purgeVar;
	char  resultText[200];  // buffer to hold result of operation for
                                // printing outside of timing region.
	char *configfile;
	int opIndex = 2;
	int repeatCount = 1;
	BenchmarkOp whichOp = Trav1;
	bool manyXACTS = 0;

	w_rc_t rc;

#ifdef PARSETS

	LOID objtype;

	int NumNodes;

	//reinitialize some globals.
	nextAtomicId=0; 
	nextCompositeId=0;
	nextComplexAssemblyId=0;
	nextBaseAssemblyId=0;
	nextModuleId = TotalModules;
	initParSets(argc, argv);

	if (argc < 6){
	    fprintf(stderr, "Usage: %s %s\n", argv[0], usage1);
	    fprintf(stderr, "%s\n", usage3);
	    fprintf(stderr, "%s\n", usage4);

	    exit(1);
	}

	sscanf(argv[5], "%d", &NumNodes);
	printf("NUMNODES = %d\n", NumNodes);

	for (int j=0; j< NumNodes; j++)
	  CompNodes.Add(j+1);

#endif

	rc = initialize(argc, argv, usage1);
	if(rc) {
	    return 1;
	}

	rc = Shore::begin_transaction(3);
	if(rc){
	    cerr << "can't begin transaction: " << rc << endl;
	    return 1;
	}

	// initialize parameters for benchmark.
	ParseCommandLine(argc, argv, opIndex, repeatCount, whichOp, manyXACTS,
		&configfile);

#ifdef PARSETS
	SetParams(argv[1], slArgs);
#else
	SetParams(configfile);
#endif
	rc = InitGlobals();
	if(rc){
	    cerr << "Error in InitGlobals: " << rc << endl;
	    exit(1);
	}

	nextAtomicId  = TotalAtomicParts + 1;
	nextCompositeId = TotalCompParts + 1;

	rc = Shore::commit_transaction();
	if(rc){
	    cerr << "can't commit transaction: " << rc << endl;
	    return 1;
	}

#ifdef PARSETS


	SlaveRPC(CompNodes, (char *) slaveGenInit, (char *)&slArgs, sizeof(SlaveArgs));	
	SlaveRPC(CompNodes, (char *)slaveOpenPools, NULL, -1);	

#ifdef NEWCOMMUNICATION
	myParSetServer->CreateParSet("oo7db", "CompositePart", ParSet::kPrimary, objtype,(char *)createCompositePart, 4, CompNodes, ParSet::kUserDef, 
		     (char *)declusterCompositeParts);
#else
	CreateParSet("oo7db", "CompositePart", ParSet::kPrimary, objtype,
		     4, (char *)createCompositePart, CompNodes, 
		     ParSet::kUserDef, (char *)declusterCompositeParts);

#endif

	compositeParSet = new PrimaryParSet <REF(CompositePart)>("oo7db", "CompositePart");
#endif

	// Compute structural info needed by the update operations,
        // since these operations need to know which id's should
        // be used next.

	int baseCnt = NumAssmPerAssm;
	int complexCnt = 1;	for (int i = 1; i < NumAssmLevels-1; i++) {
            baseCnt = baseCnt * NumAssmPerAssm;
            complexCnt += complexCnt * NumAssmPerAssm;
	}
	nextBaseAssemblyId = TotalModules*baseCnt + 1;
	nextComplexAssemblyId = TotalModules*complexCnt + 1;
	nextAtomicId = TotalAtomicParts + 1;
	nextCompositeId = TotalCompParts + 1;


	// needed for insert and delete tests
	shared_cp = new BAIdList[TotalCompParts+NumNewCompParts+1];
	private_cp = new BAIdList[TotalCompParts+NumNewCompParts+1];


	// See if debug mode is desired, see which operation to run,
	// and how many times to run it.



	// totalTime.Start();
	// totalSrvTime.Start();

	enum {do_commit, do_chain, do_nothing, do_begin } choice=do_begin;

        // Actually run the darn thing.
	for (int iter = 0; iter < repeatCount; iter++) 
	{
	    //////////////////////////////////////////////////////////////////
	    // Run an OO7 Benchmark Operation
	    //
	    //////////////////////////////////////////////////////////////////

	    printf("RUNNING OO7 BENCHMARK OPERATION %s, iteration = %d.\n", 
	           argv[opIndex], iter);

  	    // get wall clock time
            gettimeofday(&startWallTime, IGNOREZONE &ignoreTimeZone);

	    // get starting usage values.
	    getrusage(RUSAGE_SELF, &startUsage);

	    // Start a new transaction if either this is the first iteration
	    // of a multioperation transaction or we we are running each
	    // operate as a separate transaction

#ifdef PARSETS
	    if(choice == do_begin) {
		W_COERCE(Shore::begin_transaction(3));
		SlaveRPC(CompNodes, (char *)slaveBeginTransaction, NULL, -1);
	    }

#else
	    if(choice == do_begin) {
		// E_BeginTransaction();
		W_COERCE(Shore::begin_transaction(3));
	    }
#endif

            // set random seed so "hot" runs are truly hot
            srandom(1);

	    // Use random module for the operation
//            int moduleId = (int) (random() % TotalModules) + 1;
	for (int moduleId = 1 ; moduleId <= TotalModules; moduleId++){
			
#ifdef USE_MODULE_INDEX
            sprintf(moduleName, "Module %08d", moduleId);
//	    printf("moduleName=%s\n",moduleName);
	    moduleH =  tbl->ModuleIdx.find(moduleName);
#else
	    sprintf(moduleName,"Module%d", moduleId);
	    rc = REF(Module)::lookup(moduleName, moduleH);
	    if(rc){
		cerr << "Can't find module " << moduleName << ": "
		     << rc << endl;
		return 1;
	    }
#endif
	    printf("Traversing Module= %s\n", moduleName);
	    if (moduleH == NULL)
	    {
	        fprintf(stderr, "ERROR: Unable to access %s.\n", moduleName);
		// E_AbortTransaction();
		W_COERCE(Shore::abort_transaction());
	        exit(1);
	    }

	    // Perform the requested operation on the chosen module
	    long count = 0;
	    int docCount = 0;
	    int charCount = 0;
	    int replaceCount = 0;

	    switch (whichOp) {
	        case Trav1:
	            count = moduleH->traverse(whichOp);
	            sprintf(resultText, "Traversal 1 DFS visited %d atomic parts.\n",
			             count);
  		    break;
		 case Trav1WW:
                    RealWork = 1;
                    whichOp = Trav1;  // so traverse methods don't complain
                    count = moduleH->traverse(whichOp);
                    whichOp = Trav1WW;  // for next (hot) traversal
                    sprintf(resultText, "Traversal 1WW DFS visited %d atomic parts.\n",
                                     count);
                    break;
	        case Trav2a:
	            count = moduleH->traverse(whichOp);
	            sprintf(resultText, "Traversal 2A swapped %d pairs of (X,Y) coordinates.\n",
 			         count);
		    break;
	        case Trav2b:
	            count = moduleH->traverse(whichOp);
	            sprintf(resultText, "Traversal 2B swapped %d pairs of (X,Y) coordinates.\n",
			             count);
		    break;
	        case Trav2c:
	            count = moduleH->traverse(whichOp);
	            sprintf(resultText, "Traversal 2C swapped %d pairs of (X,Y) coordinates.\n",
			             count);
		    break;
	        case Trav3a:
	            count = moduleH->traverse(whichOp);
	            sprintf(resultText, "Traversal 3A toggled %d dates.\n",
			             count);
		    break;
	        case Trav3b:
	            count = moduleH->traverse(whichOp);
	            sprintf(resultText, "Traversal 3B toggled %d dates.\n",
			             count);
		    break;
	        case Trav3c:
	            count = moduleH->traverse(whichOp);
	            sprintf(resultText, "Traversal 3C toggled %d dates.\n",
			            count);
		    break;
	        case Trav4:
	            count = moduleH->traverse(whichOp);
	            sprintf(resultText, "Traversal 4: %d instances of the character found\n",
			             count);
		    break;
	        case Trav5do:
	            count = moduleH->traverse(whichOp);
	            sprintf(resultText, "Traversal 5(DO): %d string replacements performed\n",
			             count);
		    break;
	        case Trav5undo:
	            count = moduleH->traverse(whichOp);
	            sprintf(resultText, "Traversal 5(UNDO): %d string replacements performed\n",
			         count);
		    break;
	        case Trav6:
	            count = moduleH->traverse(whichOp);
	            sprintf(resultText, "Traversal 6: visited %d atomic part roots.\n",
			             count);
		    break;
	        case Trav7:
	            count = traverse7();
		    sprintf(resultText, "Traversal 7: found %d assemblies using rand om atomic part.\n", 
			count);
		    break;
	        case Trav8:
	            count = moduleH->scanManual();
		    sprintf(resultText, "Traversal 8: found %d occurrences of character in manual.\n", 
			count);
		    break;
	        case Trav9:
	            count = moduleH->firstLast();
		    sprintf(resultText, "Traversal 9: match was %d.\n", 
			count);
		    break;

                case Trav10:
                    // run traversal #1 on every module.
                    count = 0;
                    whichOp = Trav1;  // so object methods don't complain
                    for (moduleId = 1; moduleId <= TotalModules; moduleId++) {
                        sprintf(moduleName, "Module %08d", moduleId);
			bool found;
	  	        shrc rc =tbl->ModuleIdx.find(moduleName,moduleH,found);
   	                if (rc || !found ||moduleH == NULL) {
                                fprintf(stderr,
                                        "ERROR: t10 Unable to access %s.\n",
                                         moduleName);
				W_COERCE(Shore::abort_transaction());
                                exit(1);
                        }
                        count += moduleH->traverse(whichOp);
                    }
                    sprintf(resultText,
                           "Traversal 10 visited %d atomic parts in %d modules.\\n",
                                     count, TotalModules);
                    whichOp = Trav10;  // for next time around
                    break;           

	        case Query1:
	            count = query1();
	            sprintf(resultText, "Query one retrieved %d atomic parts.\n",
			             count);
		    break;
	        case Query2:
	            count = query2();
	            sprintf(resultText, "Query two retrieved %d qualifying atomic parts.\n",
			         count);
		    break;
	        case Query3:
	            count = query3();
	            sprintf(resultText, "Query three retrieved %d qualifying atomic parts.\n",
			         count);
		    break;
	        case Query4:
	            count = query4();
	            sprintf(resultText, "Query four retrieved %d (document, base assembly) pairs.\n",
			         count);
		    break;
	        case Query5:
	            count = query5();
	            sprintf(resultText, "Query five retrieved %d out-of-date base assemblies.\n",
			             count);
		    break;
	        case Query6:
	            count = query6();
	            sprintf(resultText, "Query six retrieved %d out-of-date assemblies.\n",
			         count);
		    break;
	        case Query7:
	            count = query7();
		    sprintf(resultText, "Query seven iterated through %d atomic part s.\n",
			             count);
		    break;
	        case Query8:
	            count = query8();
		    sprintf(resultText, "Query eight found %d atomic part/document m atches.\n",
			 count);
		    break;
	        case Insert:
	            insert1();
	            sprintf(resultText, "Inserted %d composite parts (a total of %d atomic parts.)\n",
		      NumNewCompParts, NumNewCompParts*NumAtomicPerComp);
		    break;
	        case Delete:
	            delete1();
	            sprintf(resultText, "Deleted %d composite parts (a total of %d atomic parts.)\n",
	             NumNewCompParts, NumNewCompParts*NumAtomicPerComp);
		    break;

		 case Reorg1:
		     count = reorg1();
		     sprintf(resultText, "Reorg1 replaced %d atomic parts.\n", 
			count);
		     break;

	    	 case Reorg2:
		     count = reorg2();
		     sprintf(resultText, "Reorg2 replaced %d atomic parts.\n", 
			count);
		     break;
// NEW
	        case WarmUpdate:
		    // first do the t1 traversal to warm the cache
	            count = moduleH->traverse(Trav1);
		    // then call T2 to do the update
	            count = moduleH->traverse(Trav2a);
	            sprintf(resultText, 
			"Warm update swapped %d pairs of (X,Y) coordinates.\n",
 			         count);
		     break;
	        default:
	            fprintf(stderr, "Sorry, that operation isn't available yet.\n");
		    // E_AbortTransaction();
		    W_COERCE(Shore::abort_transaction());
	            exit(1);
	    }
		printf("Visited=%d\n", count);
	}
	{ 
#ifdef PARSETS

	    if ((iter == repeatCount-1) || manyXACTS){
		printf("Calling commit transaction\n");
		SlaveRPC(CompNodes, (char *)slaveCommitTransaction, NULL, -1);
		choice = do_commit;
	    }
#else
	    // Commit the current transaction if 
	    // we are running the last iteration 
	    // or running a multitransaction test and not chaining
	    // Chain the tx if we are chaining and not on
	    // the last iteration

	    if (iter == repeatCount-1) {
		choice=do_commit;
		// commit 
	    } else if(manyXACTS) {
		// not last iteration, multi tx test
		if(chain_tx) {
		    choice=do_chain;
		} else {
		    choice=do_commit;
		}
	    } else choice=do_nothing;
#endif
	    if(choice==do_commit) {
		//E_CommitTransaction();
		W_COERCE(Shore::commit_transaction());
		choice = do_begin;
	    } else if (choice==do_chain) {
		W_COERCE(Shore::chain_transaction());
		choice = do_nothing;
	    } 
	}

            // compute and report wall clock time
            gettimeofday(&endWallTime, IGNOREZONE &ignoreTimeZone);
	    printf("SHORE, operation= %s, iteration= %d, elapsedTime= %f seconds\n",
               argv[opIndex], iter,
               ComputeWallClockTime(&startWallTime, &endWallTime));
            if (iter == 1) startWarmTime = startWallTime;

            // Compute and report CPU time.
	    getrusage(RUSAGE_SELF, &endUsage);
            fprintf(stdout, resultText);
	    fprintf(stdout, "CPU time: %f seconds.\n", 
	                ComputeUserTime(&startUsage, &endUsage) +
			ComputeSystemTime(&startUsage, &endUsage));
	    fprintf(stdout, "(%f seconds user, %f seconds system.)\n", 
	                ComputeUserTime(&startUsage, &endUsage),
			ComputeSystemTime(&startUsage, &endUsage));

	    if ((repeatCount > 2) && (iter == repeatCount-2)) 
	    {
	       // compute average hot time for 2nd through n-1 th iterations
               printf("SHORE, operation=%s, average hot elapsedTime=%f seconds\n",
	       	  argv[opIndex], 
	          ComputeWallClockTime(&startWarmTime, &endWallTime)/(repeatCount-2)); 
	    }
	  }

	//////////////////////////////////////////////////////////////////
	//
	// Shutdown 
	//
	//////////////////////////////////////////////////////////////////

#ifdef PARSETS
	cleanupParSets();
#endif
	// totalTime.Stop();
	// totalSrvTime.Stop();
	// fprintf(stdout, "Total stats (client,server):\n");
	// totalTime.PrintStatsHeader(stdout);
	// totalTime.PrintStats(stdout, "TotalCli");
	// totalSrvTime.PrintStats(stdout, "TotalSrv");

	// Exit
	W_COERCE(Shore::exit());
	return(0);
}