double FFileManagerGeneric::GetFileAgeSeconds( const TCHAR* Filename ) { // make sure it exists if (!GetLowLevel().FileExists(Filename)) { return -1.0; } // get difference in time between now (UTC) and the filetime FTimespan Age = FDateTime::UtcNow() - GetTimeStamp(Filename); return Age.GetTotalSeconds(); }
float AFPSGPlayerController::timeUntilRespawn() { float timeToRespawn = 0.0f; if (!isAlive) { if (timeOfDeath != FDateTime::MinValue()) { FTimespan timeSinceDeath = FDateTime::Now() - timeOfDeath; timeToRespawn = respawnTime - static_cast<float>(timeSinceDeath.GetTotalSeconds()); } } return timeToRespawn; }
bool FTimespan::Parse( const FString& TimespanString, FTimespan& OutTimespan ) { // @todo gmp: implement stricter FTimespan parsing; this implementation is too forgiving. FString TokenString = TimespanString.Replace(TEXT("."), TEXT(":")); bool Negative = TokenString.StartsWith(TEXT("-")); TokenString.ReplaceInline(TEXT("-"), TEXT(":"), ESearchCase::CaseSensitive); TArray<FString> Tokens; TokenString.ParseIntoArray(Tokens, TEXT(":"), true); if (Tokens.Num() == 4) { Tokens.Insert(TEXT("0"), 0); } if (Tokens.Num() == 5) { OutTimespan.Assign(FCString::Atoi(*Tokens[0]), FCString::Atoi(*Tokens[1]), FCString::Atoi(*Tokens[2]), FCString::Atoi(*Tokens[3]), FCString::Atoi(*Tokens[4])); if (Negative) { OutTimespan.Ticks *= -1; } return true; } return false; }
bool FAndroidMediaPlayer::Seek(const FTimespan& Time) { if (MediaState == EMediaState::Prepared || MediaState == EMediaState::Started || MediaState == EMediaState::Paused || MediaState == EMediaState::PlaybackCompleted) { JavaMediaPlayer->SeekTo(Time.GetMilliseconds()); return true; } else { return false; } }
EIPv6SocketInternalState::Return FSocketBSDIPv6::HasState(EIPv6SocketInternalState::Param State, FTimespan WaitTime) { #if PLATFORM_HAS_BSD_SOCKET_FEATURE_SELECT // convert WaitTime to a timeval timeval Time; Time.tv_sec = (int32)WaitTime.GetTotalSeconds(); Time.tv_usec = WaitTime.GetMilliseconds() * 1000; fd_set SocketSet; // Set up the socket sets we are interested in (just this one) FD_ZERO(&SocketSet); FD_SET(Socket, &SocketSet); // Check the status of the state int32 SelectStatus = 0; switch (State) { case EIPv6SocketInternalState::CanRead: SelectStatus = select(Socket + 1, &SocketSet, NULL, NULL, &Time); break; case EIPv6SocketInternalState::CanWrite: SelectStatus = select(Socket + 1, NULL, &SocketSet, NULL, &Time); break; case EIPv6SocketInternalState::HasError: SelectStatus = select(Socket + 1, NULL, NULL, &SocketSet, &Time); break; } // if the select returns a positive number, the socket had the state, 0 means didn't have it, and negative is API error condition (not socket's error state) return SelectStatus > 0 ? EIPv6SocketInternalState::Yes : SelectStatus == 0 ? EIPv6SocketInternalState::No : EIPv6SocketInternalState::EncounteredError; #else UE_LOG(LogSockets, Fatal, TEXT("This platform doesn't support select(), but FSocketBSD::HasState was not overridden")); return EIPv6SocketInternalState::EncounteredError; #endif }
void AEscapeBallGameMode::Tick(float DeltaSeconds) { // Add time to the total Time totalGameTime += DeltaSeconds; FTimespan timeFormatter = FTimespan(0, 0, 0, FMath::Floor(totalGameTime), FMath::Floor(FMath::Fmod(totalGameTime, 1.0f)*1000.0f)); totalGameTimeString = timeFormatter.ToString(); // Check if player has touched the goal. UWorld* worldRef = GetWorld(); //ADestinationGoal* goalRef = Cast<ADestinationGoal>(goal); if (goal && goal->isTriggered()) { //GEngine->AddOnScreenDebugMessage(-1, 0.1f, FColor::Yellow, TEXT("Reached Goal")); UGameplayStatics::OpenLevel(GWorld, "LevelEditorQuickStartGuide", false, ""); } }
void AUISurfaceActor::HandleVirtualTouchInput(FVector ActionHandPalmLocation, FVector ActionHandFingerLocation) { FVector ActorSpaceActionFingerLocation = this->GetTransform().InverseTransformPosition(ActionHandFingerLocation); FVector ActorSpaceActionPalmLocation = this->GetTransform().InverseTransformPosition(ActionHandPalmLocation); FVector ActorSpaceActionFingerPreviousLocation = this->GetTransform().InverseTransformPosition(ActionHandPreviousFingerLocation); FVector ActorSpaceActionPalmPreviousLocation = this->GetTransform().InverseTransformPosition(ActionHandPreviousPalmLocation); FVector ActorSpaceActionFingerLocationXY = ActorSpaceActionFingerLocation; ActorSpaceActionFingerLocationXY.Z = 0.0; FVector ActorSpacePointerFingerLocationXYWorld = this->GetTransform().TransformPosition(ActorSpaceActionFingerLocationXY); FVector PointerFingerPixelCoordinates = GetViewPixelCoordinatesFromActorLocation(ActorSpaceActionFingerLocationXY); if (ActorSpaceActionFingerLocation.Z <= 0) { // finger is across plane if (!PointerFingerAcrossPlane) { // finger was not previously across plane, so handle as mouse click and set across flag to true HandleMouseDownEventAtCoordinates(PointerFingerPixelCoordinates); DrawDebugSphere(GetWorld(), ActorSpacePointerFingerLocationXYWorld, 0.6, 12, FColor::Cyan, true, 0.1); PointerFingerAcrossPlane = true; } else { // was already across plane } // handle scrolling FVector CurrentPalmPixelCoordinates = GetViewPixelCoordinatesFromActorLocation(ActorSpaceActionPalmLocation); FVector PreviousPalmPixelCoordinates = GetViewPixelCoordinatesFromActorLocation(ActorSpaceActionPalmPreviousLocation); float NumPixelsMovedY = CurrentPalmPixelCoordinates.Y - PreviousPalmPixelCoordinates.Y; // if negative means palm is moving up so should scroll down if (abs(NumPixelsMovedY) >= ScrollNumPixelsThreshold) { float WheelTicksY = NumPixelsMovedY / PixelToWheelTickScalingFactor; HandleYScrollIncrementEvent(WheelTicksY); } // Handle left/right swipe // NOTE: was a simple left swipe to go Back before, but since there were too many false positives have more complicated gesture linking left and right swipes to go Back float NumPixelsMovedX = CurrentPalmPixelCoordinates.X - PreviousPalmPixelCoordinates.X; if (abs(NumPixelsMovedX) >= SwipeLengthPixelsThreshold) { FDateTime CurrentTime = FDateTime::Now(); if (NumPixelsMovedX < 0) { // is left swipe FTimespan TimeBetweenLeftSwipes = CurrentTime - LastLeftSwipeTime; if (TimeBetweenLeftSwipes.GetTotalMilliseconds() >= MinMillisecondsBetweenSwipes) { // check if there has been enough elapsed time since last swipe LastLeftSwipeTime = CurrentTime; } } else { // is right swipe FTimespan TimeBetweenRightSwipes = CurrentTime - LastRightSwipeTime; if (TimeBetweenRightSwipes.GetTotalMilliseconds() >= MinMillisecondsBetweenSwipes) { // check if there has been enough elapsed time since last swipe LastRightSwipeTime = CurrentTime; FTimespan TimeBetweenLeftAndRightSwipes = LastRightSwipeTime - LastLeftSwipeTime; if (TimeBetweenLeftAndRightSwipes.GetTotalMilliseconds() <= MaxMillisecondsLinkingSwipes) { // check right swipe is linked to the previous left swipe HandleBackEvent(); } } } } } else { // finger is not across plane if (PointerFingerAcrossPlane) { // set across flag to false if it was set to true PointerFingerAcrossPlane = false ; HandleMouseUpEventAtCoordinates(PointerFingerPixelCoordinates); } // check for hover state if (ActorSpaceActionFingerLocation.Z <= this->HoverDistance) { // check for hovering PointerFingerIsHovering = true; DrawDebugSphere(GetWorld(), ActorSpacePointerFingerLocationXYWorld, 0.5, 12, FColor::Magenta); HandleMouseoverEventPixelCoordinates(PointerFingerPixelCoordinates); } else { if (PointerFingerIsHovering) { PointerFingerIsHovering = false; } } } // Now that we are done with Leap processing let's set the previous hand locations to the current hand locations ActionHandPreviousPalmLocation = ActionHandPalmLocation; ActionHandPreviousFingerLocation = ActionHandFingerLocation; }
FText FText::AsTimespan( const FTimespan& Timespan, const FCulturePtr& TargetCulture) { checkf(FInternationalization::Get().IsInitialized() == true, TEXT("FInternationalization is not initialized. An FText formatting method was likely used in static object initialization - this is not supported.")); FDateTime DateTime(Timespan.GetTicks()); return FText::FromString( DateTime.ToString( TEXT("%H.%M.%S") ) ); }
TArray<FColor> USceneCapturer::SaveAtlas(FString Folder, const TArray<FColor>& SurfaceData) { SCOPE_CYCLE_COUNTER( STAT_SPSavePNG ); TArray<FColor> SphericalAtlas; SphericalAtlas.AddZeroed(SphericalAtlasWidth * SphericalAtlasHeight); const FVector2D slicePlaneDim = FVector2D( 2.0f * FMath::Tan(FMath::DegreesToRadians(sliceHFov) / 2.0f), 2.0f * FMath::Tan(FMath::DegreesToRadians(sliceVFov) / 2.0f)); //For each direction, // Find corresponding slice // Calculate intersection of slice plane // Calculate intersection UVs by projecting onto plane tangents // Supersample that UV coordinate from the unprojected atlas { SCOPE_CYCLE_COUNTER(STAT_SPSampleSpherical); // Dump out how long the process took const FDateTime SamplingStartTime = FDateTime::UtcNow(); UE_LOG(LogStereoPanorama, Log, TEXT("Sampling atlas...")); for (int32 y = 0; y < SphericalAtlasHeight; y++) { for (int32 x = 0; x < SphericalAtlasWidth; x++) { FLinearColor samplePixelAccum = FLinearColor(0, 0, 0, 0); //TODO: ikrimae: Seems that bilinear filtering sans supersampling is good enough. Supersampling sans bilerp seems best. // After more tests, come back to optimize by folding supersampling in and remove this outer sampling loop. const auto& ssPattern = g_ssPatterns[SSMethod]; for (int32 SampleCount = 0; SampleCount < ssPattern.numSamples; SampleCount++) { const float sampleU = ((float)x + ssPattern.ssOffsets[SampleCount].X) / SphericalAtlasWidth; const float sampleV = ((float)y + ssPattern.ssOffsets[SampleCount].Y) / SphericalAtlasHeight; const float sampleTheta = sampleU * 360.0f; const float samplePhi = sampleV * 180.0f; const FVector sampleDir = FVector( FMath::Sin(FMath::DegreesToRadians(samplePhi)) * FMath::Cos(FMath::DegreesToRadians(sampleTheta)), FMath::Sin(FMath::DegreesToRadians(samplePhi)) * FMath::Sin(FMath::DegreesToRadians(sampleTheta)), FMath::Cos(FMath::DegreesToRadians(samplePhi))); //TODO: ikrimae: ugh, ugly. const int32 sliceXIndex = FMath::TruncToInt(FRotator::ClampAxis(sampleTheta + hAngIncrement / 2.0f) / hAngIncrement); int32 sliceYIndex = 0; //Slice Selection = slice with max{sampleDir dot sliceNormal } { float largestCosAngle = 0; for (int VerticalStep = 0; VerticalStep < NumberOfVerticalSteps; VerticalStep++) { const FVector2D sliceCenterThetaPhi = FVector2D( hAngIncrement * sliceXIndex, vAngIncrement * VerticalStep); //TODO: ikrimae: There has got to be a faster way. Rethink reparametrization later const FVector sliceDir = FVector( FMath::Sin(FMath::DegreesToRadians(sliceCenterThetaPhi.Y)) * FMath::Cos(FMath::DegreesToRadians(sliceCenterThetaPhi.X)), FMath::Sin(FMath::DegreesToRadians(sliceCenterThetaPhi.Y)) * FMath::Sin(FMath::DegreesToRadians(sliceCenterThetaPhi.X)), FMath::Cos(FMath::DegreesToRadians(sliceCenterThetaPhi.Y))); const float cosAngle = sampleDir | sliceDir; if (cosAngle > largestCosAngle) { largestCosAngle = cosAngle; sliceYIndex = VerticalStep; } } } const FVector2D sliceCenterThetaPhi = FVector2D( hAngIncrement * sliceXIndex, vAngIncrement * sliceYIndex); //TODO: ikrimae: Reparameterize with an inverse mapping (e.g. project from slice pixels onto final u,v coordinates. // Should make code simpler and faster b/c reduces to handful of sin/cos calcs per slice. // Supersampling will be more difficult though. const FVector sliceDir = FVector( FMath::Sin(FMath::DegreesToRadians(sliceCenterThetaPhi.Y)) * FMath::Cos(FMath::DegreesToRadians(sliceCenterThetaPhi.X)), FMath::Sin(FMath::DegreesToRadians(sliceCenterThetaPhi.Y)) * FMath::Sin(FMath::DegreesToRadians(sliceCenterThetaPhi.X)), FMath::Cos(FMath::DegreesToRadians(sliceCenterThetaPhi.Y))); const FPlane slicePlane = FPlane(sliceDir, -sliceDir); //Tangents from partial derivatives of sphere equation const FVector slicePlanePhiTangent = FVector( FMath::Cos(FMath::DegreesToRadians(sliceCenterThetaPhi.Y)) * FMath::Cos(FMath::DegreesToRadians(sliceCenterThetaPhi.X)), FMath::Cos(FMath::DegreesToRadians(sliceCenterThetaPhi.Y)) * FMath::Sin(FMath::DegreesToRadians(sliceCenterThetaPhi.X)), -FMath::Sin(FMath::DegreesToRadians(sliceCenterThetaPhi.Y))).GetSafeNormal(); //Should be reconstructed to get around discontinuity of theta tangent at nodal points const FVector slicePlaneThetaTangent = (sliceDir ^ slicePlanePhiTangent).GetSafeNormal(); //const FVector slicePlaneThetaTangent = FVector( // -FMath::Sin(FMath::DegreesToRadians(sliceCenterThetaPhi.Y)) * FMath::Sin(FMath::DegreesToRadians(sliceCenterThetaPhi.X)), // FMath::Sin(FMath::DegreesToRadians(sliceCenterThetaPhi.Y)) * FMath::Cos(FMath::DegreesToRadians(sliceCenterThetaPhi.X)), // 0).SafeNormal(); check(!slicePlaneThetaTangent.IsZero() && !slicePlanePhiTangent.IsZero()); const double t = (double)-slicePlane.W / (sampleDir | sliceDir); const FVector sliceIntersection = FVector(t * sampleDir.X, t * sampleDir.Y, t * sampleDir.Z); //Calculate scalar projection of sliceIntersection onto tangent vectors. a dot b / |b| = a dot b when tangent vectors are normalized //Then reparameterize to U,V of the sliceplane based on slice plane dimensions const float sliceU = (sliceIntersection | slicePlaneThetaTangent) / slicePlaneDim.X; const float sliceV = (sliceIntersection | slicePlanePhiTangent) / slicePlaneDim.Y; check(sliceU >= -(0.5f + KINDA_SMALL_NUMBER) && sliceU <= (0.5f + KINDA_SMALL_NUMBER)); check(sliceV >= -(0.5f + KINDA_SMALL_NUMBER) && sliceV <= (0.5f + KINDA_SMALL_NUMBER)); //TODO: ikrimae: Supersample/bilinear filter const int32 slicePixelX = FMath::TruncToInt(dbgMatchCaptureSliceFovToAtlasSliceFov ? sliceU * StripWidth : sliceU * CaptureWidth); const int32 slicePixelY = FMath::TruncToInt(dbgMatchCaptureSliceFovToAtlasSliceFov ? sliceV * StripHeight : sliceV * CaptureHeight); FLinearColor slicePixelSample; if (bEnableBilerp) { //TODO: ikrimae: Clean up later; too tired now const int32 sliceCenterPixelX = (sliceXIndex + 0.5f) * StripWidth; const int32 sliceCenterPixelY = (sliceYIndex + 0.5f) * StripHeight; const FIntPoint atlasSampleTL(sliceCenterPixelX + FMath::Clamp(slicePixelX , -StripWidth/2, StripWidth/2), sliceCenterPixelY + FMath::Clamp(slicePixelY , -StripHeight/2, StripHeight/2)); const FIntPoint atlasSampleTR(sliceCenterPixelX + FMath::Clamp(slicePixelX + 1, -StripWidth/2, StripWidth/2), sliceCenterPixelY + FMath::Clamp(slicePixelY , -StripHeight/2, StripHeight/2)); const FIntPoint atlasSampleBL(sliceCenterPixelX + FMath::Clamp(slicePixelX , -StripWidth/2, StripWidth/2), sliceCenterPixelY + FMath::Clamp(slicePixelY + 1, -StripHeight/2, StripHeight/2)); const FIntPoint atlasSampleBR(sliceCenterPixelX + FMath::Clamp(slicePixelX + 1, -StripWidth/2, StripWidth/2), sliceCenterPixelY + FMath::Clamp(slicePixelY + 1, -StripHeight/2, StripHeight/2)); const FColor pixelColorTL = SurfaceData[atlasSampleTL.Y * UnprojectedAtlasWidth + atlasSampleTL.X]; const FColor pixelColorTR = SurfaceData[atlasSampleTR.Y * UnprojectedAtlasWidth + atlasSampleTR.X]; const FColor pixelColorBL = SurfaceData[atlasSampleBL.Y * UnprojectedAtlasWidth + atlasSampleBL.X]; const FColor pixelColorBR = SurfaceData[atlasSampleBR.Y * UnprojectedAtlasWidth + atlasSampleBR.X]; const float fracX = FMath::Frac(dbgMatchCaptureSliceFovToAtlasSliceFov ? sliceU * StripWidth : sliceU * CaptureWidth); const float fracY = FMath::Frac(dbgMatchCaptureSliceFovToAtlasSliceFov ? sliceV * StripHeight : sliceV * CaptureHeight); //Reinterpret as linear (a.k.a dont apply srgb inversion) slicePixelSample = FMath::BiLerp( pixelColorTL.ReinterpretAsLinear(), pixelColorTR.ReinterpretAsLinear(), pixelColorBL.ReinterpretAsLinear(), pixelColorBR.ReinterpretAsLinear(), fracX, fracY); } else { const int32 sliceCenterPixelX = (sliceXIndex + 0.5f) * StripWidth; const int32 sliceCenterPixelY = (sliceYIndex + 0.5f) * StripHeight; const int32 atlasSampleX = sliceCenterPixelX + slicePixelX; const int32 atlasSampleY = sliceCenterPixelY + slicePixelY; slicePixelSample = SurfaceData[atlasSampleY * UnprojectedAtlasWidth + atlasSampleX].ReinterpretAsLinear(); } samplePixelAccum += slicePixelSample; ////Output color map of projections //const FColor debugEquiColors[12] = { // FColor(205, 180, 76), // FColor(190, 88, 202), // FColor(127, 185, 194), // FColor(90, 54, 47), // FColor(197, 88, 53), // FColor(197, 75, 124), // FColor(130, 208, 72), // FColor(136, 211, 153), // FColor(126, 130, 207), // FColor(83, 107, 59), // FColor(200, 160, 157), // FColor(80, 66, 106) //}; //samplePixelAccum = ssPattern.numSamples * debugEquiColors[sliceYIndex * 4 + sliceXIndex]; } SphericalAtlas[y * SphericalAtlasWidth + x] = (samplePixelAccum / ssPattern.numSamples).Quantize(); // Force alpha value if (bForceAlpha) { SphericalAtlas[y * SphericalAtlasWidth + x].A = 255; } } } //Blit the first column into the last column to make the stereo image seamless at theta=360 for (int32 y = 0; y < SphericalAtlasHeight; y++) { SphericalAtlas[y * SphericalAtlasWidth + (SphericalAtlasWidth - 1)] = SphericalAtlas[y * SphericalAtlasWidth + 0]; } const FTimespan SamplingDuration = FDateTime::UtcNow() - SamplingStartTime; UE_LOG(LogStereoPanorama, Log, TEXT("...done! Duration: %g seconds"), SamplingDuration.GetTotalSeconds()); } // Generate name FString FrameString = FString::Printf( TEXT( "%s_%05d.png" ), *Folder, CurrentFrameCount ); FString AtlasName = OutputDir / Timestamp / FrameString; UE_LOG( LogStereoPanorama, Log, TEXT( "Writing atlas: %s" ), *AtlasName ); // Write out PNG //TODO: ikrimae: Use threads to write out the images for performance IImageWrapperPtr ImageWrapper = ImageWrapperModule.CreateImageWrapper( EImageFormat::PNG ); ImageWrapper->SetRaw(SphericalAtlas.GetData(), SphericalAtlas.GetAllocatedSize(), SphericalAtlasWidth, SphericalAtlasHeight, ERGBFormat::BGRA, 8); const TArray<uint8>& PNGData = ImageWrapper->GetCompressed(100); FFileHelper::SaveArrayToFile( PNGData, *AtlasName ); if (FStereoPanoramaManager::GenerateDebugImages->GetInt() != 0) { FString FrameStringUnprojected = FString::Printf(TEXT("%s_%05d_Unprojected.png"), *Folder, CurrentFrameCount); FString AtlasNameUnprojected = OutputDir / Timestamp / FrameStringUnprojected; ImageWrapper->SetRaw(SurfaceData.GetData(), SurfaceData.GetAllocatedSize(), UnprojectedAtlasWidth, UnprojectedAtlasHeight, ERGBFormat::BGRA, 8); const TArray<uint8>& PNGDataUnprojected = ImageWrapper->GetCompressed(100); FFileHelper::SaveArrayToFile(PNGData, *AtlasNameUnprojected); } ImageWrapper.Reset(); UE_LOG( LogStereoPanorama, Log, TEXT( " ... done!" ), *AtlasName ); return SphericalAtlas; }
float UKismetMathLibrary::GetTotalSeconds( FTimespan A ) { return A.GetTotalSeconds(); }
float UKismetMathLibrary::GetTotalMinutes( FTimespan A ) { return A.GetTotalMinutes(); }
int32 UKismetMathLibrary::GetSeconds( FTimespan A ) { return A.GetSeconds(); }
int32 UKismetMathLibrary::GetMinutes( FTimespan A ) { return A.GetMinutes(); }
int32 UKismetMathLibrary::GetHours( FTimespan A ) { return A.GetHours(); }
FTimespan UKismetMathLibrary::GetDuration( FTimespan A ) { return A.GetDuration(); }
/** * Dumps the information held within the EditorPerfCaptureParameters struct into a CSV file. * @param EditorPerfStats is the name of the struct that holds the needed performance information. */ void EditorPerfDump(EditorPerfCaptureParameters& EditorPerfStats) { UE_LOG(LogEditorAutomationTests, Log, TEXT("Begin generating the editor performance charts.")); //The file location where to save the data. FString DataFileLocation = FPaths::Combine(*FPaths::AutomationLogDir(), TEXT("Performance"), *EditorPerfStats.MapName); //Get the map load time (in seconds) from the text file that is created when the load map latent command is ran. EditorPerfStats.MapLoadTime = 0; FString MapLoadTimeFileLocation = FPaths::Combine(*DataFileLocation, TEXT("RAWMapLoadTime.txt")); if (FPaths::FileExists(*MapLoadTimeFileLocation)) { TArray<FString> SavedMapLoadTimes; FAutomationEditorCommonUtils::CreateArrayFromFile(MapLoadTimeFileLocation, SavedMapLoadTimes); EditorPerfStats.MapLoadTime = FCString::Atof(*SavedMapLoadTimes.Last()); } //Filename for the RAW csv which holds the data gathered from a single test ran. FString RAWCSVFilePath = FString::Printf(TEXT("%s/RAW_%s_%s.csv"), *DataFileLocation, *EditorPerfStats.MapName, *FDateTime::Now().ToString()); //Filename for the pretty csv file. FString PerfCSVFilePath = FString::Printf(TEXT("%s/%s_Performance.csv"), *DataFileLocation, *EditorPerfStats.MapName); //Create the raw csv and then add the title row it. FArchive* RAWCSVArchive = IFileManager::Get().CreateFileWriter(*RAWCSVFilePath); FString RAWCSVLine = (TEXT("Map Name, Changelist, Time Stamp, Map Load Time, Average FPS, Frame Time, Used Physical Memory, Used Virtual Memory, Used Peak Physical, Used Peak Virtual, Available Physical Memory, Available Virtual Memory\n")); RAWCSVArchive->Serialize(TCHAR_TO_ANSI(*RAWCSVLine), RAWCSVLine.Len()); //Dump the stats from each run to the raw csv file and then close it. for (int32 i = 0; i < EditorPerfStats.TimeStamp.Num(); i++) { //If the raw file isn't available to write to then we'll fail back this test. if ( FAutomationEditorCommonUtils::IsArchiveWriteable(RAWCSVFilePath, RAWCSVArchive)) { RAWCSVLine = FString::Printf(TEXT("%s,%s,%s,%.3f,%.1f,%.1f,%.0f,%.0f,%.0f,%.0f,%.0f,%.0f%s"), *EditorPerfStats.MapName, *FEngineVersion::Current().ToString(EVersionComponent::Changelist), *EditorPerfStats.FormattedTimeStamp[i], EditorPerfStats.MapLoadTime, EditorPerfStats.AverageFPS[i], EditorPerfStats.AverageFrameTime[i], EditorPerfStats.UsedPhysical[i], EditorPerfStats.UsedVirtual[i], EditorPerfStats.PeakUsedPhysical[i], EditorPerfStats.PeakUsedVirtual[i], EditorPerfStats.AvailablePhysical[i], EditorPerfStats.AvailableVirtual[i], LINE_TERMINATOR); RAWCSVArchive->Serialize(TCHAR_TO_ANSI(*RAWCSVLine), RAWCSVLine.Len()); } } RAWCSVArchive->Close(); //Get the final pretty data for the Performance csv file. float AverageFPS = FAutomationEditorCommonUtils::TotalFromFloatArray(EditorPerfStats.AverageFPS, true); float AverageFrameTime = FAutomationEditorCommonUtils::TotalFromFloatArray(EditorPerfStats.AverageFrameTime, true); float MemoryUsedPhysical = FAutomationEditorCommonUtils::TotalFromFloatArray(EditorPerfStats.UsedPhysical, true); float MemoryAvailPhysAvg = FAutomationEditorCommonUtils::TotalFromFloatArray(EditorPerfStats.AvailablePhysical, true); float MemoryAvailVirtualAvg = FAutomationEditorCommonUtils::TotalFromFloatArray(EditorPerfStats.AvailableVirtual, true); float MemoryUsedVirtualAvg = FAutomationEditorCommonUtils::TotalFromFloatArray(EditorPerfStats.UsedVirtual, true); float MemoryUsedPeak = FAutomationEditorCommonUtils::LargestValueInFloatArray(EditorPerfStats.PeakUsedPhysical); float MemoryUsedPeakVirtual = FAutomationEditorCommonUtils::LargestValueInFloatArray(EditorPerfStats.PeakUsedVirtual); //TestRunDuration is the length of time the test lasted in ticks. FTimespan TestRunDuration = (EditorPerfStats.TimeStamp.Last().GetTicks() - EditorPerfStats.TimeStamp[0].GetTicks()) + ETimespan::TicksPerSecond; //The performance csv file will be created if it didn't exist prior to the start of this test. if (!FPaths::FileExists(*PerfCSVFilePath)) { FArchive* FinalCSVArchive = IFileManager::Get().CreateFileWriter(*PerfCSVFilePath); if ( FAutomationEditorCommonUtils::IsArchiveWriteable(PerfCSVFilePath, FinalCSVArchive)) { FString FinalCSVLine = (TEXT("Date, Map Name, Changelist, Test Run Time , Map Load Time, Average FPS, Average MS, Used Physical KB, Used Virtual KB, Used Peak Physcial KB, Used Peak Virtual KB, Available Physical KB, Available Virtual KB\n")); FinalCSVArchive->Serialize(TCHAR_TO_ANSI(*FinalCSVLine), FinalCSVLine.Len()); FinalCSVArchive->Close(); } } //Load the existing performance csv so that it doesn't get saved over and lost. FString OldPerformanceCSVFile; FFileHelper::LoadFileToString(OldPerformanceCSVFile, *PerfCSVFilePath); FArchive* FinalCSVArchive = IFileManager::Get().CreateFileWriter(*PerfCSVFilePath); if ( FAutomationEditorCommonUtils::IsArchiveWriteable(PerfCSVFilePath, FinalCSVArchive)) { //Dump the old performance csv file data to the new csv file. FinalCSVArchive->Serialize(TCHAR_TO_ANSI(*OldPerformanceCSVFile), OldPerformanceCSVFile.Len()); //Dump the pretty stats to the Performance CSV file and then close it so we can edit it while the engine is still running. FString FinalCSVLine = FString::Printf(TEXT("%s,%s,%s,%.0f,%.3f,%.1f,%.1f,%.0f,%.0f,%.0f,%.0f,%.0f,%.0f%s"), *FDateTime::Now().ToString(), *EditorPerfStats.MapName, *FEngineVersion::Current().ToString(EVersionComponent::Changelist), TestRunDuration.GetTotalSeconds(), EditorPerfStats.MapLoadTime, AverageFPS, AverageFrameTime, MemoryUsedPhysical, MemoryUsedVirtualAvg, MemoryUsedPeak, MemoryUsedPeakVirtual, MemoryAvailPhysAvg, MemoryAvailVirtualAvg, LINE_TERMINATOR); FinalCSVArchive->Serialize(TCHAR_TO_ANSI(*FinalCSVLine), FinalCSVLine.Len()); FinalCSVArchive->Close(); } //Display the test results to the user. UE_LOG(LogEditorAutomationTests, Display, TEXT("AVG FPS: '%.1f'"), AverageFPS); UE_LOG(LogEditorAutomationTests, Display, TEXT("AVG Frame Time: '%.1f' ms"), AverageFrameTime); UE_LOG(LogEditorAutomationTests, Display, TEXT("AVG Used Physical Memory: '%.0f' kb"), MemoryUsedPhysical); UE_LOG(LogEditorAutomationTests, Display, TEXT("AVG Used Virtual Memory: '%.0f' kb"), MemoryUsedVirtualAvg); UE_LOG(LogEditorAutomationTests, Display, TEXT("Performance csv file is located here: %s"), *FPaths::ConvertRelativePathToFull(PerfCSVFilePath)); UE_LOG(LogEditorAutomationTests, Log, TEXT("Performance csv file is located here: %s"), *FPaths::ConvertRelativePathToFull(PerfCSVFilePath)); UE_LOG(LogEditorAutomationTests, Log, TEXT("Raw performance csv file is located here: %s"), *FPaths::ConvertRelativePathToFull(RAWCSVFilePath)); }
void USocketIOClientComponent::SetupCallbacks() { //Sync current connected state bIsConnected = NativeClient->bIsConnected; if (bIsConnected) { SessionId = NativeClient->SessionId; AddressAndPort = NativeClient->AddressAndPort; } NativeClient->OnConnectedCallback = [this](const FString& InSessionId) { FLambdaRunnable::RunShortLambdaOnGameThread([this, InSessionId] { if (this) { bIsConnected = true; SessionId = InSessionId; OnConnected.Broadcast(SessionId, bIsHavingConnectionProblems); bIsHavingConnectionProblems = false; } }); }; const FSIOCCloseEventSignature OnDisconnectedSafe = OnDisconnected; NativeClient->OnDisconnectedCallback = [OnDisconnectedSafe, this](const ESIOConnectionCloseReason Reason) { FLambdaRunnable::RunShortLambdaOnGameThread([OnDisconnectedSafe, this, Reason] { if (this && OnDisconnectedSafe.IsBound()) { bIsConnected = false; OnDisconnectedSafe.Broadcast(Reason); } }); }; NativeClient->OnNamespaceConnectedCallback = [this](const FString& Namespace) { FLambdaRunnable::RunShortLambdaOnGameThread([this, Namespace] { if (this && OnSocketNamespaceConnected.IsBound()) { OnSocketNamespaceConnected.Broadcast(Namespace); } }); }; const FSIOCSocketEventSignature OnSocketNamespaceDisconnectedSafe = OnSocketNamespaceDisconnected; NativeClient->OnNamespaceDisconnectedCallback = [this, OnSocketNamespaceDisconnectedSafe](const FString& Namespace) { FLambdaRunnable::RunShortLambdaOnGameThread([OnSocketNamespaceDisconnectedSafe, this, Namespace] { if (this && OnSocketNamespaceDisconnectedSafe.IsBound()) { OnSocketNamespaceDisconnectedSafe.Broadcast(Namespace); } }); }; NativeClient->OnReconnectionCallback = [this](const uint32 AttemptCount, const uint32 DelayInMs) { FLambdaRunnable::RunShortLambdaOnGameThread([this, AttemptCount, DelayInMs] { //First time we know about this problem? if (!bIsHavingConnectionProblems) { TimeWhenConnectionProblemsStarted = FDateTime::Now(); bIsHavingConnectionProblems = true; } FTimespan Difference = FDateTime::Now() - TimeWhenConnectionProblemsStarted; float ElapsedInSec = Difference.GetTotalSeconds(); if (ReconnectionTimeout > 0 && ElapsedInSec>ReconnectionTimeout) { //Let's stop trying and disconnect if we're using timeouts Disconnect(); } if (this && OnConnectionProblems.IsBound()) { OnConnectionProblems.Broadcast(AttemptCount, DelayInMs, ElapsedInSec); } }); }; NativeClient->OnFailCallback = [this]() { FLambdaRunnable::RunShortLambdaOnGameThread([this] { OnFail.Broadcast(); }); }; }
UUpdateManager::EUpdateStartResult UUpdateManager::StartCheckInternal(bool bInCheckHotfixOnly) { EUpdateStartResult Result = EUpdateStartResult::None; if (!ChecksEnabled()) { UE_LOG(LogHotfixManager, Display, TEXT("Update checks disabled!")); bInitialUpdateFinished = true; auto StartDelegate = [this]() { CheckComplete(EUpdateCompletionStatus::UpdateSuccess_NoChange); }; DelayResponse(StartDelegate, 0.1f); return Result; } if (CurrentUpdateState == EUpdateState::UpdateIdle || CurrentUpdateState == EUpdateState::UpdatePending || CurrentUpdateState == EUpdateState::UpdateComplete) { bCheckHotfixAvailabilityOnly = bInCheckHotfixOnly; // Immediately move into a pending state so the UI state trigger fires SetUpdateState(EUpdateState::UpdatePending); const EUpdateCompletionStatus LastResult = LastCompletionResult[bCheckHotfixAvailabilityOnly]; const FTimespan DeltaTime = FDateTime::UtcNow() - LastUpdateCheck[bCheckHotfixAvailabilityOnly]; const bool bForceCheck = LastResult == EUpdateCompletionStatus::UpdateUnknown || LastResult == EUpdateCompletionStatus::UpdateFailure_PatchCheck || LastResult == EUpdateCompletionStatus::UpdateFailure_HotfixCheck || LastResult == EUpdateCompletionStatus::UpdateFailure_NotLoggedIn; static double CacheTimer = UPDATE_CHECK_SECONDS; const double TimeSinceCheck = DeltaTime.GetTotalSeconds(); if (bForceCheck || TimeSinceCheck >= CacheTimer) { auto StartDelegate = [this]() { // Check for a patch first, then hotfix application StartPatchCheck(); }; // Give the UI state widget a chance to start listening for delegates DelayResponse(StartDelegate, 0.2f); Result = EUpdateStartResult::UpdateStarted; } else { UE_LOG(LogHotfixManager, Display, TEXT("Returning cached update result %d"), (int32)LastResult); auto StartDelegate = [this, LastResult]() { CheckComplete(LastResult, false); }; DelayResponse(StartDelegate, 0.1f); Result = EUpdateStartResult::UpdateCached; } } else { UE_LOG(LogHotfixManager, Display, TEXT("Update already in progress")); } return Result; }
void FNetworkPlatformFile::InitializeAfterSetActive() { double NetworkFileStartupTime = 0.0; { SCOPE_SECONDS_COUNTER(NetworkFileStartupTime); // send the filenames and timestamps to the server FNetworkFileArchive Payload(NFS_Messages::GetFileList); FillGetFileList(Payload, false); // send the directories over, and wait for a response FArrayReader Response; if (!SendPayloadAndReceiveResponse(Payload, Response)) { delete Transport; return; } else { // receive the cooked version information int32 ServerPackageVersion = 0; int32 ServerPackageLicenseeVersion = 0; ProcessServerInitialResponse(Response, ServerPackageVersion, ServerPackageLicenseeVersion); // receive a list of the cache files and their timestamps TMap<FString, FDateTime> ServerCachedFiles; Response << ServerCachedFiles; bool bDeleteAllFiles = true; // Check the stored cooked version FString CookedVersionFile = FPaths::GeneratedConfigDir() / TEXT("CookedVersion.txt"); if (InnerPlatformFile->FileExists(*CookedVersionFile) == true) { IFileHandle* FileHandle = InnerPlatformFile->OpenRead(*CookedVersionFile); if (FileHandle != NULL) { int32 StoredPackageCookedVersion; int32 StoredPackageCookedLicenseeVersion; if (FileHandle->Read((uint8*)&StoredPackageCookedVersion, sizeof(int32)) == true) { if (FileHandle->Read((uint8*)&StoredPackageCookedLicenseeVersion, sizeof(int32)) == true) { if ((ServerPackageVersion == StoredPackageCookedVersion) && (ServerPackageLicenseeVersion == StoredPackageCookedLicenseeVersion)) { bDeleteAllFiles = false; } else { UE_LOG(LogNetworkPlatformFile, Display, TEXT("Engine version mismatch: Server %d.%d, Stored %d.%d\n"), ServerPackageVersion, ServerPackageLicenseeVersion, StoredPackageCookedVersion, StoredPackageCookedLicenseeVersion); } } } delete FileHandle; } } else { UE_LOG(LogNetworkPlatformFile, Display, TEXT("Cooked version file missing: %s\n"), *CookedVersionFile); } if (bDeleteAllFiles == true) { // Make sure the config file exists... InnerPlatformFile->CreateDirectoryTree(*(FPaths::GeneratedConfigDir())); // Update the cooked version file IFileHandle* FileHandle = InnerPlatformFile->OpenWrite(*CookedVersionFile); if (FileHandle != NULL) { FileHandle->Write((const uint8*)&ServerPackageVersion, sizeof(int32)); FileHandle->Write((const uint8*)&ServerPackageLicenseeVersion, sizeof(int32)); delete FileHandle; } } // list of directories to skip TArray<FString> DirectoriesToSkip; TArray<FString> DirectoriesToNotRecurse; // use the timestamp grabbing visitor to get all the content times FLocalTimestampDirectoryVisitor Visitor(*InnerPlatformFile, DirectoriesToSkip, DirectoriesToNotRecurse, false); TArray<FString> RootContentPaths; FPackageName::QueryRootContentPaths( RootContentPaths ); for( TArray<FString>::TConstIterator RootPathIt( RootContentPaths ); RootPathIt; ++RootPathIt ) { const FString& RootPath = *RootPathIt; const FString& ContentFolder = FPackageName::LongPackageNameToFilename(RootPath); InnerPlatformFile->IterateDirectory( *ContentFolder, Visitor); } // delete out of date files using the server cached files for (TMap<FString, FDateTime>::TIterator It(ServerCachedFiles); It; ++It) { bool bDeleteFile = bDeleteAllFiles; FString ServerFile = It.Key(); // Convert the filename to the client version ConvertServerFilenameToClientFilename(ServerFile); // Set it in the visitor file times list Visitor.FileTimes.Add(ServerFile, FDateTime::MinValue()); if (bDeleteFile == false) { // Check the time stamps... // get local time FDateTime LocalTime = InnerPlatformFile->GetTimeStamp(*ServerFile); // If local time == MinValue than the file does not exist in the cache. if (LocalTime != FDateTime::MinValue()) { FDateTime ServerTime = It.Value(); // delete if out of date // We will use 1.0 second as the tolerance to cover any platform differences in resolution FTimespan TimeDiff = LocalTime - ServerTime; double TimeDiffInSeconds = TimeDiff.GetTotalSeconds(); bDeleteFile = (TimeDiffInSeconds > 1.0) || (TimeDiffInSeconds < -1.0); if (bDeleteFile == true) { if (InnerPlatformFile->FileExists(*ServerFile) == true) { UE_LOG(LogNetworkPlatformFile, Display, TEXT("Deleting cached file: TimeDiff %5.3f, %s"), TimeDiffInSeconds, *It.Key()); } else { // It's a directory bDeleteFile = false; } } } } if (bDeleteFile == true) { InnerPlatformFile->DeleteFile(*ServerFile); } } // Any content files we have locally that were not cached, delete them for (TMap<FString, FDateTime>::TIterator It(Visitor.FileTimes); It; ++It) { if (It.Value() != FDateTime::MinValue()) { // This was *not* found in the server file list... delete it UE_LOG(LogNetworkPlatformFile, Display, TEXT("Deleting cached file: %s"), *It.Key()); InnerPlatformFile->DeleteFile(*It.Key()); } } // make sure we can sync a file FString TestSyncFile = FPaths::Combine(*(FPaths::EngineDir()), TEXT("Config/BaseEngine.ini")); InnerPlatformFile->SetReadOnly(*TestSyncFile, false); InnerPlatformFile->DeleteFile(*TestSyncFile); if (InnerPlatformFile->FileExists(*TestSyncFile)) { UE_LOG(LogNetworkPlatformFile, Fatal, TEXT("Could not delete file sync test file %s."), *TestSyncFile); } EnsureFileIsLocal(TestSyncFile); if (!InnerPlatformFile->FileExists(*TestSyncFile) || InnerPlatformFile->FileSize(*TestSyncFile) < 1) { UE_LOG(LogNetworkPlatformFile, Fatal, TEXT("Could not sync test file %s."), *TestSyncFile); } } } FPlatformMisc::LowLevelOutputDebugStringf(TEXT("Network file startup time: %5.3f seconds\n"), NetworkFileStartupTime); }
void UHTNPlannerComponent::ProcessExecutionRequest() { bRequestedExecutionUpdate = false; if(!IsRegistered()) { // it shouldn't be called, component is no longer valid return; } if(bIsPaused) { UE_VLOG(GetOwner(), LogHTNPlanner, Verbose, TEXT("Ignoring ProcessExecutionRequest call due to HTNPlannerComponent still being paused")); return; } //GEngine->AddOnScreenDebugMessage(-1, 1.5f, FColor::Yellow, TEXT("UHTNPlannerComponent::ProcessExecutionRequest()")); if(PendingExecution.IsValid()) { ProcessPendingExecution(); return; } if(NumStackElements == 0) { //BestPlan = nullptr; if(CurrentPlannerAsset->bLoop) { // finished execution of plan and we want to loop, so re-start RestartPlanner(); } else { bIsRunning = false; } return; } #if HTN_LOG_RUNTIME_STATS if(StartPlanningTime == FDateTime::MinValue()) { StartPlanningTime = FDateTime::UtcNow(); } #endif // HTN_LOG_RUNTIME_STATS FDateTime PlanningStart = FDateTime::UtcNow(); while(NumStackElements > 0) { // our stack is not empty FTimespan TimePlanned = FDateTime::UtcNow() - PlanningStart; if(TimePlanned.GetTotalSeconds() >= CurrentPlannerAsset->MaxSearchTime) { // we've spent our maximum allowed search time for this tick on planning, so need to continue some other time ScheduleExecutionUpdate(); #if HTN_LOG_RUNTIME_STATS CumulativeSearchTimeMs += TimePlanned.GetTotalMilliseconds(); ++CumulativeFrameCount; #endif return; } #if HTN_LOG_RUNTIME_STATS ++NumNodesExpanded; #endif if(PreviousPlan.IsValid()) { UE_LOG(LogHTNPlanner, Warning, TEXT("%d nodes in data structure(s)"), NumStackElements); } if(bProbabilisticPlanReuse && bHitLeaf) { // we've hit a leaf node, so it's time to re-evaluate whether we're ignoring plan reuse probabilistically bHitLeaf = false; if(NumStackElements == PlanningStack.Num()) { bIgnoringPlanReuse = false; // not ignoring plan reuse if everything's still in the non-prioritized stack } else { bIgnoringPlanReuse = (FMath::FRand() <= ProbabilityIgnorePlanReuse); } } const FHTNStackElement StackTop = PopStackElement(); if(StackTop.Cost + Cast<UTaskNetwork>(StackTop.TaskNetwork->Task)-> GetHeuristicCost(StackTop.WorldState, StackTop.TaskNetwork->GetMemory()) >= BestCost) { if(!bDepthFirstSearch) { // everything remaining in the heap will be at least as bad, and maybe worse PlanningStack.Empty(); NumStackElements = 0; } if(PreviousPlan.IsValid()) // we're doing plan reuse { // verify that all of our values of maximum streak lengths among unprocessed nodes are still correct UpdateMaxStreakLengths(); } continue; // we won't find any improvements down this path } UTaskNetwork* TopNetwork = Cast<UTaskNetwork>(StackTop.TaskNetwork->Task); if(TopNetwork->IsEmpty(StackTop.TaskNetwork->GetMemory())) { // we've found a path leading to a legal, complete Plan #if HTN_LOG_RUNTIME_STATS CumulativeSearchTimeMsTillLastImprovement = CumulativeSearchTimeMs + (FDateTime::UtcNow() - PlanningStart).GetTotalMilliseconds(); if(BestCost == TNumericLimits<float>::Max()) // this means that this is the first time we find a valid plan { CumulativeSearchTimeMsTillFirstPlan = CumulativeSearchTimeMsTillLastImprovement; FirstPlanCost = StackTop.Cost; } #endif BestPlan = StackTop.Plan; BestPlan->SetComplete(true); BestCost = StackTop.Cost; #if HTN_LOG_RUNTIME_STATS DataCollector->FoundSolution(BestCost, StackTop.Plan->GetPlanSize(), StackTop.Plan->GetSearchHistory().Num(), CumulativeSearchTimeMsTillLastImprovement, NumNodesExpanded); #endif if(CurrentPlannerAsset->bIgnoreTaskCosts) { // the HTN Planner doesn't care about finding optimal plans, only about finding the first one PlanningStack.Empty(); NumStackElements = 0; } else if(!bDepthFirstSearch) { // best-first search finds an optimal solution as first solution PlanningStack.Empty(); NumStackElements = 0; } if(PreviousPlan.IsValid()) // we're doing plan reuse { if(bProbabilisticPlanReuse) { bHitLeaf = true; } // verify that all of our values of maximum streak lengths among unprocessed nodes are still correct UpdateMaxStreakLengths(); } continue; } // find all tasks that share the highest priority amongst the tasks in the task network TArray<TSharedPtr<FHTNTaskInstance>> TaskInstances = TopNetwork->FindTasksWithoutPredecessors(StackTop.TaskNetwork->GetMemory()); for(const TSharedPtr<FHTNTaskInstance>& TaskInstance : TaskInstances) { UHTNTask* Task = TaskInstance->Task; if(UPrimitiveTask* PrimitiveTask = Cast<UPrimitiveTask>(Task)) { if(PrimitiveTask->IsApplicable(StackTop.WorldState, TaskInstance->GetMemory())) { // prepare a new element for the stack where we'll have applied this primitive task FHTNStackElement NewStackElement; NewStackElement.Cost = FMath::Max(0.f, StackTop.Cost) + PrimitiveTask->GetCost(StackTop.WorldState, TaskInstance->GetMemory()); TSharedPtr<FHTNPlan> Plan = StackTop.Plan->Copy(); Plan->AppendTaskInstance(TaskInstance); Plan->AppendSearchHistory(TaskInstance); TSharedPtr<FHTNTaskInstance> TaskNetwork = TopNetwork->Copy(StackTop.TaskNetwork); Cast<UTaskNetwork>(TaskNetwork->Task)->Remove(TaskInstance, TaskNetwork->GetMemory()); TSharedPtr<FHTNWorldState> WorldState = StackTop.WorldState->Copy(); PrimitiveTask->ApplyTo(WorldState, TaskInstance->GetMemory()); NewStackElement.Plan = Plan; NewStackElement.TaskNetwork = TaskNetwork; NewStackElement.WorldState = WorldState; if(PreviousPlan.IsValid()) { // we're doing plan reuse CurrentMatchingStreakLength = Plan->GetMatchingStreak(PreviousPlan, MinMatchingStreakLength); } AddStackElement(NewStackElement); // TO DO maybe should explicitly Move the NewStackElement? } else if(PreviousPlan.IsValid()) { if(bProbabilisticPlanReuse) { bHitLeaf = true; } } } else if(UCompoundTask* CompoundTask = Cast<UCompoundTask>(Task)) { TArray<TSharedPtr<FHTNTaskInstance>> Decompositions = CompoundTask->FindDecompositions(*this, StackTop.WorldState, TaskInstance->GetMemory()); // regardless of which decomposition we pick, effect on plan will be the same TSharedPtr<FHTNPlan> Plan = StackTop.Plan->Copy(); Plan->AppendSearchHistory(TaskInstance); if(PreviousPlan.IsValid()) { // we're doing plan reuse if(Decompositions.Num() == 0) // leaf node { if(bProbabilisticPlanReuse) { bHitLeaf = true; } } CurrentMatchingStreakLength = Plan->GetMatchingStreak(PreviousPlan, MinMatchingStreakLength); TArray<FHTNStackElement> NewStackElements; TArray<FHTNStackElement> NewFifoElements; for(int32 Idx = 0; Idx < Decompositions.Num(); ++Idx) { const TSharedPtr<FHTNTaskInstance>& Decomposition = Decompositions[Idx]; // prepare a new element where we'll have decomposed this compound task FHTNStackElement NewStackElement; NewStackElement.WorldState = StackTop.WorldState; NewStackElement.Cost = StackTop.Cost; TSharedPtr<FHTNTaskInstance> TaskNetwork = TopNetwork->Copy(StackTop.TaskNetwork); Cast<UTaskNetwork>(TaskNetwork->Task)->Replace(TaskInstance, Decomposition, TaskNetwork->GetMemory()); NewStackElement.Plan = Plan->Copy(); NewStackElement.TaskNetwork = TaskNetwork; if(bProbabilisticPlanReuse && bIgnoringPlanReuse) { // probabilistically ignoring plan reuse, so no need to waste time computing streak lengths NewStackElements.Push(NewStackElement); } else { if(MaxCurrentMatchingStreakLength > 0 && CurrentMatchingStreakLength == 0) { // this element belongs in a FIFO queue NewFifoElements.Push(NewStackElement); } else { // this element belongs in a stack NewStackElements.Push(NewStackElement); } } } // first we'll add all the elements that belong in FIFO queues to their FIFO queues (in given order) for(int32 Idx = 0; Idx < NewFifoElements.Num(); ++Idx) { AddStackElement(NewFifoElements[Idx]); } // now we'll add all the elements that belong in some stack to those stacks (reverse order) while(NewStackElements.Num() > 0) { AddStackElement(NewStackElements.Pop()); } } else { // we're not doing plan reuse // looping through Decompositions in reverse order so that they'll be popped off stack in correct order again for(int32 Idx = Decompositions.Num() - 1; Idx >= 0; --Idx) { const TSharedPtr<FHTNTaskInstance>& Decomposition = Decompositions[Idx]; // prepare a new element for the stack where we'll have decomposed this compound task FHTNStackElement NewStackElement; NewStackElement.WorldState = StackTop.WorldState; NewStackElement.Cost = StackTop.Cost; TSharedPtr<FHTNTaskInstance> TaskNetwork = TopNetwork->Copy(StackTop.TaskNetwork); Cast<UTaskNetwork>(TaskNetwork->Task)->Replace(TaskInstance, Decomposition, TaskNetwork->GetMemory()); NewStackElement.Plan = Plan->Copy(); NewStackElement.TaskNetwork = TaskNetwork; AddStackElement(NewStackElement); // TO DO maybe should explicitly Move the NewStackElement? } } } else { UE_LOG(LogHTNPlanner, Error, TEXT("UHTNPlannerComponent::ProcessExecutionRequest() encountered a Task that was neither Primitive nor Compound!")) } } if(PreviousPlan.IsValid()) // we're doing plan reuse { // verify that all of our values of maximum streak lengths among unprocessed nodes are still correct UpdateMaxStreakLengths(); } } if(BestPlan.IsValid()) { #if HTN_LOG_RUNTIME_STATS CumulativeSearchTimeMs += (FDateTime::UtcNow() - PlanningStart).GetTotalMilliseconds(); ++CumulativeFrameCount; CumulativeSearchTimespan = FDateTime::UtcNow() - StartPlanningTime; // print runtime stats //UE_LOG(LogHTNPlanner, Warning, TEXT("Cumulative Search Timespan = %.2f ms"), CumulativeSearchTimespan.GetTotalMilliseconds()); UE_LOG(LogHTNPlanner, Warning, TEXT("Cumulative Search Time = %.2f ms"), CumulativeSearchTimeMs); UE_LOG(LogHTNPlanner, Warning, TEXT("Cumulative Search Time Till Last Improvement = %.2f ms"), CumulativeSearchTimeMsTillLastImprovement); UE_LOG(LogHTNPlanner, Warning, TEXT("Cumulative Search Time First Plan = %.2f ms"), CumulativeSearchTimeMsTillFirstPlan); //UE_LOG(LogHTNPlanner, Warning, TEXT("Cumulative Frame Count = %d frames"), CumulativeFrameCount); UE_LOG(LogHTNPlanner, Warning, TEXT("Num Nodes Expanded = %d"), NumNodesExpanded); UE_LOG(LogHTNPlanner, Warning, TEXT("Cost of first plan found = %.2f"), FirstPlanCost); if(PreviousPlan.IsValid()) { UE_LOG(LogHTNPlanner, Warning, TEXT("Longest matching streak = %d"), BestPlan->GetLongestMatchingStreak(PreviousPlan, MinMatchingStreakLength)); } DataCollector->OptimalityProven(CumulativeSearchTimeMs, NumNodesExpanded); #endif // we have a complete plan, so we'll want to execute the next task in the plan UE_LOG(LogHTNPlanner, Warning, TEXT("Found Plan with size = %d, cost = %.2f, search history size = %d!"), BestPlan->GetPlanSize(), BestCost, BestPlan->GetSearchHistory().Num()); if(CurrentPlannerAsset->bExecutePlan) { PendingExecution = BestPlan->GetTaskInstanceToExecute(); ProcessPendingExecution(); } else { bIsRunning = false; } } }
bool FHTTPTransport::SendPayloadAndReceiveResponse(TArray<uint8>& In, TArray<uint8>& Out) { RecieveBuffer.Empty(); ReadPtr = 0; #if !PLATFORM_HTML5 if (GIsRequestingExit) // We have already lost HTTP Module. return false; class HTTPRequestHandler { public: HTTPRequestHandler(TArray<uint8>& InOut) :Out(InOut) {} void HttpRequestComplete( FHttpRequestPtr HttpRequest, FHttpResponsePtr HttpResponse, bool bSucceeded) { if (HttpResponse.IsValid()) Out.Append(HttpResponse->GetContent()); } private: TArray<uint8>& Out; }; HTTPRequestHandler Handler(RecieveBuffer); HttpRequest->OnProcessRequestComplete().BindRaw(&Handler,&HTTPRequestHandler::HttpRequestComplete ); if ( In.Num() ) { HttpRequest->SetVerb("POST"); FBufferArchive Ar; Ar << Guid; Ar.Append(In); HttpRequest->SetContent(Ar); } else { HttpRequest->SetVerb("GET"); } HttpRequest->ProcessRequest(); FDateTime StartTime; FTimespan Span = FDateTime::UtcNow() - StartTime; while( HttpRequest->GetStatus() != EHttpRequestStatus::Failed && HttpRequest->GetStatus() != EHttpRequestStatus::Succeeded && Span.GetSeconds() < 10 ) { HttpRequest->Tick(0); Span = FDateTime::UtcNow() - StartTime; } if (HttpRequest->GetStatus() == EHttpRequestStatus::Succeeded) return true; HttpRequest->CancelRequest(); return false; #else // PLATFORM_HTML5 FBufferArchive Ar; if ( In.Num() ) { Ar << Guid; } Ar.Append(In); unsigned char *OutData = NULL; unsigned int OutSize= 0; bool RetVal = true; #if PLATFORM_HTML5_WIN32 RetVal = HTML5Win32::NFSHttp::SendPayLoadAndRecieve(Ar.GetData(), Ar.Num(), &OutData, OutSize); #endif #if PLATFORM_HTML5_BROWSER UE_SendAndRecievePayLoad(TCHAR_TO_ANSI(Url),(char*)Ar.GetData(),Ar.Num(),(char**)&OutData,(int*)&OutSize); #endif if (!Ar.Num()) { uint32 Size = OutSize; uint32 Marker = 0xDeadBeef; RecieveBuffer.Append((uint8*)&Marker,sizeof(uint32)); RecieveBuffer.Append((uint8*)&Size,sizeof(uint32)); } if (OutSize) { RecieveBuffer.Append(OutData,OutSize); #if PLATFORM_HTML5_WIN32 free (OutData); #endif #if PLATFORM_HTML5_BROWSER // don't go through the Unreal Memory system. ::free(OutData); #endif } return RetVal & ReceiveResponse(Out); #endif }