예제 #1
0
void FSlateCrashReportResource::InitDynamicRHI()
{
	FRHIResourceCreateInfo CreateInfo;
	CrashReportBuffer = RHICreateTexture2D(
		VirtualScreen.Width(),
		VirtualScreen.Height(),
		PF_R8G8B8A8,
		1,
		1,
		TexCreate_RenderTargetable,
		CreateInfo
		);

	for (int32 i = 0; i < 2; ++i)
	{
		ReadbackBuffer[i] = RHICreateTexture2D(
			VirtualScreen.Width(),
			VirtualScreen.Height(),
			PF_R8G8B8A8,
			1,
			1,
			TexCreate_CPUReadback,
			CreateInfo
			);
	}
	
	ReadbackBufferIndex = 0;
}
예제 #2
0
void FSlateCrashReportResource::InitDynamicRHI()
{
	CrashReportBuffer = RHICreateTexture2D(
		VirtualScreen.Width() * CrashTrackerConstants::ScreenScaling,
		VirtualScreen.Height() * CrashTrackerConstants::ScreenScaling,
		PF_R8G8B8A8,
		1,
		1,
		TexCreate_RenderTargetable,
		NULL
		);

	for (int32 i = 0; i < 2; ++i)
	{
		ReadbackBuffer[i] = RHICreateTexture2D(
			VirtualScreen.Width() * CrashTrackerConstants::ScreenScaling,
			VirtualScreen.Height() * CrashTrackerConstants::ScreenScaling,
			PF_R8G8B8A8,
			1,
			1,
			TexCreate_CPUReadback,
			NULL
			);
	}
	
	ReadbackBufferIndex = 0;
}
예제 #3
0
/** Called when the resource is initialized. This is only called by the rendering thread. */
void FTexture2DDynamicResource::InitRHI()
{
	// Create the sampler state RHI resource.
	FSamplerStateInitializerRHI SamplerStateInitializer
	(
		GSystemSettings.TextureLODSettings.GetSamplerFilter( Owner ),
		AM_Wrap,
		AM_Wrap,
		AM_Wrap
	);
	SamplerStateRHI = RHICreateSamplerState( SamplerStateInitializer );

	uint32 Flags = 0;
	if ( Owner->bIsResolveTarget )
	{
		Flags |= TexCreate_ResolveTargetable;
		bIgnoreGammaConversions = true;		// Note, we're ignoring Owner->SRGB (it should be false).
	}
	else if ( Owner->SRGB )
	{
		Flags |= TexCreate_SRGB;
	}
	if ( Owner->bNoTiling )
	{
		Flags |= TexCreate_NoTiling;
	}
	FRHIResourceCreateInfo CreateInfo;
	Texture2DRHI = RHICreateTexture2D(GetSizeX(), GetSizeY(), Owner->Format, Owner->NumMips, 1, Flags, CreateInfo);
	TextureRHI = Texture2DRHI;
	RHIUpdateTextureReference(Owner->TextureReference.TextureReferenceRHI,TextureRHI);
}
예제 #4
0
void FStaticShadowDepthMap::InitRHI()
{
	if (ShadowMapSizeX > 0 && ShadowMapSizeY > 0 && GMaxRHIFeatureLevel >= ERHIFeatureLevel::SM4)
	{
		FRHIResourceCreateInfo CreateInfo;
		FTexture2DRHIRef Texture2DRHI = RHICreateTexture2D(ShadowMapSizeX, ShadowMapSizeY, PF_R16F, 1, 1, 0, CreateInfo);
		TextureRHI = Texture2DRHI;

		uint32 DestStride = 0;
		uint8* TextureData = (uint8*)RHILockTexture2D(Texture2DRHI, 0, RLM_WriteOnly, DestStride, false);
		uint32 RowSize = ShadowMapSizeX * GPixelFormats[PF_R16F].BlockBytes;

		for (int32 Y = 0; Y < ShadowMapSizeY; Y++)
		{
			FMemory::Memcpy(TextureData + DestStride * Y, ((uint8*)DepthSamples.GetData()) + RowSize * Y, RowSize);
		}

		RHIUnlockTexture2D(Texture2DRHI, 0, false);
	}
}
FComputeShaderUsageExample::FComputeShaderUsageExample(float SimulationSpeed, int32 SizeX, int32 SizeY, ERHIFeatureLevel::Type ShaderFeatureLevel)
{
	FeatureLevel = ShaderFeatureLevel;

	ConstantParameters.SimulationSpeed = SimulationSpeed;

	VariableParameters = FComputeShaderVariableParameters();

	bIsComputeShaderExecuting = false;
	bIsUnloading = false;
	bSave = false;

	//There are only a few different texture formats we can use if we want to use the output texture as input in a pixel shader later
	//I would have loved to go with the R8G8B8A8_UNORM approach, but unfortunately, it seems UE4 does not support this in an obvious way, which is why I chose the UINT format using packing instead :)
	//There is some excellent information on this topic in the following links:
    //http://www.gamedev.net/topic/605356-r8g8b8a8-texture-format-in-compute-shader/
	//https://msdn.microsoft.com/en-us/library/ff728749(v=vs.85).aspx
	FRHIResourceCreateInfo CreateInfo;
	Texture = RHICreateTexture2D(SizeX, SizeY, PF_R32_UINT, 1, 1, TexCreate_ShaderResource | TexCreate_UAV, CreateInfo);
	TextureUAV = RHICreateUnorderedAccessView(Texture);
}
예제 #6
0
void FWebMRecord::OnWorldCreated(UWorld* World, const UWorld::InitializationValues IVS)
{
	if (IsRunningCommandlet() || IsRunningDedicatedServer())
	{
		return;
	}

	if (!bRegisteredSlateDelegate)
	{
		FSlateRenderer* SlateRenderer = FSlateApplication::Get().GetRenderer().Get();
		SlateRenderer->OnSlateWindowRendered().AddRaw(this, &FWebMRecord::OnSlateWindowRenderedDuringCapture);
		bRegisteredSlateDelegate = true;

		// Setup readback buffer textures
		{
			for (int32 TextureIndex = 0; TextureIndex < 2; ++TextureIndex)
			{
				FRHIResourceCreateInfo CreateInfo;
				ReadbackTextures[TextureIndex] = RHICreateTexture2D(
					VideoWidth,
					VideoHeight,
					PF_B8G8R8A8,
					1,
					1,
					TexCreate_CPUReadback,
					CreateInfo
					);
			}

			ReadbackTextureIndex = 0;

			ReadbackBuffers[0] = nullptr;
			ReadbackBuffers[1] = nullptr;
			ReadbackBufferIndex = 0;
		}
	}
}
예제 #7
0
void FGameLiveStreaming::StartBroadcastingGame( const FGameBroadcastConfig& GameBroadcastConfig )
{
	if( !IsBroadcastingGame() )
	{
		if( bIsBroadcasting )
		{
			FSlateRenderer* SlateRenderer = FSlateApplication::Get().GetRenderer().Get();
			SlateRenderer->OnSlateWindowRendered().RemoveAll( this );
			bIsBroadcasting = false;
		}

		// We can GetLiveStreamingService() here to fill in our LiveStreamer variable lazily, to make sure the service plugin is loaded 
		// before we try to cache it's interface pointer
		if( GetLiveStreamingService() != nullptr  )
		{
			FSlateRenderer* SlateRenderer = FSlateApplication::Get().GetRenderer().Get();
			SlateRenderer->OnSlateWindowRendered().AddRaw( this, &FGameLiveStreaming::OnSlateWindowRenderedDuringBroadcasting );

			this->bMirrorWebCamImage = GameBroadcastConfig.bMirrorWebCamImage;
			this->bDrawSimpleWebCamVideo = GameBroadcastConfig.bDrawSimpleWebCamVideo;

			// @todo livestream: This will interfere with editor live streaming if both are running at the same time!  The editor live 
			// streaming does check to make sure that game isn't already broadcasting, but the game currently doesn't have a good way to
			// do that, besides asking the LiveStreamer itself.

			UGameViewportClient* GameViewportClient = GEngine->GameViewport;
			check( GameViewportClient != nullptr );

			// @todo livestream: What about if viewport size changes while we are still broadcasting?  We need to restart the broadcast!
			FBroadcastConfig BroadcastConfig;
			BroadcastConfig.VideoBufferWidth = GameViewportClient->Viewport->GetSizeXY().X;
			BroadcastConfig.VideoBufferHeight = GameViewportClient->Viewport->GetSizeXY().Y;

			BroadcastConfig.VideoBufferWidth = FPlatformMath::FloorToInt( (float)BroadcastConfig.VideoBufferWidth * GameBroadcastConfig.ScreenScaling );
			BroadcastConfig.VideoBufferHeight = FPlatformMath::FloorToInt( (float)BroadcastConfig.VideoBufferHeight * GameBroadcastConfig.ScreenScaling );

			// Fix up the desired resolution so that it will work with the streaming system.  Some broadcasters require the
			// video buffer to be multiples of specific values, such as 32
			// @todo livestream: This could cause the aspect ratio to be changed and the buffer to be stretched non-uniformly, but usually the aspect only changes slightly
			LiveStreamer->MakeValidVideoBufferResolution( BroadcastConfig.VideoBufferWidth, BroadcastConfig.VideoBufferHeight );

			// Setup readback buffer textures
			{
				for( int32 TextureIndex = 0; TextureIndex < 2; ++TextureIndex )
				{
					FRHIResourceCreateInfo CreateInfo;
					ReadbackTextures[ TextureIndex ] = RHICreateTexture2D(
						BroadcastConfig.VideoBufferWidth,
						BroadcastConfig.VideoBufferHeight,
						PF_B8G8R8A8,
						1,
						1,
						TexCreate_CPUReadback,
						CreateInfo
						);
				}
	
				ReadbackTextureIndex = 0;

				ReadbackBuffers[0] = nullptr;
				ReadbackBuffers[1] = nullptr;
				ReadbackBufferIndex = 0;
			}

			BroadcastConfig.FramesPerSecond = GameBroadcastConfig.FrameRate;
			BroadcastConfig.PixelFormat = FBroadcastConfig::EBroadcastPixelFormat::B8G8R8A8;	// Matches viewport backbuffer format
			BroadcastConfig.bCaptureAudioFromComputer = GameBroadcastConfig.bCaptureAudioFromComputer;
			BroadcastConfig.bCaptureAudioFromMicrophone = GameBroadcastConfig.bCaptureAudioFromMicrophone;
			LiveStreamer->StartBroadcasting( BroadcastConfig );

			if( GameBroadcastConfig.bEnableWebCam )
			{
				// @todo livestream: Allow web cam to be started/stopped independently from the broadcast itself, so users can setup their web cam
				FWebCamConfig WebCamConfig;
				WebCamConfig.DesiredWebCamWidth = GameBroadcastConfig.DesiredWebCamWidth;
				WebCamConfig.DesiredWebCamHeight = GameBroadcastConfig.DesiredWebCamHeight;
				LiveStreamer->StartWebCam( WebCamConfig );
			}

			bIsBroadcasting = true;
		}
	}
}