void FViewExtension::PreRenderViewFamily_RenderThread(FRHICommandListImmediate& RHICmdList, FSceneViewFamily& ViewFamily)
{
	check(IsInRenderingThread());
	FViewExtension& RenderContext = *this;
	FGameFrame* CurrentFrame = static_cast<FGameFrame*>(RenderContext.RenderFrame.Get());

	if (bFrameBegun || !CurrentFrame || !CurrentFrame->Settings->IsStereoEnabled() || !ViewFamily.RenderTarget->GetRenderTargetTexture())
	{
		return;
	}
	FSettings* FrameSettings = CurrentFrame->GetSettings();
	RenderContext.ShowFlags = ViewFamily.EngineShowFlags;

	RenderContext.CurHeadPose = CurrentFrame->HeadPose;

	if (FrameSettings->TexturePaddingPerEye != 0)
	{
		// clear the padding between two eyes
		const int32 GapMinX = ViewFamily.Views[0]->ViewRect.Max.X;
		const int32 GapMaxX = ViewFamily.Views[1]->ViewRect.Min.X;

		const int ViewportSizeY = (ViewFamily.RenderTarget->GetRenderTargetTexture()) ? 
			ViewFamily.RenderTarget->GetRenderTargetTexture()->GetSizeY() : ViewFamily.RenderTarget->GetSizeXY().Y;
		RHICmdList.SetViewport(GapMinX, 0, 0, GapMaxX, ViewportSizeY, 1.0f);
		RHICmdList.Clear(true, FLinearColor::Black, false, 0, false, 0, FIntRect());
	}

	check(ViewFamily.RenderTarget->GetRenderTargetTexture());
	
	FrameSettings->EyeLayer.EyeFov.Viewport[0] = ToOVRRecti(FrameSettings->EyeRenderViewport[0]);
	FrameSettings->EyeLayer.EyeFov.Viewport[1] = ToOVRRecti(FrameSettings->EyeRenderViewport[1]);
	
	pPresentBridge->BeginRendering(RenderContext, ViewFamily.RenderTarget->GetRenderTargetTexture());

	ovrHmd_GetFrameTiming(Hmd, RenderContext.RenderFrame->FrameNumber);

	RenderContext.bFrameBegun = true;

	if (RenderContext.ShowFlags.Rendering)
	{
		// get latest orientation/position and cache it
		ovrTrackingState ts;
		ovrPosef EyeRenderPose[2];
		CurrentFrame->GetEyePoses(Hmd, EyeRenderPose, ts);

		// Take new EyeRenderPose is bUpdateOnRT.
		// if !bOrientationChanged && !bPositionChanged then we still need to use new eye pose (for timewarp)
		if (FrameSettings->Flags.bUpdateOnRT ||
			(!CurrentFrame->Flags.bOrientationChanged && !CurrentFrame->Flags.bPositionChanged))
		{
			RenderContext.CurHeadPose = ts.HeadPose.ThePose;
			FMemory::Memcpy(RenderContext.CurEyeRenderPose, EyeRenderPose);
		}
		else
		{
			FMemory::Memcpy<ovrPosef[2]>(RenderContext.CurEyeRenderPose, CurrentFrame->EyeRenderPose);
		}
	}
}
void OculusDevice::updatePose(unsigned int frameIndex)
{
	// Ask the API for the times when this frame is expected to be displayed.
	m_frameTiming = ovrHmd_GetFrameTiming(m_hmdDevice, frameIndex);

	// Query the HMD for the current tracking state.
	ovrTrackingState ts = ovrHmd_GetTrackingState(m_hmdDevice, m_frameTiming.ScanoutMidpointSeconds);
	ovrPoseStatef headpose = ts.HeadPose;
	ovrPosef pose = headpose.ThePose;
	m_position.set(-pose.Position.x, -pose.Position.y, -pose.Position.z);
	m_orientation.set(pose.Orientation.x, pose.Orientation.y, pose.Orientation.z, -pose.Orientation.w);

	// Get head pose for both eyes (used for time warp
	for (int eyeIndex = 0; eyeIndex < ovrEye_Count; ++eyeIndex) {
		ovrEyeType eye = m_hmdDevice->EyeRenderOrder[eyeIndex];
		m_headPose[eye] = ovrHmd_GetEyePose(m_hmdDevice, eye);
	}
}
Beispiel #3
0
void VR_UpdateScreenContent()
{
	int i;
	vec3_t orientation;
	ovrVector3f view_offset[2];
	ovrPosef render_pose[2];

	ovrFrameTiming ftiming;
	ovrTrackingState hmdState;

	ovrViewScaleDesc viewScaleDesc;
	ovrLayerEyeFov ld;
	ovrLayerHeader* layers;
	
	GLint w, h;
	
	
	// Last chance to enable VR Mode - we get here when the game already start up with vr_enabled 1
	// If enabling fails, unset the cvar and return.
	if( !vr_initialized && !VR_Enable() ) {
		Cvar_Set ("vr_enabled", "0");
		return;
	}

	w = mirror_texture->OGL.Header.TextureSize.w;
	h= mirror_texture->OGL.Header.TextureSize.h;

	// Get current orientation of the HMD
	ftiming = ovrHmd_GetFrameTiming(hmd, 0);
	hmdState = ovrHmd_GetTrackingState(hmd, ftiming.DisplayMidpointSeconds);


	// Calculate HMD angles and blend with input angles based on current aim mode
	QuatToYawPitchRoll(hmdState.HeadPose.ThePose.Orientation, orientation);
	switch( (int)vr_aimmode.value )
	{
		// 1: (Default) Head Aiming; View YAW is mouse+head, PITCH is head
		default:
		case VR_AIMMODE_HEAD_MYAW:
			cl.viewangles[PITCH] = cl.aimangles[PITCH] = orientation[PITCH];
			cl.aimangles[YAW] = cl.viewangles[YAW] = cl.aimangles[YAW] + orientation[YAW] - lastOrientation[YAW];
			break;
		
		// 2: Head Aiming; View YAW and PITCH is mouse+head (this is stupid)
		case VR_AIMMODE_HEAD_MYAW_MPITCH:
			cl.viewangles[PITCH] = cl.aimangles[PITCH] = cl.aimangles[PITCH] + orientation[PITCH] - lastOrientation[PITCH];
			cl.aimangles[YAW] = cl.viewangles[YAW] = cl.aimangles[YAW] + orientation[YAW] - lastOrientation[YAW];
			break;
		
		// 3: Mouse Aiming; View YAW is mouse+head, PITCH is head
		case VR_AIMMODE_MOUSE_MYAW:
			cl.viewangles[PITCH] = orientation[PITCH];
			cl.viewangles[YAW]   = cl.aimangles[YAW] + orientation[YAW];
			break;
		
		// 4: Mouse Aiming; View YAW and PITCH is mouse+head
		case VR_AIMMODE_MOUSE_MYAW_MPITCH:
			cl.viewangles[PITCH] = cl.aimangles[PITCH] + orientation[PITCH];
			cl.viewangles[YAW]   = cl.aimangles[YAW] + orientation[YAW];
			break;
		
		case VR_AIMMODE_BLENDED:
			{
				float diffHMDYaw = orientation[YAW] - lastOrientation[YAW];
				float diffHMDPitch = orientation[PITCH] - lastOrientation[PITCH];
				float diffAimYaw = cl.aimangles[YAW] - lastAim[YAW];
				float diffYaw;

				// find new view position based on orientation delta
				cl.viewangles[YAW] += diffHMDYaw;

				// find difference between view and aim yaw
				diffYaw = cl.viewangles[YAW] - cl.aimangles[YAW];

				if (abs(diffYaw) > vr_deadzone.value / 2.0f)
				{
					// apply the difference from each set of angles to the other
					cl.aimangles[YAW] += diffHMDYaw;
					cl.viewangles[YAW] += diffAimYaw;
				}
				cl.aimangles[PITCH] += diffHMDPitch;
				cl.viewangles[PITCH]  = orientation[PITCH];
			}
			break;
	}
	cl.viewangles[ROLL]  = orientation[ROLL];

	VectorCopy (orientation, lastOrientation);
	VectorCopy (cl.aimangles, lastAim);
	
	VectorCopy (cl.viewangles, r_refdef.viewangles);
	VectorCopy (cl.aimangles, r_refdef.aimangles);


	// Calculate eye poses
	view_offset[0] = eyes[0].render_desc.HmdToEyeViewOffset;
	view_offset[1] = eyes[1].render_desc.HmdToEyeViewOffset;

	ovr_CalcEyePoses(hmdState.HeadPose.ThePose, view_offset, render_pose);
	eyes[0].pose = render_pose[0];
	eyes[1].pose = render_pose[1];


	// Render the scene for each eye into their FBOs
	for( i = 0; i < 2; i++ ) {
		current_eye = &eyes[i];
		RenderScreenForCurrentEye();
	}
	

	// Submit the FBOs to OVR
	viewScaleDesc.HmdSpaceToWorldScaleInMeters = meters_to_units;
	viewScaleDesc.HmdToEyeViewOffset[0] = view_offset[0];
	viewScaleDesc.HmdToEyeViewOffset[1] = view_offset[1];

	ld.Header.Type = ovrLayerType_EyeFov;
	ld.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;

	for( i = 0; i < 2; i++ ) {
		ld.ColorTexture[i] = eyes[i].fbo.color_textures;
		ld.Viewport[i].Pos.x = 0;
		ld.Viewport[i].Pos.y = 0;
		ld.Viewport[i].Size.w = eyes[i].fbo.size.width;
		ld.Viewport[i].Size.h = eyes[i].fbo.size.height;
		ld.Fov[i] = hmd->DefaultEyeFov[i];
		ld.RenderPose[i] = eyes[i].pose;
	}

	layers = &ld.Header;
	ovrHmd_SubmitFrame(hmd, 0, &viewScaleDesc, &layers, 1);

	// Blit mirror texture to back buffer
	glBindFramebufferEXT(GL_READ_FRAMEBUFFER_EXT, mirror_fbo);
	glBindFramebufferEXT(GL_DRAW_FRAMEBUFFER_EXT, 0);
	glBlitFramebufferEXT(0, h, w, 0, 0, 0, w, h,GL_COLOR_BUFFER_BIT, GL_NEAREST);
	glBindFramebufferEXT(GL_READ_FRAMEBUFFER_EXT, 0);
}