void FOculusRiftHMD::D3D11Bridge::FinishRendering()
{
	check(IsInRenderingThread());
	
	check(RenderContext.IsValid());

	if (RenderContext->bFrameBegun && ColorTextureSet)
	{
		if (!ColorTextureSet)
		{
			UE_LOG(LogHMD, Warning, TEXT("Skipping frame: TextureSet is null ?"));
		}
		else
		{
			// Finish the frame and let OVR do buffer swap (Present) and flush/sync.
			FSettings* FrameSettings = RenderContext->GetFrameSettings();

			check(ColorTextureSet->GetTextureSet());
			FrameSettings->EyeLayer.EyeFov.ColorTexture[0] = ColorTextureSet->GetTextureSet();
			FrameSettings->EyeLayer.EyeFov.ColorTexture[1] = ColorTextureSet->GetTextureSet();

			ovrLayerHeader* LayerList[1];
			LayerList[0] = &FrameSettings->EyeLayer.EyeFov.Header;

			// Set up positional data.
			ovrViewScaleDesc viewScaleDesc;
			viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.0f;
			viewScaleDesc.HmdToEyeViewOffset[0] = FrameSettings->EyeRenderDesc[0].HmdToEyeViewOffset;
			viewScaleDesc.HmdToEyeViewOffset[1] = FrameSettings->EyeRenderDesc[1].HmdToEyeViewOffset;

			ovrHmd_SubmitFrame(RenderContext->Hmd, RenderContext->RenderFrame->FrameNumber, &viewScaleDesc, LayerList, 1);

			ColorTextureSet->SwitchToNextElement();
		}
	}
	else
	{
		UE_LOG(LogHMD, Warning, TEXT("Skipping frame: FinishRendering called with no corresponding BeginRendering (was BackBuffer re-allocated?)"));
	}
	RenderContext->bFrameBegun = false;
	SetRenderContext(nullptr);
}
void OVRSDK06AppSkeleton::display_sdk() const
{
    ovrHmd hmd = m_Hmd;
    if (hmd == NULL)
        return;

    ovrTrackingState outHmdTrackingState = { 0 };
    ovrHmd_GetEyePoses(m_Hmd, m_frameIndex, m_eyeOffsets,
        m_eyePoses, &outHmdTrackingState);

    for (ovrEyeType eye = ovrEyeType::ovrEye_Left;
        eye < ovrEyeType::ovrEye_Count;
        eye = static_cast<ovrEyeType>(eye + 1))
    {
        const ovrSwapTextureSet& swapSet = *m_pTexSet[eye];
        glBindFramebuffer(GL_FRAMEBUFFER, m_swapFBO.id);
        ovrGLTexture& tex = (ovrGLTexture&)(swapSet.Textures[swapSet.CurrentIndex]);
        glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tex.OGL.TexId, 0);
        {
            // Handle render target resolution scaling
            m_layerEyeFov.Viewport[eye].Size = ovrHmd_GetFovTextureSize(m_Hmd, eye, m_layerEyeFov.Fov[eye], m_fboScale);
            ovrRecti& vp = m_layerEyeFov.Viewport[eye];
            if (m_layerEyeFov.Header.Flags & ovrLayerFlag_TextureOriginAtBottomLeft)
            {
                ///@note It seems that the render viewport should be vertically centered within the swapSet texture.
                /// See also OculusWorldDemo.cpp:1443 - "The usual OpenGL viewports-don't-match-UVs oddness."
                const int texh = swapSet.Textures[swapSet.CurrentIndex].Header.TextureSize.h;
                vp.Pos.y = (texh - vp.Size.h) / 2;
            }

            glViewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);

            glClearColor(0.f, 0.f, 0.f, 0.f);
            glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

            // Render the scene for the current eye
            const ovrPosef& eyePose = m_eyePoses[eye];
            const glm::mat4 viewLocal = makeMatrixFromPose(eyePose);
            const glm::mat4 viewWorld = makeWorldToChassisMatrix() * viewLocal;
            const glm::mat4& proj = m_eyeProjections[eye];
            _DrawScenes(
                glm::value_ptr(glm::inverse(viewWorld)),
                glm::value_ptr(proj),
                glm::value_ptr(glm::inverse(viewLocal)));

            m_layerEyeFov.RenderPose[eye] = eyePose;
        }
        glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
        glBindFramebuffer(GL_FRAMEBUFFER, 0);
    }

    ovrLayerEyeFov& layer = m_layerEyeFov;
    ovrLayerHeader* layers = &layer.Header;
    ovrResult result = ovrHmd_SubmitFrame(hmd, m_frameIndex, NULL, &layers, 1);

    // Increment counters in swap texture set
    for (ovrEyeType eye = ovrEyeType::ovrEye_Left;
        eye < ovrEyeType::ovrEye_Count;
        eye = static_cast<ovrEyeType>(eye + 1))
    {
        ovrSwapTextureSet& swapSet = *m_pTexSet[eye];
        ++swapSet.CurrentIndex %= swapSet.TextureCount;
    }

    // Blit output to main app window to show something on screen in addition
    // to what's in the Rift. This could optionally be the distorted texture
    // from the OVR SDK's mirror texture, or perhaps a single eye's undistorted
    // view, or even a third-person render(at a performance cost).
    if (true)
    {
        glViewport(0, 0, m_appWindowSize.w, m_appWindowSize.h);
        glBindFramebuffer(GL_READ_FRAMEBUFFER, m_mirrorFBO.id);
        glBlitFramebuffer(
            0, m_mirrorFBO.h, m_mirrorFBO.w, 0,
            0, 0, m_appWindowSize.w, m_appWindowSize.h,
            GL_COLOR_BUFFER_BIT, GL_NEAREST);
        glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
    }

    ++m_frameIndex;
}
Пример #3
0
void VR_UpdateScreenContent()
{
	int i;
	vec3_t orientation;
	ovrVector3f view_offset[2];
	ovrPosef render_pose[2];

	ovrFrameTiming ftiming;
	ovrTrackingState hmdState;

	ovrViewScaleDesc viewScaleDesc;
	ovrLayerEyeFov ld;
	ovrLayerHeader* layers;
	
	GLint w, h;
	
	
	// Last chance to enable VR Mode - we get here when the game already start up with vr_enabled 1
	// If enabling fails, unset the cvar and return.
	if( !vr_initialized && !VR_Enable() ) {
		Cvar_Set ("vr_enabled", "0");
		return;
	}

	w = mirror_texture->OGL.Header.TextureSize.w;
	h= mirror_texture->OGL.Header.TextureSize.h;

	// Get current orientation of the HMD
	ftiming = ovrHmd_GetFrameTiming(hmd, 0);
	hmdState = ovrHmd_GetTrackingState(hmd, ftiming.DisplayMidpointSeconds);


	// Calculate HMD angles and blend with input angles based on current aim mode
	QuatToYawPitchRoll(hmdState.HeadPose.ThePose.Orientation, orientation);
	switch( (int)vr_aimmode.value )
	{
		// 1: (Default) Head Aiming; View YAW is mouse+head, PITCH is head
		default:
		case VR_AIMMODE_HEAD_MYAW:
			cl.viewangles[PITCH] = cl.aimangles[PITCH] = orientation[PITCH];
			cl.aimangles[YAW] = cl.viewangles[YAW] = cl.aimangles[YAW] + orientation[YAW] - lastOrientation[YAW];
			break;
		
		// 2: Head Aiming; View YAW and PITCH is mouse+head (this is stupid)
		case VR_AIMMODE_HEAD_MYAW_MPITCH:
			cl.viewangles[PITCH] = cl.aimangles[PITCH] = cl.aimangles[PITCH] + orientation[PITCH] - lastOrientation[PITCH];
			cl.aimangles[YAW] = cl.viewangles[YAW] = cl.aimangles[YAW] + orientation[YAW] - lastOrientation[YAW];
			break;
		
		// 3: Mouse Aiming; View YAW is mouse+head, PITCH is head
		case VR_AIMMODE_MOUSE_MYAW:
			cl.viewangles[PITCH] = orientation[PITCH];
			cl.viewangles[YAW]   = cl.aimangles[YAW] + orientation[YAW];
			break;
		
		// 4: Mouse Aiming; View YAW and PITCH is mouse+head
		case VR_AIMMODE_MOUSE_MYAW_MPITCH:
			cl.viewangles[PITCH] = cl.aimangles[PITCH] + orientation[PITCH];
			cl.viewangles[YAW]   = cl.aimangles[YAW] + orientation[YAW];
			break;
		
		case VR_AIMMODE_BLENDED:
			{
				float diffHMDYaw = orientation[YAW] - lastOrientation[YAW];
				float diffHMDPitch = orientation[PITCH] - lastOrientation[PITCH];
				float diffAimYaw = cl.aimangles[YAW] - lastAim[YAW];
				float diffYaw;

				// find new view position based on orientation delta
				cl.viewangles[YAW] += diffHMDYaw;

				// find difference between view and aim yaw
				diffYaw = cl.viewangles[YAW] - cl.aimangles[YAW];

				if (abs(diffYaw) > vr_deadzone.value / 2.0f)
				{
					// apply the difference from each set of angles to the other
					cl.aimangles[YAW] += diffHMDYaw;
					cl.viewangles[YAW] += diffAimYaw;
				}
				cl.aimangles[PITCH] += diffHMDPitch;
				cl.viewangles[PITCH]  = orientation[PITCH];
			}
			break;
	}
	cl.viewangles[ROLL]  = orientation[ROLL];

	VectorCopy (orientation, lastOrientation);
	VectorCopy (cl.aimangles, lastAim);
	
	VectorCopy (cl.viewangles, r_refdef.viewangles);
	VectorCopy (cl.aimangles, r_refdef.aimangles);


	// Calculate eye poses
	view_offset[0] = eyes[0].render_desc.HmdToEyeViewOffset;
	view_offset[1] = eyes[1].render_desc.HmdToEyeViewOffset;

	ovr_CalcEyePoses(hmdState.HeadPose.ThePose, view_offset, render_pose);
	eyes[0].pose = render_pose[0];
	eyes[1].pose = render_pose[1];


	// Render the scene for each eye into their FBOs
	for( i = 0; i < 2; i++ ) {
		current_eye = &eyes[i];
		RenderScreenForCurrentEye();
	}
	

	// Submit the FBOs to OVR
	viewScaleDesc.HmdSpaceToWorldScaleInMeters = meters_to_units;
	viewScaleDesc.HmdToEyeViewOffset[0] = view_offset[0];
	viewScaleDesc.HmdToEyeViewOffset[1] = view_offset[1];

	ld.Header.Type = ovrLayerType_EyeFov;
	ld.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;

	for( i = 0; i < 2; i++ ) {
		ld.ColorTexture[i] = eyes[i].fbo.color_textures;
		ld.Viewport[i].Pos.x = 0;
		ld.Viewport[i].Pos.y = 0;
		ld.Viewport[i].Size.w = eyes[i].fbo.size.width;
		ld.Viewport[i].Size.h = eyes[i].fbo.size.height;
		ld.Fov[i] = hmd->DefaultEyeFov[i];
		ld.RenderPose[i] = eyes[i].pose;
	}

	layers = &ld.Header;
	ovrHmd_SubmitFrame(hmd, 0, &viewScaleDesc, &layers, 1);

	// Blit mirror texture to back buffer
	glBindFramebufferEXT(GL_READ_FRAMEBUFFER_EXT, mirror_fbo);
	glBindFramebufferEXT(GL_DRAW_FRAMEBUFFER_EXT, 0);
	glBlitFramebufferEXT(0, h, w, 0, 0, 0, w, h,GL_COLOR_BUFFER_BIT, GL_NEAREST);
	glBindFramebufferEXT(GL_READ_FRAMEBUFFER_EXT, 0);
}