Exemple #1
0
	void OVR::postReset(const ovrTexture& _texture)
	{
		if (NULL != m_hmd)
		{
			m_texture[0] = _texture;
			m_texture[1] = _texture;

			ovrRecti rect;
			rect.Pos.x  = 0;
			rect.Pos.y  = 0;
			rect.Size.w = m_rtSize.w/2;
			rect.Size.h = m_rtSize.h;

			m_texture[0].Header.RenderViewport = rect;

			rect.Pos.x += rect.Size.w;
			m_texture[1].Header.RenderViewport = rect;

			m_timing = ovrHmd_BeginFrame(m_hmd, 0);
#if OVR_VERSION > OVR_VERSION_042
			m_pose[0] = ovrHmd_GetHmdPosePerEye(m_hmd, ovrEye_Left);
			m_pose[1] = ovrHmd_GetHmdPosePerEye(m_hmd, ovrEye_Right);
#else
			m_pose[0] = ovrHmd_GetEyePose(m_hmd, ovrEye_Left);
			m_pose[1] = ovrHmd_GetEyePose(m_hmd, ovrEye_Right);
#endif // OVR_VERSION > OVR_VERSION_042
		}
	}
Exemple #2
0
	bool OVR::swap(HMD& _hmd)
	{
		if (NULL == m_hmd)
		{
			return false;
		}

		ovrHmd_EndFrame(m_hmd, m_pose, m_texture);

		if (m_warning)
		{
			m_warning = !ovrHmd_DismissHSWDisplay(m_hmd);
		}

		m_timing = ovrHmd_BeginFrame(m_hmd, 0);

#if OVR_VERSION > OVR_VERSION_042
		m_pose[0] = ovrHmd_GetHmdPosePerEye(m_hmd, ovrEye_Left);
		m_pose[1] = ovrHmd_GetHmdPosePerEye(m_hmd, ovrEye_Right);
#else
		m_pose[0] = ovrHmd_GetEyePose(m_hmd, ovrEye_Left);
		m_pose[1] = ovrHmd_GetEyePose(m_hmd, ovrEye_Right);
#endif // OVR_VERSION > OVR_VERSION_042

		getEyePose(_hmd);

		return true;
	}
		void EndFrame(const ovrTexture* eyes)
		{
			static ovrPosef eyeRenderPose[2];
			eyeRenderPose[0] = ovrHmd_GetEyePose(m_device, ovrEyeType::ovrEye_Left);
			eyeRenderPose[1] = ovrHmd_GetEyePose(m_device, ovrEyeType::ovrEye_Right);

			ovrHmd_EndFrame(m_device, eyeRenderPose, eyes);
		}
void OVREndFrame()
{
	ovrPosef headPose[2] = {
		ovrHmd_GetEyePose( _OVRGlobals.HMD, _OVRGlobals.HMD->EyeRenderOrder[0] ),
		ovrHmd_GetEyePose( _OVRGlobals.HMD, _OVRGlobals.HMD->EyeRenderOrder[1] )
	};
	ovrTexture eyeTextures[2] = {
		_OVRGlobals.Eye[0].Texture,
		_OVRGlobals.Eye[1].Texture
	};

	ovrHmd_EndFrame( _OVRGlobals.HMD, headPose, eyeTextures );
	memset( &_OVRGlobals.FrameTiming, 0, sizeof(ovrFrameTiming) );
}
		bool GetEyePos(OVREye e, math::vector3d& pos, math::quaternion& ori)
		{

			ovrEyeType eye = m_device->EyeRenderOrder[(int)e];
			ovrPosef p = ovrHmd_GetEyePose(m_device, eye);
			pos.set(p.Position.x, p.Position.y, p.Position.z);
			ori = math::quaternion(p.Orientation.w, p.Orientation.x, p.Orientation.y, p.Orientation.z);
			return true;
		}
		math::vector3d GetPosition()
		{

			ovrEyeType eye = m_device->EyeRenderOrder[0];
			ovrPosef p = ovrHmd_GetEyePose(m_device, eye);
			return math::vector3d(p.Position.x, p.Position.y, p.Position.z);
			ovrTrackingState s = ovrHmd_GetTrackingState(m_device, 0);
			return math::vector3d(s.HeadPose.ThePose.Position.x, s.HeadPose.ThePose.Position.y, -s.HeadPose.ThePose.Position.z);

		}
void OculusDevice::updatePose(unsigned int frameIndex)
{
	// Ask the API for the times when this frame is expected to be displayed.
	m_frameTiming = ovrHmd_GetFrameTiming(m_hmdDevice, frameIndex);

	// Query the HMD for the current tracking state.
	ovrTrackingState ts = ovrHmd_GetTrackingState(m_hmdDevice, m_frameTiming.ScanoutMidpointSeconds);
	ovrPoseStatef headpose = ts.HeadPose;
	ovrPosef pose = headpose.ThePose;
	m_position.set(-pose.Position.x, -pose.Position.y, -pose.Position.z);
	m_orientation.set(pose.Orientation.x, pose.Orientation.y, pose.Orientation.z, -pose.Orientation.w);

	// Get head pose for both eyes (used for time warp
	for (int eyeIndex = 0; eyeIndex < ovrEye_Count; ++eyeIndex) {
		ovrEyeType eye = m_hmdDevice->EyeRenderOrder[eyeIndex];
		m_headPose[eye] = ovrHmd_GetEyePose(m_hmdDevice, eye);
	}
}
Exemple #8
0
ngl::Mat4 OculusInterface::getViewMatrix(int _eye)
{
	m_pose[_eye] = ovrHmd_GetEyePose(m_hmd,(ovrEyeType) _eye);
	ngl::Mat4 pos;
	pos.translate(m_eyeRdesc[_eye].ViewAdjust.x, m_eyeRdesc[_eye].ViewAdjust.y, m_eyeRdesc[_eye].ViewAdjust.z);
	ngl::Mat4 rotation ;
	ngl::Quaternion orientation(m_pose[_eye].Orientation.w,m_pose[_eye].Orientation.x,m_pose[_eye].Orientation.y,m_pose[_eye].Orientation.z);
	//	quat_to_matrix(&m_pose[_eye].Orientation.x, &rotation.m_m[0]);
	rotation=orientation.toMat4();
	rotation.transpose();
	ngl::Mat4  eyePos;
	eyePos.translate(-m_pose[_eye].Position.x, -m_pose[_eye].Position.y, -m_pose[_eye].Position.z);
	ngl::Mat4 eyeLevel;
	eyeLevel.translate(0, -ovrHmd_GetFloat(m_hmd, OVR_KEY_EYE_HEIGHT, 1.65), 0);
	// could optimize this
	return pos*rotation*eyePos*eyeLevel;

}
Exemple #9
0
GMO double endFrame() {
	static ovrPosef eyeRenderPose[2]; 
	static float    BodyYaw(3.141592f);
	static Vector3f HeadPos(0.0f, 1.6f, -5.0f);
	HeadPos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, HeadPos.y);

    //pRender->SetRenderTarget ( pRendertargetTexture );
	
    //pRender->SetViewport (Recti(0,0, pRendertargetTexture->GetWidth(),
    //                                    pRendertargetTexture->GetHeight() ));  
    //pRender->Clear();
	for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
	{
        ovrEyeType eye = HMD->EyeRenderOrder[eyeIndex];
        eyeRenderPose[eye] = ovrHmd_GetEyePose(HMD, eye);

        // Get view and projection matrices
		Matrix4f rollPitchYaw       = Matrix4f::RotationY(BodyYaw);
		Matrix4f finalRollPitchYaw  = rollPitchYaw * Matrix4f(eyeRenderPose[eye].Orientation);
		Vector3f finalUp            = finalRollPitchYaw.Transform(Vector3f(0,1,0));
		Vector3f finalForward       = finalRollPitchYaw.Transform(Vector3f(0,0,-1));
		Vector3f shiftedEyePos      = HeadPos + rollPitchYaw.Transform(eyeRenderPose[eye].Position);
        Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp); 
		Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, true);

		//pRender->SetViewport(Recti(EyeRenderViewport[eye]));
		//pRender->SetProjection(proj);
		pRender->SetDepthMode(true, true);
		
		//pRoomScene->Render(pRender, Matrix4f::Translation(EyeRenderDesc[eye].ViewAdjust) * view);
	}
	//pRender->BlendState
	
	//pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);
	//pRender->Present( true );
	pRender->UpdateMonitorOutputs();
    pRender->FinishScene();
	ovrHmd_EndFrame(HMD, headPose, &EyeTexture[0].Texture);
	return 1;
}
Exemple #10
0
void Renderer::renderOVR(fp_t interp)
{
    UNUSED(interp);

    ovrHmd_BeginFrame(hmd_, 0);

    glBindFramebuffer(GL_FRAMEBUFFER, framebuffer_);
    glClear(GL_DEPTH_BUFFER_BIT);

    ovrPosef eyePose[ovrEye_Count];

    for(int i = 0; i < ovrEye_Count; i++)
    {
        ovrEyeType eye = hmd_->EyeRenderOrder[i];

        glViewport(eyeViewport_[eye].Pos.x, eyeViewport_[eye].Pos.y,
                   eyeViewport_[eye].Size.w, eyeViewport_[eye].Size.h);

        glm::mat4 projectionMat = convertOvrMatrix4f(ovrMatrix4f_Projection(eyeRenderDesc_[eye].Fov, 0.1f, 1000.0f, /*rightHanded*/ true));

        eyePose[eye] = ovrHmd_GetEyePose(hmd_, eye);
        Core::get().camera().setHeadPosition(convertOvrVector3f(eyePose[eye].Position));
        Core::get().camera().setHeadRotation(glm::conjugate(convertOvrQuatf(eyePose[eye].Orientation)));

        glm::mat4 viewMat = glm::translate(glm::mat4{}, convertOvrVector3f(eyeRenderDesc_[eye].ViewAdjust));
        viewMat = viewMat * Core::get().camera().viewMatrix();

        skyRenderer_->render();
        landRenderer_->render(projectionMat, viewMat);
        structureRenderer_->render(projectionMat, viewMat);
        modelRenderer_->render(projectionMat, viewMat);
    }

    glBindFramebuffer(GL_FRAMEBUFFER, 0);

    ovrHmd_EndFrame(hmd_, eyePose, (ovrTexture*)eyeTexture_);
}
  virtual void draw() {
    ovrHmd_BeginFrame(hmd, frameIndex++);
    ovrPosef eyePoses[2];

    gl::MatrixStack & mv = gl::Stacks::modelview();
    for (int i = 0; i < ovrEye_Count; ++i) {
      ovrEyeType eye = hmd->EyeRenderOrder[i];
      PerEyeArg & eyeArgs = eyes[eye];
      gl::Stacks::projection().top() = eyeArgs.projection;

      eyePoses[eye] = ovrHmd_GetEyePose(hmd, eye);

      eyeArgs.frameBuffer.withFramebufferActive([&]{
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
        gl::Stacks::with_push(mv, [&]{
          mv.preMultiply(glm::inverse(Rift::fromOvr(eyePoses[eye])));
          mv.preMultiply(eyeArgs.modelviewOffset);
          drawCubeScene();
        });
      });
    }

    ovrHmd_EndFrame(hmd, eyePoses, textures);
  }
Exemple #12
0
void RiftAppSkeleton::display_sdk() //const
{
    ovrHmd hmd = m_Hmd;
    if (hmd == NULL)
        return;

    //const ovrFrameTiming hmdFrameTiming =
    ovrHmd_BeginFrame(m_Hmd, 0);

    bindFBO(m_renderBuffer);

    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    // For passing to EndFrame once rendering is done
    ovrPosef renderPose[2];
    ovrTexture eyeTexture[2];

    for (int eyeIndex=0; eyeIndex<ovrEye_Count; eyeIndex++)
    {
        const ovrEyeType eye = hmd->EyeRenderOrder[eyeIndex];
        const ovrPosef eyePose = ovrHmd_GetEyePose(m_Hmd, eye);
        m_eyeOri = eyePose.Orientation; // cache this for movement direction
        _StoreHmdPose(eyePose);

        const ovrGLTexture& otex = l_EyeTexture[eye];
        const ovrRecti& rvp = otex.OGL.Header.RenderViewport;
        glViewport(
            rvp.Pos.x,
            rvp.Pos.y,
            rvp.Size.w,
            rvp.Size.h
            );

        const OVR::Matrix4f proj = ovrMatrix4f_Projection(
            m_EyeRenderDesc[eye].Fov,
            0.01f, 10000.0f, true);

        const OVR::Matrix4f view = _MakeModelviewMatrix(
            eyePose,
            -OVR::Vector3f(m_EyeRenderDesc[eye].ViewAdjust), // not sure why negative...
            m_chassisYaw,
            m_chassisPos);

        const OVR::Matrix4f scaledView = _MakeModelviewMatrix(
            eyePose,
            -OVR::Vector3f(m_EyeRenderDesc[eye].ViewAdjust), // not sure why negative...
            m_chassisYaw,
            m_chassisPos,
            m_headSize);

        _resetGLState();

        _DrawScenes(&view.Transposed().M[0][0], &proj.Transposed().M[0][0], rvp, &scaledView.Transposed().M[0][0]);

        renderPose[eyeIndex] = eyePose;
        eyeTexture[eyeIndex] = l_EyeTexture[eye].Texture;
    }
    unbindFBO();

    ovrHmd_EndFrame(m_Hmd, renderPose, eyeTexture);

    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
    glBindBuffer(GL_ARRAY_BUFFER, 0);
    glUseProgram(0);
}
Exemple #13
0
GMO double getEyePos(double eyeIndexInput) {
	int eyeIndex = (int) eyeIndexInput;
	ovrEyeType eye = HMD->EyeRenderOrder[eyeIndex];
	headPose[eye] = ovrHmd_GetEyePose(HMD, eye);
	return 1;
}
  void runOvrThread() {
    // Make the shared context current
    glfwMakeContextCurrent(renderWindow);
    // Each thread requires it's own glewInit call.
    glewInit();

    // Synchronization to determine when a given eye's render commands have completed
    GLsync eyeFences[2]{0, 0};
    // The index of the current rendering target framebuffer.  
    int backBuffers[2]{0, 0};
    // The pose for each rendered framebuffer
    ovrPosef backPoses[2];

    // Offscreen rendering targets.  two for each eye.
    // One is used for rendering while the other is used for distortion
    gl::FrameBufferWrapper frameBuffers[2][2];
    for_each_eye([&](ovrEyeType eye) {
      glm::uvec2 frameBufferSize = Rift::fromOvr(eyeTextures[0].Header.TextureSize);
      for (int i = 0; i < 2; ++i) {
        frameBuffers[i][eye].init(frameBufferSize);
      }
    });

    while (running) {
      for (int i = 0; i < 2; ++i) {
        for_each_eye([&](ovrEyeType eye) {
          if (0 != eyeFences[eye]) {
            GLenum result = glClientWaitSync(eyeFences[eye], GL_SYNC_FLUSH_COMMANDS_BIT, 0);
            switch (result) {
            case GL_ALREADY_SIGNALED:
            case GL_CONDITION_SATISFIED:
              withLock(ovrLock, [&]{
                eyeFences[eye] = 0;
                int bufferIndex = backBuffers[eye];
                textureIds[eye] = frameBuffers[bufferIndex][eye].color->texture;
                backBuffers[eye] = (bufferIndex + 1) % 2;
                eyePoses[eye] = backPoses[eye];
              });
              break;
            }
          }
        });


        ovrEyeType eye = hmd->EyeRenderOrder[i];
        if (0 != eyeFences[eye]) {
          continue;
        }

        gl::MatrixStack & mv = gl::Stacks::modelview();
        gl::Stacks::projection().top() = projections[eye];
        gl::Stacks::with_push(mv, [&]{
          const ovrEyeRenderDesc & erd = eyeRenderDescs[eye];

          // We can only acquire an eye pose between beginframe and endframe.
          // So we've arranged for the lock to be only open at those points.  
          // The main thread will spend most of it's time in the wait.
          ::withLock(ovrLock, [&]{
            if (running) {
              backPoses[eye] = ovrHmd_GetEyePose(hmd, eye);
            }
          });

          {
            // Apply the head pose
            glm::mat4 m = Rift::fromOvr(backPoses[eye]);
            mv.preMultiply(glm::inverse(m));
            // Apply the per-eye offset
            glm::vec3 eyeOffset = Rift::fromOvr(erd.ViewAdjust);
            mv.preMultiply(glm::translate(glm::mat4(), eyeOffset));
          }

          int bufferIndex = backBuffers[eye];
          gl::FrameBufferWrapper & frameBuffer = frameBuffers[bufferIndex][eye];
          // Render the scene to an offscreen buffer
          frameBuffer.activate();
          renderScene();
          frameBuffer.deactivate();
          eyeFences[eye] = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
        });
      }
    }
  }
void CoinRiftWidget::paintGL()
{ 
    const int ms(1000 / 75 /*fps*/);
    QTimer::singleShot(ms, this, SLOT(updateGL()));

    // handling the sfety warning
    handlingSafetyWarning();

    makeCurrent();

    ovrPosef eyePose[2];

    glEnable(GL_TEXTURE_2D);

    ovrFrameTiming hmdFrameTiming = ovrHmd_BeginFrame(hmd, 0);
    for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++) {
        ovrEyeType eye = hmd->EyeRenderOrder[eyeIndex];
        eyePose[eye] = ovrHmd_GetEyePose(hmd, eye);


        SbRotation    riftOrientation(  eyePose[eye].Orientation.x,
                                        eyePose[eye].Orientation.y,
                                        eyePose[eye].Orientation.z,
                                        eyePose[eye].Orientation.w);

        camera[eye]->orientation.setValue(riftOrientation);

        SbVec3f riftPosition =   SbVec3f(eyePose[eye].Position.x,
                                         eyePose[eye].Position.y,
                                         eyePose[eye].Position.z);


        //SbVec3f originalPosition(camera[eye]->position.getValue());
        SbVec3f viewAdjust(eyeRenderDesc[eye].ViewAdjust.x,
                                                              eyeRenderDesc[eye].ViewAdjust.y,
                                                              eyeRenderDesc[eye].ViewAdjust.z);

        riftOrientation.multVec(viewAdjust,viewAdjust);

        camera[eye]->position.setValue(basePosition - viewAdjust + riftPosition);

        //Base::Console().Log("Eye(%d) Pos: %f, %f, %f  ViewAdjust:  %f, %f, %f \n",eye, eyePose[eye].Position.x,
        //                                                eyePose[eye].Position.y,
        //                                 eyePose[eye].Position.z,
        //                                 eyeRenderDesc[eye].ViewAdjust.x,
        //                                                      eyeRenderDesc[eye].ViewAdjust.y,
        //                                                      eyeRenderDesc[eye].ViewAdjust.z);

#ifdef USE_SO_OFFSCREEN_RENDERER
        ovrGLTextureData *texData = reinterpret_cast<ovrGLTextureData*>(&eyeTexture[eye]);
        glBindTexture(GL_TEXTURE_2D, texData->TexId);
        renderer->render(rootScene[eye]);
        Q_ASSERT(!glGetError());
        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA,
                     eyeTexture[eye].Header.TextureSize.w,
                     eyeTexture[eye].Header.TextureSize.h,
                     0, GL_RGBA /*GL_BGRA*/, GL_UNSIGNED_BYTE, renderer->getBuffer());
        Q_ASSERT(!glGetError());
        glBindTexture(GL_TEXTURE_2D, 0);
#endif
#ifdef USE_FRAMEBUFFER
        // Clear state pollution from OVR SDK.
        glBindTexture(GL_TEXTURE_2D, 0); // You need this, at least if (hmdDesc.DistortionCaps & ovrDistortion_Chromatic).
        OVR::CAPI::GL::glUseProgram(0); // You need this even more.

        GLint oldfb;
        glGetIntegerv(GL_FRAMEBUFFER_BINDING_EXT, &oldfb);
        // Set up framebuffer for rendering.
        OVR::CAPI::GL::glBindFramebuffer(GL_FRAMEBUFFER_EXT, frameBufferID[eye]);

        m_sceneManager->setSceneGraph(rootScene[eye]);
// m_sceneManager->setCamera(camera[eye]); // SoSceneManager does this implicitly.
        m_sceneManager->render();

        // Continue rendering to the orgiginal frame buffer (likely 0, the onscreen buffer).
        OVR::CAPI::GL::glBindFramebuffer(GL_FRAMEBUFFER_EXT, oldfb);
        Q_ASSERT(!glGetError());
#endif

        //camera[eye]->position.setValue(originalPosition);

    }
    
    // Submit the texture for distortion.     
    ovrHmd_EndFrame(hmd, eyePose, eyeTexture);

    // Swap buffers.
    glDisable(GL_CULL_FACE);
    glDisable(GL_DEPTH_TEST);
    //ovrHmd_EndFrame(hmd);
    glEnable(GL_CULL_FACE);
    glEnable(GL_DEPTH_TEST);
    glClearDepth(1.0);

    doneCurrent();
}
Exemple #16
0
///@todo Even though this function shares most of its code with client rendering,
/// which appears to work fine, it is non-convergable. It appears that the projection
/// matrices for each eye are too far apart? Could be modelview...
void RiftAppSkeleton::display_stereo_undistorted() //const
{
    ovrHmd hmd = m_Hmd;
    if (hmd == NULL)
        return;

    //ovrFrameTiming hmdFrameTiming =
    ovrHmd_BeginFrameTiming(hmd, 0);

    bindFBO(m_renderBuffer, m_fboScale);

    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
    {
        ovrEyeType eye = hmd->EyeRenderOrder[eyeIndex];
        ovrPosef eyePose = ovrHmd_GetEyePose(hmd, eye);

        const ovrGLTexture& otex = l_EyeTexture[eye];
        const ovrRecti& rvp = otex.OGL.Header.RenderViewport;
        const ovrRecti rsc = {
            static_cast<int>(m_fboScale * rvp.Pos.x),
            static_cast<int>(m_fboScale * rvp.Pos.y),
            static_cast<int>(m_fboScale * rvp.Size.w),
            static_cast<int>(m_fboScale * rvp.Size.h)
        };
        glViewport(rsc.Pos.x, rsc.Pos.y, rsc.Size.w, rsc.Size.h);

        OVR::Quatf orientation = OVR::Quatf(eyePose.Orientation);
        OVR::Matrix4f proj = ovrMatrix4f_Projection(
            m_EyeRenderDesc[eye].Fov,
            0.01f, 10000.0f, true);

        //m_EyeRenderDesc[eye].DistortedViewport;
        OVR::Vector3f EyePos = m_chassisPos;
        OVR::Matrix4f view = OVR::Matrix4f(orientation.Inverted())
            * OVR::Matrix4f::RotationY(m_chassisYaw)
            * OVR::Matrix4f::Translation(-EyePos);
        OVR::Matrix4f eyeview = OVR::Matrix4f::Translation(m_EyeRenderDesc[eye].ViewAdjust) * view;

        _resetGLState();

        _DrawScenes(&eyeview.Transposed().M[0][0], &proj.Transposed().M[0][0], rvp);
    }
    unbindFBO();

    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    glDisable(GL_DEPTH_TEST);
    glDisable(GL_CULL_FACE);

    // Present FBO to screen
    const GLuint prog = m_presentFbo.prog();
    glUseProgram(prog);
    m_presentFbo.bindVAO();
    {
        glActiveTexture(GL_TEXTURE0);
        glBindTexture(GL_TEXTURE_2D, m_renderBuffer.tex);
        glUniform1i(m_presentFbo.GetUniLoc("fboTex"), 0);

        // This is the only uniform that changes per-frame
        glUniform1f(m_presentFbo.GetUniLoc("fboScale"), m_fboScale);

        glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
    }
    glBindVertexArray(0);
    glUseProgram(0);

    ovrHmd_EndFrameTiming(hmd);
}
//-------------------------------------------------------------------------------------
void ProcessAndRender()
{
    static ovrPosef eyeRenderPose[2]; 

	// Start timing
    #if SDK_RENDER
	ovrHmd_BeginFrame(HMD, 0); 
    #else
	ovrHmd_BeginFrameTiming(HMD, 0); 
    // Retrieve data useful for handling the Health and Safety Warning - unused, but here for reference
    ovrHSWDisplayState hswDisplayState;
    ovrHmd_GetHSWDisplayState(HMD, &hswDisplayState);
    #endif

	// Adjust eye position and rotation from controls, maintaining y position from HMD.
	static float    BodyYaw(3.141592f);
	static Vector3f HeadPos(0.0f, 1.6f, -5.0f);
//	HeadPos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, HeadPos.y);
	bool freezeEyeRender = Util_RespondToControls(BodyYaw, HeadPos, eyeRenderPose[1].Orientation);

     pRender->BeginScene();
    
	// Render the two undistorted eye views into their render buffers.
    if (!freezeEyeRender) // freeze to debug for time warp
    {
        pRender->SetRenderTarget ( pRendertargetTexture );
        pRender->SetViewport (Recti(0,0, pRendertargetTexture->GetWidth(),
                                         pRendertargetTexture->GetHeight() ));  
        pRender->Clear();
		for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
		{
            ovrEyeType eye = HMD->EyeRenderOrder[eyeIndex];
            eyeRenderPose[eye] = ovrHmd_GetEyePose(HMD, eye);

            // Get view and projection matrices
			Matrix4f rollPitchYaw       = Matrix4f::RotationY(BodyYaw);
			Matrix4f finalRollPitchYaw  = rollPitchYaw * Matrix4f(eyeRenderPose[eye].Orientation);
			Vector3f finalUp            = finalRollPitchYaw.Transform(Vector3f(0,1,0));
			Vector3f finalForward       = finalRollPitchYaw.Transform(Vector3f(0,0,-1));
			Vector3f shiftedEyePos      = HeadPos + rollPitchYaw.Transform(eyeRenderPose[eye].Position);
            Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp); 
			Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, true);

			pRender->SetViewport(Recti(EyeRenderViewport[eye]));
			pRender->SetProjection(proj);
			pRender->SetDepthMode(true, true);
			pRoomScene->Render(pRender, Matrix4f::Translation(EyeRenderDesc[eye].ViewAdjust) * view);
		}
    }
    pRender->FinishScene();

    #if SDK_RENDER	// Let OVR do distortion rendering, Present and flush/sync
	ovrHmd_EndFrame(HMD, eyeRenderPose, &EyeTexture[0].Texture);
    #else
	// Clear screen
	pRender->SetDefaultRenderTarget();
	pRender->SetFullViewport();
	pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);

	// Setup shader
	ShaderFill distortionShaderFill(Shaders);
	distortionShaderFill.SetTexture(0, pRendertargetTexture);
	distortionShaderFill.SetInputLayout(VertexIL);

	for(int eyeNum = 0; eyeNum < 2; eyeNum++)
	{
		// Get and set shader constants
		Shaders->SetUniform2f("EyeToSourceUVScale",   UVScaleOffset[eyeNum][0].x, UVScaleOffset[eyeNum][0].y);
		Shaders->SetUniform2f("EyeToSourceUVOffset",  UVScaleOffset[eyeNum][1].x, UVScaleOffset[eyeNum][1].y);
 		ovrMatrix4f timeWarpMatrices[2];
		ovrHmd_GetEyeTimewarpMatrices(HMD, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
		Shaders->SetUniform4x4f("EyeRotationStart", timeWarpMatrices[0]);  //Nb transposed when set
		Shaders->SetUniform4x4f("EyeRotationEnd",   timeWarpMatrices[1]);  //Nb transposed when set
		// Perform distortion
		pRender->Render(&distortionShaderFill, MeshVBs[eyeNum], MeshIBs[eyeNum],sizeof(ovrDistortionVertex));
	}

	pRender->SetDefaultRenderTarget();

	pRender->Present( true ); // Vsync enabled

    // Only flush GPU for ExtendDesktop; not needed in Direct App Renering with Oculus driver.
    if (HMD->HmdCaps & ovrHmdCap_ExtendDesktop)
		pRender->WaitUntilGpuIdle();  
	ovrHmd_EndFrameTiming(HMD);
    #endif
}
//-------------------------------------------------------------------------------------
void ProcessAndRender()
{
#if 0
	//HRESULT hr = pRender->Device->SetRenderState(D3DRS_ZENABLE, D3DZB_TRUE);
	//OVR_ASSERT(SUCCEEDED(hr));

	pRender->Clear();
	pRender->BeginScene();

	Vector3f eye(0.0f, 0.0f, -5.0f);
	Vector3f lookat(0.0f, 0.0f, 0.0f);
	Vector3f up(0.0f, 1.0f, 0.0f);
	Matrix4f view = Matrix4f::LookAtLH(eye, lookat, up);
	//Matrix4f proj = Matrix4f::PerspectiveLH(3.14145f / 2, 800.0f / 600.0f, 1.0f, 10000.0f);

	ovrFovPort fov = { 1, 1, 1, 1 };
	Matrix4f proj = ovrMatrix4f_Projection(fov, 1.0f, 10000.0f, false);

	pRender->SetProjection(proj);
	pRoomScene->Render(pRender, view);

	pRender->EndScene();
	pRender->Present();
#endif

	static ovrPosef eyeRenderPose[2];
	ovrHmd_BeginFrameTiming(HMD, 0);

	// Adjust eye position and rotation from controls, maintaining y position from HMD.
	static float BodyYaw(3.141592f);
	static Vector3f HeadPos(0.0f, 1.6f, -5.0f);
	HeadPos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, HeadPos.y);
	bool freezeEyeRender = false;

	pRender->BeginScene();

	if (!freezeEyeRender)
	{
		pRender->SetRenderTarget(pRendertargetTexture);
		pRender->SetViewport(Recti(0, 0, pRendertargetTexture->Width, pRendertargetTexture->Height));
		pRender->Clear();
		for (int eyeIndex = 0; eyeIndex < ovrEye_Count; ++eyeIndex)
		{
			ovrEyeType eye = HMD->EyeRenderOrder[eyeIndex];
			eyeRenderPose[eye] = ovrHmd_GetEyePose(HMD, eye);

			// Get view and projection matrices
			Matrix4f rollPitchYaw = Matrix4f::RotationY(BodyYaw);
			Matrix4f finalRollPitchYaw = rollPitchYaw * Matrix4f(eyeRenderPose[eye].Orientation);
			Vector3f finalUp = finalRollPitchYaw.Transform(Vector3f(0, 1, 0));
			Vector3f finalForward = finalRollPitchYaw.Transform(Vector3f(0, 0, -1));
			Vector3f shiftedEyePos      = HeadPos + rollPitchYaw.Transform(eyeRenderPose[eye].Position);
			//Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp); 
			//Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, true);
			Matrix4f view = Matrix4f::LookAtLH(shiftedEyePos, shiftedEyePos + finalForward, finalUp);
			Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, false);

			pRender->SetViewport(Recti(EyeRenderViewport[eye]));
			pRender->SetProjection(proj);
			pRender->SetDepthMode(true, true);
			pRoomScene->Render(pRender, Matrix4f::Translation(EyeRenderDesc[eye].ViewAdjust) * view);
		}
	}

	pRender->SetDefaultRenderTarget();
	pRender->SetFullViewport();
	pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);

	ShaderFill distortionShaderFill(DistortionShaders);
	distortionShaderFill.SetTexture(0, pRendertargetTexture);

	for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++)
	{
		// Get and set shader constants
		DistortionShaders->SetUniform2f("EyeToSourceUVScale", UVScaleOffset[eyeNum][0].x, UVScaleOffset[eyeNum][0].y);
		DistortionShaders->SetUniform2f("EyeToSourceUVOffset", UVScaleOffset[eyeNum][1].x, UVScaleOffset[eyeNum][1].y);
		ovrMatrix4f timeWarpMatrices[2];
		ovrHmd_GetEyeTimewarpMatrices(HMD, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
		DistortionShaders->SetUniform4x4f("EyeRotationStart", timeWarpMatrices[0]);  //Nb transposed when set
		DistortionShaders->SetUniform4x4f("EyeRotationEnd", timeWarpMatrices[1]);  //Nb transposed when set
		// Perform distortion
		pRender->Render(&distortionShaderFill, DistortionDecl, MeshVBs[eyeNum], MeshIBs[eyeNum],
			sizeof(ovrDistortionVertex), Matrix4f(), MeshVBCnts[eyeNum], MeshIBCnts[eyeNum], Prim_Triangles);
		//Render(fill, vertices, indices, stride, Matrix4f(), 0,(int)vertices->GetSize(), Prim_Triangles, false);
		//(&distortionShaderFill, MeshVBs[eyeNum], MeshIBs[eyeNum],sizeof(ovrDistortionVertex));
	}

	/*
	pRender->SetDefaultRenderTarget();
	pRender->SetFullViewport();
	pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);

	Vector3f eye(0.0f, 0.0f, -5.0f);
	Vector3f lookat(0.0f, 0.0f, 0.0f);
	Vector3f up(0.0f, 1.0f, 0.0f);
	Matrix4f view = Matrix4f::LookAtLH(eye, lookat, up);
	Matrix4f proj = Matrix4f::PerspectiveLH(3.14145f / 4, 800.0f / 600.0f, 1.0f, 10000.0f);

	pRender->Proj = proj;
	pScene->Render(pRender, view);
	*/
	//pRender->SetDefaultRenderTarget();

	pRender->EndScene();
	pRender->Present();

	//if (HMD->HmdCaps & ovrHmdCap_ExtendDesktop)
	//	pRender->WaitUntilG
	ovrHmd_EndFrameTiming(HMD);
}
Exemple #19
0
int main(int argc, char *argv[])
{

	if (argc > 1){
		if  (strcmp(argv[1], "-debug") == 0 )
			mode = MODE_DEBUG;
		else if ( strcmp(argv[1], "-oculus") == 0 )
			mode = MODE_OCULUS;
		else if ( strcmp(argv[1], "-oculus-debug") == 0 )
			mode = MODE_OCULUS_DEBUG;
		else return 100;
	}else
		mode = MODE_DEBUG;

	int err;

	// Init OVR library, hardware and sensors.
	err = init_ovr();
	if ( err != 0 )
		exit( 10 + err );

	//Init windows and OpenGL context
	err = init_SDL_GL();
	if ( err != 0 )
		exit( 0 + err );
	
	// Load and Init shader and shaders Program
	err = load_init_shaders();
	if ( err != 0 )
		exit( 20 + err );

	// Load the Vertices, vertex arrays, etc... And bind them, along with the shaders.
	err = load_vertex();
	if ( err != 0 )
		exit( 30 + err );

	// Loads the texture from files and bien them as uniform in the frag shader
	err = load_textures();
	if ( err != 0 )
		exit( 40 + err );
	
	if (mode != MODE_DEBUG){
		// Inits the frame buffer, usefull for rendering the scene in a texture to send it to Oculus
		err = init_framebuffers();
		if ( err != 0 )
			exit( 50 + err );

		err = init_render_ovr();
		if ( err != 0 )
			exit( 60 + err );
	}

	std::cout << "Recommended w " << recommendedTex0Size.w << std::endl << "Recommended h " << recommendedTex0Size.h << std::endl;

	// Tansformations
	//---------------------------------------------
	// ---- Transfo
	glm::mat4 trans;
	GLuint uniTrans = glGetUniformLocation(shaderProgram, "trans");

	// ---- View
	glm::mat4 view;

	// ---- Projection
	glm::mat4 proj;
	
	
	


	// Render in Texture, and display
	//-------------------------------------------------
	if (mode == MODE_OCULUS_DEBUG ){

		load_init_passthrough_shaders();
		GLuint passthroughOB;
		glGenBuffers(1, &passthroughOB);
		glBindBuffer(GL_ARRAY_BUFFER, passthroughOB);
		glBufferData(GL_ARRAY_BUFFER, sizeof(passthroughScreen), passthroughScreen, GL_STATIC_DRAW);
		
		// Binding the fragment Shader output to the current buffer
		glBindFragDataLocation(passthroughShadersProgram, 0, "passthroughColor");
		errorCode = glGetError();

		// Link and Use Program
		glLinkProgram(passthroughShadersProgram);
		glUseProgram(passthroughShadersProgram);
		
		// Store the attributes for the shaders
		
		glGenVertexArrays(1, &passthroughVAO);
		glBindVertexArray(passthroughVAO);

		// Attributes Locations for Shaders and Enable
		GLint posAttrib = glGetAttribLocation(passthroughShadersProgram, "position");
		glVertexAttribPointer(posAttrib, 0, GL_FLOAT, GL_FALSE, sizeof(float) * 4, (void*) 2 );
		glEnableVertexAttribArray(posAttrib);

		GLint colorAttrib = glGetAttribLocation(passthroughShadersProgram, "texCoords");
		glVertexAttribPointer(colorAttrib, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 4, (void*)(sizeof(float) * 2) );
		glEnableVertexAttribArray(colorAttrib);

		glUseProgram(passthroughShadersProgram);
		glUniform1i(glGetUniformLocation(passthroughShadersProgram, "renderedTex"), 0);
	}

	
	
	



	// Event Loop
	//--------------------------------------------------
	SDL_Event windowEvent;
	while (true)
	{
		if (SDL_PollEvent(&windowEvent))
		{

			// Quit events
			if (windowEvent.type == SDL_QUIT) break;
			else if (windowEvent.type == SDL_KEYUP && windowEvent.key.keysym.sym == SDLK_ESCAPE) break;

		}



		// Enabling ressources to draw the cube
		// Before entering the rendering loop
		glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
		glActiveTexture(GL_TEXTURE0);
		glBindTexture(GL_TEXTURE_2D, textures[0]);
		glActiveTexture(GL_TEXTURE1);
		glBindTexture(GL_TEXTURE_2D, textures[1]);


		
		// ---- View
		view = glm::lookAt(
			glm::vec3(5.0f, 5.0f, 5.0f),
			glm::vec3(0.0f, 0.0f, 0.0f),
			glm::vec3(0.0f, 0.0f, 1.0f)
		);

		GLint uniView = glGetUniformLocation(shaderProgram, "view");
		glUniformMatrix4fv(uniView, 1, GL_FALSE, glm::value_ptr(view));
		
		// ---- Projection
		if ( mode == MODE_DEBUG ){
			proj = glm::perspective(45.0f, 1280.0f / 720.0f, 1.0f, 10.0f);
		}else{
			proj = glm::perspective(45.0f, 640.0f / 720.0f, 1.0f, 10.0f);
		}
		GLint uniProj = glGetUniformLocation(shaderProgram, "proj");
		glUniformMatrix4fv(uniProj, 1, GL_FALSE, glm::value_ptr(proj));
		
		//Turn around Z
		trans = glm::rotate(
			trans,
			0.7f,
			glm::vec3(0.0f, 0.0f, 1.0f)
		);
		glUniformMatrix4fv(uniTrans, 1, GL_FALSE, glm::value_ptr(trans));

		
		if ( mode == MODE_OCULUS || mode == MODE_OCULUS_DEBUG ){

			hdmFrameTiming = ovrHmd_BeginFrame(hmd, 0);
			

			

			glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
			glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
			
			for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++){

				
	
				ovrEyeType eye = hmd->EyeRenderOrder[eyeIndex];
				headPose[eye] = ovrHmd_GetEyePose(hmd, eye);


				if (eye == ovrEye_Right){
					glScissor(renderTargetSize.w / 2, 0, renderTargetSize.w / 2, renderTargetSize.h);
					glViewport(renderTargetSize.w / 2, 0, renderTargetSize.w / 2, renderTargetSize.h);
				}else{
					glScissor(0, 0, renderTargetSize.w / 2, renderTargetSize.h);
					glViewport(0, 0, renderTargetSize.w / 2, renderTargetSize.h);
				}
				
				if (eye == ovrEye_Right)
					glClearColor(0.0f, 0.3f, 0.0f, 1.0f);
				else
					glClearColor(0.3f, 0.0f, 0.0f, 1.0f);

				glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
				 
				// Drawing
				glDrawArrays(GL_TRIANGLES, 0, 36);

			}

			if (mode == MODE_OCULUS ){
				
				glScissor(0, 0, renderTargetSize.w, renderTargetSize.h);
				glViewport(0, 0, renderTargetSize.w, renderTargetSize.h);

				ovrHmd_EndFrame(hmd, headPose, eyeTex);
				Sleep(1);
			}else if ( mode == MODE_OCULUS_DEBUG ){

				glBindBuffer(GL_FRAMEBUFFER, 0);
				glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
				glBindBuffer(GL_ARRAY_BUFFER, 0);
				glUseProgram(0);

				glBindFramebuffer(GL_FRAMEBUFFER, 0);
				glBindVertexArray(passthroughVAO);
				glDisable(GL_DEPTH_TEST);
				glUseProgram(passthroughShadersProgram);

				glActiveTexture(GL_TEXTURE0);

				glDrawArrays(GL_TRIANGLES, 0, 6);

			}
		
		}else if (mode == MODE_DEBUG){


			// Clear the screen and the depth buffer (as it is filled with 0 initially, 
			// nothing will be draw (0 = on top);
			glClearColor(0.0f, 0.3f, 0.0f, 1.0f);
			glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);


			// Drawing
			glDrawArrays(GL_TRIANGLES, 0, 36);

		}

		
		if ( mode != MODE_OCULUS )
			SDL_GL_SwapWindow(window);


	}





	// Destoy the HMD and shutdown the library
	ovrHmd_Destroy(hmd);
	ovr_Shutdown();

	// Quite SDL and OpenGL
	glDeleteFramebuffers(1, &frameBuffer);
	SDL_GL_DeleteContext(context);
	SDL_Quit();

	return 0;
}
void Render()
{
	ovrFrameTiming frameTiming = ovrHmd_BeginFrameTiming(HMD, 0);

	// 箱の回転の値を更新
	rotationBoxValue += 2.0f*frameTiming.DeltaSeconds;

	// キーボード等で操作する場合の目の位置を指定します。
	static OVR::Vector3f EyePos;
	EyePos.x = 0.0f, EyePos.y = 0.0f, EyePos.z = 0.0f;

	// マウスの回転等でYawを操作する場合に使用する。
	static float eyeYaw = 0;

	// センサーから取得
	ovrPosef movePose = ovrHmd_GetSensorState(HMD, frameTiming.ScanoutMidpointSeconds).Predicted.Pose;
	static ovrPosef eyeRenderPose[2];

	//身長ぶんの考慮をする際の計算
	//EyePos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, EyePos.y);	

	// 今回は TriangleList しか使わない。
	g_pImmediateContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);

	//レンダーターゲットの設定
	g_pImmediateContext->OMSetRenderTargets(1, &g_pRenderTargetViewOculus, g_pDepthStencilViewOculus);

	//画面のクリア・深度バッファクリア
	float ClearColor[4] = { 0.0f, 0.125f, 0.3f, 1.0f }; // R,G,B,A の順番
	g_pImmediateContext->ClearRenderTargetView(g_pRenderTargetViewOculus, ClearColor);
	g_pImmediateContext->ClearDepthStencilView(g_pDepthStencilViewOculus, D3D11_CLEAR_DEPTH, 1.0f, 0);

	//それぞれの目に対応するシーンを描画します。
	for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
	{
		ConstantBuffer cb;
		ovrEyeType eye = HMDDesc.EyeRenderOrder[eyeIndex];
		eyeRenderPose[eye] = ovrHmd_GetEyePose(HMD, eye);

		// ビュー行列を計算します。
		OVR::Matrix4f rotation = OVR::Matrix4f::RotationY(eyeYaw);											// あらかじめ(マウスなどで)計算された回転行列を適用する
		OVR::Matrix4f resultRotation = rotation * OVR::Matrix4f(eyeRenderPose[eye].Orientation) *			// 目の姿勢(回転)を計算する
										OVR::Matrix4f(1, 0, 0, 0, 0, -1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 1);	// 軸に合うように方向を合わせる
		OVR::Vector3f resultUp = resultRotation.Transform(OVR::Vector3f(0, 1, 0));							// 上ベクトルを計算
		OVR::Vector3f forward = resultRotation.Transform(OVR::Vector3f(0, 0, -1));							// 前ベクトルを計算
		OVR::Vector3f resultEyePos = EyePos + rotation.Transform(eyeRenderPose[eye].Position);				// 最終的な目の位置を計算する
		OVR::Vector3f resultEyeAt = EyePos + rotation.Transform(eyeRenderPose[eye].Position) + forward;		// 最終的な目視先を計算する

		// 計算した値から xnamath でビュー行列を計算します。
		XMVECTOR Eye = XMVectorSet(resultEyePos.x, resultEyePos.y, resultEyePos.z, 0.0f);		//カメラの位置
		XMVECTOR At = XMVectorSet(resultEyeAt.x, resultEyeAt.y, resultEyeAt.z, 0.0f);			//カメラの注視先
		XMVECTOR Up = XMVectorSet(resultUp.x, resultUp.y, resultUp.z, 0.0f);					//カメラの真上のベクトル
		g_View = XMMatrixLookAtLH(Eye, At,Up) * XMMatrixTranslation(EyeRenderDesc[eye].ViewAdjust.x, EyeRenderDesc[eye].ViewAdjust.y, EyeRenderDesc[eye].ViewAdjust.z);

		// EyeRenderDesc からプロジェクション行列を計算します。
		// 目の中心からそれぞれ上下左右のfovの正接値(tan)が格納されているので libovr 専用の関数で計算します。
		// OVR::Matrix4f は xnamath と違い行と列が反対なので転置にしておきます。
		OVR::Matrix4f proj = OVR::CreateProjection(false, EyeRenderDesc[eye].Fov, 0.01f, 100.0f);
		proj.Transpose();
		memcpy_s(&g_Projection, 64, &proj, 64);		

		//ビューポートの設定(片目ぶんずつ設定)
		D3D11_VIEWPORT vp;
		vp.TopLeftX = EyeRenderViewport[eye].Pos.x;
		vp.TopLeftY = EyeRenderViewport[eye].Pos.y;
		vp.Width = EyeRenderViewport[eye].Size.w;
		vp.Height = EyeRenderViewport[eye].Size.h;
		vp.MinDepth = 0.0f;
		vp.MaxDepth = 1.0f;
		g_pImmediateContext->RSSetViewports(1, &vp);

		// コンスタントバッファに投げるための行列を設定
		// シェーダーに渡す際に転置行列になるため、ここで転置しておきます。
		cb.mView = XMMatrixTranspose(g_View);
		cb.mProjection = XMMatrixTranspose(g_Projection);

		//シーンを描画
		Scene(cb);
	}


	//ここでレンダーターゲットに描画したシーンをゆがませてバックバッファに描画します。
	DistortionMeshRender(3, HMD, frameTiming.TimewarpPointSeconds,eyeRenderPose);

	g_pSwapChain->Present(0, 0);
	//pRender->WaitUntilGpuIdle();  //今回はクエリ実装してない
	ovrHmd_EndFrameTiming(HMD);

}
Exemple #21
0
void RiftAppSkeleton::display_client() //const
{
    ovrHmd hmd = m_Hmd;
    if (hmd == NULL)
        return;

    //ovrFrameTiming hmdFrameTiming =
    ovrHmd_BeginFrameTiming(hmd, 0);

    bindFBO(m_renderBuffer, m_fboScale);

    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
    {
        const ovrEyeType eye = hmd->EyeRenderOrder[eyeIndex];
        const ovrPosef eyePose = ovrHmd_GetEyePose(hmd, eye);
        m_eyeOri = eyePose.Orientation; // cache this for movement direction
        _StoreHmdPose(eyePose);

        const ovrGLTexture& otex = l_EyeTexture[eye];
        const ovrRecti& rvp = otex.OGL.Header.RenderViewport;
        const ovrRecti rsc = {
            static_cast<int>(m_fboScale * rvp.Pos.x),
            static_cast<int>(m_fboScale * rvp.Pos.y),
            static_cast<int>(m_fboScale * rvp.Size.w),
            static_cast<int>(m_fboScale * rvp.Size.h)
        };
        glViewport(rsc.Pos.x, rsc.Pos.y, rsc.Size.w, rsc.Size.h);

        const OVR::Matrix4f proj = ovrMatrix4f_Projection(
            m_EyeRenderDesc[eye].Fov,
            0.01f, 10000.0f, true);

        ///@todo Should we be using this variable?
        //m_EyeRenderDesc[eye].DistortedViewport;

        const OVR::Matrix4f view = _MakeModelviewMatrix(
            eyePose,
            m_EyeRenderDesc[eye].ViewAdjust,
            m_chassisYaw,
            m_chassisPos);

        const OVR::Matrix4f scaledView = _MakeModelviewMatrix(
            eyePose,
            m_EyeRenderDesc[eye].ViewAdjust,
            m_chassisYaw,
            m_chassisPos,
            m_headSize);

        _resetGLState();

        _DrawScenes(&view.Transposed().M[0][0], &proj.Transposed().M[0][0], rsc, &scaledView.Transposed().M[0][0]);
    }
    unbindFBO();


    // Set full viewport...?
    const int w = m_Cfg.OGL.Header.RTSize.w;
    const int h = m_Cfg.OGL.Header.RTSize.h;
    glViewport(0, 0, w, h);

    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    glDisable(GL_DEPTH_TEST);
    glDisable(GL_CULL_FACE);

    // Now draw the distortion mesh...
    for(int eyeNum = 0; eyeNum < 2; eyeNum++)
    {
        const ShaderWithVariables& eyeShader = eyeNum == 0 ?
            m_presentDistMeshL :
            m_presentDistMeshR;
        const GLuint prog = eyeShader.prog();
        glUseProgram(prog);
        //glBindVertexArray(eyeShader.m_vao);
        {
            const ovrDistortionMesh& mesh = m_DistMeshes[eyeNum];
            glBindBuffer(GL_ARRAY_BUFFER, 0);

            const int a_pos =  glGetAttribLocation(prog, "vPosition");
            glVertexAttribPointer(a_pos, 4, GL_FLOAT, GL_FALSE, sizeof(ovrDistortionVertex), &mesh.pVertexData[0].ScreenPosNDC.x);
            glEnableVertexAttribArray(a_pos);

            const int a_texR =  glGetAttribLocation(prog, "vTexR");
            if (a_texR > -1)
            {
                glVertexAttribPointer(a_texR, 2, GL_FLOAT, GL_FALSE, sizeof(ovrDistortionVertex), &mesh.pVertexData[0].TanEyeAnglesR);
                glEnableVertexAttribArray(a_texR);
            }

            const int a_texG =  glGetAttribLocation(prog, "vTexG");
            if (a_texG > -1)
            {
                glVertexAttribPointer(a_texG, 2, GL_FLOAT, GL_FALSE, sizeof(ovrDistortionVertex), &mesh.pVertexData[0].TanEyeAnglesG);
                glEnableVertexAttribArray(a_texG);
            }

            const int a_texB =  glGetAttribLocation(prog, "vTexB");
            if (a_texB > -1)
            {
                glVertexAttribPointer(a_texB, 2, GL_FLOAT, GL_FALSE, sizeof(ovrDistortionVertex), &mesh.pVertexData[0].TanEyeAnglesB);
                glEnableVertexAttribArray(a_texB);
            }

            ovrVector2f uvoff =
                m_uvScaleOffsetOut[2*eyeNum + 1];
                //DistortionData.UVScaleOffset[eyeNum][0];
            ovrVector2f uvscale =
                m_uvScaleOffsetOut[2*eyeNum + 0];
                //DistortionData.UVScaleOffset[eyeNum][1];

            glUniform2f(eyeShader.GetUniLoc("EyeToSourceUVOffset"), uvoff.x, uvoff.y);
            glUniform2f(eyeShader.GetUniLoc("EyeToSourceUVScale"), uvscale.x, uvscale.y);


#if 0
            // Setup shader constants
            DistortionData.Shaders->SetUniform2f(
                "EyeToSourceUVScale",
                DistortionData.UVScaleOffset[eyeNum][0].x,
                DistortionData.UVScaleOffset[eyeNum][0].y);
            DistortionData.Shaders->SetUniform2f(
                "EyeToSourceUVOffset",
                DistortionData.UVScaleOffset[eyeNum][1].x,
                DistortionData.UVScaleOffset[eyeNum][1].y);

            if (distortionCaps & ovrDistortionCap_TimeWarp)
            { // TIMEWARP - Additional shader constants required
                ovrMatrix4f timeWarpMatrices[2];
                ovrHmd_GetEyeTimewarpMatrices(HMD, (ovrEyeType)eyeNum, eyeRenderPoses[eyeNum], timeWarpMatrices);
                //WARNING!!! These matrices are transposed in SetUniform4x4f, before being used by the shader.
                DistortionData.Shaders->SetUniform4x4f("EyeRotationStart", Matrix4f(timeWarpMatrices[0]));
                DistortionData.Shaders->SetUniform4x4f("EyeRotationEnd", Matrix4f(timeWarpMatrices[1]));
            }

            // Perform distortion
            pRender->Render(
                &distortionShaderFill,
                DistortionData.MeshVBs[eyeNum],
                DistortionData.MeshIBs[eyeNum]);
#endif

            glActiveTexture(GL_TEXTURE0);
            glBindTexture(GL_TEXTURE_2D, m_renderBuffer.tex);
            glUniform1i(eyeShader.GetUniLoc("fboTex"), 0);

            // This is the only uniform that changes per-frame
            glUniform1f(eyeShader.GetUniLoc("fboScale"), m_fboScale);


            glDrawElements(
                GL_TRIANGLES,
                mesh.IndexCount,
                GL_UNSIGNED_SHORT,
                &mesh.pIndexData[0]);
        }
        glBindVertexArray(0);
        glUseProgram(0);
    }

    ovrHmd_EndFrameTiming(hmd);
}