Пример #1
0
void OVR_SDL2_app::run()
{
    SDL_Event e;

    while (running)
    {
        // Dispatch all pending SDL events.

        while (SDL_PollEvent(&e))
            dispatch(e);

        // Let the application animate.

        step();

        // Render both views and let the Oculus SDK display them on-screen.
        // 'eye' is a private member variable that notes which eye is being
        // rendered. This is used when the application calls back down to
        // learn the view and projection matrices.

        ovrHmd_BeginFrame(hmd, 0);
        {
            ovrHmd_GetEyePoses(hmd, 0, offset, pose, NULL);

            for (int i = 0; i < 2; i++)
            {
                eye = hmd->EyeRenderOrder[i];
                buffer[eye]->bind();
                draw();
            }
        }
        ovrHmd_EndFrame(hmd, pose, tex);
    }
}
Пример #2
0
  void draw() {
    static int frameIndex = 0;
    static ovrPosef eyePoses[2];
    ++frameIndex;
    ovrHmd_GetEyePoses(hmd, frameIndex, eyeOffsets, eyePoses, nullptr);

    ovrHmd_BeginFrame(hmd, frameIndex);
    glEnable(GL_DEPTH_TEST);

    for (int i = 0; i < 2; ++i) {
      ovrEyeType eye = hmd->EyeRenderOrder[i];

      const ovrRecti & vp = textures[eye].Header.RenderViewport;
      eyeFramebuffers[eye]->Bind();
      oglplus::Context::Viewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
      Stacks::projection().top() = eyeProjections[eye];

      MatrixStack & mv = Stacks::modelview();
      mv.withPush([&]{
        // Apply the per-eye offset & the head pose
        mv.top() = glm::inverse(ovr::toGlm(eyePoses[eye])) * mv.top();
        renderScene();
      });
    };
    oglplus::DefaultFramebuffer().Bind(oglplus::Framebuffer::Target::Draw);

    ovrHmd_EndFrame(hmd, eyePoses, textures);
  }
 void draw() {
   static int frameIndex = 0;
   static ovrPosef poses[2];
   glClear(GL_COLOR_BUFFER_BIT);
   ovrHmd_BeginFrame(hmd, frameIndex++);
   ovrHmd_EndFrame(hmd, poses, eyeTextures);
 }
Пример #4
0
	bool OVR::swap(HMD& _hmd)
	{
		if (NULL == m_hmd)
		{
			return false;
		}

		ovrHmd_EndFrame(m_hmd, m_pose, m_texture);

		if (m_warning)
		{
			m_warning = !ovrHmd_DismissHSWDisplay(m_hmd);
		}

		m_timing = ovrHmd_BeginFrame(m_hmd, 0);

#if OVR_VERSION > OVR_VERSION_042
		m_pose[0] = ovrHmd_GetHmdPosePerEye(m_hmd, ovrEye_Left);
		m_pose[1] = ovrHmd_GetHmdPosePerEye(m_hmd, ovrEye_Right);
#else
		m_pose[0] = ovrHmd_GetEyePose(m_hmd, ovrEye_Left);
		m_pose[1] = ovrHmd_GetEyePose(m_hmd, ovrEye_Right);
#endif // OVR_VERSION > OVR_VERSION_042

		getEyePose(_hmd);

		return true;
	}
Пример #5
0
void ToOculusRenderingPipeline::Render() {
	render_to_texture.Clear(game_world->GetScreenColor());
	view_state->device_context->ClearDepthStencilView(depth_buffer_view, D3D11_CLEAR_DEPTH | D3D11_CLEAR_STENCIL, 1.0, 1);
	for (int i = 0; i < 2; i++) {
		player_camera.location = 
			OculusHelper::ConvertVector3fToArray(
			OculusHelper::ConvertArrayToVector3f(game_world->GetPlayerLocation()) +
			input_handler->GetHeadOffset() + input_handler->GetEyeOffset(i));
		player_camera.orientaiton = game_world->GetPlayerOrientation();
		player_camera.InvalidateAllMatrices();
		XMStoreFloat4x4(&(player_camera_transformation.EditBufferDataRef().transformation),
			player_camera.GetViewProjectionMatrix()
			);
		player_camera_transformation.PushBuffer(view_state->device_context);

		std::array<int, 2> viewport_position = { oculus->eye_viewports[i].Pos.x, oculus->eye_viewports[i].Pos.y };
		std::array<int, 2> viewport_size = { oculus->eye_viewports[i].Size.w, oculus->eye_viewports[i].Size.h };
		render_to_texture.SetViewport(viewport_position, viewport_size, { 0.0f, 1.0f });
		render_to_texture.Prepare();
		player_camera_transformation.Prepare(view_state->device_interface, view_state->device_context, PER_FRAME_CONSTANT_BUFFER_REGISTER);
		view_state->device_context->OMSetDepthStencilState(depth_buffer_state, 1);
		game_world->Draw(render_to_texture);
	}

	ovrHmd_EndFrame(oculus->head_mounted_display, oculus->eye_rendering_pose, &oculus->eye_textures[0].Texture);

}
		void EndFrame(const ovrTexture* eyes)
		{
			static ovrPosef eyeRenderPose[2];
			eyeRenderPose[0] = ovrHmd_GetEyePose(m_device, ovrEyeType::ovrEye_Left);
			eyeRenderPose[1] = ovrHmd_GetEyePose(m_device, ovrEyeType::ovrEye_Right);

			ovrHmd_EndFrame(m_device, eyeRenderPose, eyes);
		}
 void draw() {
   // Bug in SDK prevents direct mode from activating unless I call this
   static ovrPosef eyePoses[2];
   {
     static ovrVector3f eyeOffsets[2];
     ovrHmd_GetEyePoses(hmd, getFrame(), eyeOffsets, eyePoses, nullptr);
   }
   ovrHmd_BeginFrame(hmd, getFrame());
   ovrHmd_EndFrame(hmd, eyePoses, eyeTextures);
 }
void RiftRenderingApp::drawRiftFrame() {
  ++frameCount;
  ovrHmd_BeginFrame(hmd, frameCount);
  MatrixStack & mv = Stacks::modelview();
  MatrixStack & pr = Stacks::projection();

  perFrameRender();
  
  ovrPosef fetchPoses[2];
  ovrHmd_GetEyePoses(hmd, frameCount, eyeOffsets, fetchPoses, nullptr);
  for (int i = 0; i < 2; ++i) {
    ovrEyeType eye = currentEye = hmd->EyeRenderOrder[i];
    // Force us to alternate eyes if we aren't keeping up with the required framerate
    if (eye == lastEyeRendered) {
      continue;
    }
    // We want to ensure that we only update the pose we 
    // send to the SDK if we actually render this eye.
    eyePoses[eye] = fetchPoses[eye];

    lastEyeRendered = eye;
    Stacks::withPush(pr, mv, [&] {
      // Set up the per-eye projection matrix
      pr.top() = projections[eye];

      // Set up the per-eye modelview matrix
      // Apply the head pose
      glm::mat4 eyePose = ovr::toGlm(eyePoses[eye]);
      mv.preMultiply(glm::inverse(eyePose));

      // Render the scene to an offscreen buffer
      eyeFramebuffers[eye]->Bind();
      perEyeRender();
    });
    
    if (eyePerFrameMode) {
      break;
    }
  }

  if (endFrameLock) {
    endFrameLock->lock();
  }
  ovrHmd_EndFrame(hmd, eyePoses, eyeTextures);
  if (endFrameLock) {
    endFrameLock->unlock();
  }
  rateCounter.increment();
  if (rateCounter.elapsed() > 2.0f) {
    float fps = rateCounter.getRate();
    updateFps(fps);
    rateCounter.reset();
  }
}
Пример #9
0
void OculusInterface::endFrame()
{
	//std::cout<<"End Frame\n";

	glBindFramebuffer(GL_FRAMEBUFFER, 0);
	glViewport(0, 0, m_windowWidth, m_windowHeight);

	ovrHmd_EndFrame(m_hmd, m_pose, &m_fbTextureIDOVR[0].Texture);
	//assert(glGetError() == GL_NO_ERROR);

}
Пример #10
0
	void OVR::preReset()
	{
		if (NULL != m_hmd)
		{
			ovrHmd_EndFrame(m_hmd, m_pose, m_texture);
			ovrHmd_Destroy(m_hmd);
			m_hmd = NULL;
		}

		m_debug = false;
	}
Пример #11
0
void OVREndFrame()
{
	ovrPosef headPose[2] = {
		ovrHmd_GetEyePose( _OVRGlobals.HMD, _OVRGlobals.HMD->EyeRenderOrder[0] ),
		ovrHmd_GetEyePose( _OVRGlobals.HMD, _OVRGlobals.HMD->EyeRenderOrder[1] )
	};
	ovrTexture eyeTextures[2] = {
		_OVRGlobals.Eye[0].Texture,
		_OVRGlobals.Eye[1].Texture
	};

	ovrHmd_EndFrame( _OVRGlobals.HMD, headPose, eyeTextures );
	memset( &_OVRGlobals.FrameTiming, 0, sizeof(ovrFrameTiming) );
}
Пример #12
0
/** Should be called when we have completed rendering a frame. For
 * HMDs, this should be called after both the left and right eyes have
 * been rendered. */
void viewmat_end_frame(void)
{
	if(viewmat_display_mode == VIEWMAT_OCULUS)
	{
#ifndef MISSING_OVR
		/* Copy the prerendered image from a multisample antialiasing
		   texture into a normal OpenGL texture. This section of code
		   is not necessary if we are rendering directly into the
		   normal (non-antialiased) OpenGL texture. */
		GLuint buffersToBlit[3] = { GL_COLOR_BUFFER_BIT, GL_STENCIL_BUFFER_BIT, GL_DEPTH_BUFFER_BIT };

		glBindFramebuffer(GL_READ_FRAMEBUFFER, leftFramebufferAA);
		glBindFramebuffer(GL_DRAW_FRAMEBUFFER, leftFramebuffer);
		for(unsigned int i=0; i<sizeof(buffersToBlit)/sizeof(buffersToBlit[0]); i++)
			glBlitFramebuffer(0, 0, recommendTexSizeL.w,
			                  recommendTexSizeL.h, 0, 0, recommendTexSizeL.w,
			                  recommendTexSizeL.h, buffersToBlit[i], GL_NEAREST);

		glBindFramebuffer(GL_READ_FRAMEBUFFER, rightFramebufferAA);
		glBindFramebuffer(GL_DRAW_FRAMEBUFFER, rightFramebuffer);
		for(unsigned int i=0; i<sizeof(buffersToBlit)/sizeof(buffersToBlit[0]); i++)
			glBlitFramebuffer(0, 0, recommendTexSizeL.w,
			                  recommendTexSizeL.h, 0, 0, recommendTexSizeL.w,
			                  recommendTexSizeL.h, buffersToBlit[i], GL_NEAREST);
		kuhl_errorcheck();
		
		glBindFramebuffer(GL_FRAMEBUFFER, 0);
		if(hmd)
			ovrHmd_EndFrame(hmd, pose, &EyeTexture[0].Texture);
#endif
	}
	else if(viewmat_display_mode == VIEWMAT_ANAGLYPH)
	{
		glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE);
	}

	/* Need to swap front and back buffers here unless we are using
	 * Oculus. (Oculus draws to the screen directly). */
	if(viewmat_display_mode != VIEWMAT_OCULUS)
		glutSwapBuffers();

	viewmat_validate_fps();
}
  virtual void draw() {
    static ovrPosef eyePoses[2];

    ovrHmd_BeginFrame(hmd, getFrame());
    MatrixStack & mv = Stacks::modelview();
    for (int i = 0; i < ovrEye_Count; ++i) {
      ovrEyeType eye = hmd->EyeRenderOrder[i];
      PerEyeArg & eyeArgs = eyes[eye];
      Stacks::projection().top() = eyeArgs.projection;

      eyeArgs.framebuffer->Bind();
      oglplus::Context::Clear().DepthBuffer();
      Stacks::withPush(mv, [&]{
        mv.preMultiply(eyeArgs.modelviewOffset);
        oria::renderExampleScene(ipd, eyeHeight);
      });
    }
    ovrHmd_EndFrame(hmd, eyePoses, eyeTextures);
  }
  void draw() {
    auto frameTime = ovrHmd_BeginFrame(hmd, frameIndex++);
    ovrLock.unlock();

    if (0 == frameTime.TimewarpPointSeconds) {
      ovr_WaitTillTime(frameTime.TimewarpPointSeconds - 0.002);
    } else {
      ovr_WaitTillTime(frameTime.NextFrameSeconds - 0.008);
    }

    // Grab the most recent textures
    for_each_eye([&](ovrEyeType eye) {
      ((ovrGLTexture&)(eyeTextures[eye])).OGL.TexId =
        textureIds[eye];
    });

    ovrLock.lock();
    ovrHmd_EndFrame(hmd, eyePoses, eyeTextures);
  }
Пример #15
0
GMO double endFrame() {
	static ovrPosef eyeRenderPose[2]; 
	static float    BodyYaw(3.141592f);
	static Vector3f HeadPos(0.0f, 1.6f, -5.0f);
	HeadPos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, HeadPos.y);

    //pRender->SetRenderTarget ( pRendertargetTexture );
	
    //pRender->SetViewport (Recti(0,0, pRendertargetTexture->GetWidth(),
    //                                    pRendertargetTexture->GetHeight() ));  
    //pRender->Clear();
	for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
	{
        ovrEyeType eye = HMD->EyeRenderOrder[eyeIndex];
        eyeRenderPose[eye] = ovrHmd_GetEyePose(HMD, eye);

        // Get view and projection matrices
		Matrix4f rollPitchYaw       = Matrix4f::RotationY(BodyYaw);
		Matrix4f finalRollPitchYaw  = rollPitchYaw * Matrix4f(eyeRenderPose[eye].Orientation);
		Vector3f finalUp            = finalRollPitchYaw.Transform(Vector3f(0,1,0));
		Vector3f finalForward       = finalRollPitchYaw.Transform(Vector3f(0,0,-1));
		Vector3f shiftedEyePos      = HeadPos + rollPitchYaw.Transform(eyeRenderPose[eye].Position);
        Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp); 
		Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, true);

		//pRender->SetViewport(Recti(EyeRenderViewport[eye]));
		//pRender->SetProjection(proj);
		pRender->SetDepthMode(true, true);
		
		//pRoomScene->Render(pRender, Matrix4f::Translation(EyeRenderDesc[eye].ViewAdjust) * view);
	}
	//pRender->BlendState
	
	//pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);
	//pRender->Present( true );
	pRender->UpdateMonitorOutputs();
    pRender->FinishScene();
	ovrHmd_EndFrame(HMD, headPose, &EyeTexture[0].Texture);
	return 1;
}
Пример #16
0
void VR::draw(const std::function<void()>& drawer) const
{
    // draw onto our framebuffer, clearing it first
    glBindFramebuffer(GL_FRAMEBUFFER, m_fbo);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    // render per-eye
    ovrPosef pose[2];
    auto eye_trans = eye_transforms(pose);
    ovrHmd_BeginFrame(m_hmd, 0);
    for (auto i = 0; i < 2; ++i)
    {
        auto eye = m_hmd->EyeRenderOrder[i];

        // viewport transform: select left/right based on eye
        glViewport(eye == ovrEye_Left ? 0 : m_fb_width / 2, 0,
            m_fb_width / 2, m_fb_height);

        // projection transform: just use OVR's eye matrix
        auto proj = ovrMatrix4f_Projection(m_hmd->DefaultEyeFov[eye],
                                           0.5, 500.0, 1);
        glMatrixMode(GL_PROJECTION);
        glLoadTransposeMatrixf(&proj.M[0][0]); // GL uses column-major

        // view transform: use inverse eye transform
        auto view = inverse(eye_trans[eye]);
        glMatrixMode(GL_MODELVIEW);
        glLoadIdentity();
        glLoadMatrixf(value_ptr(view));

        // draw!
        drawer();
    }

    // draw onto display
    glBindFramebuffer(GL_FRAMEBUFFER, 0);
    ovrHmd_EndFrame(m_hmd, pose, &m_ovr_tex[0].Texture);

    glUseProgram(0); // OVR doesn't restore shader program when done
}
Пример #17
0
void Renderer::renderOVR(fp_t interp)
{
    UNUSED(interp);

    ovrHmd_BeginFrame(hmd_, 0);

    glBindFramebuffer(GL_FRAMEBUFFER, framebuffer_);
    glClear(GL_DEPTH_BUFFER_BIT);

    ovrPosef eyePose[ovrEye_Count];

    for(int i = 0; i < ovrEye_Count; i++)
    {
        ovrEyeType eye = hmd_->EyeRenderOrder[i];

        glViewport(eyeViewport_[eye].Pos.x, eyeViewport_[eye].Pos.y,
                   eyeViewport_[eye].Size.w, eyeViewport_[eye].Size.h);

        glm::mat4 projectionMat = convertOvrMatrix4f(ovrMatrix4f_Projection(eyeRenderDesc_[eye].Fov, 0.1f, 1000.0f, /*rightHanded*/ true));

        eyePose[eye] = ovrHmd_GetEyePose(hmd_, eye);
        Core::get().camera().setHeadPosition(convertOvrVector3f(eyePose[eye].Position));
        Core::get().camera().setHeadRotation(glm::conjugate(convertOvrQuatf(eyePose[eye].Orientation)));

        glm::mat4 viewMat = glm::translate(glm::mat4{}, convertOvrVector3f(eyeRenderDesc_[eye].ViewAdjust));
        viewMat = viewMat * Core::get().camera().viewMatrix();

        skyRenderer_->render();
        landRenderer_->render(projectionMat, viewMat);
        structureRenderer_->render(projectionMat, viewMat);
        modelRenderer_->render(projectionMat, viewMat);
    }

    glBindFramebuffer(GL_FRAMEBUFFER, 0);

    ovrHmd_EndFrame(hmd_, eyePose, (ovrTexture*)eyeTexture_);
}
  virtual void draw() {
    ovrHmd_BeginFrame(hmd, frameIndex++);
    ovrPosef eyePoses[2];

    gl::MatrixStack & mv = gl::Stacks::modelview();
    for (int i = 0; i < ovrEye_Count; ++i) {
      ovrEyeType eye = hmd->EyeRenderOrder[i];
      PerEyeArg & eyeArgs = eyes[eye];
      gl::Stacks::projection().top() = eyeArgs.projection;

      eyePoses[eye] = ovrHmd_GetEyePose(hmd, eye);

      eyeArgs.frameBuffer.withFramebufferActive([&]{
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
        gl::Stacks::with_push(mv, [&]{
          mv.preMultiply(glm::inverse(Rift::fromOvr(eyePoses[eye])));
          mv.preMultiply(eyeArgs.modelviewOffset);
          drawCubeScene();
        });
      });
    }

    ovrHmd_EndFrame(hmd, eyePoses, textures);
  }
Пример #19
0
void Render(float dt)
{
    static unsigned int frameIndex = 0;
    frameIndex++;
    ovrFrameTiming timing = ovrHmd_BeginFrame(s_hmd, 0);

    // ovrSensorState ss = ovrHmd_GetSensorState(s_hmd, timing.ScanoutMidpointSeconds);
    // TODO: Use this for head tracking...
    // TODO: Use player height from SDK

    glBindBuffer(GL_ARRAY_BUFFER, 0);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);

    // render into fbo
    glBindFramebuffer(GL_FRAMEBUFFER, s_fbo);

    // TODO: enable this when we have more complex rendering.
    glEnable(GL_DEPTH_TEST);
    glDisable(GL_CULL_FACE);
    glEnable(GL_BLEND);
    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);

    static float t = 0.0;
    t += dt;

    // clear render target
    glViewport(0, 0, s_renderTargetSize.w, s_renderTargetSize.h);
    glClearColor(s_clearColor.x, s_clearColor.y, s_clearColor.z, s_clearColor.w);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    for (int i = 0; i < 2; i++)
    {
        ovrEyeType eye = s_hmdDesc.EyeRenderOrder[i];
        ovrPosef pose = ovrHmd_BeginEyeRender(s_hmd, eye);

        glViewport(s_eyeTexture[eye].Header.RenderViewport.Pos.x,
                   s_eyeTexture[eye].Header.RenderViewport.Pos.y,
                   s_eyeTexture[eye].Header.RenderViewport.Size.w,
                   s_eyeTexture[eye].Header.RenderViewport.Size.h);

        Quatf q(pose.Orientation.x, pose.Orientation.y, pose.Orientation.z, pose.Orientation.w);
        Vector3f p(pose.Position.x, pose.Position.y, pose.Position.z);

        Matrixf cameraMatrix = Matrixf::QuatTrans(q, s_cameraPos);
        Matrixf viewCenter = cameraMatrix.OrthoInverse();

        // let ovr compute projection matrix, cause it's hard.
        ovrMatrix4f ovrProj = ovrMatrix4f_Projection(s_eyeRenderDesc[eye].Fov, 0.1f, 10000.0f, true);

        // convert to abaci matrix
        Matrixf projMatrix = Matrixf::Rows(Vector4f(ovrProj.M[0][0], ovrProj.M[0][1], ovrProj.M[0][2], ovrProj.M[0][3]),
                                           Vector4f(ovrProj.M[1][0], ovrProj.M[1][1], ovrProj.M[1][2], ovrProj.M[1][3]),
                                           Vector4f(ovrProj.M[2][0], ovrProj.M[2][1], ovrProj.M[2][2], ovrProj.M[2][3]),
                                           Vector4f(ovrProj.M[3][0], ovrProj.M[3][1], ovrProj.M[3][2], ovrProj.M[3][3]));

        // use EyeRenderDesc.ViewAdjust to do eye offset.
        Matrixf viewMatrix = viewCenter * Matrixf::Trans(Vector3f(s_eyeRenderDesc[eye].ViewAdjust.x,
                                                                  s_eyeRenderDesc[eye].ViewAdjust.y,
                                                                  s_eyeRenderDesc[eye].ViewAdjust.z));

        // compute model matrix for terminal
        const float kTermScale = 0.001f;
        const Vector3f termOrigin(-2 * kFeetToMeters, 6.75f * kFeetToMeters, -2.5 * kFeetToMeters);
        Matrixf modelMatrix = Matrixf::ScaleQuatTrans(Vector3f(kTermScale, -kTermScale, kTermScale),
                                                      Quatf::AxisAngle(Vector3f(0, 1, 0), 0),
                                                      termOrigin);
        RenderBegin();

        RenderFloor(projMatrix, viewMatrix, 0.0f);

        RenderTextBegin(projMatrix, viewMatrix, modelMatrix);
        for (int j = 0; j < win_get_text_count(); j++)
        {
            gb::Text* text = (gb::Text*)win_get_text(j);
            if (text)
            {
                RenderText(text->GetQuadVec());
            }
        }
        RenderTextEnd();

        RenderEnd();
        ovrHmd_EndEyeRender(s_hmd, eye, pose, &s_eyeTexture[eye]);
    }

    ovrHmd_EndFrame(s_hmd);
}
Пример #20
0
void OculusManager::render(RenderSystem* render, Scene* scene)
{
	unsigned int l_FrameIndex = 0;
	int i;

	/* the drawing starts with a call to ovrHmd_BeginFrame */
	ovrHmd_BeginFrame(g_Hmd, l_FrameIndex);

	ovrHmd_GetEyePoses(g_Hmd, l_FrameIndex, g_EyeOffsets, g_EyePoses, NULL);

	/* start drawing onto our texture render target */
	glBindFramebuffer(GL_FRAMEBUFFER, l_FBOId);

	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	/* for each eye ... */
	for (int l_EyeIndex = 0; l_EyeIndex < ovrEye_Count; l_EyeIndex++)
	{
		ovrEyeType l_Eye = g_Hmd->EyeRenderOrder[l_EyeIndex];

		glViewport(
			g_EyeTextures[l_Eye].Header.RenderViewport.Pos.x,
			g_EyeTextures[l_Eye].Header.RenderViewport.Pos.y,
			g_EyeTextures[l_Eye].Header.RenderViewport.Size.w,
			g_EyeTextures[l_Eye].Header.RenderViewport.Size.h
			);

		// Pass projection matrix on to OpenGL...
		glMatrixMode(GL_PROJECTION);
		glLoadIdentity();
		glMultMatrixf(&(g_ProjectionMatrici[l_Eye].Transposed().M[0][0]));

		// Create the model-view matrix and pass on to OpenGL...
		glMatrixMode(GL_MODELVIEW);
		glLoadIdentity();

		// Multiply with orientation retrieved from sensor...
		//OVR::Quatf l_Orientation = OVR::Quatf(g_EyePoses[l_Eye].Orientation);
		//OVR::Matrix4f l_ModelViewMatrix = OVR::Matrix4f(l_Orientation.Inverted());
		//glMultMatrixf(&(l_ModelViewMatrix.Transposed().M[0][0]));


		// Translation due to positional tracking (DK2) and IPD...
		//glTranslatef(-g_EyePoses[l_Eye].Position.x, -g_EyePoses[l_Eye].Position.y, -g_EyePoses[l_Eye].Position.z);

		// Move the world forward a bit to show the scene in front of us...
		//glTranslatef(g_CameraPosition.x, g_CameraPosition.y, g_CameraPosition.z);


		//Render
		render->renderOcculus(scene->getChildren(), scene->getLights(), (GLfloat)g_EyeTextures[l_Eye].Header.RenderViewport.Size.w, (GLfloat)g_EyeTextures[l_Eye].Header.RenderViewport.Size.h, toGlm(g_EyePoses[l_Eye]));
	}

	/* after drawing both eyes into the texture render target, revert to drawing directly to the
	* display, and we call ovrHmd_EndFrame, to let the Oculus SDK draw both images properly
	* compensated for lens distortion and chromatic abberation onto the HMD screen.
	*/
	glBindFramebuffer(GL_FRAMEBUFFER, 0);

	ovrHmd_EndFrame(g_Hmd, g_EyePoses, g_EyeTextures);

	glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); // Avoid OpenGL state leak in ovrHmd_EndFrame...
	glBindBuffer(GL_ARRAY_BUFFER, 0); // Avoid OpenGL state leak in ovrHmd_EndFrame...

	glUseProgram(0);

	++l_FrameIndex;
}
 void draw() {
   static ovrPosef eyePoses[2];
   ovrHmd_BeginFrame(hmd, getFrame());
   ovrHmd_EndFrame(hmd, eyePoses, eyeTextures);
 }
Пример #22
0
void CoinRiftWidget::paintGL()
{ 
    const int ms(1000 / 75 /*fps*/);
    QTimer::singleShot(ms, this, SLOT(updateGL()));

    // handling the sfety warning
    handlingSafetyWarning();

    makeCurrent();

    ovrPosef eyePose[2];

    glEnable(GL_TEXTURE_2D);

    ovrFrameTiming hmdFrameTiming = ovrHmd_BeginFrame(hmd, 0);
    for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++) {
        ovrEyeType eye = hmd->EyeRenderOrder[eyeIndex];
        eyePose[eye] = ovrHmd_GetEyePose(hmd, eye);


        SbRotation    riftOrientation(  eyePose[eye].Orientation.x,
                                        eyePose[eye].Orientation.y,
                                        eyePose[eye].Orientation.z,
                                        eyePose[eye].Orientation.w);

        camera[eye]->orientation.setValue(riftOrientation);

        SbVec3f riftPosition =   SbVec3f(eyePose[eye].Position.x,
                                         eyePose[eye].Position.y,
                                         eyePose[eye].Position.z);


        //SbVec3f originalPosition(camera[eye]->position.getValue());
        SbVec3f viewAdjust(eyeRenderDesc[eye].ViewAdjust.x,
                                                              eyeRenderDesc[eye].ViewAdjust.y,
                                                              eyeRenderDesc[eye].ViewAdjust.z);

        riftOrientation.multVec(viewAdjust,viewAdjust);

        camera[eye]->position.setValue(basePosition - viewAdjust + riftPosition);

        //Base::Console().Log("Eye(%d) Pos: %f, %f, %f  ViewAdjust:  %f, %f, %f \n",eye, eyePose[eye].Position.x,
        //                                                eyePose[eye].Position.y,
        //                                 eyePose[eye].Position.z,
        //                                 eyeRenderDesc[eye].ViewAdjust.x,
        //                                                      eyeRenderDesc[eye].ViewAdjust.y,
        //                                                      eyeRenderDesc[eye].ViewAdjust.z);

#ifdef USE_SO_OFFSCREEN_RENDERER
        ovrGLTextureData *texData = reinterpret_cast<ovrGLTextureData*>(&eyeTexture[eye]);
        glBindTexture(GL_TEXTURE_2D, texData->TexId);
        renderer->render(rootScene[eye]);
        Q_ASSERT(!glGetError());
        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA,
                     eyeTexture[eye].Header.TextureSize.w,
                     eyeTexture[eye].Header.TextureSize.h,
                     0, GL_RGBA /*GL_BGRA*/, GL_UNSIGNED_BYTE, renderer->getBuffer());
        Q_ASSERT(!glGetError());
        glBindTexture(GL_TEXTURE_2D, 0);
#endif
#ifdef USE_FRAMEBUFFER
        // Clear state pollution from OVR SDK.
        glBindTexture(GL_TEXTURE_2D, 0); // You need this, at least if (hmdDesc.DistortionCaps & ovrDistortion_Chromatic).
        OVR::CAPI::GL::glUseProgram(0); // You need this even more.

        GLint oldfb;
        glGetIntegerv(GL_FRAMEBUFFER_BINDING_EXT, &oldfb);
        // Set up framebuffer for rendering.
        OVR::CAPI::GL::glBindFramebuffer(GL_FRAMEBUFFER_EXT, frameBufferID[eye]);

        m_sceneManager->setSceneGraph(rootScene[eye]);
// m_sceneManager->setCamera(camera[eye]); // SoSceneManager does this implicitly.
        m_sceneManager->render();

        // Continue rendering to the orgiginal frame buffer (likely 0, the onscreen buffer).
        OVR::CAPI::GL::glBindFramebuffer(GL_FRAMEBUFFER_EXT, oldfb);
        Q_ASSERT(!glGetError());
#endif

        //camera[eye]->position.setValue(originalPosition);

    }
    
    // Submit the texture for distortion.     
    ovrHmd_EndFrame(hmd, eyePose, eyeTexture);

    // Swap buffers.
    glDisable(GL_CULL_FACE);
    glDisable(GL_DEPTH_TEST);
    //ovrHmd_EndFrame(hmd);
    glEnable(GL_CULL_FACE);
    glEnable(GL_DEPTH_TEST);
    glClearDepth(1.0);

    doneCurrent();
}
void OcculusCameraComponent::breakDown()
{
	parent->getStage()->getGame()->getGraphicsHandle()->setFrameBuffer();
	ovrHmd_EndFrame(hmd, headPoses, &EyeTexture[0].Texture);
}
Пример #24
0
void OculusWorldDemoApp::OnIdle()
{
    double curtime = ovr_GetTimeInSeconds();
    // If running slower than 10fps, clamp. Helps when debugging, because then dt can be minutes!
    float  dt      = Alg::Min<float>(float(curtime - LastUpdate), 0.1f);
    LastUpdate     = curtime;    


    Profiler.RecordSample(RenderProfiler::Sample_FrameStart);

    if (LoadingState == LoadingState_DoLoad)
    {
        PopulateScene(MainFilePath.ToCStr());
        LoadingState = LoadingState_Finished;
        return;
    }    

    if (HmdSettingsChanged)
    {
        CalculateHmdValues();        
        HmdSettingsChanged = false;
    }

    HmdFrameTiming = ovrHmd_BeginFrame(Hmd, 0);


    // Update gamepad.
    GamepadState gamepadState;
    if (GetPlatformCore()->GetGamepadManager()->GetGamepadState(0, &gamepadState))
    {
        GamepadStateChanged(gamepadState);
    }

    SensorState ss = ovrHmd_GetSensorState(Hmd, HmdFrameTiming.ScanoutMidpointSeconds);
    HmdStatus = ss.StatusFlags;

    // Change message status around positional tracking.
	bool hadVisionTracking = HaveVisionTracking;
	HaveVisionTracking = (ss.StatusFlags & Status_PositionTracked) != 0;
	if (HaveVisionTracking && !hadVisionTracking)
		Menu.SetPopupMessage("Vision Tracking Acquired");
    if (!HaveVisionTracking && hadVisionTracking)
		Menu.SetPopupMessage("Lost Vision Tracking");
    
    // Check if any new devices were connected.
    ProcessDeviceNotificationQueue();
    // FPS count and timing.
    UpdateFrameRateCounter(curtime);

    
    // Update pose based on frame!
    ThePlayer.HeadPose = ss.Predicted.Pose;
    // Movement/rotation with the gamepad.
    ThePlayer.BodyYaw -= ThePlayer.GamepadRotate.x * dt;
    ThePlayer.HandleMovement(dt, &CollisionModels, &GroundCollisionModels, ShiftDown);


    // Record after processing time.
    Profiler.RecordSample(RenderProfiler::Sample_AfterGameProcessing);    


    // Determine if we are rendering this frame. Frame rendering may be
    // skipped based on FreezeEyeUpdate and Time-warp timing state.
    bool bupdateRenderedView = FrameNeedsRendering(curtime);
    
    if (bupdateRenderedView)
    {
        // If render texture size is changing, apply dynamic changes to viewport.
        ApplyDynamicResolutionScaling();

        pRender->BeginScene(PostProcess_None);

        if (ForceZeroIpd)
        {             
            // Zero IPD eye rendering: draw into left eye only,
            // re-use  texture for right eye.
            pRender->SetRenderTarget(RenderTargets[Rendertarget_Left].pTex);
            pRender->Clear();
        
            ovrPosef eyeRenderPose = ovrHmd_BeginEyeRender(Hmd, ovrEye_Left);
        
            View = CalculateViewFromPose(eyeRenderPose);
            RenderEyeView(ovrEye_Left);
            ovrHmd_EndEyeRender(Hmd, ovrEye_Left, eyeRenderPose, &EyeTexture[ovrEye_Left]);

            // Second eye gets the same texture (initialized to same value above).
            ovrHmd_BeginEyeRender(Hmd, ovrEye_Right); 
            ovrHmd_EndEyeRender(Hmd, ovrEye_Right, eyeRenderPose, &EyeTexture[ovrEye_Right]);
        }

        else if (RendertargetIsSharedByBothEyes)
        {
            // Shared render target eye rendering; set up RT once for both eyes.
            pRender->SetRenderTarget(RenderTargets[Rendertarget_BothEyes].pTex);
            pRender->Clear();

            for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
            {      
                ovrEyeType eye = HmdDesc.EyeRenderOrder[eyeIndex];
                ovrPosef eyeRenderPose = ovrHmd_BeginEyeRender(Hmd, eye);

                View = CalculateViewFromPose(eyeRenderPose);
                RenderEyeView(eye); 
                ovrHmd_EndEyeRender(Hmd, eye, eyeRenderPose, &EyeTexture[eye]);
            }
        }

        else
        {
            // Separate eye rendering - each eye gets its own render target.
            for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
            {      
                ovrEyeType eye = HmdDesc.EyeRenderOrder[eyeIndex];
                pRender->SetRenderTarget(
                    RenderTargets[(eye == 0) ? Rendertarget_Left : Rendertarget_Right].pTex);
                pRender->Clear();
            
                ovrPosef eyeRenderPose = ovrHmd_BeginEyeRender(Hmd, eye);

                View = CalculateViewFromPose(eyeRenderPose);
                RenderEyeView(eye);            
                ovrHmd_EndEyeRender(Hmd, eye, eyeRenderPose, &EyeTexture[eye]);
            }
        }   

        pRender->SetDefaultRenderTarget();
        pRender->FinishScene();        
    }
        
    /*
    double t= ovr_GetTimeInSeconds();
    while (ovr_GetTimeInSeconds() < (t + 0.017))
    {

    } */

    Profiler.RecordSample(RenderProfiler::Sample_AfterEyeRender);

    // TODO: These happen inside ovrHmd_EndFrame; need to hook into it.
    //Profiler.RecordSample(RenderProfiler::Sample_BeforeDistortion);
    ovrHmd_EndFrame(Hmd);
    Profiler.RecordSample(RenderProfiler::Sample_AfterPresent);    
}
Пример #25
0
void HeadMountedDisplay::endFrame( const ovrPosef pose[2], const ovrTexture texture[2] )
{
    KVS_OVR_CALL( ovrHmd_EndFrame( m_handler, pose, texture ) );
}
Пример #26
0
int main(int argc, char *argv[])
{

	if (argc > 1){
		if  (strcmp(argv[1], "-debug") == 0 )
			mode = MODE_DEBUG;
		else if ( strcmp(argv[1], "-oculus") == 0 )
			mode = MODE_OCULUS;
		else if ( strcmp(argv[1], "-oculus-debug") == 0 )
			mode = MODE_OCULUS_DEBUG;
		else return 100;
	}else
		mode = MODE_DEBUG;

	int err;

	// Init OVR library, hardware and sensors.
	err = init_ovr();
	if ( err != 0 )
		exit( 10 + err );

	//Init windows and OpenGL context
	err = init_SDL_GL();
	if ( err != 0 )
		exit( 0 + err );
	
	// Load and Init shader and shaders Program
	err = load_init_shaders();
	if ( err != 0 )
		exit( 20 + err );

	// Load the Vertices, vertex arrays, etc... And bind them, along with the shaders.
	err = load_vertex();
	if ( err != 0 )
		exit( 30 + err );

	// Loads the texture from files and bien them as uniform in the frag shader
	err = load_textures();
	if ( err != 0 )
		exit( 40 + err );
	
	if (mode != MODE_DEBUG){
		// Inits the frame buffer, usefull for rendering the scene in a texture to send it to Oculus
		err = init_framebuffers();
		if ( err != 0 )
			exit( 50 + err );

		err = init_render_ovr();
		if ( err != 0 )
			exit( 60 + err );
	}

	std::cout << "Recommended w " << recommendedTex0Size.w << std::endl << "Recommended h " << recommendedTex0Size.h << std::endl;

	// Tansformations
	//---------------------------------------------
	// ---- Transfo
	glm::mat4 trans;
	GLuint uniTrans = glGetUniformLocation(shaderProgram, "trans");

	// ---- View
	glm::mat4 view;

	// ---- Projection
	glm::mat4 proj;
	
	
	


	// Render in Texture, and display
	//-------------------------------------------------
	if (mode == MODE_OCULUS_DEBUG ){

		load_init_passthrough_shaders();
		GLuint passthroughOB;
		glGenBuffers(1, &passthroughOB);
		glBindBuffer(GL_ARRAY_BUFFER, passthroughOB);
		glBufferData(GL_ARRAY_BUFFER, sizeof(passthroughScreen), passthroughScreen, GL_STATIC_DRAW);
		
		// Binding the fragment Shader output to the current buffer
		glBindFragDataLocation(passthroughShadersProgram, 0, "passthroughColor");
		errorCode = glGetError();

		// Link and Use Program
		glLinkProgram(passthroughShadersProgram);
		glUseProgram(passthroughShadersProgram);
		
		// Store the attributes for the shaders
		
		glGenVertexArrays(1, &passthroughVAO);
		glBindVertexArray(passthroughVAO);

		// Attributes Locations for Shaders and Enable
		GLint posAttrib = glGetAttribLocation(passthroughShadersProgram, "position");
		glVertexAttribPointer(posAttrib, 0, GL_FLOAT, GL_FALSE, sizeof(float) * 4, (void*) 2 );
		glEnableVertexAttribArray(posAttrib);

		GLint colorAttrib = glGetAttribLocation(passthroughShadersProgram, "texCoords");
		glVertexAttribPointer(colorAttrib, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 4, (void*)(sizeof(float) * 2) );
		glEnableVertexAttribArray(colorAttrib);

		glUseProgram(passthroughShadersProgram);
		glUniform1i(glGetUniformLocation(passthroughShadersProgram, "renderedTex"), 0);
	}

	
	
	



	// Event Loop
	//--------------------------------------------------
	SDL_Event windowEvent;
	while (true)
	{
		if (SDL_PollEvent(&windowEvent))
		{

			// Quit events
			if (windowEvent.type == SDL_QUIT) break;
			else if (windowEvent.type == SDL_KEYUP && windowEvent.key.keysym.sym == SDLK_ESCAPE) break;

		}



		// Enabling ressources to draw the cube
		// Before entering the rendering loop
		glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
		glActiveTexture(GL_TEXTURE0);
		glBindTexture(GL_TEXTURE_2D, textures[0]);
		glActiveTexture(GL_TEXTURE1);
		glBindTexture(GL_TEXTURE_2D, textures[1]);


		
		// ---- View
		view = glm::lookAt(
			glm::vec3(5.0f, 5.0f, 5.0f),
			glm::vec3(0.0f, 0.0f, 0.0f),
			glm::vec3(0.0f, 0.0f, 1.0f)
		);

		GLint uniView = glGetUniformLocation(shaderProgram, "view");
		glUniformMatrix4fv(uniView, 1, GL_FALSE, glm::value_ptr(view));
		
		// ---- Projection
		if ( mode == MODE_DEBUG ){
			proj = glm::perspective(45.0f, 1280.0f / 720.0f, 1.0f, 10.0f);
		}else{
			proj = glm::perspective(45.0f, 640.0f / 720.0f, 1.0f, 10.0f);
		}
		GLint uniProj = glGetUniformLocation(shaderProgram, "proj");
		glUniformMatrix4fv(uniProj, 1, GL_FALSE, glm::value_ptr(proj));
		
		//Turn around Z
		trans = glm::rotate(
			trans,
			0.7f,
			glm::vec3(0.0f, 0.0f, 1.0f)
		);
		glUniformMatrix4fv(uniTrans, 1, GL_FALSE, glm::value_ptr(trans));

		
		if ( mode == MODE_OCULUS || mode == MODE_OCULUS_DEBUG ){

			hdmFrameTiming = ovrHmd_BeginFrame(hmd, 0);
			

			

			glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
			glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
			
			for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++){

				
	
				ovrEyeType eye = hmd->EyeRenderOrder[eyeIndex];
				headPose[eye] = ovrHmd_GetEyePose(hmd, eye);


				if (eye == ovrEye_Right){
					glScissor(renderTargetSize.w / 2, 0, renderTargetSize.w / 2, renderTargetSize.h);
					glViewport(renderTargetSize.w / 2, 0, renderTargetSize.w / 2, renderTargetSize.h);
				}else{
					glScissor(0, 0, renderTargetSize.w / 2, renderTargetSize.h);
					glViewport(0, 0, renderTargetSize.w / 2, renderTargetSize.h);
				}
				
				if (eye == ovrEye_Right)
					glClearColor(0.0f, 0.3f, 0.0f, 1.0f);
				else
					glClearColor(0.3f, 0.0f, 0.0f, 1.0f);

				glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
				 
				// Drawing
				glDrawArrays(GL_TRIANGLES, 0, 36);

			}

			if (mode == MODE_OCULUS ){
				
				glScissor(0, 0, renderTargetSize.w, renderTargetSize.h);
				glViewport(0, 0, renderTargetSize.w, renderTargetSize.h);

				ovrHmd_EndFrame(hmd, headPose, eyeTex);
				Sleep(1);
			}else if ( mode == MODE_OCULUS_DEBUG ){

				glBindBuffer(GL_FRAMEBUFFER, 0);
				glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
				glBindBuffer(GL_ARRAY_BUFFER, 0);
				glUseProgram(0);

				glBindFramebuffer(GL_FRAMEBUFFER, 0);
				glBindVertexArray(passthroughVAO);
				glDisable(GL_DEPTH_TEST);
				glUseProgram(passthroughShadersProgram);

				glActiveTexture(GL_TEXTURE0);

				glDrawArrays(GL_TRIANGLES, 0, 6);

			}
		
		}else if (mode == MODE_DEBUG){


			// Clear the screen and the depth buffer (as it is filled with 0 initially, 
			// nothing will be draw (0 = on top);
			glClearColor(0.0f, 0.3f, 0.0f, 1.0f);
			glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);


			// Drawing
			glDrawArrays(GL_TRIANGLES, 0, 36);

		}

		
		if ( mode != MODE_OCULUS )
			SDL_GL_SwapWindow(window);


	}





	// Destoy the HMD and shutdown the library
	ovrHmd_Destroy(hmd);
	ovr_Shutdown();

	// Quite SDL and OpenGL
	glDeleteFramebuffers(1, &frameBuffer);
	SDL_GL_DeleteContext(context);
	SDL_Quit();

	return 0;
}
Пример #27
0
//-------------------------------------------------------------------------------------
void ProcessAndRender()
{
    static ovrPosef eyeRenderPose[2]; 

	// Start timing
    #if SDK_RENDER
	ovrHmd_BeginFrame(HMD, 0); 
    #else
	ovrHmd_BeginFrameTiming(HMD, 0); 
    // Retrieve data useful for handling the Health and Safety Warning - unused, but here for reference
    ovrHSWDisplayState hswDisplayState;
    ovrHmd_GetHSWDisplayState(HMD, &hswDisplayState);
    #endif

	// Adjust eye position and rotation from controls, maintaining y position from HMD.
	static float    BodyYaw(3.141592f);
	static Vector3f HeadPos(0.0f, 1.6f, -5.0f);
//	HeadPos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, HeadPos.y);
	bool freezeEyeRender = Util_RespondToControls(BodyYaw, HeadPos, eyeRenderPose[1].Orientation);

     pRender->BeginScene();
    
	// Render the two undistorted eye views into their render buffers.
    if (!freezeEyeRender) // freeze to debug for time warp
    {
        pRender->SetRenderTarget ( pRendertargetTexture );
        pRender->SetViewport (Recti(0,0, pRendertargetTexture->GetWidth(),
                                         pRendertargetTexture->GetHeight() ));  
        pRender->Clear();
		for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
		{
            ovrEyeType eye = HMD->EyeRenderOrder[eyeIndex];
            eyeRenderPose[eye] = ovrHmd_GetEyePose(HMD, eye);

            // Get view and projection matrices
			Matrix4f rollPitchYaw       = Matrix4f::RotationY(BodyYaw);
			Matrix4f finalRollPitchYaw  = rollPitchYaw * Matrix4f(eyeRenderPose[eye].Orientation);
			Vector3f finalUp            = finalRollPitchYaw.Transform(Vector3f(0,1,0));
			Vector3f finalForward       = finalRollPitchYaw.Transform(Vector3f(0,0,-1));
			Vector3f shiftedEyePos      = HeadPos + rollPitchYaw.Transform(eyeRenderPose[eye].Position);
            Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp); 
			Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, true);

			pRender->SetViewport(Recti(EyeRenderViewport[eye]));
			pRender->SetProjection(proj);
			pRender->SetDepthMode(true, true);
			pRoomScene->Render(pRender, Matrix4f::Translation(EyeRenderDesc[eye].ViewAdjust) * view);
		}
    }
    pRender->FinishScene();

    #if SDK_RENDER	// Let OVR do distortion rendering, Present and flush/sync
	ovrHmd_EndFrame(HMD, eyeRenderPose, &EyeTexture[0].Texture);
    #else
	// Clear screen
	pRender->SetDefaultRenderTarget();
	pRender->SetFullViewport();
	pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);

	// Setup shader
	ShaderFill distortionShaderFill(Shaders);
	distortionShaderFill.SetTexture(0, pRendertargetTexture);
	distortionShaderFill.SetInputLayout(VertexIL);

	for(int eyeNum = 0; eyeNum < 2; eyeNum++)
	{
		// Get and set shader constants
		Shaders->SetUniform2f("EyeToSourceUVScale",   UVScaleOffset[eyeNum][0].x, UVScaleOffset[eyeNum][0].y);
		Shaders->SetUniform2f("EyeToSourceUVOffset",  UVScaleOffset[eyeNum][1].x, UVScaleOffset[eyeNum][1].y);
 		ovrMatrix4f timeWarpMatrices[2];
		ovrHmd_GetEyeTimewarpMatrices(HMD, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
		Shaders->SetUniform4x4f("EyeRotationStart", timeWarpMatrices[0]);  //Nb transposed when set
		Shaders->SetUniform4x4f("EyeRotationEnd",   timeWarpMatrices[1]);  //Nb transposed when set
		// Perform distortion
		pRender->Render(&distortionShaderFill, MeshVBs[eyeNum], MeshIBs[eyeNum],sizeof(ovrDistortionVertex));
	}

	pRender->SetDefaultRenderTarget();

	pRender->Present( true ); // Vsync enabled

    // Only flush GPU for ExtendDesktop; not needed in Direct App Renering with Oculus driver.
    if (HMD->HmdCaps & ovrHmdCap_ExtendDesktop)
		pRender->WaitUntilGpuIdle();  
	ovrHmd_EndFrameTiming(HMD);
    #endif
}
//-------------------------------------------------------------------------------------
int WINAPI WinMain(HINSTANCE hinst, HINSTANCE, LPSTR, int)
{
    // Initializes LibOVR, and the Rift
    ovr_Initialize();
    HMD = ovrHmd_Create(0);

    if (!HMD)                       { MessageBoxA(NULL,"Oculus Rift not detected.","", MB_OK); return(0); }
    if (HMD->ProductName[0] == '\0')  MessageBoxA(NULL,"Rift detected, display not enabled.", "", MB_OK);

    // Setup Window and Graphics - use window frame if relying on Oculus driver
    bool windowed = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true;    
    if (!WND.InitWindowAndDevice(hinst, Recti(HMD->WindowsPos, HMD->Resolution), windowed))
        return(0);

    WND.SetMaxFrameLatency(1);
    ovrHmd_AttachToWindow(HMD, WND.Window, NULL, NULL);
    ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

    // Start the sensor which informs of the Rift's pose and motion
    ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection |
                                  ovrTrackingCap_Position, 0);

    // Make the eye render buffers (caution if actual size < requested due to HW limits). 
    for (int eye=0; eye<2; eye++)
    {
        Sizei idealSize             = ovrHmd_GetFovTextureSize(HMD, (ovrEyeType)eye,
                                                               HMD->DefaultEyeFov[eye], 1.0f);
        pEyeRenderTexture[eye]      = new ImageBuffer(true, false, idealSize);
        pEyeDepthBuffer[eye]        = new ImageBuffer(true, true, pEyeRenderTexture[eye]->Size);
        EyeRenderViewport[eye].Pos  = Vector2i(0, 0);
        EyeRenderViewport[eye].Size = pEyeRenderTexture[eye]->Size;
    }

    // Setup VR components
#if SDK_RENDER
	#if RENDER_OPENGL
    ovrGLConfig oglcfg;
    oglcfg.OGL.Header.API				= ovrRenderAPI_OpenGL;
    oglcfg.OGL.Header.BackBufferSize	= Sizei(HMD->Resolution.w, HMD->Resolution.h);
    oglcfg.OGL.Header.Multisample		= 1;
	oglcfg.OGL.Window					= OGL.Window;
	oglcfg.OGL.DC						= GetDC(OGL.Window);

    if (!ovrHmd_ConfigureRendering(HMD, &oglcfg.Config,
		                           ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
								   HMD->DefaultEyeFov, EyeRenderDesc))	
		return(1);
	#else
    ovrD3D11Config d3d11cfg;
    d3d11cfg.D3D11.Header.API            = ovrRenderAPI_D3D11;
    d3d11cfg.D3D11.Header.BackBufferSize = Sizei(HMD->Resolution.w, HMD->Resolution.h);
    d3d11cfg.D3D11.Header.Multisample    = 1;
    d3d11cfg.D3D11.pDevice               = WND.Device;
    d3d11cfg.D3D11.pDeviceContext        = WND.Context;
    d3d11cfg.D3D11.pBackBufferRT         = WND.BackBufferRT;
    d3d11cfg.D3D11.pSwapChain            = WND.SwapChain;

    if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config,
                                   ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
                                   HMD->DefaultEyeFov, EyeRenderDesc))
        return(1);
	#endif
#else
    APP_RENDER_SetupGeometryAndShaders();
#endif

    // Create the room model
    Scene roomScene(false); // Can simplify scene further with parameter if required.

    // Initialize Webcams and threads
	WebCamManager WebCamMngr(HMD);

    // MAIN LOOP
    // =========
    while (!(WND.Key['Q'] && WND.Key[VK_CONTROL]) && !WND.Key[VK_ESCAPE])
    {
        WND.HandleMessages();
        
        float       speed                    = 1.0f; // Can adjust the movement speed. 
        int         timesToRenderScene       = 1;    // Can adjust the render burden on the app.
		ovrVector3f useHmdToEyeViewOffset[2] = {EyeRenderDesc[0].HmdToEyeViewOffset,
			                                    EyeRenderDesc[1].HmdToEyeViewOffset};
        // Start timing
    #if SDK_RENDER
        ovrHmd_BeginFrame(HMD, 0);
    #else
        ovrHmd_BeginFrameTiming(HMD, 0);
    #endif

        // Handle key toggles for re-centering, meshes, FOV, etc.
        ExampleFeatures1(&speed, &timesToRenderScene, useHmdToEyeViewOffset);

        // Keyboard inputs to adjust player orientation
        if (WND.Key[VK_LEFT])  Yaw += 0.02f;
        if (WND.Key[VK_RIGHT]) Yaw -= 0.02f;

        // Keyboard inputs to adjust player position
        if (WND.Key['W']||WND.Key[VK_UP])   Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(0,0,-speed*0.05f));
        if (WND.Key['S']||WND.Key[VK_DOWN]) Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(0,0,+speed*0.05f));
        if (WND.Key['D'])                    Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(+speed*0.05f,0,0));
        if (WND.Key['A'])                    Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(-speed*0.05f,0,0));
        Pos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, Pos.y);
  
        // Animate the cube
        if (speed)
            roomScene.Models[0]->Pos = Vector3f(9*sin(0.01f*clock),3,9*cos(0.01f*clock));

		// Get both eye poses simultaneously, with IPD offset already included. 
		ovrPosef temp_EyeRenderPose[2];
		ovrHmd_GetEyePoses(HMD, 0, useHmdToEyeViewOffset, temp_EyeRenderPose, NULL);

		// Update textures with WebCams' frames
		WebCamMngr.Update();	

        // Render the two undistorted eye views into their render buffers.  
        for (int eye = 0; eye < 2; eye++)
        {
            ImageBuffer * useBuffer      = pEyeRenderTexture[eye];  
            ovrPosef    * useEyePose     = &EyeRenderPose[eye];
            float       * useYaw         = &YawAtRender[eye];
            bool          clearEyeImage  = true;
            bool          updateEyeImage = true;

            // Handle key toggles for half-frame rendering, buffer resolution, etc.
            ExampleFeatures2(eye, &useBuffer, &useEyePose, &useYaw, &clearEyeImage, &updateEyeImage);

            if (clearEyeImage)
			#if RENDER_OPENGL
				WND.ClearAndSetRenderTarget(useBuffer, Recti(EyeRenderViewport[eye]));
			#else
                WND.ClearAndSetRenderTarget(useBuffer->TexRtv,
                                             pEyeDepthBuffer[eye], Recti(EyeRenderViewport[eye]));	
			#endif

            if (updateEyeImage)
            {
                // Write in values actually used (becomes significant in Example features)
                *useEyePose = temp_EyeRenderPose[eye];
                *useYaw     = Yaw;

                // Get view and projection matrices (note near Z to reduce eye strain)
                Matrix4f rollPitchYaw       = Matrix4f::RotationY(Yaw);
                Matrix4f finalRollPitchYaw  = rollPitchYaw * Matrix4f(useEyePose->Orientation);
                Vector3f finalUp            = finalRollPitchYaw.Transform(Vector3f(0, 1, 0));
                Vector3f finalForward       = finalRollPitchYaw.Transform(Vector3f(0, 0, -1));
                Vector3f shiftedEyePos      = Pos + rollPitchYaw.Transform(useEyePose->Position);

                Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp);
                Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.2f, 1000.0f, true); 

				// Keyboard input to switch from "look through" to scene mode
				static bool bOldLookThrough	= false;
				static bool bLookThrough	= true;
				if (WND.Key['X'] && bOldLookThrough != WND.Key['X']) { bLookThrough = !bLookThrough; }
				bOldLookThrough = WND.Key['X'];

				if(!bLookThrough)
				{
					// Render the scene
					for (int t=0; t<timesToRenderScene; t++)
						roomScene.Render(view, proj.Transposed());

					WebCamMngr.DrawBoard(view, proj.Transposed());
				}
				else { WebCamMngr.DrawLookThrough(eye); }
            }
        }

        // Do distortion rendering, Present and flush/sync
    #if SDK_RENDER
		#if RENDER_OPENGL
		ovrGLTexture eyeTexture[2]; // Gather data for eye textures 
        for (int eye = 0; eye<2; eye++)
        {
            eyeTexture[eye].OGL.Header.API				= ovrRenderAPI_OpenGL;
            eyeTexture[eye].OGL.Header.TextureSize		= pEyeRenderTexture[eye]->Size;
            eyeTexture[eye].OGL.Header.RenderViewport	= EyeRenderViewport[eye];
            eyeTexture[eye].OGL.TexId					= pEyeRenderTexture[eye]->TexId;
        }
		#else
        ovrD3D11Texture eyeTexture[2]; // Gather data for eye textures 
        for (int eye = 0; eye<2; eye++)
        {
            eyeTexture[eye].D3D11.Header.API            = ovrRenderAPI_D3D11;
            eyeTexture[eye].D3D11.Header.TextureSize    = pEyeRenderTexture[eye]->Size;
            eyeTexture[eye].D3D11.Header.RenderViewport = EyeRenderViewport[eye];
            eyeTexture[eye].D3D11.pTexture              = pEyeRenderTexture[eye]->Tex;
            eyeTexture[eye].D3D11.pSRView               = pEyeRenderTexture[eye]->TexSv;
        }
		#endif
		ovrHmd_EndFrame(HMD, EyeRenderPose, &eyeTexture[0].Texture);
    #else
        APP_RENDER_DistortAndPresent();
    #endif
    }

	WebCamMngr.StopCapture();

    // Release and close down
    ovrHmd_Destroy(HMD);
    ovr_Shutdown();
	WND.ReleaseWindow(hinst);

    return(0);
}
Пример #29
0
void RiftAppSkeleton::display_sdk() //const
{
    ovrHmd hmd = m_Hmd;
    if (hmd == NULL)
        return;

    //const ovrFrameTiming hmdFrameTiming =
    ovrHmd_BeginFrame(m_Hmd, 0);

    bindFBO(m_renderBuffer);

    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    // For passing to EndFrame once rendering is done
    ovrPosef renderPose[2];
    ovrTexture eyeTexture[2];

    for (int eyeIndex=0; eyeIndex<ovrEye_Count; eyeIndex++)
    {
        const ovrEyeType eye = hmd->EyeRenderOrder[eyeIndex];
        const ovrPosef eyePose = ovrHmd_GetEyePose(m_Hmd, eye);
        m_eyeOri = eyePose.Orientation; // cache this for movement direction
        _StoreHmdPose(eyePose);

        const ovrGLTexture& otex = l_EyeTexture[eye];
        const ovrRecti& rvp = otex.OGL.Header.RenderViewport;
        glViewport(
            rvp.Pos.x,
            rvp.Pos.y,
            rvp.Size.w,
            rvp.Size.h
            );

        const OVR::Matrix4f proj = ovrMatrix4f_Projection(
            m_EyeRenderDesc[eye].Fov,
            0.01f, 10000.0f, true);

        const OVR::Matrix4f view = _MakeModelviewMatrix(
            eyePose,
            -OVR::Vector3f(m_EyeRenderDesc[eye].ViewAdjust), // not sure why negative...
            m_chassisYaw,
            m_chassisPos);

        const OVR::Matrix4f scaledView = _MakeModelviewMatrix(
            eyePose,
            -OVR::Vector3f(m_EyeRenderDesc[eye].ViewAdjust), // not sure why negative...
            m_chassisYaw,
            m_chassisPos,
            m_headSize);

        _resetGLState();

        _DrawScenes(&view.Transposed().M[0][0], &proj.Transposed().M[0][0], rvp, &scaledView.Transposed().M[0][0]);

        renderPose[eyeIndex] = eyePose;
        eyeTexture[eyeIndex] = l_EyeTexture[eye].Texture;
    }
    unbindFBO();

    ovrHmd_EndFrame(m_Hmd, renderPose, eyeTexture);

    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
    glBindBuffer(GL_ARRAY_BUFFER, 0);
    glUseProgram(0);
}