/**
   * Build a Direct HMD mode window, binding the OVR SDK to the native window object.
   */
  GLFWwindow * createDirectHmdModeWindow(ovrHmd hmd, glm::uvec2 & outSize) {

    // On linux it's recommended to leave the screen in its default portrait orientation.
    // The SDK currently allows no mechanism to test if this is the case.
    // So in direct mode, we need to swap the x and y value.
    ON_LINUX([&] {
      std::swap(outSize.x, outSize.y);
    });

    // In direct HMD mode, we always use the native resolution, because the
    // user has no control over it.
    // In direct mode, try to put the output window on a secondary screen
    // (for easier debugging, assuming your dev environment is on the primary)
    GLFWwindow * window = glfw::createSecondaryScreenWindow(outSize);

    // Attach the OVR SDK to the native window
    void * nativeWindowHandle = glfw::getNativeWindowHandle(window);
    if (nullptr != nativeWindowHandle) {
      ovrHmd_AttachToWindow(hmd, nativeWindowHandle, nullptr, nullptr);
    }

    // A bug in some versions of the SDK (0.4.x) prevents Direct Mode from 
    // engaging properly unless you call the GetEyePoses function.
    {
      static ovrVector3f offsets[2];
      static ovrPosef poses[2];
      ovrHmd_GetEyePoses(hmd, 0, offsets, poses, nullptr);
    }

    return window;
  }
示例#2
0
void OVR_SDL2_app::run()
{
    SDL_Event e;

    while (running)
    {
        // Dispatch all pending SDL events.

        while (SDL_PollEvent(&e))
            dispatch(e);

        // Let the application animate.

        step();

        // Render both views and let the Oculus SDK display them on-screen.
        // 'eye' is a private member variable that notes which eye is being
        // rendered. This is used when the application calls back down to
        // learn the view and projection matrices.

        ovrHmd_BeginFrame(hmd, 0);
        {
            ovrHmd_GetEyePoses(hmd, 0, offset, pose, NULL);

            for (int i = 0; i < 2; i++)
            {
                eye = hmd->EyeRenderOrder[i];
                buffer[eye]->bind();
                draw();
            }
        }
        ovrHmd_EndFrame(hmd, pose, tex);
    }
}
示例#3
0
  void draw() {
    static int frameIndex = 0;
    static ovrPosef eyePoses[2];
    ++frameIndex;
    ovrHmd_GetEyePoses(hmd, frameIndex, eyeOffsets, eyePoses, nullptr);

    ovrHmd_BeginFrame(hmd, frameIndex);
    glEnable(GL_DEPTH_TEST);

    for (int i = 0; i < 2; ++i) {
      ovrEyeType eye = hmd->EyeRenderOrder[i];

      const ovrRecti & vp = textures[eye].Header.RenderViewport;
      eyeFramebuffers[eye]->Bind();
      oglplus::Context::Viewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
      Stacks::projection().top() = eyeProjections[eye];

      MatrixStack & mv = Stacks::modelview();
      mv.withPush([&]{
        // Apply the per-eye offset & the head pose
        mv.top() = glm::inverse(ovr::toGlm(eyePoses[eye])) * mv.top();
        renderScene();
      });
    };
    oglplus::DefaultFramebuffer().Bind(oglplus::Framebuffer::Target::Draw);

    ovrHmd_EndFrame(hmd, eyePoses, textures);
  }
示例#4
0
std::array<mat4, 2> VR::eye_transforms(ovrPosef pose[2], bool mid) const
{
    ovrVector3f eye_offset[2] =
    {
        m_eye_rdesc[0].HmdToEyeViewOffset,
        m_eye_rdesc[1].HmdToEyeViewOffset,
    };

    // 'mid eye'? use midpoint
    if (mid)
    {
        eye_offset[0].x = eye_offset[1].x =
            0.5f * (eye_offset[0].x + eye_offset[1].x);
        eye_offset[0].y = eye_offset[1].y =
            0.5f * (eye_offset[0].y + eye_offset[1].y);
        eye_offset[0].z = eye_offset[1].z =
            0.5f * (eye_offset[0].z + eye_offset[1].z);
    }

    ovrHmd_GetEyePoses(m_hmd, 0, eye_offset, pose, nullptr);
    return
    {
        {
            translate(mat4(), convert(pose[0].Position))
            * mat4_cast(convert(pose[0].Orientation)),
            translate(mat4(), convert(pose[1].Position))
            * mat4_cast(convert(pose[1].Orientation))
        }
    };
}
//Draw
void COculusVR::DrawScreen()
{
	//clear
	wzClear();

	// Adjust eye position and rotation from controls, maintaining y position from HMD.
	static float BodyYaw(3.141592f);
	static Vector3f HeadPos(0.0f, 0.0f, -5.0f);
    static ovrTrackingState HmdState;
	static ovrPosef eyeRenderPose[2];

    ovrVector3f hmdToEyeViewOffset[2] = { EyeRenderDesc[0].HmdToEyeViewOffset, EyeRenderDesc[1].HmdToEyeViewOffset };
    ovrHmd_GetEyePoses(Hmd, 0, hmdToEyeViewOffset, eyeRenderPose, &HmdState);

	/* debug
		wzSetSpriteScSize(1920, 1080);
		wzSetSpritePosition(0.0f, 0.0f, 0.0f);
		wzSetSpriteColor(1.0f, 1.0f, 1.0f, 1.0f);
		wzSetSpriteTexCoord(0.0f, 0.0f, 1.0f, 1.0f);
		wzSetSpriteSizeLeftUp((float)1920, (float)1080);
		wzSetSpriteTexture(&m_screenTex);
		wzSpriteDraw();	//Draw
	*/

	// Setup shader
	wzUseShader(&LensShader);
	wzSetTexture("texture0", &m_screenTex, 0);

	for ( int eyeNum = 0; eyeNum < 2; eyeNum++ )
	{
		wzVector2 uvScale = {UVScaleOffset[eyeNum][0].x,-UVScaleOffset[eyeNum][0].y};
		wzVector2 uvOffset = {UVScaleOffset[eyeNum][1].x,UVScaleOffset[eyeNum][1].y};
		wzMatrix rotStart,rotEnd;
		wzUniformVector2("eyeToSourceUVscale", &uvScale);
		wzUniformVector2("eyeToSourceUVoffset", &uvOffset);

 		ovrMatrix4f timeWarpMatrices[2];
		ovrHmd_GetEyeTimewarpMatrices(Hmd, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
		memcpy(&rotStart.m,&timeWarpMatrices[0],sizeof(ovrMatrix4f));
		memcpy(&rotEnd.m,&timeWarpMatrices[1],sizeof(ovrMatrix4f));
		wzUniformMatrix("eyeRotationStart", &rotStart);	//Nb transposed when set
		wzUniformMatrix("eyeRotationEnd", &rotEnd);
		
		//Draw Mesh
		wzDrawMesh(&MeshBuffer[eyeNum]);
	}

	//DK2 Latency Tester
	unsigned char latencyColor[3];
	ovrBool drawDk2LatencyQuad = ovrHmd_GetLatencyTest2DrawColor(Hmd, latencyColor);
	if(drawDk2LatencyQuad)
    {
		const int latencyQuadSize = 20; // only needs to be 1-pixel, but larger helps visual debugging
		wzSetViewport(Hmd->Resolution.w - latencyQuadSize, 0, latencyQuadSize, latencyQuadSize);
		wzSetClearColor(latencyColor[0] / 255.0f, latencyColor[1] / 255.0f, latencyColor[2] / 255.0f,0.0f);
		wzClear();
	}
}
 void draw() {
   // Bug in SDK prevents direct mode from activating unless I call this
   static ovrPosef eyePoses[2];
   {
     static ovrVector3f eyeOffsets[2];
     ovrHmd_GetEyePoses(hmd, getFrame(), eyeOffsets, eyePoses, nullptr);
   }
   ovrHmd_BeginFrame(hmd, getFrame());
   ovrHmd_EndFrame(hmd, eyePoses, eyeTextures);
 }
void RiftRenderingApp::drawRiftFrame() {
  ++frameCount;
  ovrHmd_BeginFrame(hmd, frameCount);
  MatrixStack & mv = Stacks::modelview();
  MatrixStack & pr = Stacks::projection();

  perFrameRender();
  
  ovrPosef fetchPoses[2];
  ovrHmd_GetEyePoses(hmd, frameCount, eyeOffsets, fetchPoses, nullptr);
  for (int i = 0; i < 2; ++i) {
    ovrEyeType eye = currentEye = hmd->EyeRenderOrder[i];
    // Force us to alternate eyes if we aren't keeping up with the required framerate
    if (eye == lastEyeRendered) {
      continue;
    }
    // We want to ensure that we only update the pose we 
    // send to the SDK if we actually render this eye.
    eyePoses[eye] = fetchPoses[eye];

    lastEyeRendered = eye;
    Stacks::withPush(pr, mv, [&] {
      // Set up the per-eye projection matrix
      pr.top() = projections[eye];

      // Set up the per-eye modelview matrix
      // Apply the head pose
      glm::mat4 eyePose = ovr::toGlm(eyePoses[eye]);
      mv.preMultiply(glm::inverse(eyePose));

      // Render the scene to an offscreen buffer
      eyeFramebuffers[eye]->Bind();
      perEyeRender();
    });
    
    if (eyePerFrameMode) {
      break;
    }
  }

  if (endFrameLock) {
    endFrameLock->lock();
  }
  ovrHmd_EndFrame(hmd, eyePoses, eyeTextures);
  if (endFrameLock) {
    endFrameLock->unlock();
  }
  rateCounter.increment();
  if (rateCounter.elapsed() > 2.0f) {
    float fps = rateCounter.getRate();
    updateFps(fps);
    rateCounter.reset();
  }
}
示例#8
0
void gkOculus::Flush()
{
	if(HMD)
	{
		// OVERRIDE MAIN CAMERA

		ICamera* cam = gEnv->p3DEngine->getMainCamera();

		static ovrTrackingState HmdState;

		ovrVector3f hmdToEyeViewOffset[2] = { EyeRenderDesc[0].HmdToEyeViewOffset, EyeRenderDesc[1].HmdToEyeViewOffset };
		ovrHmd_GetEyePoses(HMD, 0, hmdToEyeViewOffset, eyeRenderPose, &HmdState);

		Quat camOrientation;
		camOrientation.v.x = eyeRenderPose->Orientation.x;
		camOrientation.v.y = -eyeRenderPose->Orientation.z;
		camOrientation.v.z = eyeRenderPose->Orientation.y;
		camOrientation.w = eyeRenderPose->Orientation.w;

		cam->setAdditionalOrientation( camOrientation );

		Quat dRot = cam->getDerivedOrientation();

		Vec3 eyeOffsetL = Vec3(eyeRenderPose[0].Position.x, -eyeRenderPose[0].Position.z, eyeRenderPose[0].Position.y);
		Vec3 eyeOffsetR = Vec3(eyeRenderPose[1].Position.x, -eyeRenderPose[1].Position.z, eyeRenderPose[1].Position.y);

		cam->setStereoOffset( dRot * eyeOffsetL, dRot * eyeOffsetR );
		
		Matrix44 leftMat,rightMat;
		
		OVR::Matrix4f projLeft;
		OVR::Matrix4f projRight;

		projLeft = ovrMatrix4f_Projection(EyeRenderDesc[0].Fov, 0.01f, 10000.0f, true);
		projRight = ovrMatrix4f_Projection(EyeRenderDesc[1].Fov, 0.01f, 10000.0f, true);

		leftMat = *((Matrix44*)(&projLeft));
		rightMat = *((Matrix44*)(&projRight));

		leftMat.Transpose();
		rightMat.Transpose();

		cam->setStereoProjMatrix( leftMat, rightMat );

		IRenderSequence* rs = gEnv->pRenderer->RT_GetRenderSequence();
		if (rs)
		{
			rs->addToRenderSequence( m_disortation_renderable_eyes[0], RENDER_LAYER_STEREO_DEVICE );
			rs->addToRenderSequence( m_disortation_renderable_eyes[1], RENDER_LAYER_STEREO_DEVICE );
		}
	}

}
void OcculusCameraComponent::setUp()
{
	ovrHmd_BeginFrame(hmd, 0); 
	//frameTiming = ovrHmd_BeginFrameTiming(hmd, 0);
	
	//Set render target		pRender->SetRenderTarget ( pRendertargetTexture );
	//clear render target	pRender->Clear();

	Graphics * g = parent->getStage()->getGame()->getGraphicsHandle();
	g->setFrameBuffer( renderTarget );
	g->clear();

	ovrVector3f hmdToEyeViewOffset[2] = { EyeRenderDesc[0].HmdToEyeViewOffset, EyeRenderDesc[1].HmdToEyeViewOffset };

	static ovrTrackingState HmdState;
	ovrHmd_GetEyePoses(hmd, 0, hmdToEyeViewOffset, headPoses, &HmdState);
}
  virtual void draw() {
    ovrPosef eyePoses[2];
    ovrHmd_GetEyePoses(hmd, getFrame(), eyeOffsets, eyePoses, nullptr);

    ovrHmd_BeginFrame(hmd, getFrame());
    MatrixStack & mv = Stacks::modelview();
    for (int i = 0; i < ovrEye_Count; ++i) {
      ovrEyeType eye = hmd->EyeRenderOrder[i];
      PerEyeArg & eyeArgs = eyes[eye];
      Stacks::projection().top() = eyeArgs.projection;

      eyeArgs.framebuffer->Bind();
      oglplus::Context::Clear().DepthBuffer();
      Stacks::withPush(mv, [&]{
        mv.preMultiply(glm::inverse(ovr::toGlm(eyePoses[eye])));
        oria::renderExampleScene(ipd, eyeHeight);
      });
    }
    ovrHmd_EndFrame(hmd, eyePoses, eyeTextures);
  }
int
render_rift(struct weston_compositor *compositor, GLuint original_program)
{
  struct oculus_rift *rift = compositor->rift;

  // copy rift->pbuffer into rift->texture
  /*eglMakeCurrent(rift->egl_display, rift->pbuffer, rift->pbuffer, rift->egl_context);
  //glClearColor(0.5, 0.0, 0.5, 1.0);
  //glClear(GL_COLOR_BUFFER_BIT);
  glBindTexture(GL_TEXTURE_2D, rift->texture);
  eglReleaseTexImage(rift->egl_display, rift->pbuffer, EGL_BACK_BUFFER);
  eglBindTexImage(rift->egl_display, rift->pbuffer, EGL_BACK_BUFFER);
  eglMakeCurrent(rift->egl_display, rift->orig_surface, rift->orig_surface, rift->egl_context);*/
  // render eyes

  static int frameIndex = 0;
  ++frameIndex;
  ovrPosef eyePoses[2];
  ovrHmd_BeginFrameTiming(rift->hmd, frameIndex);
  ovrHmd_GetEyePoses(rift->hmd, frameIndex, rift->hmdToEyeOffsets, eyePoses, NULL);

  glEnable(GL_DEPTH_TEST);
  glUseProgram(rift->eye_shader->program);
  int i;
  for(i=0; i<2; i++)
  {
    const ovrEyeType eye = rift->hmd->EyeRenderOrder[i];
    struct EyeArg eyeArg = rift->eyeArgs[eye];
    
    ovrMatrix4f Model = initTranslationF(0.0, 0.0, rift->screen_z);
    Model = matrix4fMul(initScale(
          3.2 * rift->screen_scale, 
          1.8 * rift->screen_scale, 
          1.0), Model);
    ovrMatrix4f MV = matrix4fMul(posefToMatrix4f(eyePoses[eye]), Model);
    //MV = initIdentity();
    //MV.M[2][3] = 5;

    glBindFramebuffer(GL_FRAMEBUFFER, eyeArg.framebuffer);
    glViewport(0, 0, eyeArg.textureWidth, eyeArg.textureHeight);
    glClearColor(0.0, 0.0, 0.2, 1.0);
    glClear(GL_COLOR_BUFFER_BIT);

    glUniform1i(rift->eye_shader->virtualScreenTexture, 0);
    glUniformMatrix4fv(rift->eye_shader->Projection, 1, GL_FALSE, &eyeArg.projection.M[0][0]);
    glUniformMatrix4fv(rift->eye_shader->ModelView, 1, GL_FALSE, &MV.M[0][0]);

    glEnableVertexAttribArray(0);
    glEnableVertexAttribArray(1);

    glBindTexture(GL_TEXTURE_2D, rift->fbTexture);
    glBindBuffer(GL_ARRAY_BUFFER, rift->scene->vertexBuffer);
    glVertexAttribPointer(rift->eye_shader->Position, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), NULL);
    if(rift->sbs == 1)
      glBindBuffer(GL_ARRAY_BUFFER, rift->scene->SBSuvsBuffer[eye]);
    else
      glBindBuffer(GL_ARRAY_BUFFER, rift->scene->uvsBuffer);
    glVertexAttribPointer(rift->eye_shader->TexCoord0, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), NULL);
    glDrawArrays(GL_TRIANGLES, 0, 6);

    glBindBuffer(GL_ARRAY_BUFFER, 0);
    glDisableVertexAttribArray(0);
    glDisableVertexAttribArray(1);
    //render_eye(rift, eyeArg);
  }

  glBindFramebuffer(GL_FRAMEBUFFER, 0);

  // render distortion
  glUseProgram(rift->distortion_shader->program);
  glViewport(0, 0, 1920, 1080);

  glClearColor(0.0, 0.1, 0.0, 1.0);
  glClear(GL_COLOR_BUFFER_BIT);
  glDisable(GL_BLEND);
  glDisable(GL_CULL_FACE);
  glDisable(GL_DEPTH_TEST);

  float angle = 0.0;
  if(rift->rotate == 1)
  {
    angle = 1.57079633; // 90 degrees, in radians
    glViewport(0, 0, 1080, 1920);
  }

  int eye;
  for(eye=0; eye<2; eye++)
  {
    struct EyeArg eyeArg = rift->eyeArgs[eye];
    glUniform2fv(rift->distortion_shader->EyeToSourceUVScale, 1, (float *)&eyeArg.scale);
    glUniform2fv(rift->distortion_shader->EyeToSourceUVOffset, 1, (float *)&eyeArg.offset);
    glUniform1i(rift->distortion_shader->RightEye, eye);
    glUniform1f(rift->distortion_shader->angle, angle);
    glUniform1i(rift->distortion_shader->eyeTexture, 0);

    //glActiveTexture(GL_TEXTURE0);
    glBindTexture(GL_TEXTURE_2D, eyeArg.texture);

    glBindBuffer(GL_ARRAY_BUFFER, eyeArg.vertexBuffer);
    glVertexAttribPointer(rift->distortion_shader->Position, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), NULL);
    glEnableVertexAttribArray(rift->distortion_shader->Position);

    glBindBuffer(GL_ARRAY_BUFFER, eyeArg.uvsBuffer[1]);
    glVertexAttribPointer(rift->distortion_shader->TexCoord0, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), NULL);
    glEnableVertexAttribArray(rift->distortion_shader->TexCoord0);
    glBindBuffer(GL_ARRAY_BUFFER, eyeArg.uvsBuffer[0]);
    glVertexAttribPointer(rift->distortion_shader->TexCoordR, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), NULL);
    glEnableVertexAttribArray(rift->distortion_shader->TexCoordR);
    glBindBuffer(GL_ARRAY_BUFFER, eyeArg.uvsBuffer[1]);
    glVertexAttribPointer(rift->distortion_shader->TexCoordG, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), NULL);
    glEnableVertexAttribArray(rift->distortion_shader->TexCoordG);
    glBindBuffer(GL_ARRAY_BUFFER, eyeArg.uvsBuffer[2]);
    glVertexAttribPointer(rift->distortion_shader->TexCoordB, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), NULL);
    glEnableVertexAttribArray(rift->distortion_shader->TexCoordB);

    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, eyeArg.indexBuffer);

    glDrawElements(GL_TRIANGLES, eyeArg.mesh.IndexCount, GL_UNSIGNED_SHORT, 0);

    glBindBuffer(GL_ARRAY_BUFFER, 0);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
  }

  //glEnable(GL_CULL_FACE);
  glEnable(GL_DEPTH_TEST);

  ovrHmd_EndFrameTiming(rift->hmd);

  // set program back to original shader program
  glUseProgram(original_program);
  return 0;
}
//-------------------------------------------------------------------------------------
void ProcessAndRender()
{
    static ovrPosef eyeRenderPose[2]; 

	// Start timing
    #if SDK_RENDER
	ovrHmd_BeginFrame(HMD, 0); 
    #else
	ovrHmd_BeginFrameTiming(HMD, 0); 
    // Retrieve data useful for handling the Health and Safety Warning - unused, but here for reference
    ovrHSWDisplayState hswDisplayState;
    ovrHmd_GetHSWDisplayState(HMD, &hswDisplayState);
    #endif

	// Adjust eye position and rotation from controls, maintaining y position from HMD.
	static float    BodyYaw(3.141592f);
	static Vector3f HeadPos(0.0f, 1.6f, -5.0f);
    static ovrTrackingState HmdState;

    ovrVector3f hmdToEyeViewOffset[2] = { EyeRenderDesc[0].HmdToEyeViewOffset, EyeRenderDesc[1].HmdToEyeViewOffset };
    ovrHmd_GetEyePoses(HMD, 0, hmdToEyeViewOffset, eyeRenderPose, &HmdState);

	HeadPos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, HeadPos.y);
	bool freezeEyeRender = Util_RespondToControls(BodyYaw, HeadPos, HmdState.HeadPose.ThePose.Orientation);

    pRender->BeginScene();
    
	// Render the two undistorted eye views into their render buffers.
    if (!freezeEyeRender) // freeze to debug for time warp
    {
        pRender->SetRenderTarget ( pRendertargetTexture );
        pRender->SetViewport (Recti(0,0, pRendertargetTexture->GetWidth(),
                                         pRendertargetTexture->GetHeight() ));  
        pRender->Clear();
		for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
		{
            ovrEyeType eye = HMD->EyeRenderOrder[eyeIndex];

            // Get view and projection matrices
            Matrix4f rollPitchYaw       = Matrix4f::RotationY(BodyYaw);
            Matrix4f finalRollPitchYaw  = rollPitchYaw * Matrix4f(eyeRenderPose[eye].Orientation);
            Vector3f finalUp            = finalRollPitchYaw.Transform(Vector3f(0,1,0));
            Vector3f finalForward       = finalRollPitchYaw.Transform(Vector3f(0,0,-1));
            Vector3f shiftedEyePos      = HeadPos + rollPitchYaw.Transform(eyeRenderPose[eye].Position);
            Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp); 
			Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, true);

			pRender->SetViewport(Recti(EyeRenderViewport[eye]));
			pRender->SetProjection(proj);
			pRender->SetDepthMode(true, true);
			pRoomScene->Render(pRender, view);
		}
    }
    pRender->FinishScene();

    #if SDK_RENDER	// Let OVR do distortion rendering, Present and flush/sync
	ovrHmd_EndFrame(HMD, eyeRenderPose, &EyeTexture[0].Texture);
    #else
	// Clear screen
	pRender->SetDefaultRenderTarget();
	pRender->SetFullViewport();
	pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);

	// Setup shader
	ShaderFill distortionShaderFill(Shaders);
	distortionShaderFill.SetTexture(0, pRendertargetTexture);

	for(int eyeNum = 0; eyeNum < 2; eyeNum++)
	{
		// Get and set shader constants
		Shaders->SetUniform2f("EyeToSourceUVScale",   UVScaleOffset[eyeNum][0].x, UVScaleOffset[eyeNum][0].y);
		Shaders->SetUniform2f("EyeToSourceUVOffset",  UVScaleOffset[eyeNum][1].x, UVScaleOffset[eyeNum][1].y);
 		ovrMatrix4f timeWarpMatrices[2];
		ovrHmd_GetEyeTimewarpMatrices(HMD, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
		Shaders->SetUniform4x4f("EyeRotationStart", timeWarpMatrices[0]);  //Nb transposed when set
		Shaders->SetUniform4x4f("EyeRotationEnd",   timeWarpMatrices[1]);  //Nb transposed when set
		// Perform distortion
		pRender->Render(&distortionShaderFill, MeshVBs[eyeNum], MeshIBs[eyeNum]);
	}
/*
    unsigned char latencyColor[3];
    ovrBool drawDk2LatencyQuad = ovrHmd_GetLatencyTest2DrawColor(HMD, latencyColor);
    if(drawDk2LatencyQuad)
    {
        const int latencyQuadSize = 20; // only needs to be 1-pixel, but larger helps visual debugging
        pRender->SetViewport(HMD->Resolution.w - latencyQuadSize, 0, latencyQuadSize, latencyQuadSize);
        pRender->Clear(latencyColor[0] / 255.0f, latencyColor[1] / 255.0f, latencyColor[2] / 255.0f, 0.0f);
    }
*/
	pRender->SetDefaultRenderTarget();

	pRender->Present( true ); // Vsync enabled

    // Only flush GPU for ExtendDesktop; not needed in Direct App Renering with Oculus driver.
    if (HMD->HmdCaps & ovrHmdCap_ExtendDesktop)
		pRender->WaitUntilGpuIdle();
  
	ovrHmd_EndFrameTiming(HMD);
    #endif
}
示例#13
0
void HeadMountedDisplay::getEyePoses( kvs::UInt32 index, const ovrVector3f offset[2], ovrPosef poses[2], ovrTrackingState* state )
{
    KVS_OVR_CALL( ovrHmd_GetEyePoses( m_handler, index, offset, poses, state ) );
}
//-------------------------------------------------------------------------------------
int WINAPI WinMain(HINSTANCE hinst, HINSTANCE, LPSTR, int)
{
    // Initializes LibOVR, and the Rift
    ovr_Initialize();
    HMD = ovrHmd_Create(0);

    if (!HMD)                       { MessageBoxA(NULL,"Oculus Rift not detected.","", MB_OK); return(0); }
    if (HMD->ProductName[0] == '\0')  MessageBoxA(NULL,"Rift detected, display not enabled.", "", MB_OK);

    // Setup Window and Graphics - use window frame if relying on Oculus driver
    bool windowed = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true;    
    if (!WND.InitWindowAndDevice(hinst, Recti(HMD->WindowsPos, HMD->Resolution), windowed))
        return(0);

    WND.SetMaxFrameLatency(1);
    ovrHmd_AttachToWindow(HMD, WND.Window, NULL, NULL);
    ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

    // Start the sensor which informs of the Rift's pose and motion
    ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection |
                                  ovrTrackingCap_Position, 0);

    // Make the eye render buffers (caution if actual size < requested due to HW limits). 
    for (int eye=0; eye<2; eye++)
    {
        Sizei idealSize             = ovrHmd_GetFovTextureSize(HMD, (ovrEyeType)eye,
                                                               HMD->DefaultEyeFov[eye], 1.0f);
        pEyeRenderTexture[eye]      = new ImageBuffer(true, false, idealSize);
        pEyeDepthBuffer[eye]        = new ImageBuffer(true, true, pEyeRenderTexture[eye]->Size);
        EyeRenderViewport[eye].Pos  = Vector2i(0, 0);
        EyeRenderViewport[eye].Size = pEyeRenderTexture[eye]->Size;
    }

    // Setup VR components
#if SDK_RENDER
	#if RENDER_OPENGL
    ovrGLConfig oglcfg;
    oglcfg.OGL.Header.API				= ovrRenderAPI_OpenGL;
    oglcfg.OGL.Header.BackBufferSize	= Sizei(HMD->Resolution.w, HMD->Resolution.h);
    oglcfg.OGL.Header.Multisample		= 1;
	oglcfg.OGL.Window					= OGL.Window;
	oglcfg.OGL.DC						= GetDC(OGL.Window);

    if (!ovrHmd_ConfigureRendering(HMD, &oglcfg.Config,
		                           ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
								   HMD->DefaultEyeFov, EyeRenderDesc))	
		return(1);
	#else
    ovrD3D11Config d3d11cfg;
    d3d11cfg.D3D11.Header.API            = ovrRenderAPI_D3D11;
    d3d11cfg.D3D11.Header.BackBufferSize = Sizei(HMD->Resolution.w, HMD->Resolution.h);
    d3d11cfg.D3D11.Header.Multisample    = 1;
    d3d11cfg.D3D11.pDevice               = WND.Device;
    d3d11cfg.D3D11.pDeviceContext        = WND.Context;
    d3d11cfg.D3D11.pBackBufferRT         = WND.BackBufferRT;
    d3d11cfg.D3D11.pSwapChain            = WND.SwapChain;

    if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config,
                                   ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
                                   HMD->DefaultEyeFov, EyeRenderDesc))
        return(1);
	#endif
#else
    APP_RENDER_SetupGeometryAndShaders();
#endif

    // Create the room model
    Scene roomScene(false); // Can simplify scene further with parameter if required.

    // Initialize Webcams and threads
	WebCamManager WebCamMngr(HMD);

    // MAIN LOOP
    // =========
    while (!(WND.Key['Q'] && WND.Key[VK_CONTROL]) && !WND.Key[VK_ESCAPE])
    {
        WND.HandleMessages();
        
        float       speed                    = 1.0f; // Can adjust the movement speed. 
        int         timesToRenderScene       = 1;    // Can adjust the render burden on the app.
		ovrVector3f useHmdToEyeViewOffset[2] = {EyeRenderDesc[0].HmdToEyeViewOffset,
			                                    EyeRenderDesc[1].HmdToEyeViewOffset};
        // Start timing
    #if SDK_RENDER
        ovrHmd_BeginFrame(HMD, 0);
    #else
        ovrHmd_BeginFrameTiming(HMD, 0);
    #endif

        // Handle key toggles for re-centering, meshes, FOV, etc.
        ExampleFeatures1(&speed, &timesToRenderScene, useHmdToEyeViewOffset);

        // Keyboard inputs to adjust player orientation
        if (WND.Key[VK_LEFT])  Yaw += 0.02f;
        if (WND.Key[VK_RIGHT]) Yaw -= 0.02f;

        // Keyboard inputs to adjust player position
        if (WND.Key['W']||WND.Key[VK_UP])   Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(0,0,-speed*0.05f));
        if (WND.Key['S']||WND.Key[VK_DOWN]) Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(0,0,+speed*0.05f));
        if (WND.Key['D'])                    Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(+speed*0.05f,0,0));
        if (WND.Key['A'])                    Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(-speed*0.05f,0,0));
        Pos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, Pos.y);
  
        // Animate the cube
        if (speed)
            roomScene.Models[0]->Pos = Vector3f(9*sin(0.01f*clock),3,9*cos(0.01f*clock));

		// Get both eye poses simultaneously, with IPD offset already included. 
		ovrPosef temp_EyeRenderPose[2];
		ovrHmd_GetEyePoses(HMD, 0, useHmdToEyeViewOffset, temp_EyeRenderPose, NULL);

		// Update textures with WebCams' frames
		WebCamMngr.Update();	

        // Render the two undistorted eye views into their render buffers.  
        for (int eye = 0; eye < 2; eye++)
        {
            ImageBuffer * useBuffer      = pEyeRenderTexture[eye];  
            ovrPosef    * useEyePose     = &EyeRenderPose[eye];
            float       * useYaw         = &YawAtRender[eye];
            bool          clearEyeImage  = true;
            bool          updateEyeImage = true;

            // Handle key toggles for half-frame rendering, buffer resolution, etc.
            ExampleFeatures2(eye, &useBuffer, &useEyePose, &useYaw, &clearEyeImage, &updateEyeImage);

            if (clearEyeImage)
			#if RENDER_OPENGL
				WND.ClearAndSetRenderTarget(useBuffer, Recti(EyeRenderViewport[eye]));
			#else
                WND.ClearAndSetRenderTarget(useBuffer->TexRtv,
                                             pEyeDepthBuffer[eye], Recti(EyeRenderViewport[eye]));	
			#endif

            if (updateEyeImage)
            {
                // Write in values actually used (becomes significant in Example features)
                *useEyePose = temp_EyeRenderPose[eye];
                *useYaw     = Yaw;

                // Get view and projection matrices (note near Z to reduce eye strain)
                Matrix4f rollPitchYaw       = Matrix4f::RotationY(Yaw);
                Matrix4f finalRollPitchYaw  = rollPitchYaw * Matrix4f(useEyePose->Orientation);
                Vector3f finalUp            = finalRollPitchYaw.Transform(Vector3f(0, 1, 0));
                Vector3f finalForward       = finalRollPitchYaw.Transform(Vector3f(0, 0, -1));
                Vector3f shiftedEyePos      = Pos + rollPitchYaw.Transform(useEyePose->Position);

                Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp);
                Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.2f, 1000.0f, true); 

				// Keyboard input to switch from "look through" to scene mode
				static bool bOldLookThrough	= false;
				static bool bLookThrough	= true;
				if (WND.Key['X'] && bOldLookThrough != WND.Key['X']) { bLookThrough = !bLookThrough; }
				bOldLookThrough = WND.Key['X'];

				if(!bLookThrough)
				{
					// Render the scene
					for (int t=0; t<timesToRenderScene; t++)
						roomScene.Render(view, proj.Transposed());

					WebCamMngr.DrawBoard(view, proj.Transposed());
				}
				else { WebCamMngr.DrawLookThrough(eye); }
            }
        }

        // Do distortion rendering, Present and flush/sync
    #if SDK_RENDER
		#if RENDER_OPENGL
		ovrGLTexture eyeTexture[2]; // Gather data for eye textures 
        for (int eye = 0; eye<2; eye++)
        {
            eyeTexture[eye].OGL.Header.API				= ovrRenderAPI_OpenGL;
            eyeTexture[eye].OGL.Header.TextureSize		= pEyeRenderTexture[eye]->Size;
            eyeTexture[eye].OGL.Header.RenderViewport	= EyeRenderViewport[eye];
            eyeTexture[eye].OGL.TexId					= pEyeRenderTexture[eye]->TexId;
        }
		#else
        ovrD3D11Texture eyeTexture[2]; // Gather data for eye textures 
        for (int eye = 0; eye<2; eye++)
        {
            eyeTexture[eye].D3D11.Header.API            = ovrRenderAPI_D3D11;
            eyeTexture[eye].D3D11.Header.TextureSize    = pEyeRenderTexture[eye]->Size;
            eyeTexture[eye].D3D11.Header.RenderViewport = EyeRenderViewport[eye];
            eyeTexture[eye].D3D11.pTexture              = pEyeRenderTexture[eye]->Tex;
            eyeTexture[eye].D3D11.pSRView               = pEyeRenderTexture[eye]->TexSv;
        }
		#endif
		ovrHmd_EndFrame(HMD, EyeRenderPose, &eyeTexture[0].Texture);
    #else
        APP_RENDER_DistortAndPresent();
    #endif
    }

	WebCamMngr.StopCapture();

    // Release and close down
    ovrHmd_Destroy(HMD);
    ovr_Shutdown();
	WND.ReleaseWindow(hinst);

    return(0);
}
void OVRSDK06AppSkeleton::display_sdk() const
{
    ovrHmd hmd = m_Hmd;
    if (hmd == NULL)
        return;

    ovrTrackingState outHmdTrackingState = { 0 };
    ovrHmd_GetEyePoses(m_Hmd, m_frameIndex, m_eyeOffsets,
        m_eyePoses, &outHmdTrackingState);

    for (ovrEyeType eye = ovrEyeType::ovrEye_Left;
        eye < ovrEyeType::ovrEye_Count;
        eye = static_cast<ovrEyeType>(eye + 1))
    {
        const ovrSwapTextureSet& swapSet = *m_pTexSet[eye];
        glBindFramebuffer(GL_FRAMEBUFFER, m_swapFBO.id);
        ovrGLTexture& tex = (ovrGLTexture&)(swapSet.Textures[swapSet.CurrentIndex]);
        glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tex.OGL.TexId, 0);
        {
            // Handle render target resolution scaling
            m_layerEyeFov.Viewport[eye].Size = ovrHmd_GetFovTextureSize(m_Hmd, eye, m_layerEyeFov.Fov[eye], m_fboScale);
            ovrRecti& vp = m_layerEyeFov.Viewport[eye];
            if (m_layerEyeFov.Header.Flags & ovrLayerFlag_TextureOriginAtBottomLeft)
            {
                ///@note It seems that the render viewport should be vertically centered within the swapSet texture.
                /// See also OculusWorldDemo.cpp:1443 - "The usual OpenGL viewports-don't-match-UVs oddness."
                const int texh = swapSet.Textures[swapSet.CurrentIndex].Header.TextureSize.h;
                vp.Pos.y = (texh - vp.Size.h) / 2;
            }

            glViewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);

            glClearColor(0.f, 0.f, 0.f, 0.f);
            glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

            // Render the scene for the current eye
            const ovrPosef& eyePose = m_eyePoses[eye];
            const glm::mat4 viewLocal = makeMatrixFromPose(eyePose);
            const glm::mat4 viewWorld = makeWorldToChassisMatrix() * viewLocal;
            const glm::mat4& proj = m_eyeProjections[eye];
            _DrawScenes(
                glm::value_ptr(glm::inverse(viewWorld)),
                glm::value_ptr(proj),
                glm::value_ptr(glm::inverse(viewLocal)));

            m_layerEyeFov.RenderPose[eye] = eyePose;
        }
        glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
        glBindFramebuffer(GL_FRAMEBUFFER, 0);
    }

    ovrLayerEyeFov& layer = m_layerEyeFov;
    ovrLayerHeader* layers = &layer.Header;
    ovrResult result = ovrHmd_SubmitFrame(hmd, m_frameIndex, NULL, &layers, 1);

    // Increment counters in swap texture set
    for (ovrEyeType eye = ovrEyeType::ovrEye_Left;
        eye < ovrEyeType::ovrEye_Count;
        eye = static_cast<ovrEyeType>(eye + 1))
    {
        ovrSwapTextureSet& swapSet = *m_pTexSet[eye];
        ++swapSet.CurrentIndex %= swapSet.TextureCount;
    }

    // Blit output to main app window to show something on screen in addition
    // to what's in the Rift. This could optionally be the distorted texture
    // from the OVR SDK's mirror texture, or perhaps a single eye's undistorted
    // view, or even a third-person render(at a performance cost).
    if (true)
    {
        glViewport(0, 0, m_appWindowSize.w, m_appWindowSize.h);
        glBindFramebuffer(GL_READ_FRAMEBUFFER, m_mirrorFBO.id);
        glBlitFramebuffer(
            0, m_mirrorFBO.h, m_mirrorFBO.w, 0,
            0, 0, m_appWindowSize.w, m_appWindowSize.h,
            GL_COLOR_BUFFER_BIT, GL_NEAREST);
        glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
    }

    ++m_frameIndex;
}
示例#16
0
void OculusManager::render(RenderSystem* render, Scene* scene)
{
	unsigned int l_FrameIndex = 0;
	int i;

	/* the drawing starts with a call to ovrHmd_BeginFrame */
	ovrHmd_BeginFrame(g_Hmd, l_FrameIndex);

	ovrHmd_GetEyePoses(g_Hmd, l_FrameIndex, g_EyeOffsets, g_EyePoses, NULL);

	/* start drawing onto our texture render target */
	glBindFramebuffer(GL_FRAMEBUFFER, l_FBOId);

	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	/* for each eye ... */
	for (int l_EyeIndex = 0; l_EyeIndex < ovrEye_Count; l_EyeIndex++)
	{
		ovrEyeType l_Eye = g_Hmd->EyeRenderOrder[l_EyeIndex];

		glViewport(
			g_EyeTextures[l_Eye].Header.RenderViewport.Pos.x,
			g_EyeTextures[l_Eye].Header.RenderViewport.Pos.y,
			g_EyeTextures[l_Eye].Header.RenderViewport.Size.w,
			g_EyeTextures[l_Eye].Header.RenderViewport.Size.h
			);

		// Pass projection matrix on to OpenGL...
		glMatrixMode(GL_PROJECTION);
		glLoadIdentity();
		glMultMatrixf(&(g_ProjectionMatrici[l_Eye].Transposed().M[0][0]));

		// Create the model-view matrix and pass on to OpenGL...
		glMatrixMode(GL_MODELVIEW);
		glLoadIdentity();

		// Multiply with orientation retrieved from sensor...
		//OVR::Quatf l_Orientation = OVR::Quatf(g_EyePoses[l_Eye].Orientation);
		//OVR::Matrix4f l_ModelViewMatrix = OVR::Matrix4f(l_Orientation.Inverted());
		//glMultMatrixf(&(l_ModelViewMatrix.Transposed().M[0][0]));


		// Translation due to positional tracking (DK2) and IPD...
		//glTranslatef(-g_EyePoses[l_Eye].Position.x, -g_EyePoses[l_Eye].Position.y, -g_EyePoses[l_Eye].Position.z);

		// Move the world forward a bit to show the scene in front of us...
		//glTranslatef(g_CameraPosition.x, g_CameraPosition.y, g_CameraPosition.z);


		//Render
		render->renderOcculus(scene->getChildren(), scene->getLights(), (GLfloat)g_EyeTextures[l_Eye].Header.RenderViewport.Size.w, (GLfloat)g_EyeTextures[l_Eye].Header.RenderViewport.Size.h, toGlm(g_EyePoses[l_Eye]));
	}

	/* after drawing both eyes into the texture render target, revert to drawing directly to the
	* display, and we call ovrHmd_EndFrame, to let the Oculus SDK draw both images properly
	* compensated for lens distortion and chromatic abberation onto the HMD screen.
	*/
	glBindFramebuffer(GL_FRAMEBUFFER, 0);

	ovrHmd_EndFrame(g_Hmd, g_EyePoses, g_EyeTextures);

	glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); // Avoid OpenGL state leak in ovrHmd_EndFrame...
	glBindBuffer(GL_ARRAY_BUFFER, 0); // Avoid OpenGL state leak in ovrHmd_EndFrame...

	glUseProgram(0);

	++l_FrameIndex;
}