コード例 #1
0
ファイル: gkOculus.cpp プロジェクト: CheryJazz/gkEngine
void gkOculus::OnFrameEnd()
{
	if(HMD)
	{
		ovrHmd_EndFrameTiming(HMD);
	}

	
}
コード例 #2
0
    void draw() {
        static int frameIndex = 0;
        ovrFrameTiming timing = ovrHmd_BeginFrameTiming(hmd, frameIndex++);
        for (int i = 0; i < 2; ++i) {
            const ovrEyeType eye = hmdDesc.EyeRenderOrder[i];
            const EyeArg & eyeArg = eyeArgs[eye];
            // Set up the per-eye projection matrix
            gl::Stacks::projection().top() = eyeArg.projection;

            eyeArg.frameBuffer.activate();
            gl::MatrixStack & mv = gl::Stacks::modelview();
            gl::Stacks::with_push([&] {
                ovrSensorState ss = ovrHmd_GetSensorState(hmd, timing.EyeScanoutSeconds[eye]);
                // Set up the per-eye modelview matrix
                // Apply the head pose
                mv.preMultiply(glm::inverse(Rift::fromOvr(ss.Predicted.Pose)));
                // Apply the per-eye offset
                mv.preMultiply(eyeArg.viewOffset);
                renderScene();
            });
            eyeArg.frameBuffer.deactivate();
        }

        glClearColor(0.1f, 0.1f, 0.1f, 1);
        glClear(GL_COLOR_BUFFER_BIT);
        glDisable(GL_BLEND);
        glDisable(GL_CULL_FACE);
        glDisable(GL_DEPTH_TEST);

        gl::ProgramPtr distortionProgram = GlUtils::getProgram(
                                               Resource::SHADERS_DISTORTION_VS,
                                               Resource::SHADERS_DISTORTION_FS
                                           );
        distortionProgram->use();
        distortionProgram->setUniform("samples", enableSamples);

        glViewport(0, 0, windowSize.x, windowSize.y);
        for_each_eye([&](ovrEyeType eye) {
            const EyeArg & eyeArg = eyeArgs[eye];
            distortionProgram->setUniform(0, eyeArg.scale);
            distortionProgram->setUniform(1, eyeArg.offset);
            eyeArg.frameBuffer.color->bind();
            eyeArg.meshVao->bind();
            glDrawElements(GL_TRIANGLES, eyeArg.mesh.IndexCount,
                           GL_UNSIGNED_SHORT, nullptr);
        });
        gl::Texture2d::unbind();
        gl::Program::clear();
        ovrHmd_EndFrameTiming(hmd);
        glEnable(GL_CULL_FACE);
        glEnable(GL_DEPTH_TEST);
    }
コード例 #3
0
ファイル: vr_ovr.c プロジェクト: fourks/quake2vr
void VR_OVR_FrameStart()
{

	const char *results = ovrHmd_GetLatencyTestResult(hmd);
	if (results && strncmp(results,"",1))
	{
		static float lastms = 0;
		float ms;
		if (sscanf(results,"RESULT=%f ",&ms) && ms != lastms)
		{
			Cvar_SetInteger("vr_prediction",(int) ms);
			lastms = ms;
		}
	}

	if (vr_ovr_lowpersistence->modified)
	{
		unsigned int caps = 0;
		if (hmd->HmdCaps & ovrHmdCap_DynamicPrediction)
			caps |= ovrHmdCap_DynamicPrediction;

		if (hmd->HmdCaps & ovrHmdCap_LowPersistence && vr_ovr_lowpersistence->value)
			caps |= ovrHmdCap_LowPersistence;
		
		ovrHmd_SetEnabledCaps(hmd,caps);
		vr_ovr_lowpersistence->modified = false;
	}

	if (!withinFrame)
	{
		frameTime = ovrHmd_BeginFrameTiming(hmd,0);
	}
	else
	{
		ovrHmd_EndFrameTiming(hmd);
		ovrHmd_ResetFrameTiming(hmd,0);
		frameTime = ovrHmd_BeginFrameTiming(hmd,0);
	}
	withinFrame = true;
}
コード例 #4
0
void Render()
{
	ovrFrameTiming frameTiming = ovrHmd_BeginFrameTiming(HMD, 0);

	// 箱の回転の値を更新
	rotationBoxValue += 2.0f*frameTiming.DeltaSeconds;

	// キーボード等で操作する場合の目の位置を指定します。
	static OVR::Vector3f EyePos;
	EyePos.x = 0.0f, EyePos.y = 0.0f, EyePos.z = 0.0f;

	// マウスの回転等でYawを操作する場合に使用する。
	static float eyeYaw = 0;

	// センサーから取得
	ovrPosef movePose = ovrHmd_GetSensorState(HMD, frameTiming.ScanoutMidpointSeconds).Predicted.Pose;
	static ovrPosef eyeRenderPose[2];

	//身長ぶんの考慮をする際の計算
	//EyePos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, EyePos.y);	

	// 今回は TriangleList しか使わない。
	g_pImmediateContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);

	//レンダーターゲットの設定
	g_pImmediateContext->OMSetRenderTargets(1, &g_pRenderTargetViewOculus, g_pDepthStencilViewOculus);

	//画面のクリア・深度バッファクリア
	float ClearColor[4] = { 0.0f, 0.125f, 0.3f, 1.0f }; // R,G,B,A の順番
	g_pImmediateContext->ClearRenderTargetView(g_pRenderTargetViewOculus, ClearColor);
	g_pImmediateContext->ClearDepthStencilView(g_pDepthStencilViewOculus, D3D11_CLEAR_DEPTH, 1.0f, 0);

	//それぞれの目に対応するシーンを描画します。
	for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
	{
		ConstantBuffer cb;
		ovrEyeType eye = HMDDesc.EyeRenderOrder[eyeIndex];
		eyeRenderPose[eye] = ovrHmd_GetEyePose(HMD, eye);

		// ビュー行列を計算します。
		OVR::Matrix4f rotation = OVR::Matrix4f::RotationY(eyeYaw);											// あらかじめ(マウスなどで)計算された回転行列を適用する
		OVR::Matrix4f resultRotation = rotation * OVR::Matrix4f(eyeRenderPose[eye].Orientation) *			// 目の姿勢(回転)を計算する
										OVR::Matrix4f(1, 0, 0, 0, 0, -1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 1);	// 軸に合うように方向を合わせる
		OVR::Vector3f resultUp = resultRotation.Transform(OVR::Vector3f(0, 1, 0));							// 上ベクトルを計算
		OVR::Vector3f forward = resultRotation.Transform(OVR::Vector3f(0, 0, -1));							// 前ベクトルを計算
		OVR::Vector3f resultEyePos = EyePos + rotation.Transform(eyeRenderPose[eye].Position);				// 最終的な目の位置を計算する
		OVR::Vector3f resultEyeAt = EyePos + rotation.Transform(eyeRenderPose[eye].Position) + forward;		// 最終的な目視先を計算する

		// 計算した値から xnamath でビュー行列を計算します。
		XMVECTOR Eye = XMVectorSet(resultEyePos.x, resultEyePos.y, resultEyePos.z, 0.0f);		//カメラの位置
		XMVECTOR At = XMVectorSet(resultEyeAt.x, resultEyeAt.y, resultEyeAt.z, 0.0f);			//カメラの注視先
		XMVECTOR Up = XMVectorSet(resultUp.x, resultUp.y, resultUp.z, 0.0f);					//カメラの真上のベクトル
		g_View = XMMatrixLookAtLH(Eye, At,Up) * XMMatrixTranslation(EyeRenderDesc[eye].ViewAdjust.x, EyeRenderDesc[eye].ViewAdjust.y, EyeRenderDesc[eye].ViewAdjust.z);

		// EyeRenderDesc からプロジェクション行列を計算します。
		// 目の中心からそれぞれ上下左右のfovの正接値(tan)が格納されているので libovr 専用の関数で計算します。
		// OVR::Matrix4f は xnamath と違い行と列が反対なので転置にしておきます。
		OVR::Matrix4f proj = OVR::CreateProjection(false, EyeRenderDesc[eye].Fov, 0.01f, 100.0f);
		proj.Transpose();
		memcpy_s(&g_Projection, 64, &proj, 64);		

		//ビューポートの設定(片目ぶんずつ設定)
		D3D11_VIEWPORT vp;
		vp.TopLeftX = EyeRenderViewport[eye].Pos.x;
		vp.TopLeftY = EyeRenderViewport[eye].Pos.y;
		vp.Width = EyeRenderViewport[eye].Size.w;
		vp.Height = EyeRenderViewport[eye].Size.h;
		vp.MinDepth = 0.0f;
		vp.MaxDepth = 1.0f;
		g_pImmediateContext->RSSetViewports(1, &vp);

		// コンスタントバッファに投げるための行列を設定
		// シェーダーに渡す際に転置行列になるため、ここで転置しておきます。
		cb.mView = XMMatrixTranspose(g_View);
		cb.mProjection = XMMatrixTranspose(g_Projection);

		//シーンを描画
		Scene(cb);
	}


	//ここでレンダーターゲットに描画したシーンをゆがませてバックバッファに描画します。
	DistortionMeshRender(3, HMD, frameTiming.TimewarpPointSeconds,eyeRenderPose);

	g_pSwapChain->Present(0, 0);
	//pRender->WaitUntilGpuIdle();  //今回はクエリ実装してない
	ovrHmd_EndFrameTiming(HMD);

}
コード例 #5
0
//-------------------------------------------------------------------------------------
void ProcessAndRender()
{
    static ovrPosef eyeRenderPose[2]; 

	// Start timing
    #if SDK_RENDER
	ovrHmd_BeginFrame(HMD, 0); 
    #else
	ovrHmd_BeginFrameTiming(HMD, 0); 
    // Retrieve data useful for handling the Health and Safety Warning - unused, but here for reference
    ovrHSWDisplayState hswDisplayState;
    ovrHmd_GetHSWDisplayState(HMD, &hswDisplayState);
    #endif

	// Adjust eye position and rotation from controls, maintaining y position from HMD.
	static float    BodyYaw(3.141592f);
	static Vector3f HeadPos(0.0f, 1.6f, -5.0f);
//	HeadPos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, HeadPos.y);
	bool freezeEyeRender = Util_RespondToControls(BodyYaw, HeadPos, eyeRenderPose[1].Orientation);

     pRender->BeginScene();
    
	// Render the two undistorted eye views into their render buffers.
    if (!freezeEyeRender) // freeze to debug for time warp
    {
        pRender->SetRenderTarget ( pRendertargetTexture );
        pRender->SetViewport (Recti(0,0, pRendertargetTexture->GetWidth(),
                                         pRendertargetTexture->GetHeight() ));  
        pRender->Clear();
		for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
		{
            ovrEyeType eye = HMD->EyeRenderOrder[eyeIndex];
            eyeRenderPose[eye] = ovrHmd_GetEyePose(HMD, eye);

            // Get view and projection matrices
			Matrix4f rollPitchYaw       = Matrix4f::RotationY(BodyYaw);
			Matrix4f finalRollPitchYaw  = rollPitchYaw * Matrix4f(eyeRenderPose[eye].Orientation);
			Vector3f finalUp            = finalRollPitchYaw.Transform(Vector3f(0,1,0));
			Vector3f finalForward       = finalRollPitchYaw.Transform(Vector3f(0,0,-1));
			Vector3f shiftedEyePos      = HeadPos + rollPitchYaw.Transform(eyeRenderPose[eye].Position);
            Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp); 
			Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, true);

			pRender->SetViewport(Recti(EyeRenderViewport[eye]));
			pRender->SetProjection(proj);
			pRender->SetDepthMode(true, true);
			pRoomScene->Render(pRender, Matrix4f::Translation(EyeRenderDesc[eye].ViewAdjust) * view);
		}
    }
    pRender->FinishScene();

    #if SDK_RENDER	// Let OVR do distortion rendering, Present and flush/sync
	ovrHmd_EndFrame(HMD, eyeRenderPose, &EyeTexture[0].Texture);
    #else
	// Clear screen
	pRender->SetDefaultRenderTarget();
	pRender->SetFullViewport();
	pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);

	// Setup shader
	ShaderFill distortionShaderFill(Shaders);
	distortionShaderFill.SetTexture(0, pRendertargetTexture);
	distortionShaderFill.SetInputLayout(VertexIL);

	for(int eyeNum = 0; eyeNum < 2; eyeNum++)
	{
		// Get and set shader constants
		Shaders->SetUniform2f("EyeToSourceUVScale",   UVScaleOffset[eyeNum][0].x, UVScaleOffset[eyeNum][0].y);
		Shaders->SetUniform2f("EyeToSourceUVOffset",  UVScaleOffset[eyeNum][1].x, UVScaleOffset[eyeNum][1].y);
 		ovrMatrix4f timeWarpMatrices[2];
		ovrHmd_GetEyeTimewarpMatrices(HMD, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
		Shaders->SetUniform4x4f("EyeRotationStart", timeWarpMatrices[0]);  //Nb transposed when set
		Shaders->SetUniform4x4f("EyeRotationEnd",   timeWarpMatrices[1]);  //Nb transposed when set
		// Perform distortion
		pRender->Render(&distortionShaderFill, MeshVBs[eyeNum], MeshIBs[eyeNum],sizeof(ovrDistortionVertex));
	}

	pRender->SetDefaultRenderTarget();

	pRender->Present( true ); // Vsync enabled

    // Only flush GPU for ExtendDesktop; not needed in Direct App Renering with Oculus driver.
    if (HMD->HmdCaps & ovrHmdCap_ExtendDesktop)
		pRender->WaitUntilGpuIdle();  
	ovrHmd_EndFrameTiming(HMD);
    #endif
}
コード例 #6
0
void COculusVR::EndFrameTiming()
{
	//EndFrame
	ovrHmd_EndFrameTiming(Hmd);
}
コード例 #7
0
int
render_rift(struct weston_compositor *compositor, GLuint original_program)
{
  struct oculus_rift *rift = compositor->rift;

  // copy rift->pbuffer into rift->texture
  /*eglMakeCurrent(rift->egl_display, rift->pbuffer, rift->pbuffer, rift->egl_context);
  //glClearColor(0.5, 0.0, 0.5, 1.0);
  //glClear(GL_COLOR_BUFFER_BIT);
  glBindTexture(GL_TEXTURE_2D, rift->texture);
  eglReleaseTexImage(rift->egl_display, rift->pbuffer, EGL_BACK_BUFFER);
  eglBindTexImage(rift->egl_display, rift->pbuffer, EGL_BACK_BUFFER);
  eglMakeCurrent(rift->egl_display, rift->orig_surface, rift->orig_surface, rift->egl_context);*/
  // render eyes

  static int frameIndex = 0;
  ++frameIndex;
  ovrPosef eyePoses[2];
  ovrHmd_BeginFrameTiming(rift->hmd, frameIndex);
  ovrHmd_GetEyePoses(rift->hmd, frameIndex, rift->hmdToEyeOffsets, eyePoses, NULL);

  glEnable(GL_DEPTH_TEST);
  glUseProgram(rift->eye_shader->program);
  int i;
  for(i=0; i<2; i++)
  {
    const ovrEyeType eye = rift->hmd->EyeRenderOrder[i];
    struct EyeArg eyeArg = rift->eyeArgs[eye];
    
    ovrMatrix4f Model = initTranslationF(0.0, 0.0, rift->screen_z);
    Model = matrix4fMul(initScale(
          3.2 * rift->screen_scale, 
          1.8 * rift->screen_scale, 
          1.0), Model);
    ovrMatrix4f MV = matrix4fMul(posefToMatrix4f(eyePoses[eye]), Model);
    //MV = initIdentity();
    //MV.M[2][3] = 5;

    glBindFramebuffer(GL_FRAMEBUFFER, eyeArg.framebuffer);
    glViewport(0, 0, eyeArg.textureWidth, eyeArg.textureHeight);
    glClearColor(0.0, 0.0, 0.2, 1.0);
    glClear(GL_COLOR_BUFFER_BIT);

    glUniform1i(rift->eye_shader->virtualScreenTexture, 0);
    glUniformMatrix4fv(rift->eye_shader->Projection, 1, GL_FALSE, &eyeArg.projection.M[0][0]);
    glUniformMatrix4fv(rift->eye_shader->ModelView, 1, GL_FALSE, &MV.M[0][0]);

    glEnableVertexAttribArray(0);
    glEnableVertexAttribArray(1);

    glBindTexture(GL_TEXTURE_2D, rift->fbTexture);
    glBindBuffer(GL_ARRAY_BUFFER, rift->scene->vertexBuffer);
    glVertexAttribPointer(rift->eye_shader->Position, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), NULL);
    if(rift->sbs == 1)
      glBindBuffer(GL_ARRAY_BUFFER, rift->scene->SBSuvsBuffer[eye]);
    else
      glBindBuffer(GL_ARRAY_BUFFER, rift->scene->uvsBuffer);
    glVertexAttribPointer(rift->eye_shader->TexCoord0, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), NULL);
    glDrawArrays(GL_TRIANGLES, 0, 6);

    glBindBuffer(GL_ARRAY_BUFFER, 0);
    glDisableVertexAttribArray(0);
    glDisableVertexAttribArray(1);
    //render_eye(rift, eyeArg);
  }

  glBindFramebuffer(GL_FRAMEBUFFER, 0);

  // render distortion
  glUseProgram(rift->distortion_shader->program);
  glViewport(0, 0, 1920, 1080);

  glClearColor(0.0, 0.1, 0.0, 1.0);
  glClear(GL_COLOR_BUFFER_BIT);
  glDisable(GL_BLEND);
  glDisable(GL_CULL_FACE);
  glDisable(GL_DEPTH_TEST);

  float angle = 0.0;
  if(rift->rotate == 1)
  {
    angle = 1.57079633; // 90 degrees, in radians
    glViewport(0, 0, 1080, 1920);
  }

  int eye;
  for(eye=0; eye<2; eye++)
  {
    struct EyeArg eyeArg = rift->eyeArgs[eye];
    glUniform2fv(rift->distortion_shader->EyeToSourceUVScale, 1, (float *)&eyeArg.scale);
    glUniform2fv(rift->distortion_shader->EyeToSourceUVOffset, 1, (float *)&eyeArg.offset);
    glUniform1i(rift->distortion_shader->RightEye, eye);
    glUniform1f(rift->distortion_shader->angle, angle);
    glUniform1i(rift->distortion_shader->eyeTexture, 0);

    //glActiveTexture(GL_TEXTURE0);
    glBindTexture(GL_TEXTURE_2D, eyeArg.texture);

    glBindBuffer(GL_ARRAY_BUFFER, eyeArg.vertexBuffer);
    glVertexAttribPointer(rift->distortion_shader->Position, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), NULL);
    glEnableVertexAttribArray(rift->distortion_shader->Position);

    glBindBuffer(GL_ARRAY_BUFFER, eyeArg.uvsBuffer[1]);
    glVertexAttribPointer(rift->distortion_shader->TexCoord0, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), NULL);
    glEnableVertexAttribArray(rift->distortion_shader->TexCoord0);
    glBindBuffer(GL_ARRAY_BUFFER, eyeArg.uvsBuffer[0]);
    glVertexAttribPointer(rift->distortion_shader->TexCoordR, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), NULL);
    glEnableVertexAttribArray(rift->distortion_shader->TexCoordR);
    glBindBuffer(GL_ARRAY_BUFFER, eyeArg.uvsBuffer[1]);
    glVertexAttribPointer(rift->distortion_shader->TexCoordG, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), NULL);
    glEnableVertexAttribArray(rift->distortion_shader->TexCoordG);
    glBindBuffer(GL_ARRAY_BUFFER, eyeArg.uvsBuffer[2]);
    glVertexAttribPointer(rift->distortion_shader->TexCoordB, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), NULL);
    glEnableVertexAttribArray(rift->distortion_shader->TexCoordB);

    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, eyeArg.indexBuffer);

    glDrawElements(GL_TRIANGLES, eyeArg.mesh.IndexCount, GL_UNSIGNED_SHORT, 0);

    glBindBuffer(GL_ARRAY_BUFFER, 0);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
  }

  //glEnable(GL_CULL_FACE);
  glEnable(GL_DEPTH_TEST);

  ovrHmd_EndFrameTiming(rift->hmd);

  // set program back to original shader program
  glUseProgram(original_program);
  return 0;
}
コード例 #8
0
ファイル: vr_ovr.c プロジェクト: fourks/quake2vr
void VR_OVR_FrameEnd()
{
	ovrHmd_EndFrameTiming(hmd);
	withinFrame = false;
}
コード例 #9
0
void OculusDevice::endFrameTiming() const {
	ovrHmd_EndFrameTiming(m_hmdDevice);
}
コード例 #10
0
//-------------------------------------------------------------------------------------
void ProcessAndRender()
{
#if 0
	//HRESULT hr = pRender->Device->SetRenderState(D3DRS_ZENABLE, D3DZB_TRUE);
	//OVR_ASSERT(SUCCEEDED(hr));

	pRender->Clear();
	pRender->BeginScene();

	Vector3f eye(0.0f, 0.0f, -5.0f);
	Vector3f lookat(0.0f, 0.0f, 0.0f);
	Vector3f up(0.0f, 1.0f, 0.0f);
	Matrix4f view = Matrix4f::LookAtLH(eye, lookat, up);
	//Matrix4f proj = Matrix4f::PerspectiveLH(3.14145f / 2, 800.0f / 600.0f, 1.0f, 10000.0f);

	ovrFovPort fov = { 1, 1, 1, 1 };
	Matrix4f proj = ovrMatrix4f_Projection(fov, 1.0f, 10000.0f, false);

	pRender->SetProjection(proj);
	pRoomScene->Render(pRender, view);

	pRender->EndScene();
	pRender->Present();
#endif

	static ovrPosef eyeRenderPose[2];
	ovrHmd_BeginFrameTiming(HMD, 0);

	// Adjust eye position and rotation from controls, maintaining y position from HMD.
	static float BodyYaw(3.141592f);
	static Vector3f HeadPos(0.0f, 1.6f, -5.0f);
	HeadPos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, HeadPos.y);
	bool freezeEyeRender = false;

	pRender->BeginScene();

	if (!freezeEyeRender)
	{
		pRender->SetRenderTarget(pRendertargetTexture);
		pRender->SetViewport(Recti(0, 0, pRendertargetTexture->Width, pRendertargetTexture->Height));
		pRender->Clear();
		for (int eyeIndex = 0; eyeIndex < ovrEye_Count; ++eyeIndex)
		{
			ovrEyeType eye = HMD->EyeRenderOrder[eyeIndex];
			eyeRenderPose[eye] = ovrHmd_GetEyePose(HMD, eye);

			// Get view and projection matrices
			Matrix4f rollPitchYaw = Matrix4f::RotationY(BodyYaw);
			Matrix4f finalRollPitchYaw = rollPitchYaw * Matrix4f(eyeRenderPose[eye].Orientation);
			Vector3f finalUp = finalRollPitchYaw.Transform(Vector3f(0, 1, 0));
			Vector3f finalForward = finalRollPitchYaw.Transform(Vector3f(0, 0, -1));
			Vector3f shiftedEyePos      = HeadPos + rollPitchYaw.Transform(eyeRenderPose[eye].Position);
			//Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp); 
			//Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, true);
			Matrix4f view = Matrix4f::LookAtLH(shiftedEyePos, shiftedEyePos + finalForward, finalUp);
			Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, false);

			pRender->SetViewport(Recti(EyeRenderViewport[eye]));
			pRender->SetProjection(proj);
			pRender->SetDepthMode(true, true);
			pRoomScene->Render(pRender, Matrix4f::Translation(EyeRenderDesc[eye].ViewAdjust) * view);
		}
	}

	pRender->SetDefaultRenderTarget();
	pRender->SetFullViewport();
	pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);

	ShaderFill distortionShaderFill(DistortionShaders);
	distortionShaderFill.SetTexture(0, pRendertargetTexture);

	for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++)
	{
		// Get and set shader constants
		DistortionShaders->SetUniform2f("EyeToSourceUVScale", UVScaleOffset[eyeNum][0].x, UVScaleOffset[eyeNum][0].y);
		DistortionShaders->SetUniform2f("EyeToSourceUVOffset", UVScaleOffset[eyeNum][1].x, UVScaleOffset[eyeNum][1].y);
		ovrMatrix4f timeWarpMatrices[2];
		ovrHmd_GetEyeTimewarpMatrices(HMD, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
		DistortionShaders->SetUniform4x4f("EyeRotationStart", timeWarpMatrices[0]);  //Nb transposed when set
		DistortionShaders->SetUniform4x4f("EyeRotationEnd", timeWarpMatrices[1]);  //Nb transposed when set
		// Perform distortion
		pRender->Render(&distortionShaderFill, DistortionDecl, MeshVBs[eyeNum], MeshIBs[eyeNum],
			sizeof(ovrDistortionVertex), Matrix4f(), MeshVBCnts[eyeNum], MeshIBCnts[eyeNum], Prim_Triangles);
		//Render(fill, vertices, indices, stride, Matrix4f(), 0,(int)vertices->GetSize(), Prim_Triangles, false);
		//(&distortionShaderFill, MeshVBs[eyeNum], MeshIBs[eyeNum],sizeof(ovrDistortionVertex));
	}

	/*
	pRender->SetDefaultRenderTarget();
	pRender->SetFullViewport();
	pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);

	Vector3f eye(0.0f, 0.0f, -5.0f);
	Vector3f lookat(0.0f, 0.0f, 0.0f);
	Vector3f up(0.0f, 1.0f, 0.0f);
	Matrix4f view = Matrix4f::LookAtLH(eye, lookat, up);
	Matrix4f proj = Matrix4f::PerspectiveLH(3.14145f / 4, 800.0f / 600.0f, 1.0f, 10000.0f);

	pRender->Proj = proj;
	pScene->Render(pRender, view);
	*/
	//pRender->SetDefaultRenderTarget();

	pRender->EndScene();
	pRender->Present();

	//if (HMD->HmdCaps & ovrHmdCap_ExtendDesktop)
	//	pRender->WaitUntilG
	ovrHmd_EndFrameTiming(HMD);
}
コード例 #11
0
ファイル: RiftAppSkeleton.cpp プロジェクト: cleoag/RiftRay
///@todo Even though this function shares most of its code with client rendering,
/// which appears to work fine, it is non-convergable. It appears that the projection
/// matrices for each eye are too far apart? Could be modelview...
void RiftAppSkeleton::display_stereo_undistorted() //const
{
    ovrHmd hmd = m_Hmd;
    if (hmd == NULL)
        return;

    //ovrFrameTiming hmdFrameTiming =
    ovrHmd_BeginFrameTiming(hmd, 0);

    bindFBO(m_renderBuffer, m_fboScale);

    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
    {
        ovrEyeType eye = hmd->EyeRenderOrder[eyeIndex];
        ovrPosef eyePose = ovrHmd_GetEyePose(hmd, eye);

        const ovrGLTexture& otex = l_EyeTexture[eye];
        const ovrRecti& rvp = otex.OGL.Header.RenderViewport;
        const ovrRecti rsc = {
            static_cast<int>(m_fboScale * rvp.Pos.x),
            static_cast<int>(m_fboScale * rvp.Pos.y),
            static_cast<int>(m_fboScale * rvp.Size.w),
            static_cast<int>(m_fboScale * rvp.Size.h)
        };
        glViewport(rsc.Pos.x, rsc.Pos.y, rsc.Size.w, rsc.Size.h);

        OVR::Quatf orientation = OVR::Quatf(eyePose.Orientation);
        OVR::Matrix4f proj = ovrMatrix4f_Projection(
            m_EyeRenderDesc[eye].Fov,
            0.01f, 10000.0f, true);

        //m_EyeRenderDesc[eye].DistortedViewport;
        OVR::Vector3f EyePos = m_chassisPos;
        OVR::Matrix4f view = OVR::Matrix4f(orientation.Inverted())
            * OVR::Matrix4f::RotationY(m_chassisYaw)
            * OVR::Matrix4f::Translation(-EyePos);
        OVR::Matrix4f eyeview = OVR::Matrix4f::Translation(m_EyeRenderDesc[eye].ViewAdjust) * view;

        _resetGLState();

        _DrawScenes(&eyeview.Transposed().M[0][0], &proj.Transposed().M[0][0], rvp);
    }
    unbindFBO();

    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    glDisable(GL_DEPTH_TEST);
    glDisable(GL_CULL_FACE);

    // Present FBO to screen
    const GLuint prog = m_presentFbo.prog();
    glUseProgram(prog);
    m_presentFbo.bindVAO();
    {
        glActiveTexture(GL_TEXTURE0);
        glBindTexture(GL_TEXTURE_2D, m_renderBuffer.tex);
        glUniform1i(m_presentFbo.GetUniLoc("fboTex"), 0);

        // This is the only uniform that changes per-frame
        glUniform1f(m_presentFbo.GetUniLoc("fboScale"), m_fboScale);

        glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
    }
    glBindVertexArray(0);
    glUseProgram(0);

    ovrHmd_EndFrameTiming(hmd);
}
コード例 #12
0
ファイル: RiftAppSkeleton.cpp プロジェクト: cleoag/RiftRay
void RiftAppSkeleton::display_client() //const
{
    ovrHmd hmd = m_Hmd;
    if (hmd == NULL)
        return;

    //ovrFrameTiming hmdFrameTiming =
    ovrHmd_BeginFrameTiming(hmd, 0);

    bindFBO(m_renderBuffer, m_fboScale);

    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
    {
        const ovrEyeType eye = hmd->EyeRenderOrder[eyeIndex];
        const ovrPosef eyePose = ovrHmd_GetEyePose(hmd, eye);
        m_eyeOri = eyePose.Orientation; // cache this for movement direction
        _StoreHmdPose(eyePose);

        const ovrGLTexture& otex = l_EyeTexture[eye];
        const ovrRecti& rvp = otex.OGL.Header.RenderViewport;
        const ovrRecti rsc = {
            static_cast<int>(m_fboScale * rvp.Pos.x),
            static_cast<int>(m_fboScale * rvp.Pos.y),
            static_cast<int>(m_fboScale * rvp.Size.w),
            static_cast<int>(m_fboScale * rvp.Size.h)
        };
        glViewport(rsc.Pos.x, rsc.Pos.y, rsc.Size.w, rsc.Size.h);

        const OVR::Matrix4f proj = ovrMatrix4f_Projection(
            m_EyeRenderDesc[eye].Fov,
            0.01f, 10000.0f, true);

        ///@todo Should we be using this variable?
        //m_EyeRenderDesc[eye].DistortedViewport;

        const OVR::Matrix4f view = _MakeModelviewMatrix(
            eyePose,
            m_EyeRenderDesc[eye].ViewAdjust,
            m_chassisYaw,
            m_chassisPos);

        const OVR::Matrix4f scaledView = _MakeModelviewMatrix(
            eyePose,
            m_EyeRenderDesc[eye].ViewAdjust,
            m_chassisYaw,
            m_chassisPos,
            m_headSize);

        _resetGLState();

        _DrawScenes(&view.Transposed().M[0][0], &proj.Transposed().M[0][0], rsc, &scaledView.Transposed().M[0][0]);
    }
    unbindFBO();


    // Set full viewport...?
    const int w = m_Cfg.OGL.Header.RTSize.w;
    const int h = m_Cfg.OGL.Header.RTSize.h;
    glViewport(0, 0, w, h);

    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    glDisable(GL_DEPTH_TEST);
    glDisable(GL_CULL_FACE);

    // Now draw the distortion mesh...
    for(int eyeNum = 0; eyeNum < 2; eyeNum++)
    {
        const ShaderWithVariables& eyeShader = eyeNum == 0 ?
            m_presentDistMeshL :
            m_presentDistMeshR;
        const GLuint prog = eyeShader.prog();
        glUseProgram(prog);
        //glBindVertexArray(eyeShader.m_vao);
        {
            const ovrDistortionMesh& mesh = m_DistMeshes[eyeNum];
            glBindBuffer(GL_ARRAY_BUFFER, 0);

            const int a_pos =  glGetAttribLocation(prog, "vPosition");
            glVertexAttribPointer(a_pos, 4, GL_FLOAT, GL_FALSE, sizeof(ovrDistortionVertex), &mesh.pVertexData[0].ScreenPosNDC.x);
            glEnableVertexAttribArray(a_pos);

            const int a_texR =  glGetAttribLocation(prog, "vTexR");
            if (a_texR > -1)
            {
                glVertexAttribPointer(a_texR, 2, GL_FLOAT, GL_FALSE, sizeof(ovrDistortionVertex), &mesh.pVertexData[0].TanEyeAnglesR);
                glEnableVertexAttribArray(a_texR);
            }

            const int a_texG =  glGetAttribLocation(prog, "vTexG");
            if (a_texG > -1)
            {
                glVertexAttribPointer(a_texG, 2, GL_FLOAT, GL_FALSE, sizeof(ovrDistortionVertex), &mesh.pVertexData[0].TanEyeAnglesG);
                glEnableVertexAttribArray(a_texG);
            }

            const int a_texB =  glGetAttribLocation(prog, "vTexB");
            if (a_texB > -1)
            {
                glVertexAttribPointer(a_texB, 2, GL_FLOAT, GL_FALSE, sizeof(ovrDistortionVertex), &mesh.pVertexData[0].TanEyeAnglesB);
                glEnableVertexAttribArray(a_texB);
            }

            ovrVector2f uvoff =
                m_uvScaleOffsetOut[2*eyeNum + 1];
                //DistortionData.UVScaleOffset[eyeNum][0];
            ovrVector2f uvscale =
                m_uvScaleOffsetOut[2*eyeNum + 0];
                //DistortionData.UVScaleOffset[eyeNum][1];

            glUniform2f(eyeShader.GetUniLoc("EyeToSourceUVOffset"), uvoff.x, uvoff.y);
            glUniform2f(eyeShader.GetUniLoc("EyeToSourceUVScale"), uvscale.x, uvscale.y);


#if 0
            // Setup shader constants
            DistortionData.Shaders->SetUniform2f(
                "EyeToSourceUVScale",
                DistortionData.UVScaleOffset[eyeNum][0].x,
                DistortionData.UVScaleOffset[eyeNum][0].y);
            DistortionData.Shaders->SetUniform2f(
                "EyeToSourceUVOffset",
                DistortionData.UVScaleOffset[eyeNum][1].x,
                DistortionData.UVScaleOffset[eyeNum][1].y);

            if (distortionCaps & ovrDistortionCap_TimeWarp)
            { // TIMEWARP - Additional shader constants required
                ovrMatrix4f timeWarpMatrices[2];
                ovrHmd_GetEyeTimewarpMatrices(HMD, (ovrEyeType)eyeNum, eyeRenderPoses[eyeNum], timeWarpMatrices);
                //WARNING!!! These matrices are transposed in SetUniform4x4f, before being used by the shader.
                DistortionData.Shaders->SetUniform4x4f("EyeRotationStart", Matrix4f(timeWarpMatrices[0]));
                DistortionData.Shaders->SetUniform4x4f("EyeRotationEnd", Matrix4f(timeWarpMatrices[1]));
            }

            // Perform distortion
            pRender->Render(
                &distortionShaderFill,
                DistortionData.MeshVBs[eyeNum],
                DistortionData.MeshIBs[eyeNum]);
#endif

            glActiveTexture(GL_TEXTURE0);
            glBindTexture(GL_TEXTURE_2D, m_renderBuffer.tex);
            glUniform1i(eyeShader.GetUniLoc("fboTex"), 0);

            // This is the only uniform that changes per-frame
            glUniform1f(eyeShader.GetUniLoc("fboScale"), m_fboScale);


            glDrawElements(
                GL_TRIANGLES,
                mesh.IndexCount,
                GL_UNSIGNED_SHORT,
                &mesh.pIndexData[0]);
        }
        glBindVertexArray(0);
        glUseProgram(0);
    }

    ovrHmd_EndFrameTiming(hmd);
}