void OculusDevice::waitTillTime() {
	// Wait till time-warp point to reduce latency.
	ovr_WaitTillTime(m_frameTiming.TimewarpPointSeconds);

	// Get time warp properties
	for (int eyeIndex = 0; eyeIndex < ovrEye_Count; ++eyeIndex) {
		ovrHmd_GetEyeTimewarpMatrices(m_hmdDevice, (ovrEyeType)eyeIndex, m_headPose[eyeIndex], m_timeWarpMatrices[eyeIndex]);
	}
}
  void draw() {
    auto frameTime = ovrHmd_BeginFrame(hmd, frameIndex++);
    ovrLock.unlock();

    if (0 == frameTime.TimewarpPointSeconds) {
      ovr_WaitTillTime(frameTime.TimewarpPointSeconds - 0.002);
    } else {
      ovr_WaitTillTime(frameTime.NextFrameSeconds - 0.008);
    }

    // Grab the most recent textures
    for_each_eye([&](ovrEyeType eye) {
      ((ovrGLTexture&)(eyeTextures[eye])).OGL.TexId =
        textureIds[eye];
    });

    ovrLock.lock();
    ovrHmd_EndFrame(hmd, eyePoses, eyeTextures);
  }
Beispiel #3
0
void DistortionMeshRender(unsigned distortionCaps, ovrHmd HMD,
                          double timwarpTimePoint, ovrPosef eyeRenderPoses[2],
                          RenderDevice* pRender, Texture* pRendertargetTexture)
{
 	if (distortionCaps & ovrDistortionCap_TimeWarp)
	{   // TIMEWARP
        // Wait till time-warp to reduce latency.
	    ovr_WaitTillTime(timwarpTimePoint);
	}

	// Clear screen
    pRender->SetDefaultRenderTarget();
    pRender->SetFullViewport();
    pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);

	// Setup shader
	ShaderFill distortionShaderFill(DistortionData.Shaders);
    distortionShaderFill.SetTexture(0, pRendertargetTexture);
    distortionShaderFill.SetInputLayout(DistortionData.VertexIL);

    for(int eyeNum = 0; eyeNum < 2; eyeNum++)
    {
		// Setup shader constants
		DistortionData.Shaders->SetUniform2f("EyeToSourceUVScale",
            DistortionData.UVScaleOffset[eyeNum][0].x, DistortionData.UVScaleOffset[eyeNum][0].y);
        DistortionData.Shaders->SetUniform2f("EyeToSourceUVOffset",
            DistortionData.UVScaleOffset[eyeNum][1].x, DistortionData.UVScaleOffset[eyeNum][1].y);

 		if (distortionCaps & ovrDistortionCap_TimeWarp)
		{   // TIMEWARP - Additional shader constants required
			ovrMatrix4f timeWarpMatrices[2];
			ovrHmd_GetEyeTimewarpMatrices(HMD, (ovrEyeType)eyeNum, eyeRenderPoses[eyeNum], timeWarpMatrices);
			//WARNING!!! These matrices are transposed in SetUniform4x4f, before being used by the shader.
			DistortionData.Shaders->SetUniform4x4f("EyeRotationStart", Matrix4f(timeWarpMatrices[0]));
			DistortionData.Shaders->SetUniform4x4f("EyeRotationEnd",   Matrix4f(timeWarpMatrices[1]));
		}
		// Perform distortion
		pRender->Render(&distortionShaderFill,
                        DistortionData.MeshVBs[eyeNum], DistortionData.MeshIBs[eyeNum]);
    }

    pRender->SetDefaultRenderTarget();
}
void DistortionMeshRender(unsigned distortionCaps, ovrHmd HMD,
	double timwarpTimePoint, ovrPosef eyeRenderPoses[2])
{
	if (distortionCaps & ovrDistortionCap_TimeWarp)
	{
		// TimeWarp(時間歪曲)を考慮する
		// 遅延を減らすために行うものらしい
		ovr_WaitTillTime(timwarpTimePoint);
	}

	// レンダーターゲットの設定
	g_pImmediateContext->OMSetRenderTargets(1, &g_pRenderTargetView, NULL);

	// レンダーターゲットのクリア
	float ClearColor_[4] = { 0.0f, 0.0f, 0.0f, 0.0f }; // R,G,B,A の順番
	g_pImmediateContext->ClearRenderTargetView(g_pRenderTargetView, ClearColor_);

	// ビューポートをウィンドウ全体に設定する
	D3D11_VIEWPORT vp;
	vp.Width = HMDDesc.Resolution.w;
	vp.Height = HMDDesc.Resolution.h;
	vp.MinDepth = 0.0f;
	vp.MaxDepth = 1.0f;
	vp.TopLeftX = 0;
	vp.TopLeftY = 0;
	g_pImmediateContext->RSSetViewports(1, &vp);

	// ゆがませるメッシュを作成する
	UINT stride = sizeof(SimpleVertex), offset = 0;
	g_pImmediateContext->VSSetShader(g_pVertexShaderOculus, NULL, 0);
	g_pImmediateContext->VSSetConstantBuffers(0, 1, &g_pConstantBufferOculus);
	g_pImmediateContext->PSSetShader(g_pPixelShaderOculus, NULL, 0);
	g_pImmediateContext->PSSetShaderResources(0, 1, &g_pShaderResViewOculus);
	g_pImmediateContext->PSSetSamplers(0, 1, &g_pSamplerLinear);

	//インプットレイアウトのセット
	g_pImmediateContext->IASetInputLayout(g_pVertexLayoutOculus);

	//それぞれの目に対するメッシュデータを描画する
	for (int eyeNum = 0; eyeNum < 2; eyeNum++)
	{
		// コンスタントバッファに適用するための値を設定する。
		OculusRiftSettings ocrSet;
		ocrSet.EyeToSourceUVScale.x = UVScaleOffset[eyeNum][0].x;
		ocrSet.EyeToSourceUVScale.y = UVScaleOffset[eyeNum][0].y;
		ocrSet.EyeToSourceUVOffset.x = UVScaleOffset[eyeNum][1].x;
		ocrSet.EyeToSourceUVOffset.y = UVScaleOffset[eyeNum][1].y;

		if (distortionCaps & ovrDistortionCap_TimeWarp)
		{
			// TimeWarp(時間歪曲)を考慮する
			ovrMatrix4f timeWarpMatrices[2];
			ovrHmd_GetEyeTimewarpMatrices(HMD, (ovrEyeType)eyeNum, eyeRenderPoses[eyeNum], timeWarpMatrices);
			//シェーダーで
			memcpy_s(&ocrSet.EyeRotationStart,64,&timeWarpMatrices[0],64);
			memcpy_s(&ocrSet.EyeRotationEnd, 64, &timeWarpMatrices[1], 64);
		}

		//頂点バッファとインデックスバッファのセット
		UINT stride = sizeof(DistortionVertex), offset = 0;
		g_pImmediateContext->IASetVertexBuffers(0, 1, &g_pVertexBufferOculus[eyeNum], &stride, &offset);
		g_pImmediateContext->IASetIndexBuffer(g_pIndexBufferOculus[eyeNum], DXGI_FORMAT_R16_UINT, 0);

		// コンスタントバッファに転送します。
		g_pImmediateContext->UpdateSubresource(g_pConstantBufferOculus, 0, NULL, &ocrSet, 0, 0);

		// ゆがませるメッシュを描画します。
		g_pImmediateContext->DrawIndexed(oculusIndexCount,0, 0);
	}

}
Beispiel #5
0
void OVR_Present(qboolean loading)
{
    int fade = vr_ovr_distortion_fade->value != 0.0f;
	float desaturate = 0.0;
    
	if (positionTracked && trackingState.StatusFlags & ovrStatus_PositionConnected && vr_ovr_trackingloss->value > 0) {
		if (hasPositionLock) {
			float yawDiff = (fabsf(cameraYaw) - 105.0f) * 0.04;
			float xBound,yBound,zBound;
			vec_t temp[4][4], fin[4][4];
			int i = 0;
			vec3_t euler;
			vec4_t pos = {0.0,0.0,0.0,1.0};
			vec4_t out = {0,0,0,0};
			ovrPosef camera, head;
			vec4_t quat;
			camera = trackingState.CameraPose;
			head = trackingState.HeadPose.ThePose;

			pos[0] = -(head.Position.x - camera.Position.x);
			pos[1] = head.Position.y - camera.Position.y;
			pos[2] = -(head.Position.z - camera.Position.z);

			VR_OVR_QuatToEuler(camera.Orientation,euler);
			EulerToQuat(euler,quat);
			QuatToRotation(quat,temp);
			MatrixMultiply (cameraFrustum,temp,fin);

			for (i=0; i<4; i++) {
				out[i] = fin[i][0]*pos[0] + fin[i][1]*pos[1] + fin[i][2]*pos[2] + fin[i][3]*pos[3];
			}

			xBound = (fabsf(out[0]) - 0.6f) * 6.25f;
			yBound = (fabsf(out[1]) - 0.45f) * 6.25f;
			zBound = (fabsf(out[2] - 0.5f) - 0.5f) * 10.0f;

			yawDiff = clamp(yawDiff,0.0,1.0);
			xBound = clamp(xBound,0.0,1.0);
			yBound = clamp(yBound,0.0,1.0);
			zBound = clamp(zBound,0.0,1.0);

			desaturate = max(max(max(xBound,yBound),zBound),yawDiff);
		} else {
			desaturate = 1.0;
		}
	}
	GL_ClearColor(0.0, 0.0, 0.0, 1.0);
	R_Clear();
	GL_SetDefaultClearColor();	
	{
		int i = 0;
		r_ovr_shader_t *currentShader;

		qboolean warp =(qboolean) (!loading && withinFrame && vr_ovr_timewarp->value);
		if (warp)
		{
			currentShader = &ovr_timewarp_shaders[useChroma];	
			ovr_WaitTillTime(frameTime.TimewarpPointSeconds);
		} else {
			currentShader = &ovr_distortion_shaders[useChroma];	
		}

		glDisableClientState (GL_COLOR_ARRAY);
		glDisableClientState (GL_TEXTURE_COORD_ARRAY);
		glDisableClientState (GL_VERTEX_ARRAY);
		glEnableVertexAttribArray (0);
		glEnableVertexAttribArray (1);
		glEnableVertexAttribArray (2);
		glEnableVertexAttribArray (3);
		glEnableVertexAttribArray (4);

		glUseProgram(currentShader->shader->program);

		if (hmd->Type >= ovrHmd_DK2 && vr_ovr_lumoverdrive->value)
		{
			int lastFrame = (currentFrame ? 0 : 1);
			static float overdriveScaleRegularRise = 0.1f;
			static float overdriveScaleRegularFall = 0.05f;	// falling issues are hardly visible

			GL_MBind(1,offscreen[lastFrame].texture);
			glUniform2f(currentShader->uniform.OverdriveScales,overdriveScaleRegularRise, overdriveScaleRegularFall);
		} else {
			glUniform2f(currentShader->uniform.OverdriveScales,0,0);
		}
		glUniform2f(currentShader->uniform.InverseResolution,1.0/glState.currentFBO->width,1.0/glState.currentFBO->height);
		glUniform1i(currentShader->uniform.VignetteFade,fade);

		glUniform1f(currentShader->uniform.Desaturate, desaturate);

		for (i = 0; i < 2; i++)
		{
			// hook for rendering in different order
			int eye = i;
			GL_MBind(0,renderInfo[eye].eyeFBO.texture);
			R_BindIVBO(&renderInfo[eye].eye,distortion_attribs,5);

			glUniform2f(currentShader->uniform.EyeToSourceUVScale,
				renderInfo[eye].UVScaleOffset[0].x, renderInfo[eye].UVScaleOffset[0].y);

			glUniform2f(currentShader->uniform.EyeToSourceUVOffset,
				renderInfo[eye].UVScaleOffset[1].x, renderInfo[eye].UVScaleOffset[1].y);

			if (warp)
			{
				ovrPosef framePose = trackingState.HeadPose.ThePose;
				ovrMatrix4f timeWarpMatrices[2];
				ovrHmd_GetEyeTimewarpMatrices(hmd, (ovrEyeType)eye, framePose, timeWarpMatrices);
				glUniformMatrix4fv(currentShader->uniform.EyeRotationStart,1,GL_TRUE,(GLfloat *) timeWarpMatrices[0].M);
				glUniformMatrix4fv(currentShader->uniform.EyeRotationEnd,1,GL_TRUE,(GLfloat *) timeWarpMatrices[1].M);
			}

			R_DrawIVBO(&renderInfo[eye].eye);
			R_ReleaseIVBO();
		}

		if (vr_ovr_lumoverdrive->value)
		{
			GL_MBind(1,0);
			currentFrame = (currentFrame ? 0 : 1);
		}

		GL_MBind(0,0);
		glUseProgram(0);

		glDisableVertexAttribArray (0);
		glDisableVertexAttribArray (1);
		glDisableVertexAttribArray (2);
		glDisableVertexAttribArray (3);
		glDisableVertexAttribArray (4);

		glEnableClientState (GL_COLOR_ARRAY);
		glEnableClientState (GL_TEXTURE_COORD_ARRAY);
		glEnableClientState (GL_VERTEX_ARRAY);

		//		glTexCoordPointer (2, GL_FLOAT, sizeof(texCoordArray[0][0]), texCoordArray[0][0]);
		//		glVertexPointer (3, GL_FLOAT, sizeof(vertexArray[0]), vertexArray[0]);

	}

}