Пример #1
0
void AppLocal::DrawPassThroughCamera( const float bufferFov, const Quatf &orientation )
{
    if ( cameraFovHorizontal == 0 || cameraFovVertical == 0 )
    {
    	return;
    }

//    const Matrix4f		viewMatrix = Matrix4f( orientation ).Transposed();

	glDisable( GL_CULL_FACE );

	// flipped for portrait mode
	const float znear = 0.5f;
	const float zfar = 150.0f;
	const Matrix4f projectionMatrix = Matrix4f::PerspectiveRH(
			DegreeToRad(bufferFov), 1.0f, znear, zfar);

	const Matrix4f modelMatrix = Matrix4f(
			tan( DegreeToRad( cameraFovHorizontal/2 ) ), 0, 0, 0,
			0, tan( DegreeToRad( cameraFovVertical/2 ) ), 0, 0,
			0, 0, 0, -1,
			0, 0, 0, 1 );

	const GlProgram & prog = interpolatedCameraWarp;
	glUseProgram( prog.program );

	// interpolated time warp to de-waggle the rolling shutter camera image
	// enableCameraTimeWarp 0 = none
	// enableCameraTimeWarp 1 = full frame
	// enableCameraTimeWarp 2 = interpolated
	for ( int i = 0 ; i < 2 ; i++ )
	{
		const Matrix4f	timeWarp = enableCameraTimeWarp ?
				CalculateCameraTimeWarpMatrix( cameraFramePose[ enableCameraTimeWarp == 2 ? i : 0].Predicted.Pose.Orientation,
											   orientation )
				: Matrix4f::Identity();

		const Matrix4f cameraMvp = projectionMatrix * timeWarp * modelMatrix;

		glUniformMatrix4fv( i ? prog.uTexm : prog.uMvp, 1, GL_FALSE,
				cameraMvp.Transposed().M[0]);
	}

	glUniform4f(prog.uColor, 1, 1, 1, 1 );
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_EXTERNAL_OES, cameraTexture->textureId);
	panelGeometry.Draw();
	glBindTexture(GL_TEXTURE_EXTERNAL_OES, 0 );	// don't leave it bound
}
StereoConfig::StereoConfig(StereoMode mode, const Viewport& vp)
    : Mode(mode),
      InterpupillaryDistance(0.064f), AspectMultiplier(1.0f),
      FullView(vp), DirtyFlag(true),
      YFov(0), Aspect(vp.w / float(vp.h)), ProjectionCenterOffset(0),
      OrthoPixelOffset(0)
{
    // And default distortion for it.
    Distortion.SetCoefficients(1.0f, 0.22f, 0.24f);
    Distortion.Scale = 1.0f; // Will be computed later.

    // Fit left of the image.
    DistortionFitX = -1.0f;
    DistortionFitY = 0.0f;

    // Initialize "fake" default HMD values for testing without HMD plugged in.
    // These default values match those returned by the HMD.
    HMD.HResolution            = 1280;
    HMD.VResolution            = 800;
    HMD.HScreenSize            = 0.14976f;
    HMD.VScreenSize            = HMD.HScreenSize / (1280.0f / 800.0f);
    HMD.InterpupillaryDistance = InterpupillaryDistance;
    HMD.LensSeparationDistance = 0.064f;
    HMD.EyeToScreenDistance    = 0.041f;
    HMD.DistortionK[0]         = Distortion.K[0];
    HMD.DistortionK[1]         = Distortion.K[1];
    HMD.DistortionK[2]         = Distortion.K[2];
    HMD.DistortionK[3]         = 0;

    Set2DAreaFov(DegreeToRad(85.0f));
}
void Camera::setZoom(double z) {
	// set field of view (specified in degrees)
	zoom = z;
	theta_w = Theta_W(DegreeToRad(z));

	double cot_w = double(1) / tan(theta_w / double(2));
	double cot_h = double(1) / tan(DegreeToRad(zoom / double(2)));

	S_xy = Matrix4(	Vector4(cot_w, 0, 0, 0),
					Vector4(0, cot_h, 0, 0),
					Vector4(0, 0, 1, 0),
					Vector4(0, 0, 0, 1));
	S_xy_inv = Matrix4(Vector4(tan(DegreeToRad(zoom / double(2))), 0, 0, 0),
						Vector4(0, tan(DegreeToRad(zoom / double(2))), 0, 0),
						Vector4(0, 0, 1, 0),
						Vector4(0, 0, 0, 1));
	//Check_Matrix(3);
}
void StereoConfig::updateComputedState()
{
    // Need to compute all of the following:
    //   - Aspect Ratio
    //   - FOV
    //   - Projection offsets for 3D
    //   - Distortion XCenterOffset
    //   - Update 2D
    //   - Initialize EyeRenderParams

    // Compute aspect ratio. Stereo mode cuts width in half.
    Aspect = float(FullView.w) / float(FullView.h);
    Aspect *= (Mode == Stereo_None) ? 1.0f : 0.5f;
    Aspect *= AspectMultiplier; 

    updateDistortionOffsetAndScale();

    // Compute Vertical FOV based on distance, distortion, etc.
    // Distance from vertical center to render vertical edge perceived through the lens.
    // This will be larger then normal screen size due to magnification & distortion.
    //
    // This percievedHalfRTDistance equation should hold as long as the render target
    // and display have the same aspect ratios. What we'd like to know is where the edge
    // of the render target will on the perceived screen surface. With NO LENS,
    // the answer would be:
    //
    //  halfRTDistance = (VScreenSize / 2) * aspect *
    //                   DistortionFn_Inverse( DistortionScale / aspect )
    //
    // To model the optical lens we eliminates DistortionFn_Inverse. Aspect ratios
    // cancel out, so we get:
    //
    //  halfRTDistance = (VScreenSize / 2) * DistortionScale
    //
    if (Mode == Stereo_None)
    {
        YFov = DegreeToRad(80.0f);
    }
    else
    {
        float percievedHalfRTDistance = (HMD.VScreenSize / 2) * Distortion.Scale;    
        YFov = 2.0f * atan(percievedHalfRTDistance/HMD.EyeToScreenDistance);
    }
    
    updateProjectionOffset();
    update2D();
    updateEyeParams();

    DirtyFlag = false;
}
Пример #5
0
void EyePostRender::DrawEyeCalibrationLines( const float bufferFovDegrees, const int eye )
{
	// Optionally draw thick calibration lines into the texture,
	// which will be overlayed by the thinner pre-distorted lines
	// later -- they should match very closely!
	const Matrix4f projectionMatrix =
	//Matrix4f::Identity();
	 Matrix4f::PerspectiveRH( DegreeToRad( bufferFovDegrees ), 1.0f, 0.01f, 2000.0f );

	const GlProgram & prog = UntexturedMvpProgram;
	glUseProgram( prog.program );
	glLineWidth( 3.0f );
	glUniform4f( prog.uColor, 0, static_cast<float>( 1 - eye ), static_cast<float>( eye ), 1.0f );
	glUniformMatrix4fv( prog.uMvp, 1, GL_TRUE, projectionMatrix.M[0] );

	glBindVertexArray( CalibrationLines.vertexArrayObject );
	glDrawElements( GL_LINES, CalibrationLines.indexCount, GL_UNSIGNED_SHORT, NULL );
	glBindVertexArray( 0 );
}
Пример #6
0
void SensorRangeImpl::GetSensorRange(SensorRange* r)
{
    r->MaxAcceleration = AccelScale * 9.81f;
    r->MaxRotationRate = DegreeToRad((float)GyroScale);
    r->MaxMagneticField= MagScale * 0.001f;
}
void Camera::setAspectRatioScale(double d){
	aspect_ratio = d;
	theta_w = Theta_W(DegreeToRad(zoom));


}
Пример #8
0
Matrix4f OvrSceneView::ProjectionMatrixForEye( const int eye, const float fovDegrees ) const
{
	// We may want to make per-eye projection matrices if we move away from
	// nearly-centered lenses.
	return Matrix4f::PerspectiveRH( DegreeToRad( fovDegrees ), 1.0f, Znear, Zfar );
}
Пример #9
0
void OculusWorldDemoApp::OnKey(OVR::KeyCode key, int chr, bool down, int modifiers)
{
    if (Menu.OnKey(key, chr, down, modifiers))
        return;

    // Handle player movement keys.
    if (ThePlayer.HandleMoveKey(key, down))
        return;

    switch(key)
    {
    case Key_Q:
        if (down && (modifiers & Mod_Control))
            pPlatform->Exit(0);
        break;
        
    case Key_Escape:
        // Back to primary windowed
        if (!down) ChangeDisplay ( true, false, false );
        break;

    case Key_F9:
        // Cycle through displays, going fullscreen on each one.
        if (!down) ChangeDisplay ( false, true, false );
        break;
        
#ifdef OVR_OS_MAC
     // F11 is reserved on Mac, F10 doesn't work on Windows
    case Key_F10:  
#else
    case Key_F11:
#endif
        if (!down) ChangeDisplay ( false, false, true );
        break;
        
    case Key_R:
        if (!down)
        {
            ovrHmd_ResetSensor(Hmd);
            Menu.SetPopupMessage("Sensor Fusion Reset");
        }
        break;

    case Key_Space:
        if (!down)
        {
            TextScreen = (enum TextScreen)((TextScreen + 1) % Text_Count);
        }
        break;

    // Distortion correction adjustments
    case Key_Backslash:        
        break;
        // Holding down Shift key accelerates adjustment velocity.
    case Key_Shift:
        ShiftDown = down;
        break;
    case Key_Control:
        CtrlDown = down;
        break;

       // Reset the camera position in case we get stuck
    case Key_T:
        if (down)
        {
            struct {
                float  x, z;
                float  YawDegrees;
            }  Positions[] =
            {
               // x         z           Yaw
                { 7.7f,     -1.0f,      180.0f },   // The starting position.
                { 10.0f,    10.0f,      90.0f  },   // Outside, looking at some trees.
                { 19.26f,   5.43f,      22.0f  },   // Outside, looking at the fountain.
            };

            static int nextPosition = 0;
            nextPosition = (nextPosition + 1) % (sizeof(Positions)/sizeof(Positions[0]));

            ThePlayer.BodyPos = Vector3f(Positions[nextPosition].x,
                                         ThePlayer.UserEyeHeight, Positions[nextPosition].z);
            ThePlayer.BodyYaw = DegreeToRad( Positions[nextPosition].YawDegrees );
        }
        break;

    case Key_Num1:
        ThePlayer.BodyPos = Vector3f(-1.85f, 6.0f, -0.52f);
        ThePlayer.BodyPos.y += ThePlayer.UserEyeHeight;
        ThePlayer.BodyYaw = 3.1415f / 2;
        ThePlayer.HandleMovement(0, &CollisionModels, &GroundCollisionModels, ShiftDown);
        break;

     default:
        break;
    }
}
Пример #10
0
	MsgInputOVR::MsgInputOVR()
	{
		manager_ = *DeviceManager::Create();
		manager_->SetMessageHandler(this);

		hmd_ = *manager_->EnumerateDevices<HMDDevice>().CreateDevice();
		if (hmd_)
		{
			sensor_ = *hmd_->GetSensor();
			if (hmd_->GetDeviceInfo(&hmd_info_))
			{
				sconfig_.SetHMDInfo(hmd_info_);
			}
		}
		else
		{
			sensor_ = *manager_->EnumerateDevices<SensorDevice>().CreateDevice();
		}

		if (sensor_)
		{
			sfusion_.AttachToSensor(sensor_);
			sfusion_.SetDelegateMessageHandler(this);
			sfusion_.SetPredictionEnabled(true);
		}

		uint32_t width;
		uint32_t height;
		if (hmd_info_.HResolution > 0)
		{
			width = hmd_info_.HResolution;
			height = hmd_info_.VResolution;
		}
		else
		{
			width = 1280;
			height = 800;
		}

		sconfig_.SetFullViewport(Util::Render::Viewport::Viewport(0, 0, width, height));
		sconfig_.SetStereoMode(Util::Render::Stereo_LeftRight_Multipass);

		if (hmd_info_.HScreenSize > 0.0f)
		{
			if (hmd_info_.HScreenSize > 0.140f) // 7" screen
			{
				sconfig_.SetDistortionFitPointVP(-1.0f, 0.0f);
			}
			else
			{
				sconfig_.SetDistortionFitPointVP(0.0f, 1.0f);
			}
		}

		sconfig_.Set2DAreaFov(DegreeToRad(85.0f));

		Util::Render::StereoEyeParams left_eye = sconfig_.GetEyeRenderParams(Util::Render::StereoEye_Left);
		float scale = sconfig_.GetDistortionScale();

		RenderEngine& re = Context::Instance().RenderFactoryInstance().RenderEngineInstance();
		if (hmd_info_.VScreenSize > 0)
		{
			re.DefaultFOV(atan(hmd_info_.VScreenSize / 2 * scale / hmd_info_.EyeToScreenDistance) * 2);
			re.DefaultRenderWidthScale(scale / 2);
			re.DefaultRenderHeightScale(scale);
			re.StereoSeparation(-hmd_info_.InterpupillaryDistance * scale);
		}
		re.OVRScale(scale);
		re.OVRHMDWarpParam(float4(left_eye.pDistortion->K));
		re.OVRChromAbParam(float4(left_eye.pDistortion->ChromaticAberration));
		re.OVRXCenterOffset(left_eye.pDistortion->XCenterOffset);
	}
Пример #11
0
int OculusRoomTinyApp::OnStartup(const char* args)
{
    OVR_UNUSED(args);


    // *** Oculus HMD & Sensor Initialization

    // Create DeviceManager and first available HMDDevice from it.
    // Sensor object is created from the HMD, to ensure that it is on the
    // correct device.

    pManager = *DeviceManager::Create();

	// We'll handle it's messages in this case.
	pManager->SetMessageHandler(this);


    int         detectionResult = IDCONTINUE;
    const char* detectionMessage;

    do 
    {
        // Release Sensor/HMD in case this is a retry.
        pSensor.Clear();
        pHMD.Clear();
        RenderParams.MonitorName.Clear();

        pHMD  = *pManager->EnumerateDevices<HMDDevice>().CreateDevice();
        if (pHMD)
        {
            pSensor = *pHMD->GetSensor();

            // This will initialize HMDInfo with information about configured IPD,
            // screen size and other variables needed for correct projection.
            // We pass HMD DisplayDeviceName into the renderer to select the
            // correct monitor in full-screen mode.
            if (pHMD->GetDeviceInfo(&HMDInfo))
            {            
                RenderParams.MonitorName = HMDInfo.DisplayDeviceName;
                RenderParams.DisplayId = HMDInfo.DisplayId;
                SConfig.SetHMDInfo(HMDInfo);
            }
        }
        else
        {            
            // If we didn't detect an HMD, try to create the sensor directly.
            // This is useful for debugging sensor interaction; it is not needed in
            // a shipping app.
            pSensor = *pManager->EnumerateDevices<SensorDevice>().CreateDevice();
        }


        // If there was a problem detecting the Rift, display appropriate message.
        detectionResult  = IDCONTINUE;        

        if (!pHMD && !pSensor)
            detectionMessage = "Oculus Rift not detected.";
        else if (!pHMD)
            detectionMessage = "Oculus Sensor detected; HMD Display not detected.";
        else if (!pSensor)
            detectionMessage = "Oculus HMD Display detected; Sensor not detected.";
        else if (HMDInfo.DisplayDeviceName[0] == '\0')
            detectionMessage = "Oculus Sensor detected; HMD display EDID not detected.";
        else
            detectionMessage = 0;

        if (detectionMessage)
        {
            String messageText(detectionMessage);
            messageText += "\n\n"
                           "Press 'Try Again' to run retry detection.\n"
                           "Press 'Continue' to run full-screen anyway.";

            detectionResult = ::MessageBoxA(0, messageText.ToCStr(), "Oculus Rift Detection",
                                            MB_CANCELTRYCONTINUE|MB_ICONWARNING);

            if (detectionResult == IDCANCEL)
                return 1;
        }

    } while (detectionResult != IDCONTINUE);

    
    if (HMDInfo.HResolution > 0)
    {
        Width  = HMDInfo.HResolution;
        Height = HMDInfo.VResolution;
    }


    if (!setupWindow())
        return 1;
    
    if (pSensor)
    {
        // We need to attach sensor to SensorFusion object for it to receive 
        // body frame messages and update orientation. SFusion.GetOrientation() 
        // is used in OnIdle() to orient the view.
        SFusion.AttachToSensor(pSensor);
        SFusion.SetDelegateMessageHandler(this);
    }

    
    // *** Initialize Rendering
   
    // Enable multi-sampling by default.
    RenderParams.Multisample = 4;
    RenderParams.Fullscreen  = true;

    // Setup Graphics.
    pRender = *RenderTiny::D3D10::RenderDevice::CreateDevice(RenderParams, (void*)hWnd);
    if (!pRender)
        return 1;


    // *** Configure Stereo settings.

    SConfig.SetFullViewport(Viewport(0,0, Width, Height));
    SConfig.SetStereoMode(Stereo_LeftRight_Multipass);

    // Configure proper Distortion Fit.
    // For 7" screen, fit to touch left side of the view, leaving a bit of invisible
    // screen on the top (saves on rendering cost).
    // For smaller screens (5.5"), fit to the top.
    if (HMDInfo.HScreenSize > 0.0f)
    {
        if (HMDInfo.HScreenSize > 0.140f) // 7"
            SConfig.SetDistortionFitPointVP(-1.0f, 0.0f);
        else
            SConfig.SetDistortionFitPointVP(0.0f, 1.0f);
    }

    pRender->SetSceneRenderScale(SConfig.GetDistortionScale());

    SConfig.Set2DAreaFov(DegreeToRad(85.0f));


    // *** Populate Room Scene

    // This creates lights and models.
    PopulateRoomScene(&Scene, pRender);


    LastUpdate = GetAppTime();
    return 0;
}
Пример #12
0
void AppLocal::DrawEyeViewsPostDistorted( Matrix4f const & centerViewMatrix, const int numPresents )
{
	// update vr lib systems after the app frame, but before rendering anything
	GetGuiSys().Frame( this, vrFrame, GetVRMenuMgr(), GetDefaultFont(), GetMenuFontSurface() );
	GetGazeCursor().Frame( this->lastViewMatrix, vrFrame.DeltaSeconds );

	if ( ShowFPS )
	{
		static double  LastFrameTime = TimeInSeconds();
		static double  AccumulatedFrameInterval = 0.0;
		static int   NumAccumulatedFrames = 0;
		static float LastFrameRate = 60.0f;

		double currentFrameTime = TimeInSeconds();
		double frameInterval = currentFrameTime - LastFrameTime;
		AccumulatedFrameInterval += frameInterval;
		NumAccumulatedFrames++;
		if ( NumAccumulatedFrames > FPS_NUM_FRAMES_TO_AVERAGE ) {
			double interval = ( AccumulatedFrameInterval / NumAccumulatedFrames );  // averaged
			AccumulatedFrameInterval = 0.0;
			NumAccumulatedFrames = 0;
			LastFrameRate = 1.0f / float( interval > 0.000001 ? interval : 0.00001 );
		}    
		fontParms_t fontParms;
		fontParms.CenterHoriz = true;
		fontParms.Billboard = true;
        fontParms.TrackRoll = true;
		const Vector3f viewPos( GetViewMatrixPosition( centerViewMatrix ) );
		const Vector3f viewFwd( GetViewMatrixForward( centerViewMatrix ) );
		const Vector3f textPos( viewPos + viewFwd * 1.5f );
		GetWorldFontSurface().DrawTextBillboarded3Df( GetDefaultFont(), 
                fontParms, textPos, 0.0025f, Vector4f( 1.0f, 0.0f, 0.0f, 1.0f ), "%.1f fps", LastFrameRate );
		LastFrameTime = currentFrameTime;
	}

	if ( InfoTextEndFrame >= vrFrame.FrameNumber )
	{
		fontParms_t fontParms;
		fontParms.CenterHoriz = true;
		fontParms.Billboard = true;
        fontParms.TrackRoll = true;
		const Vector3f viewPos( GetViewMatrixPosition( centerViewMatrix ) );
		const Vector3f viewFwd( GetViewMatrixForward( centerViewMatrix ) );
		const Vector3f textPos( viewPos + viewFwd * 1.5f );
		GetWorldFontSurface().DrawTextBillboarded3Df( GetDefaultFont(), 
                fontParms, textPos, 0.0025f, Vector4f( 1.0f, 1.0f, 1.0f, 1.0f ), InfoText.ToCStr() );
	}

	GetMenuFontSurface().Finish( centerViewMatrix );
	GetWorldFontSurface().Finish( centerViewMatrix );
	GetVRMenuMgr().Finish( centerViewMatrix );

	// Increase the fov by about 10 degrees if we are not holding 60 fps so
	// there is less black pull-in at the edges.
	//
	// Doing this dynamically based just on time causes visible flickering at the
	// periphery when the fov is increased, so only do it if minimumVsyncs is set.
	const float fovDegrees = hmdInfo.SuggestedEyeFov +
			( ( ( SwapParms.MinimumVsyncs > 1 ) || ovr_GetPowerLevelStateThrottled() ) ? 10.0f : 0.0f ) +
			( ( !showVignette ) ? 5.0f : 0.0f );

	// DisplayMonoMode uses a single eye rendering for speed improvement
	// and / or high refresh rate double-scan hardware modes.
	const int numEyes = renderMonoMode ? 1 : 2;

	// Flush out and report any errors
	GL_CheckErrors("FrameStart");

	if ( drawCalibrationLines && calibrationLinesDrawn )
	{
		// doing a time warp test, don't generate new images
		LOG( "drawCalibrationLines && calibrationLinesDrawn" );
	}
	else
	{
		// possibly change the buffer parameters
		EyeTargets->BeginFrame( vrParms );

		for (int eye = 0; eye < numEyes; eye++)
		{
			EyeTargets->BeginRenderingEye( eye );

			// Call back to the app for drawing.
			const Matrix4f mvp = appInterface->DrawEyeView( eye, fovDegrees );

			DrawActivity( mvp );

			DrawPassThroughCamera( fovDegrees, vrFrame.PoseState.Pose.Orientation );

			GetVRMenuMgr().RenderSubmitted( mvp.Transposed() );
			GetMenuFontSurface().Render3D( GetDefaultFont(), mvp.Transposed() );
			GetWorldFontSurface().Render3D( GetDefaultFont(), mvp.Transposed() );

			glDisable( GL_DEPTH_TEST );
			glDisable( GL_CULL_FACE );

			// Optionally draw thick calibration lines into the texture,
			// which will be overlayed by the thinner origin cross when
			// distorted to the window.
			if ( drawCalibrationLines )
			{
				EyeDecorations.DrawEyeCalibrationLines(fovDegrees, eye);
				calibrationLinesDrawn = true;
			}
			else
			{
				calibrationLinesDrawn = false;
			}

			DrawDialog( mvp );

			GetGazeCursor().Render( eye, mvp );

			GetDebugLines().Render( mvp.Transposed() );

			if ( showVignette )
			{
				// Draw a thin vignette at the edges of the view so clamping will give black
				// This will not be reflected correctly in overlay planes.
				// EyeDecorations.DrawEyeVignette();

				EyeDecorations.FillEdge( vrParms.resolution, vrParms.resolution );
			}

			EyeTargets->EndRenderingEye( eye );
		}
	}

	// This eye set is complete, use it now.
	if ( numPresents > 0 )
	{
		const CompletedEyes eyes = EyeTargets->GetCompletedEyes();

		for ( int eye = 0 ; eye < TimeWarpParms::MAX_WARP_EYES ; eye++ )
		{
			const Matrix4f proj = Matrix4f::PerspectiveRH( DegreeToRad(fovDegrees), 1.0f, 1, 100);
			SwapParms.Images[eye][0].TexCoordsFromTanAngles = TanAngleMatrixFromProjection( proj );

			SwapParms.Images[eye][0].TexId = eyes.Textures[renderMonoMode ? 0 : eye ];
			SwapParms.Images[eye][0].Pose = SensorForNextWarp.Predicted;
		}

		ovr_WarpSwap( OvrMobile, &SwapParms );
	}
}