Exemple #1
0
void SyncHMDSettings( HmdRenderLoop* pRenderLoop, HMDManager* pHMDManager, VisContextCamera_cl* pLeftEyeCam, VisContextCamera_cl* pRightEyeCam )
{
  OVR::Util::Render::StereoConfig* pStereoConfig = pHMDManager->GetStereoConfig();

  if ( pStereoConfig->GetStereoMode() == OVR::Util::Render::Stereo_LeftRight_Multipass )
  {
    float fNear = g_spContextP1->GetViewProperties()->getNear();
    float fFar = g_spContextP1->GetViewProperties()->getFar();

    // Retrieve eye parameters for the left eye.
    // (As the whole things is rather symmetric, we can use this info, invert values where appropriate, and use them for the right eye as well.)

    // Retrieve the projection matrix and copy it into a hkvMat4.
    const OVR::Util::Render::StereoEyeParams& eyeParamsLeft = pStereoConfig->GetEyeRenderParams( OVR::Util::Render::StereoEye_Left );
    hkvMat4 m4Projection( hkvNoInitialization );
    m4Projection.set( (float*)&eyeParamsLeft.Projection.M, hkvMat4::RowMajor );

    // Adjust the OculusVR projection matrix to work with Vision:
    // - Invert the third column.
    // - Adjust near and far ranges. (As of LibOVR 0.2.3 they are hardwired to 0.01 and 1000.0, respectively.)
    m4Projection.m_Column[2][0] = -m4Projection.m_Column[2][0];
    m4Projection.m_Column[2][2] = fFar / ( fFar - fNear ); // -m4Projection.m_Column[2][2];
    m4Projection.m_Column[2][3] = 1.0f;
    m4Projection.m_Column[3][2] = ( fFar * fNear ) / ( fNear - fFar );
    g_spContextP1->SetCustomProjectionMatrix( &m4Projection );

    // Invert the element at [2][0] again to get the projection matrix for the right eye.
    m4Projection.m_Column[2][0] = -m4Projection.m_Column[2][0];
    g_spContextP2->SetCustomProjectionMatrix( &m4Projection );

    // Retrieve information for adjusting the eye position based on eye separation.
    float fHalfEyeSeparation = eyeParamsLeft.ViewAdjust.M[ 0 ][ 3 ];   // Note: LibOVR stores matrices in row-major order.

    // This is the minimal head model that's used in the OculusVR samples as well.
    const float fEyeProtrusion = 9.0f;                        // That would be 9cm.
    const float fEyeHeight = 15.0f;                           // That's 15cm.
    const float fEyeOffset = fHalfEyeSeparation * 100.0f;     // Converting LibOVRs data from meters to centimeters. Also: Note that local y points to the user's left.
    pLeftEyeCam->SetLocalPosition( fEyeProtrusion, fEyeOffset, fEyeHeight );
    pRightEyeCam->SetLocalPosition( fEyeProtrusion, -fEyeOffset, fEyeHeight );

    // Determine post-processing parameters.
    const OVR::Util::Render::DistortionConfig* pDistortion = eyeParamsLeft.pDistortion;
    hkvVec4 v4LensCenter_ScreenCenter( 0.5f + pDistortion->XCenterOffset * 0.5f, 0.5f, 0.5f, 0.5f );
    float fScaleFactor = 1.0f / pDistortion->Scale;
    float fAspectRatio = 0.5f * Vision::Video.GetXRes() / float( Vision::Video.GetYRes() );
    hkvVec4 v4Scale_ScaleIn( 0.5f * fScaleFactor, 0.5f * fScaleFactor * fAspectRatio, 2.0f, 2.0f / fAspectRatio );
    hkvVec4 v4HmdWarpParameters( pDistortion->K[ 0 ], pDistortion->K[ 1 ], pDistortion->K[ 2 ], pDistortion->K[ 3 ] );
    hkvVec4 v4ChromaticAberration( pDistortion->ChromaticAberration[ 0 ], pDistortion->ChromaticAberration[ 1 ], pDistortion->ChromaticAberration[ 2 ], pDistortion->ChromaticAberration[ 3 ] );
    pRenderLoop->SetPostProcessParameters( v4LensCenter_ScreenCenter, v4Scale_ScaleIn, v4HmdWarpParameters, v4ChromaticAberration );
  }
}
Exemple #2
0
void initRift() {
    OVR::System::Init(OVR::Log::ConfigureDefaultLog(OVR::LogMask_All));
    
    pFusionResult = new OVR::SensorFusion();
    pManager = *OVR::DeviceManager::Create();
    
    //pManager->SetMessageHandler(this);
    
    pHMD = *pManager->EnumerateDevices<OVR::HMDDevice>().CreateDevice();
    
	stereo.Set2DAreaFov(OVR::DegreeToRad(50.0f));
    stereo.SetFullViewport(OVR::Util::Render::Viewport(0,0, width, height));
    stereo.SetStereoMode(OVR::Util::Render::Stereo_LeftRight_Multipass);
    stereo.SetDistortionFitPointVP(-1.0f, 0.0f);
    renderScale = stereo.GetDistortionScale();
    
    if (pHMD)
    {
        pSensor = *pHMD->GetSensor();
        
        InfoLoaded = pHMD->GetDeviceInfo(&Info);
        
        strncpy(Info.DisplayDeviceName, RiftMonitorName, 32);
        
        RiftDisplayId = Info.DisplayId;
        
        EyeDistance = Info.InterpupillaryDistance;
        for(int i = 0; i < 4; ++i) {
            DistortionK[i] = Info.DistortionK[i];
            DistortionChromaticAberration[i] = Info.ChromaAbCorrection[i];
        }
        
        stereo.SetHMDInfo(Info);
        stereo.SetDistortionFitPointVP(-1.0f, 0.0f);
        renderScale = stereo.GetDistortionScale();
    }
    else
    {
        pSensor = *pManager->EnumerateDevices<OVR::SensorDevice>().CreateDevice();
    }
    
    textureWidth = width * renderScale;
    textureHeight = height * renderScale;
    
    leftEye  = stereo.GetEyeRenderParams(OVR::Util::Render::StereoEye_Left);
    rightEye = stereo.GetEyeRenderParams(OVR::Util::Render::StereoEye_Right);
    
    // Left eye rendering parameters
    leftVP         = leftEye.VP;
    leftProjection = leftEye.Projection;
    leftViewAdjust = leftEye.ViewAdjust;
    
    // Right eye rendering parameters
    rightVP         = leftEye.VP;
    rightProjection = leftEye.Projection;
    rightViewAdjust = leftEye.ViewAdjust;
    
    if (pSensor)
    {
        pFusionResult->AttachToSensor(pSensor);
        pFusionResult->SetPredictionEnabled(true);
        float motionPred = pFusionResult->GetPredictionDelta(); // adjust in 0.01 increments
        if(motionPred < 0) motionPred = 0;
        pFusionResult->SetPrediction(motionPred);
        
        if(InfoLoaded) {
            riftConnected = true;
            
            riftX = Info.DesktopX;
            riftY = Info.DesktopY;
            
            riftResolutionX = Info.HResolution;
            riftResolutionY = Info.VResolution;
        }
    }
    
#ifdef WIN32
    getRiftDisplay();
#endif
}
Exemple #3
0
#include "Rift.h"

#include "../ibex.h"

OVR::Ptr<OVR::DeviceManager>	pManager = 0;
OVR::Ptr<OVR::HMDDevice>	pHMD = 0;
OVR::Util::Render::StereoConfig stereo;
OVR::Ptr<OVR::SensorDevice>	pSensor = 0;
OVR::SensorFusion*		pFusionResult = 0;
OVR::HMDInfo			Info;
bool				InfoLoaded = false;
bool				riftConnected = false;
float               renderScale;
bool                renderScaleChanged = false;

OVR::Util::Render::StereoEyeParams  leftEye  = stereo.GetEyeRenderParams(OVR::Util::Render::StereoEye_Left);
OVR::Util::Render::StereoEyeParams  rightEye = stereo.GetEyeRenderParams(OVR::Util::Render::StereoEye_Right);

// Left eye rendering parameters
OVR::Util::Render::Viewport         leftVP         = leftEye.VP;
OVR::Matrix4f                       leftProjection = leftEye.Projection;
OVR::Matrix4f                       leftViewAdjust = leftEye.ViewAdjust;

// Right eye rendering parameters
OVR::Util::Render::Viewport         rightVP         = leftEye.VP;
OVR::Matrix4f                       rightProjection = leftEye.Projection;
OVR::Matrix4f                       rightViewAdjust = leftEye.ViewAdjust;


int riftX = 0;
int riftY = 0;