ShaderLookupDistort() : lookupTextureSize(512, 512), chroma(true) {
        OVR::Util::Render::StereoConfig stereoConfig;
        stereoConfig.SetHMDInfo(ovrHmdInfo);
        const OVR::Util::Render::DistortionConfig & distortion =
            stereoConfig.GetDistortionConfig();

        // The Rift examples use a post-distortion scale to resize the
        // image upward after distorting it because their K values have
        // been chosen such that they always result in a scale > 1.0, and
        // thus shrink the image.  However, we can correct for that by
        // finding the distortion scale the same way the OVR examples do,
        // and then pre-multiplying the constants by it.
        double postDistortionScale = 1.0 / stereoConfig.GetDistortionScale();
        for (int i = 0; i < 4; ++i) {
            K[i] = (float)(distortion.K[i] * postDistortionScale);
        }
        // red channel correction
        chromaK[0] = glm::vec2(
                         ovrHmdInfo.ChromaAbCorrection[0],
                         ovrHmdInfo.ChromaAbCorrection[1]);
        // blue channel correction
        chromaK[1] = glm::vec2(
                         ovrHmdInfo.ChromaAbCorrection[2],
                         ovrHmdInfo.ChromaAbCorrection[3]);
        for (int i = 0; i < 2; ++i) {
            chromaK[i][0] = ((1.0 - chromaK[i][0]) * postDistortionScale) + 1.0;
            chromaK[i][1] *= postDistortionScale;
        }
        lensOffset = 1.0f - (2.0f * ovrHmdInfo.LensSeparationDistance / ovrHmdInfo.HScreenSize);
    }
  DistortionHelper(const OVR::HMDInfo & ovrHmdInfo) {
    OVR::Util::Render::StereoConfig stereoConfig;
    stereoConfig.SetHMDInfo(ovrHmdInfo);
    const OVR::Util::Render::DistortionConfig & distortion =
        stereoConfig.GetDistortionConfig();

    double postDistortionScale = 1.0 / stereoConfig.GetDistortionScale();
    for (int i = 0; i < 4; ++i) {
      K[i] = distortion.K[i] * postDistortionScale;
    }
    lensOffset = distortion.XCenterOffset;
    eyeAspect = ovrHmdInfo.HScreenSize / 2.0f / ovrHmdInfo.VScreenSize;
  }
Ejemplo n.º 3
0
void SyncHMDSettings( HmdRenderLoop* pRenderLoop, HMDManager* pHMDManager, VisContextCamera_cl* pLeftEyeCam, VisContextCamera_cl* pRightEyeCam )
{
  OVR::Util::Render::StereoConfig* pStereoConfig = pHMDManager->GetStereoConfig();

  if ( pStereoConfig->GetStereoMode() == OVR::Util::Render::Stereo_LeftRight_Multipass )
  {
    float fNear = g_spContextP1->GetViewProperties()->getNear();
    float fFar = g_spContextP1->GetViewProperties()->getFar();

    // Retrieve eye parameters for the left eye.
    // (As the whole things is rather symmetric, we can use this info, invert values where appropriate, and use them for the right eye as well.)

    // Retrieve the projection matrix and copy it into a hkvMat4.
    const OVR::Util::Render::StereoEyeParams& eyeParamsLeft = pStereoConfig->GetEyeRenderParams( OVR::Util::Render::StereoEye_Left );
    hkvMat4 m4Projection( hkvNoInitialization );
    m4Projection.set( (float*)&eyeParamsLeft.Projection.M, hkvMat4::RowMajor );

    // Adjust the OculusVR projection matrix to work with Vision:
    // - Invert the third column.
    // - Adjust near and far ranges. (As of LibOVR 0.2.3 they are hardwired to 0.01 and 1000.0, respectively.)
    m4Projection.m_Column[2][0] = -m4Projection.m_Column[2][0];
    m4Projection.m_Column[2][2] = fFar / ( fFar - fNear ); // -m4Projection.m_Column[2][2];
    m4Projection.m_Column[2][3] = 1.0f;
    m4Projection.m_Column[3][2] = ( fFar * fNear ) / ( fNear - fFar );
    g_spContextP1->SetCustomProjectionMatrix( &m4Projection );

    // Invert the element at [2][0] again to get the projection matrix for the right eye.
    m4Projection.m_Column[2][0] = -m4Projection.m_Column[2][0];
    g_spContextP2->SetCustomProjectionMatrix( &m4Projection );

    // Retrieve information for adjusting the eye position based on eye separation.
    float fHalfEyeSeparation = eyeParamsLeft.ViewAdjust.M[ 0 ][ 3 ];   // Note: LibOVR stores matrices in row-major order.

    // This is the minimal head model that's used in the OculusVR samples as well.
    const float fEyeProtrusion = 9.0f;                        // That would be 9cm.
    const float fEyeHeight = 15.0f;                           // That's 15cm.
    const float fEyeOffset = fHalfEyeSeparation * 100.0f;     // Converting LibOVRs data from meters to centimeters. Also: Note that local y points to the user's left.
    pLeftEyeCam->SetLocalPosition( fEyeProtrusion, fEyeOffset, fEyeHeight );
    pRightEyeCam->SetLocalPosition( fEyeProtrusion, -fEyeOffset, fEyeHeight );

    // Determine post-processing parameters.
    const OVR::Util::Render::DistortionConfig* pDistortion = eyeParamsLeft.pDistortion;
    hkvVec4 v4LensCenter_ScreenCenter( 0.5f + pDistortion->XCenterOffset * 0.5f, 0.5f, 0.5f, 0.5f );
    float fScaleFactor = 1.0f / pDistortion->Scale;
    float fAspectRatio = 0.5f * Vision::Video.GetXRes() / float( Vision::Video.GetYRes() );
    hkvVec4 v4Scale_ScaleIn( 0.5f * fScaleFactor, 0.5f * fScaleFactor * fAspectRatio, 2.0f, 2.0f / fAspectRatio );
    hkvVec4 v4HmdWarpParameters( pDistortion->K[ 0 ], pDistortion->K[ 1 ], pDistortion->K[ 2 ], pDistortion->K[ 3 ] );
    hkvVec4 v4ChromaticAberration( pDistortion->ChromaticAberration[ 0 ], pDistortion->ChromaticAberration[ 1 ], pDistortion->ChromaticAberration[ 2 ], pDistortion->ChromaticAberration[ 3 ] );
    pRenderLoop->SetPostProcessParameters( v4LensCenter_ScreenCenter, v4Scale_ScaleIn, v4HmdWarpParameters, v4ChromaticAberration );
  }
}
Ejemplo n.º 4
0
void toggleRenderScale() {
    if(renderScale != 1) {
        setRenderScale(1);
    } else {
        setRenderScale(stereo.GetDistortionScale());
    }
}
  SimpleScene() : eyes({ { PerEyeArg(LEFT), PerEyeArg(RIGHT) } }) {
    eyeHeight = 1.0f;
    applyProjectionOffset = true;
    applyModelviewOffset = true;
    {
      OVR::Ptr<OVR::ProfileManager> profileManager = *OVR::ProfileManager::Create();
      OVR::Ptr<OVR::Profile> profile = *(profileManager->GetDeviceDefaultProfile(OVR::ProfileType::Profile_RiftDK1));
      ipd = profile->GetIPD();
      eyeHeight = profile->GetEyeHeight();
      glm::mat4 modelviewOffset = glm::translate(glm::mat4(),
        glm::vec3(ipd / 2.0f, 0, 0));
      eyes[LEFT].modelviewOffset = modelviewOffset;
      eyes[RIGHT].modelviewOffset = glm::inverse(modelviewOffset);
    }

    if (ovrManager) {
      ovrSensor =
        *ovrManager->EnumerateDevices<OVR::SensorDevice>().CreateDevice();
      if (ovrSensor) {
        sensorFusion.AttachToSensor(ovrSensor);
      }
    }
    if (sensorFusion.IsAttachedToSensor()) {
      SAY("Attached");
    } else {
      SAY("Attach failed");
    }

    {
      OVR::HMDInfo hmdInfo;
      Rift::getHmdInfo(ovrManager, hmdInfo);
      OVR::Util::Render::StereoConfig config;
      config.SetHMDInfo(hmdInfo);
      gl::Stacks::projection().top() = 
        glm::perspective(config.GetYFOVRadians(), eyeAspect, 0.01f, 1000.0f);
      glm::mat4 projectionOffset = glm::translate(glm::mat4(),
        glm::vec3(config.GetProjectionCenterOffset(), 0, 0));
      eyes[LEFT].projectionOffset = projectionOffset;
      eyes[RIGHT].projectionOffset = glm::inverse(projectionOffset);
    }

    glm::vec3 playerPosition(0, eyeHeight, ipd * 4.0f);
    player = glm::inverse(glm::lookAt(playerPosition, glm::vec3(0, eyeHeight, 0), GlUtils::Y_AXIS));
    CameraControl::instance().enableHydra(true);
  }
  SimpleScene() {
    OVR::Ptr<OVR::ProfileManager> profileManager =
      *OVR::ProfileManager::Create();
    OVR::Ptr<OVR::Profile> profile =
      *(profileManager->GetDeviceDefaultProfile(
      OVR::ProfileType::Profile_RiftDK1));
    ipd = profile->GetIPD();
    eyeHeight = profile->GetEyeHeight();

    // setup the initial player location
    player = glm::inverse(glm::lookAt(
      glm::vec3(0, eyeHeight, ipd * 4.0f),
      glm::vec3(0, eyeHeight, 0),
      GlUtils::Y_AXIS));

    OVR::Util::Render::StereoConfig ovrStereoConfig;
    ovrStereoConfig.SetHMDInfo(ovrHmdInfo);

    gl::Stacks::projection().top() =
      glm::perspective(ovrStereoConfig.GetYFOVRadians(),
      glm::aspect(eyeSize), 0.01f, 1000.0f);

    eyes[LEFT].viewportPosition =
      glm::uvec2(0, 0);
    eyes[LEFT].modelviewOffset = glm::translate(glm::mat4(),
      glm::vec3(ipd / 2.0f, 0, 0));
    eyes[LEFT].projectionOffset = glm::translate(glm::mat4(),
      glm::vec3(ovrStereoConfig.GetProjectionCenterOffset(), 0, 0));

    eyes[RIGHT].viewportPosition =
      glm::uvec2(hmdNativeResolution.x / 2, 0);
    eyes[RIGHT].modelviewOffset = glm::translate(glm::mat4(),
      glm::vec3(-ipd / 2.0f, 0, 0));
    eyes[RIGHT].projectionOffset = glm::translate(glm::mat4(),
      glm::vec3(-ovrStereoConfig.GetProjectionCenterOffset(), 0, 0));

    distortionScale = ovrStereoConfig.GetDistortionScale();

    ovrSensor =
      *ovrManager->EnumerateDevices<OVR::SensorDevice>().
      CreateDevice();
    if (ovrSensor) {
      sensorFusion.AttachToSensor(ovrSensor);
    }

    if (!sensorFusion.IsAttachedToSensor()) {
      SAY_ERR("Could not attach to sensor device");
    }
  }
Ejemplo n.º 7
0
void initRift() {
    OVR::System::Init(OVR::Log::ConfigureDefaultLog(OVR::LogMask_All));
    
    pFusionResult = new OVR::SensorFusion();
    pManager = *OVR::DeviceManager::Create();
    
    //pManager->SetMessageHandler(this);
    
    pHMD = *pManager->EnumerateDevices<OVR::HMDDevice>().CreateDevice();
    
	stereo.Set2DAreaFov(OVR::DegreeToRad(50.0f));
    stereo.SetFullViewport(OVR::Util::Render::Viewport(0,0, width, height));
    stereo.SetStereoMode(OVR::Util::Render::Stereo_LeftRight_Multipass);
    stereo.SetDistortionFitPointVP(-1.0f, 0.0f);
    renderScale = stereo.GetDistortionScale();
    
    if (pHMD)
    {
        pSensor = *pHMD->GetSensor();
        
        InfoLoaded = pHMD->GetDeviceInfo(&Info);
        
        strncpy(Info.DisplayDeviceName, RiftMonitorName, 32);
        
        RiftDisplayId = Info.DisplayId;
        
        EyeDistance = Info.InterpupillaryDistance;
        for(int i = 0; i < 4; ++i) {
            DistortionK[i] = Info.DistortionK[i];
            DistortionChromaticAberration[i] = Info.ChromaAbCorrection[i];
        }
        
        stereo.SetHMDInfo(Info);
        stereo.SetDistortionFitPointVP(-1.0f, 0.0f);
        renderScale = stereo.GetDistortionScale();
    }
    else
    {
        pSensor = *pManager->EnumerateDevices<OVR::SensorDevice>().CreateDevice();
    }
    
    textureWidth = width * renderScale;
    textureHeight = height * renderScale;
    
    leftEye  = stereo.GetEyeRenderParams(OVR::Util::Render::StereoEye_Left);
    rightEye = stereo.GetEyeRenderParams(OVR::Util::Render::StereoEye_Right);
    
    // Left eye rendering parameters
    leftVP         = leftEye.VP;
    leftProjection = leftEye.Projection;
    leftViewAdjust = leftEye.ViewAdjust;
    
    // Right eye rendering parameters
    rightVP         = leftEye.VP;
    rightProjection = leftEye.Projection;
    rightViewAdjust = leftEye.ViewAdjust;
    
    if (pSensor)
    {
        pFusionResult->AttachToSensor(pSensor);
        pFusionResult->SetPredictionEnabled(true);
        float motionPred = pFusionResult->GetPredictionDelta(); // adjust in 0.01 increments
        if(motionPred < 0) motionPred = 0;
        pFusionResult->SetPrediction(motionPred);
        
        if(InfoLoaded) {
            riftConnected = true;
            
            riftX = Info.DesktopX;
            riftY = Info.DesktopY;
            
            riftResolutionX = Info.HResolution;
            riftResolutionY = Info.VResolution;
        }
    }
    
#ifdef WIN32
    getRiftDisplay();
#endif
}
Ejemplo n.º 8
0
#include "Rift.h"

#include "../ibex.h"

OVR::Ptr<OVR::DeviceManager>	pManager = 0;
OVR::Ptr<OVR::HMDDevice>	pHMD = 0;
OVR::Util::Render::StereoConfig stereo;
OVR::Ptr<OVR::SensorDevice>	pSensor = 0;
OVR::SensorFusion*		pFusionResult = 0;
OVR::HMDInfo			Info;
bool				InfoLoaded = false;
bool				riftConnected = false;
float               renderScale;
bool                renderScaleChanged = false;

OVR::Util::Render::StereoEyeParams  leftEye  = stereo.GetEyeRenderParams(OVR::Util::Render::StereoEye_Left);
OVR::Util::Render::StereoEyeParams  rightEye = stereo.GetEyeRenderParams(OVR::Util::Render::StereoEye_Right);

// Left eye rendering parameters
OVR::Util::Render::Viewport         leftVP         = leftEye.VP;
OVR::Matrix4f                       leftProjection = leftEye.Projection;
OVR::Matrix4f                       leftViewAdjust = leftEye.ViewAdjust;

// Right eye rendering parameters
OVR::Util::Render::Viewport         rightVP         = leftEye.VP;
OVR::Matrix4f                       rightProjection = leftEye.Projection;
OVR::Matrix4f                       rightViewAdjust = leftEye.ViewAdjust;


int riftX = 0;
int riftY = 0;
Ejemplo n.º 9
0
int main(int argc, char* argv[]) {
  OVR::System::Init();

  ovrManager = *OVR::DeviceManager::Create();
  {
    OVR::Ptr<OVR::HMDDevice> ovrHmd = *ovrManager->EnumerateDevices<OVR::HMDDevice>().CreateDevice();
    if (ovrHmd) {
      ovrHmd->GetDeviceInfo(&ovrHmdInfo);
      ovrSensor = *ovrHmd->GetSensor();
    } else {
        ovrHmdInfo.HResolution = 1280;
        ovrHmdInfo.VResolution = 800;
        ovrHmdInfo.HScreenSize = 0.14976f;
        ovrHmdInfo.VScreenSize = 0.09360f;
        ovrHmdInfo.VScreenCenter = 0.04680f;
        ovrHmdInfo.EyeToScreenDistance = 0.04100f;
        ovrHmdInfo.LensSeparationDistance = 0.06350f;
        ovrHmdInfo.InterpupillaryDistance = 0.06400f;
        ovrHmdInfo.DistortionK[0] = 1;
        ovrHmdInfo.DistortionK[1] = 0.22f;
        ovrHmdInfo.DistortionK[2] = 0.24f;
        ovrHmdInfo.DistortionK[3] = 0;
        ovrHmdInfo.DesktopX = 100;
        ovrHmdInfo.DesktopY = 100;
        ovrHmdInfo.ChromaAbCorrection[0] = 0.99600f;
        ovrHmdInfo.ChromaAbCorrection[1] = -0.00400f;
        ovrHmdInfo.ChromaAbCorrection[2] = 1.01400f;
        ovrHmdInfo.ChromaAbCorrection[3] = 0;
    }
  }

  ovrStereoConfig.SetHMDInfo(ovrHmdInfo);
  if (!ovrSensor) {
    ovrSensor = *ovrManager->EnumerateDevices<OVR::SensorDevice>().CreateDevice();
  }

  ovrSensorFusion = new OVR::SensorFusion();
  if (ovrSensor) {
    ovrSensorFusion->AttachToSensor(ovrSensor);
  }


  setlocale(LC_ALL, "");
  setlocale(LC_NUMERIC, "C");
  fprintf(stdout, "\nDone\n", SDL_GetError());

//  bindtextdomain(PACKAGE, LOCALEDIR);
//  bind_textdomain_codeset(PACKAGE, "UTF-8");
//  textdomain (PACKAGE);

  CONFIG_DATA_DIR = getenv("CELESTIA_HOME");
  if (Directory::chdir(CONFIG_DATA_DIR) == -1) {
    cerr << "Cannot chdir to '" << CONFIG_DATA_DIR
        << "', probably due to improper installation\n";
  }
  // Not ready to render yet
  ready = false;
  char c;
  int startfile = 0;
  //while ((c = getopt(argc, argv, "v::f")) > -1) {
  //  if (c == '?') {
  //    cout << "Usage: celestia [-v] [-f <filename>]\n";
  //    exit(1);
  //  }
  //  else if (c == 'v') {
  //    if (optarg)
  //      SetDebugVerbosity(atoi(optarg));
  //    else
  //      SetDebugVerbosity(0);
  //  }
  //  else if (c == 'f') {
  //    startfile = 1;
  //  }
  //}

  appCore = new CelestiaCore();
  if (appCore == NULL) {
    cerr << "Out of memory.\n";
    return 1;
  }
  static SimpleNotifier notifier;

  if (!appCore->initSimulation(NULL, NULL, &notifier)) {
    return 1;
  }
  appCore->getSimulation()->getActiveObserver()->setFOV(ovrStereoConfig.GetYFOVDegrees());

  if (0 != SDL_Init(SDL_INIT_EVERYTHING)) {
    cerr << endl << "Unable to initialize SDL:  " << SDL_GetError()
        << endl;
    return 1;
  }
  SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
  SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);

  sdlWindow = SDL_CreateWindow(AppName,
      ovrHmdInfo.DesktopX, ovrHmdInfo.DesktopY,
      ovrHmdInfo.HResolution, ovrHmdInfo.VResolution,
      SDL_WINDOW_OPENGL | SDL_WINDOW_SHOWN | SDL_WINDOW_BORDERLESS);
//  SDL_SetWindowPosition();
  appCore->resize(ovrHmdInfo.HResolution, ovrHmdInfo.VResolution);
  sdlGlContext = SDL_GL_CreateContext(sdlWindow);
  SDL_GL_SetSwapInterval(1);
  glewExperimental = GL_TRUE;
  GLenum glewErr = glewInit();

  // GL should be all set up, now initialize the renderer.
  appCore->initRenderer();

  // Set the simulation starting time to the current system time
  time_t curtime = time(NULL);
  appCore->start(
      (double) curtime / 86400.0 + (double) astro::Date(1970, 1, 1));
  localtime(&curtime); // Only doing this to set timezone as a side effect
  appCore->setTimeZoneBias(-timezone);
  appCore->setTimeZoneName(tzname[daylight ? 0 : 1]);

  if (startfile == 1) {
    if (argv[argc - 1][0] == '-') {
      cout << "Missing Filename.\n";
      return 1;
    }

    cout << "*** Using CEL File: " << argv[argc - 1] << endl;
    appCore->runScript(argv[argc - 1]);
  }
  ready = true;
  SDL_Event event;
  while (!quit) {
    if (SDL_PollEvent(&event)) {
      switch (event.type) {
      case SDL_WINDOWEVENT:
        onWindowEvent(event.window);
        break;
      case SDL_KEYDOWN:
      case SDL_KEYUP:
        onKeyboardEvent(event.key);
        break;
      case SDL_MOUSEMOTION:
        onMouseMotion(event.motion);
        break;
      case SDL_MOUSEBUTTONDOWN:
      case SDL_MOUSEBUTTONUP:
        onMouseButton(event.button);
        break;
      case SDL_MOUSEWHEEL:
        onMouseWheel(event.wheel);
        break;
      }
    }
    else {
      appCore->tick();
      if (ready) {
        appCore->draw();
        SDL_GL_SwapWindow(sdlWindow);
      }
    }
  }
  SDL_DestroyWindow(sdlWindow);
  SDL_Quit();
  delete appCore;
  appCore = NULL;
  delete ovrSensorFusion;
  ovrManager.Clear();
  OVR::System::Destroy();
  return 0;
}
  HelloRift() : useTracker(false) {
    ovrManager = *OVR::DeviceManager::Create();
    if (!ovrManager) {
      FAIL("Unable to initialize OVR Device Manager");
    }

    OVR::Ptr<OVR::HMDDevice> ovrHmd =
        *ovrManager->EnumerateDevices<OVR::HMDDevice>().CreateDevice();
    OVR::HMDInfo hmdInfo;
    if (ovrHmd) {
      ovrHmd->GetDeviceInfo(&hmdInfo);
      ovrSensor = *ovrHmd->GetSensor();
    } else {
      Rift::getDk1HmdValues(hmdInfo);
    }
    ovrHmd.Clear();

    if (!ovrSensor) {
      ovrSensor =
          *ovrManager->EnumerateDevices<OVR::SensorDevice>().CreateDevice();
    }

    if (ovrSensor) {
      sensorFusion.AttachToSensor(ovrSensor);
      useTracker = sensorFusion.IsAttachedToSensor();
    }

    ipd = hmdInfo.InterpupillaryDistance;
    distortionCoefficients = glm::vec4(
      hmdInfo.DistortionK[0], hmdInfo.DistortionK[1],
      hmdInfo.DistortionK[2], hmdInfo.DistortionK[3]);
    windowPosition = glm::ivec2(hmdInfo.DesktopX, hmdInfo.DesktopY);
    // The HMDInfo gives us the position of the Rift in desktop 
    // coordinates as well as the native resolution of the Rift 
    // display panel, but NOT the current resolution of the signal
    // being sent to the Rift.  
    GLFWmonitor * hmdMonitor = 
      GlfwApp::getMonitorAtPosition(windowPosition);
    if (!hmdMonitor) {
      FAIL("Unable to find Rift display");
    }

    // For the current resoltuion we must find the appropriate GLFW monitor
    const GLFWvidmode * videoMode = 
      glfwGetVideoMode(hmdMonitor);
    windowSize = glm::ivec2(videoMode->width, videoMode->height);

    // The eyeSize is used to help us set the viewport when rendering to 
    // each eye.  This should be based off the video mode that is / will 
    // be sent to the Rift
    // We also use the eyeSize to set up the framebuffer which will be 
    // used to render the scene to a texture for distortion and display 
    // on the Rift.  The Framebuffer resolution does not have to match 
    // the Physical display resolution in either aspect ratio or 
    // resolution, but using a resolution less than the native pixels can
    // negatively impact image quality.
    eyeSize = windowSize;
    eyeSize.x /= 2;

    eyeArgs[1].viewportLocation = glm::ivec2(eyeSize.x, 0);
    eyeArgs[0].viewportLocation = glm::ivec2(0, 0);

    // Notice that the eyeAspect we calculate is based on the physical 
    // display resolution, regardless of the current resolution being 
    // sent to the Rift.  The Rift scales the image sent to it to fit
    // the display panel, so a 1920x1080 image (with an aspect ratio of 
    // 16:9 will be displayed with the aspect ratio of the Rift display
    // (16:10 for the DK1).  This means that if you're cloning a 
    // 1920x1080 output to the rift and an conventional monitor of those 
    // dimensions the conventional monitor's image will appear a bit 
    // squished.  This is expected and correct.
    eyeAspect = (float)(hmdInfo.HResolution / 2) /
      (float)hmdInfo.VResolution;

    // Some of the values needed by the rendering or distortion need some 
    // calculation to find, but the OVR SDK includes a utility class to
    // do them, so we use it here to get the ProjectionOffset and the 
    // post distortion scale.
    OVR::Util::Render::StereoConfig stereoConfig;
    stereoConfig.SetHMDInfo(hmdInfo);
    // The overall distortion effect has a shrinking effect.  
    postDistortionScale = 1.0f / stereoConfig.GetDistortionScale();
    // The projection offset and lens offset are both in normalized 
    // device coordinates, i.e. [-1, 1] on both the X and Y axis
    glm::vec3 projectionOffsetVector =
        glm::vec3(stereoConfig.GetProjectionCenterOffset() / 2.0f, 0, 0);
    eyeArgs[0].projectionOffset =
        glm::translate(glm::mat4(), projectionOffsetVector);
    eyeArgs[1].projectionOffset =
        glm::translate(glm::mat4(), -projectionOffsetVector);

    eyeArgs[0].lensOffset =
      1.0f - (2.0f * hmdInfo.LensSeparationDistance / hmdInfo.HScreenSize);
    eyeArgs[1].lensOffset = -eyeArgs[0].lensOffset;


    // The IPD and the modelview offset are in meters.  If you wish to have a 
    // different unit for  the scale of your world coordinates, you would need 
    // to apply the conversion factor here.
    glm::vec3 modelviewOffsetVector =
        glm::vec3(stereoConfig.GetIPD() / 2.0f, 0, 0);
    eyeArgs[0].modelviewOffset =
        glm::translate(glm::mat4(), modelviewOffsetVector);
    eyeArgs[1].modelviewOffset =
        glm::translate(glm::mat4(), -modelviewOffsetVector);


    gl::Stacks::projection().top() = glm::perspective(
        stereoConfig.GetYFOVDegrees() * DEGREES_TO_RADIANS,
        eyeAspect,
        Rift::ZNEAR, Rift::ZFAR);
  }