Example #1
0
void initRift() {
  OVR::System::Init(OVR::Log::ConfigureDefaultLog(OVR::LogMask_All));

  pManager = *OVR::DeviceManager::Create();

  //pManager->SetMessageHandler(this);

  pHMD = *pManager->EnumerateDevices<OVR::HMDDevice>().CreateDevice();

  if (pHMD)
    {
      pSensor = *pHMD->GetSensor();

      InfoLoaded = pHMD->GetDeviceInfo(&Info);

      strncpy(Info.DisplayDeviceName, RiftMonitorName, 32);

      RiftDisplayId = Info.DisplayId;

      EyeDistance = Info.InterpupillaryDistance;
      DistortionK[0] = Info.DistortionK[0];
      DistortionK[1] = Info.DistortionK[1];
      DistortionK[2] = Info.DistortionK[2];
      DistortionK[3] = Info.DistortionK[3];
    }
  else
    {
      pSensor = *pManager->EnumerateDevices<OVR::SensorDevice>().CreateDevice();
    }

  if (pSensor)
    {
      FusionResult.AttachToSensor(pSensor);
      FusionResult.SetPredictionEnabled(true);
      float motionPred = FusionResult.GetPredictionDelta(); // adjust in 0.01 increments
      if(motionPred < 0) motionPred = 0;
      FusionResult.SetPrediction(motionPred);

      if(InfoLoaded) {
	riftConnected = true;

	riftX = Info.DesktopX;
	riftY = Info.DesktopY;

	riftResolutionX = Info.HResolution;
	riftResolutionY = Info.VResolution;
      }
    }

#ifdef WIN32
  getRiftDisplay();
#endif
}
  SensorFusionPredictionExample() {
    ovrManager = *OVR::DeviceManager::Create();
    if (!ovrManager) {
      FAIL("Unable to create device manager");
    }

    ovrSensor = *ovrManager->EnumerateDevices<OVR::SensorDevice>().CreateDevice();
    if (!ovrSensor) {
      FAIL("Unable to locate Rift sensor device");
    }

    ovrSensorFusion.SetGravityEnabled(true);
    ovrSensorFusion.SetPredictionEnabled(true);
    ovrSensorFusion.SetYawCorrectionEnabled(true);
    ovrSensorFusion.AttachToSensor(ovrSensor);
  }
  virtual void onKey(int key, int scancode, int action, int mods) {
    if (GLFW_PRESS != action) {
      return;
    }

    switch (key) {
    case GLFW_KEY_R:
      ovrSensorFusion.Reset();
      return;
    case GLFW_KEY_UP:
      ovrSensorFusion.SetPrediction(ovrSensorFusion.GetPredictionDelta() * 1.5f, true);
      return;
    case GLFW_KEY_DOWN:
      ovrSensorFusion.SetPrediction(ovrSensorFusion.GetPredictionDelta() / 1.5f, true);
      return;
    }
    GlfwApp::onKey(key, scancode, action, mods);
  }
void OculusVRSensorData::setData(const OVR::SensorFusion& data, const F32& maxAxisRadius)
{
   // Sensor rotation
   OVR::Quatf orientation;
   if(data.GetPredictionDelta() > 0)
   {
      orientation = data.GetPredictedOrientation();
   }
   else
   {
      orientation = data.GetOrientation();
   }
   OVR::Matrix4f orientMat(orientation);
   OculusVRUtil::convertRotation(orientMat.M, mRot);
   mRotQuat.set(mRot);

   // Sensor rotation in Euler format
   OculusVRUtil::convertRotation(orientation, mRotEuler);

   // Sensor rotation as axis
   OculusVRUtil::calculateAxisRotation(mRot, maxAxisRadius, mRotAxis);

   mDataSet = true;
}
  void onKey(int key, int scancode, int action, int mods) {
    if (GLFW_PRESS != action) {
      return;
    }

    switch (key) {
    case GLFW_KEY_R:
      sensorFusion.Reset();
      break;

    case GLFW_KEY_T:
      useTracker = !useTracker;
      break;

    default:
      GlfwApp::onKey(key, scancode, action, mods);
      break;
    }
  }
  void draw() {
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    
    std::string message = Platform::format(
      "Prediction Delta: %0.2f ms\n",
      sensorFusion.GetPredictionDelta() * 1000.0f);
    renderStringAt(message, -0.9f, 0.9f);

    gl::MatrixStack & mv = gl::Stacks::modelview();
    gl::MatrixStack & pr = gl::Stacks::projection();

    mv.push().rotate(predictedOrientation);
    GlUtils::renderArtificialHorizon();
    mv.pop();

    mv.push().rotate(currentOrientation);
    mv.scale(1.25f);
    GlUtils::renderArtificialHorizon(0.3f);
    mv.pop();
  }
  void draw() {
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    
    std::string message = Platform::format(
        "Prediction Delta: %0.2f ms\n",
        ovrSensorFusion.GetPredictionDelta() * 1000.0f);
    renderStringAt(message, -0.9f, 0.9f);

    gl::MatrixStack & mv = gl::Stacks::modelview();
    glm::mat4 glm_mat;

    glm_mat = glm::make_mat4((float*) predicted.M);
    mv.push().transform(glm_mat);
    GlUtils::renderArtificialHorizon();
    mv.pop();

    glm_mat = glm::make_mat4((float*) actual.M);
    mv.push().transform(glm_mat);
    mv.scale(1.25f);
    GlUtils::renderArtificialHorizon(0.3f);
    mv.pop();
  }
 void update() {
   actual = ovrSensorFusion.GetOrientation();
   predicted = ovrSensorFusion.GetPredictedOrientation();
 }
  HelloRift() : useTracker(false) {
    ovrManager = *OVR::DeviceManager::Create();
    if (!ovrManager) {
      FAIL("Unable to initialize OVR Device Manager");
    }

    OVR::Ptr<OVR::HMDDevice> ovrHmd =
        *ovrManager->EnumerateDevices<OVR::HMDDevice>().CreateDevice();
    OVR::HMDInfo hmdInfo;
    if (ovrHmd) {
      ovrHmd->GetDeviceInfo(&hmdInfo);
      ovrSensor = *ovrHmd->GetSensor();
    } else {
      Rift::getDk1HmdValues(hmdInfo);
    }
    ovrHmd.Clear();

    if (!ovrSensor) {
      ovrSensor =
          *ovrManager->EnumerateDevices<OVR::SensorDevice>().CreateDevice();
    }

    if (ovrSensor) {
      sensorFusion.AttachToSensor(ovrSensor);
      useTracker = sensorFusion.IsAttachedToSensor();
    }

    ipd = hmdInfo.InterpupillaryDistance;
    distortionCoefficients = glm::vec4(
      hmdInfo.DistortionK[0], hmdInfo.DistortionK[1],
      hmdInfo.DistortionK[2], hmdInfo.DistortionK[3]);
    windowPosition = glm::ivec2(hmdInfo.DesktopX, hmdInfo.DesktopY);
    // The HMDInfo gives us the position of the Rift in desktop 
    // coordinates as well as the native resolution of the Rift 
    // display panel, but NOT the current resolution of the signal
    // being sent to the Rift.  
    GLFWmonitor * hmdMonitor = 
      GlfwApp::getMonitorAtPosition(windowPosition);
    if (!hmdMonitor) {
      FAIL("Unable to find Rift display");
    }

    // For the current resoltuion we must find the appropriate GLFW monitor
    const GLFWvidmode * videoMode = 
      glfwGetVideoMode(hmdMonitor);
    windowSize = glm::ivec2(videoMode->width, videoMode->height);

    // The eyeSize is used to help us set the viewport when rendering to 
    // each eye.  This should be based off the video mode that is / will 
    // be sent to the Rift
    // We also use the eyeSize to set up the framebuffer which will be 
    // used to render the scene to a texture for distortion and display 
    // on the Rift.  The Framebuffer resolution does not have to match 
    // the Physical display resolution in either aspect ratio or 
    // resolution, but using a resolution less than the native pixels can
    // negatively impact image quality.
    eyeSize = windowSize;
    eyeSize.x /= 2;

    eyeArgs[1].viewportLocation = glm::ivec2(eyeSize.x, 0);
    eyeArgs[0].viewportLocation = glm::ivec2(0, 0);

    // Notice that the eyeAspect we calculate is based on the physical 
    // display resolution, regardless of the current resolution being 
    // sent to the Rift.  The Rift scales the image sent to it to fit
    // the display panel, so a 1920x1080 image (with an aspect ratio of 
    // 16:9 will be displayed with the aspect ratio of the Rift display
    // (16:10 for the DK1).  This means that if you're cloning a 
    // 1920x1080 output to the rift and an conventional monitor of those 
    // dimensions the conventional monitor's image will appear a bit 
    // squished.  This is expected and correct.
    eyeAspect = (float)(hmdInfo.HResolution / 2) /
      (float)hmdInfo.VResolution;

    // Some of the values needed by the rendering or distortion need some 
    // calculation to find, but the OVR SDK includes a utility class to
    // do them, so we use it here to get the ProjectionOffset and the 
    // post distortion scale.
    OVR::Util::Render::StereoConfig stereoConfig;
    stereoConfig.SetHMDInfo(hmdInfo);
    // The overall distortion effect has a shrinking effect.  
    postDistortionScale = 1.0f / stereoConfig.GetDistortionScale();
    // The projection offset and lens offset are both in normalized 
    // device coordinates, i.e. [-1, 1] on both the X and Y axis
    glm::vec3 projectionOffsetVector =
        glm::vec3(stereoConfig.GetProjectionCenterOffset() / 2.0f, 0, 0);
    eyeArgs[0].projectionOffset =
        glm::translate(glm::mat4(), projectionOffsetVector);
    eyeArgs[1].projectionOffset =
        glm::translate(glm::mat4(), -projectionOffsetVector);

    eyeArgs[0].lensOffset =
      1.0f - (2.0f * hmdInfo.LensSeparationDistance / hmdInfo.HScreenSize);
    eyeArgs[1].lensOffset = -eyeArgs[0].lensOffset;


    // The IPD and the modelview offset are in meters.  If you wish to have a 
    // different unit for  the scale of your world coordinates, you would need 
    // to apply the conversion factor here.
    glm::vec3 modelviewOffsetVector =
        glm::vec3(stereoConfig.GetIPD() / 2.0f, 0, 0);
    eyeArgs[0].modelviewOffset =
        glm::translate(glm::mat4(), modelviewOffsetVector);
    eyeArgs[1].modelviewOffset =
        glm::translate(glm::mat4(), -modelviewOffsetVector);


    gl::Stacks::projection().top() = glm::perspective(
        stereoConfig.GetYFOVDegrees() * DEGREES_TO_RADIANS,
        eyeAspect,
        Rift::ZNEAR, Rift::ZFAR);
  }
 ~HelloRift() {
   sensorFusion.AttachToSensor(nullptr);
   ovrSensor = nullptr;
   ovrManager = nullptr;
 }
 void update() {
   currentOrientation = Rift::fromOvr(sensorFusion.GetOrientation());
   predictedOrientation = Rift::fromOvr(sensorFusion.GetPredictedOrientation());
 }