Exemple #1
0
void OculusInterface::oculusDisplayWarning()
{
  // Health and Safety Warning display state.
  ovrHSWDisplayState hswDisplayState;
  ovrHmd_GetHSWDisplayState(m_hmd, &hswDisplayState);
  if (hswDisplayState.Displayed)
  {
  // Dismiss the warning if the user pressed the appropriate key or if the user
  // is tapping the side of the HMD.
  // If the user has requested to dismiss the warning via keyboard or controller input...
  if (m_warningOff)
    ovrHmd_DismissHSWDisplay(m_hmd);
  else
  {
  // Detect a moderate tap on the side of the HMD.
  ovrTrackingState ts = ovrHmd_GetTrackingState(m_hmd, ovr_GetTimeInSeconds());
  if (ts.StatusFlags & ovrStatus_OrientationTracked)
  {
  const OVR::Vector3f v(ts.RawSensorData.Accelerometer.x,
  ts.RawSensorData.Accelerometer.y,
  ts.RawSensorData.Accelerometer.z);
  // Arbitrary value and representing moderate tap on the side of the DK2 Rift.
  if (v.LengthSq() > 250.f)
  ovrHmd_DismissHSWDisplay(m_hmd);
  }
  }
}
}
		math::quaternion GetCameraOrientation()
		{

			ovrTrackingState s = ovrHmd_GetTrackingState(m_device, 0);
			return math::quaternion(s.CameraPose.Orientation.w, -s.CameraPose.Orientation.x, -s.CameraPose.Orientation.y, s.CameraPose.Orientation.z);

		}
Exemple #3
0
ovrBool VR_OVR_InitSensor()
{
	unsigned int sensorCaps = ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection;

	if (sensorEnabled)
	{
		sensorEnabled = 0;
	}

	sensorCaps |= ovrTrackingCap_Position;

	sensorEnabled = ovrHmd_ConfigureTracking(hmd,sensorCaps, ovrTrackingCap_Orientation);
	if (sensorEnabled)
	{
		ovrTrackingState ss;
		ss = ovrHmd_GetTrackingState(hmd, ovr_GetTimeInSeconds());
		Com_Printf("VR_OVR: Successfully initialized sensors!\n");

		if (ss.StatusFlags & ovrStatus_PositionConnected)
			Com_Printf("...sensor has position tracking support\n");
		if (ss.StatusFlags & ovrStatus_OrientationTracked)
			Com_Printf("...orientation tracking enabled\n");
		if (ss.StatusFlags & ovrStatus_PositionTracked)
			Com_Printf("...position tracking enabled\n");
	}
	return sensorEnabled;
}
Exemple #4
0
void OVRScene::RenderForOneEye(const float* pMview, const float* pPersp) const
{
    printf("Rendering in OVRScene!\n");
    if (m_bDraw == false)
        return;
    if (pMview == false)
        return;
    if (pPersp == false)
        return;

    const glm::mat4 modelview = glm::make_mat4(pMview);
    const glm::mat4 projection = glm::make_mat4(pPersp);

    // Assemble modelview matrix to lock camera in with real world geometry:
    // We still have to use the assembled HMD stereo modelview matrices from OVRSDK05AppSkeleton,
    // but we undo the effects of chassis yaw and position so the frustum follows the viewer.
    if (m_pHmd != NULL)
    {
        const ovrTrackingState ts = ovrHmd_GetTrackingState(m_pHmd, ovr_GetTimeInSeconds());
        const ovrPosef& cp = ts.CameraPose;

        OVR::Matrix4f camMtx = OVR::Matrix4f();
        camMtx *= OVR::Matrix4f::Translation(cp.Position)
            * OVR::Matrix4f(OVR::Quatf(cp.Orientation));

        glm::mat4 ogmat = glm::make_mat4(&camMtx.Transposed().M[0][0]);

        DrawScene(modelview * ogmat, projection);
    }
}
		math::quaternion GetOrientation()
		{

			ovrTrackingState s = ovrHmd_GetTrackingState(m_device, 0);
			return math::quaternion(s.HeadPose.ThePose.Orientation.w, s.HeadPose.ThePose.Orientation.x, s.HeadPose.ThePose.Orientation.y, s.HeadPose.ThePose.Orientation.z);

		}
Exemple #6
0
void OculusInterface::oculusPoseState()
{
  ovrTrackingState ts = ovrHmd_GetTrackingState(m_hmd, ovr_GetTimeInSeconds());
  if (ts.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked))
  {
    ovrPoseStatef pose = ts.HeadPose;
    std::cout<<"--------------------------------------------------------------\n";
    std::cout<<"Time "<<pose.TimeInSeconds<<"\n";
    std::cout<<"Orientation Quat <<"<< pose.ThePose.Orientation.x <<" "
              << pose.ThePose.Orientation.y <<" "
              << pose.ThePose.Orientation.z <<" "
              << pose.ThePose.Orientation.w <<"\n";
    std::cout << "Angular Velocity "<< pose.AngularVelocity.x <<" "
                 << pose.AngularVelocity.y <<" "
                 << pose.AngularVelocity.z <<"\n";
    std::cout << "Linear Velocity "<< pose.LinearVelocity.x <<" "
                 << pose.LinearVelocity.y <<" "
                 << pose.LinearVelocity.z <<"\n";
    std::cout << "AngularAcceleration Velocity "<< pose.AngularAcceleration.x <<" "
                 << pose.AngularAcceleration.y <<" "
                 << pose.AngularAcceleration.z <<"\n";

    std::cout << "LinearAcceleration Velocity "<< pose.LinearAcceleration.x <<" "
                 << pose.LinearAcceleration.y <<" "
                 << pose.LinearAcceleration.z <<"\n";
    std::cout<<"--------------------------------------------------------------\n";


  }
}
  void update() {
    ovrTrackingState trackingState = ovrHmd_GetTrackingState(hmd, 0);
    ovrPoseStatef & poseState = trackingState.HeadPose;

    orientation = ovr::toGlm(poseState.ThePose.Orientation);
    linearA = ovr::toGlm(poseState.LinearAcceleration);
    angularV = ovr::toGlm(poseState.AngularVelocity);
  }
Exemple #8
0
void OculusInterface::setLeftEye()
{
	ovrEyeType eye = m_hmd->EyeRenderOrder[0];
	glViewport(0, 0, (m_fbWidth / 2), m_fbHeight);
	ovrTrackingState ts = ovrHmd_GetTrackingState(m_hmd, ovr_GetTimeInSeconds());
	OVR::Posef pose = ts.HeadPose;
//	m_pose[0] = ovrHmd_GetEyePose(m_hmd, eye);

}
Point3F OculusVRSensorDevice::getPosition()
{
    if(!mIsValid)
        return Point3F();

    ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
    OVR::Vector3f v = ts.HeadPose.ThePose.Position;
    return Point3F(-v.x, v.z, -v.y);
}
		math::vector3d GetPosition()
		{

			ovrEyeType eye = m_device->EyeRenderOrder[0];
			ovrPosef p = ovrHmd_GetEyePose(m_device, eye);
			return math::vector3d(p.Position.x, p.Position.y, p.Position.z);
			ovrTrackingState s = ovrHmd_GetTrackingState(m_device, 0);
			return math::vector3d(s.HeadPose.ThePose.Position.x, s.HeadPose.ThePose.Position.y, -s.HeadPose.ThePose.Position.z);

		}
void OcculusCameraComponent::update( float dt )
{
	//Occulus warning
	// Health and Safety Warning display state.
	ovrHSWDisplayState hswDisplayState;
	ovrHmd_GetHSWDisplayState(hmd, &hswDisplayState);
	if (hswDisplayState.Displayed) 
	{ 
		// Dismiss the warning if the user pressed the appropriate key or if the user 
		// is tapping the side of the HMD. 
		// If the user has requested to dismiss the warning via keyboard or controller input... 
		//if (Util_GetAndResetHSWDismissedState()) 
		//	ovrHmd_DismissHSWDisplay(hmd); 
		//else 
		{ 
			// Detect a moderate tap on the side of the HMD. 
			ovrTrackingState ts = ovrHmd_GetTrackingState(hmd, ovr_GetTimeInSeconds());
			if (ts.StatusFlags & ovrStatus_OrientationTracked) 
			{ 
				const OVR::Vector3f v(ts.RawSensorData.Accelerometer.x, ts.RawSensorData.Accelerometer.y, ts.RawSensorData.Accelerometer.z);

				// Arbitrary value and representing moderate tap on the side of the DK2 Rift.
				if (v.LengthSq() > 250.f) ovrHmd_DismissHSWDisplay(hmd);
			}
		}
	}


	dt;
	// Query the HMD for the current tracking state.
	
	ovrTrackingState ts = ovrHmd_GetTrackingState(hmd, ovr_GetTimeInSeconds());
	if (ts.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked))
	{
		ovrPosef pose = ts.HeadPose.ThePose;

		//convert to mat4 if desired
		glm::mat4 transformMatrix = glm::mat4_cast( fromOVR( pose.Orientation ) );

		//parent->gc<TransformComponent>()->setRotation( glm::mat3(transformMatrix) );
	}
	parent->getStage()->subscribeRender( this );
}
Exemple #12
0
void OcudumpDebug::getPose()
{
    // can't call the base class version since it's pure virtual... le sigh
    state = ovrHmd_GetTrackingState(hmd, 0);
    _getPoseOrientation();
    _getPosePosition();
    for (Animate::iterator it=animate.begin();it!=animate.end();it++)
    {
        pose[it->first]+=it->second.getElem();
    }
}
EulerF OculusVRSensorDevice::getRawEulerRotation()
{
    if(!mIsValid)
        return Point3F::Zero;

    ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
    OVR::Quatf orientation = ts.HeadPose.ThePose.Orientation;

    // Sensor rotation in Euler format
    EulerF rot;
    OculusVRUtil::convertRotation(orientation, rot);
    return rot;
}
VectorF OculusVRSensorDevice::getAcceleration()
{
    if(!mIsValid)
        return VectorF::Zero;

    ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
    OVR::Vector3f a = ts.HeadPose.LinearAcceleration;

    // Sensor acceleration in VectorF format
    VectorF acceleration;
    OculusVRUtil::convertAcceleration(a, acceleration);

    return acceleration;
}
EulerF OculusVRSensorDevice::getAngularVelocity()
{
    if(!mIsValid)
        return EulerF::Zero;

    ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
    OVR::Vector3f v = ts.HeadPose.AngularVelocity;

    // Sensor angular velocity in EulerF format
    EulerF vel;
    OculusVRUtil::convertAngularVelocity(v, vel);

    return vel;
}
Posef FrameTimeManager::GetEyePredictionPose(ovrHmd hmd, ovrEyeType eye)
{
    double           eyeRenderTime = GetEyePredictionTime(eye);
    ovrTrackingState eyeState      = ovrHmd_GetTrackingState(hmd, eyeRenderTime);
        
    // Record view pose sampling time for Latency reporting.
    if (RenderIMUTimeSeconds == 0.0)
    {
        // TODO: Figure out why this are not as accurate as ovr_GetTimeInSeconds()
        //RenderIMUTimeSeconds = eyeState.RawSensorData.TimeInSeconds;
        RenderIMUTimeSeconds = ovr_GetTimeInSeconds();
    }

    return eyeState.HeadPose.ThePose;
}
Exemple #17
0
void getTrackingData() {
	// Query the HMD for the current tracking state.
	ovrTrackingState ts = ovrHmd_GetTrackingState(HMD, ovr_GetTimeInSeconds());
	if (ts.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked)) {
		currentPose = ts.HeadPose.ThePose;
		currentPose.Rotation.GetEulerAngles<OVR::Axis_Y, OVR::Axis_X, OVR::Axis_Z>(&currentYaw, &currentPitch, &currentRoll);
		currentYaw *= RAD2DEG;
		currentPitch *= RAD2DEG;
		currentRoll *= RAD2DEG;
		currentRoll = -currentRoll;
	} else {
		currentYaw = -1;
		currentPitch = -1;
		currentRoll = -1;
	}
}
void OculusDevice::updatePose(unsigned int frameIndex)
{
	// Ask the API for the times when this frame is expected to be displayed.
	m_frameTiming = ovrHmd_GetFrameTiming(m_hmdDevice, frameIndex);

	// Query the HMD for the current tracking state.
	ovrTrackingState ts = ovrHmd_GetTrackingState(m_hmdDevice, m_frameTiming.ScanoutMidpointSeconds);
	ovrPoseStatef headpose = ts.HeadPose;
	ovrPosef pose = headpose.ThePose;
	m_position.set(-pose.Position.x, -pose.Position.y, -pose.Position.z);
	m_orientation.set(pose.Orientation.x, pose.Orientation.y, pose.Orientation.z, -pose.Orientation.w);

	// Get head pose for both eyes (used for time warp
	for (int eyeIndex = 0; eyeIndex < ovrEye_Count; ++eyeIndex) {
		ovrEyeType eye = m_hmdDevice->EyeRenderOrder[eyeIndex];
		m_headPose[eye] = ovrHmd_GetEyePose(m_hmdDevice, eye);
	}
}
Exemple #19
0
int32_t VR_OVR_getOrientation(float euler[3])
{
	double time = 0.0;

	if (!hmd)
		return 0;

	if (vr_ovr_autoprediction->value > 0)
		time = (frameTime.EyeScanoutSeconds[ovrEye_Left] + frameTime.EyeScanoutSeconds[ovrEye_Right]) / 2.0;
	else
		time = ovr_GetTimeInSeconds() + prediction_time;
	trackingState = ovrHmd_GetTrackingState(hmd, time);
	if (trackingState.StatusFlags & (ovrStatus_OrientationTracked ) )
	{
		VR_OVR_QuatToEuler(trackingState.HeadPose.ThePose.Orientation,euler);
		return 1;
	}
	return 0;
}
Exemple #20
0
/*- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
static PyObject *
oculus_OculusRiftDK2_frame(oculus_OculusRiftDK2 *self)
{
    if (!self->device)
    {
        PyErr_SetString(oculus_TrackingError, "No device found");
        return NULL;
    }

    /* Check if there is an error stored in the HMD */
    // const char *error = ovrHmd_GetLastError(self->device);
    // if (error)
    //     fprintf(stderr, "Error (frame): '%s'\n", error);

    /* Get the most current (0.0) tracking state of the HMD */
    ovrTrackingState state = ovrHmd_GetTrackingState(self->device, 0.0);
    ovrPosef head   = state.HeadPose.ThePose;
    // ovrPosef camera = state.CameraPose;

    /* Create python tuples and fill them with values and place them into
       another tuple, which will be the argument for the frame-object */
    PyObject *position    = Py_BuildValue("(fff)",
                                          head.Position.x*self->head_factor + self->head_shift[0],
                                          head.Position.y*self->head_factor + self->head_shift[1],
                                          head.Position.z*self->head_factor + self->head_shift[2]),
             *orientation = Py_BuildValue("(ffff)", head.Orientation.w,
                                                    head.Orientation.x,
                                                    head.Orientation.y,
                                                    head.Orientation.z),
             *args = Py_BuildValue("(OO)", position, orientation);

    /* Create a new frame-object and pass the previously constructed argument tuple */
    PyObject *frame = PyObject_CallObject((PyObject *)&oculus_OculusRiftDK2FrameType, args);

    /* Clean up */
    Py_XDECREF(args);
    Py_XDECREF(position);
    Py_XDECREF(orientation);

    /* Return the new frame-object */
    return frame;
}
Exemple #21
0
///@brief This function will detect a moderate tap on the Rift via the accelerometer.
///@return true if a tap was detected, false otherwise.
bool RiftAppSkeleton::CheckForTapOnHmd()
{
    const ovrTrackingState ts = ovrHmd_GetTrackingState(m_Hmd, ovr_GetTimeInSeconds());
    if (!(ts.StatusFlags & ovrStatus_OrientationTracked))
        return false;

    const OVR::Vector3f v(ts.RawSensorData.Accelerometer);
    // Arbitrary value and representing moderate tap on the side of the DK2 Rift.
    if (v.LengthSq() > 250.f)
    {
        // Limit tapping rate
        static double lastTapTime = 0.0;
        if (ovr_GetTimeInSeconds() - lastTapTime > 0.5)
        {
            lastTapTime = ovr_GetTimeInSeconds();
            DismissHealthAndSafetyWarning();
            ToggleShaderWorld();
            return true;
        }
    }
    return false;
}
Exemple #22
0
		void OculusRift::update()
		{
			ovrTrackingState state = ovrHmd_GetTrackingState(mHmd, ovr_GetTimeInSeconds());
			if ((state.StatusFlags & ovrStatus_OrientationTracked) | ovrStatus_PositionTracked)
			{
				/*console() << "trackingState::Quatf" <<
					state.HeadPose.ThePose.Orientation.x << ", " <<
					state.HeadPose.ThePose.Orientation.y << ", " <<
					state.HeadPose.ThePose.Orientation.z << ", " <<
					state.HeadPose.ThePose.Orientation.w << ", " <<
					" position: " <<
					state.HeadPose.ThePose.Position.x << ", " <<
					state.HeadPose.ThePose.Position.y << ", " <<
					state.HeadPose.ThePose.Position.z << ", " <<
					endl;*/

				mHeadPose = state.HeadPose;
			}
			else
			{
				console() << state.StatusFlags << endl;
			}
		}
int OVRGetPose(float viewAngles[3], float position[3])
{
	double absTime = _OVRGlobals.FrameTiming.ThisFrameSeconds ?
		_OVRGlobals.FrameTiming.ScanoutMidpointSeconds :
		ovr_GetTimeInSeconds();
	ovrTrackingState ts = ovrHmd_GetTrackingState( _OVRGlobals.HMD, absTime );
	ovrPosef pose = ts.HeadPose.ThePose;
	unsigned int statusFlags = ts.StatusFlags;

	if ( viewAngles && ( statusFlags & ovrStatus_OrientationTracked ) ) {
		OVR::Quatf q = pose.Orientation;

		q.GetEulerAngles<OVR::Axis_Y, OVR::Axis_X, OVR::Axis_Z>( &viewAngles[1], &viewAngles[0], &viewAngles[2] );
	}

	if ( position && ( statusFlags & ovrStatus_PositionTracked ) ) {
		position[0] = -pose.Position.z;
		position[1] = pose.Position.x;
		position[2] = pose.Position.y;
	}

	return (statusFlags & ovrStatus_PositionTracked);
}
 auto getTrackingState() {
     return ovrHmd_GetTrackingState (session, ovr_GetTimeInSeconds());
 }
Exemple #25
0
void OcudumpBase::getPose()
{
    state = ovrHmd_GetTrackingState(hmd, 0);
    _getPoseOrientation();
    _getPosePosition();
}
		math::vector3d GetAngularVelocity()
		{
			ovrTrackingState s = ovrHmd_GetTrackingState(m_device, 0);
			return math::vector3d(s.HeadPose.AngularVelocity.x, s.HeadPose.AngularVelocity.y, s.HeadPose.AngularVelocity.z);
		}
		bool IsHMDConnected()
		{
			return (ovrHmd_GetTrackingState(m_device, 0).StatusFlags | ovrStatus_HmdConnected) != 0;
		}
		bool IsTrackingConnected()
		{
			return ovrHmd_GetTrackingState(m_device, 0.0f).StatusFlags & ovrStatus_PositionConnected;
		}
bool OculusVRSensorDevice::process(U32 deviceType, bool generateRotAsAngAxis, bool generateRotAsEuler, bool generateRotationAsAxisEvents, bool generatePositionEvents, F32 maxAxisRadius, bool generateRawSensor)
{
    if(!mIsValid)
        return false;

    // Grab current state
    ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
    mLastStatus = ts.StatusFlags;

    // Store the current data from the sensor and compare with previous data
    U32 diff;
    OculusVRSensorData* currentBuffer = (mPrevData == mDataBuffer[0]) ? mDataBuffer[1] : mDataBuffer[0];
    currentBuffer->setData(ts, maxAxisRadius);
    diff = mPrevData->compare(currentBuffer, generateRawSensor);

    // Update the previous data pointer.  We do this here in case someone calls our
    // console functions during one of the input events below.
    mPrevData = currentBuffer;

    // Rotation event
    if(diff & OculusVRSensorData::DIFF_ROT)
    {
        if(generateRotAsAngAxis)
        {
            INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_ROT, OVR_SENSORROT[mActionCodeIndex], SI_MOVE, currentBuffer->mRotQuat);
        }

        if(generateRotAsEuler)
        {
            // Convert angles to degrees
            VectorF angles;
            for(U32 i=0; i<3; ++i)
            {
                angles[i] = mRadToDeg(currentBuffer->mRotEuler[i]);
            }
            INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_POS, OVR_SENSORROTANG[mActionCodeIndex], SI_MOVE, angles);
        }
    }

    // Rotation as axis event
    if(generateRotationAsAxisEvents && diff & OculusVRSensorData::DIFF_ROTAXIS)
    {
        if(diff & OculusVRSensorData::DIFF_ROTAXISX)
            INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_AXIS, OVR_SENSORROTAXISX[mActionCodeIndex], SI_MOVE, currentBuffer->mRotAxis.x);
        if(diff & OculusVRSensorData::DIFF_ROTAXISY)
            INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_AXIS, OVR_SENSORROTAXISY[mActionCodeIndex], SI_MOVE, currentBuffer->mRotAxis.y);
    }

    if (generatePositionEvents && diff & OculusVRSensorData::DIFF_POS)
    {
        INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_AXIS, OVR_SENSORROTAXISX[mActionCodeIndex], SI_MOVE, currentBuffer->mPosition);
    }

    // Raw sensor event
    if(generateRawSensor && diff & OculusVRSensorData::DIFF_RAW)
    {
        if(diff & OculusVRSensorData::DIFF_ACCEL)
            INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_POS, OVR_SENSORACCELERATION[mActionCodeIndex], SI_MOVE, currentBuffer->mAcceleration);

        if(diff & OculusVRSensorData::DIFF_ANGVEL)
        {
            // Convert angles to degrees
            VectorF angles;
            for(U32 i=0; i<3; ++i)
            {
                angles[i] = mRadToDeg(currentBuffer->mAngVelocity[i]);
            }
            INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_POS, OVR_SENSORANGVEL[mActionCodeIndex], SI_MOVE, angles);
        }

        if(diff & OculusVRSensorData::DIFF_MAG)
            INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_POS, OVR_SENSORMAGNETOMETER[mActionCodeIndex], SI_MOVE, currentBuffer->mMagnetometer);
    }

    if (diff & OculusVRSensorData::DIFF_STATUS)
    {
        if (Con::isFunction("onOculusStatusUpdate"))
        {
            Con::executef("onOculusStatusUpdate", ts.StatusFlags);
        }
    }

    return true;
}
		bool IsPositionTracking()
		{
			return ovrHmd_GetTrackingState(m_device, 0.0f).StatusFlags & ovrStatus_PositionTracked;
		}