extern "C" RIFTAPI_API int InitRiftApi()
{
	OVR::System::Init();
	pManager = *DeviceManager::Create();

	pHMD     = *pManager->EnumerateDevices<HMDDevice>().CreateDevice();
	if (!pHMD)
	{
		return 0 ;
	}

	pSensor  = *pHMD->GetSensor();

	// Get DisplayDeviceName, ScreenWidth/Height, etc..

	pHMD->GetDeviceInfo(&hmdInfo);

	if (pSensor)
	{
		SFusion.AttachToSensor(pSensor);
		SFusion.SetPrediction(0.03f);
	}

	if(pSensor)
	{
		return 1;
	}

	return 0;
}
void IN_MotionSensor_Read(float &roll, float &pitch, float &yaw)
{
		if (SFusion.IsAttachedToSensor()) {
            float predictionDelta = in_sensorPrediction.GetFloat() * (1.0f / 1000.0f);
            if (SFusion.GetPredictionDelta() != predictionDelta)
            {
                SFusion.SetPrediction(predictionDelta);
            }
			Quatf hmdOrient = SFusion.GetPredictedOrientation();
			float y = 0.0f, p = 0.0f, r = 0.0f;
			hmdOrient.GetEulerAngles<Axis_Y, Axis_X, Axis_Z>(&y, &p, &r);
			roll =   -RADIANS_TO_DEGREES(r); // ???
			pitch =  -RADIANS_TO_DEGREES(p); // should be degrees down
			yaw =     RADIANS_TO_DEGREES(y); // should be degrees left
		} else if (hasVR920Tracker && IWRGetTracking) {
			LONG y=0, p=0, r=0;
			if (IWRGetTracking(&y, &p, &r)==ERROR_SUCCESS) {
				yaw = y * 180.0f/32767.0f;
				pitch = p * -180.0f/32767.0f;
				roll = r * 180.0f/32767.0f;
			}
		} else {
			roll  = angles[ROLL];
			pitch = angles[PITCH];
			yaw   = angles[YAW];
		}
}
void InputTestApp::OnIdle()
{
    double curtime = pPlatform->GetAppTime();
 //   float  dt      = float(LastUpdate - curtime);
    LastUpdate     = curtime;
    
    if (pBox)
    {
        Quatf q = SFusion.GetOrientation();
        pBox->SetOrientation(q);

   // Test Euler conversion, alternative to the above:
   //     Vector3f euler;
   //     SFusion.GetOrientation().GetEulerABC<Axis_Y, Axis_X, Axis_Z, Rotate_CCW, Handed_R>(&euler.y, &euler.x, &euler.z);
   //     Matrix4f mat = Matrix4f::RotationY(euler.y) * Matrix4f::RotationX(euler.x) * Matrix4f::RotationZ(euler.z);
   //  pBox->SetMatrix(mat);    

        // Update titlebar every 20th of a second.
        if ((curtime - LastTitleUpdate) > 0.05f)
        {
            char                          titleBuffer[512];
            SensorDevice::CoordinateFrame coord = SensorDevice::Coord_Sensor;
            if (pSensor)
                coord = pSensor->GetCoordinateFrame();

            OVR_sprintf(titleBuffer, 512, "OVR SensorBox %s %s  Ang: %0.3f",
                        (SFusion.IsGravityEnabled() ?  "" : "[Grav Off]"),
                        (coord == SensorDevice::Coord_HMD) ? "[HMD Coord]" : "",
                        CalcDownAngleDegrees(q));
            pPlatform->SetWindowTitle(titleBuffer);
            LastTitleUpdate = curtime;
        }
    }

    if (pBox2)
    {
        pBox2->SetOrientation(SFusion2.GetOrientation());
    }

    // Render
    int w, h;
    pPlatform->GetWindowSize(&w, &h);

    pRender->SetViewport(0, 0, w, h);

    pRender->Clear();
    pRender->BeginScene();

    pRender->SetProjection(Proj);
    pRender->SetDepthMode(1,1);
    
    Sc.Render(pRender, View);

    pRender->Present();

}
示例#4
0
JNIEXPORT void JNICALL Java_de_fruitfly_ovr_OculusRift_pollSubsystem(JNIEnv *, jobject) {
	if (!Initialized) return;
	if (!pSensor) return;

	bool isPredictionEnabled = FusionResult.IsPredictionEnabled();
	if (isPredictionEnabled)
		quaternion = FusionResult.GetPredictedOrientation();
	else
		quaternion = FusionResult.GetOrientation();

	quaternion.GetEulerAngles<Axis_Y, Axis_X, Axis_Z>(&yaw, &pitch, &roll);
}
示例#5
0
JNIEXPORT void JNICALL Java_jrift_OculusRift_pollSubsystem(JNIEnv *, jobject) {
	if (!Initialized) return;
	if (!pSensor) return;

	quaternion = FusionResult.GetOrientation();
	quaternion.GetEulerAngles<Axis_Y, Axis_X, Axis_Z>(&yaw, &pitch, &roll);
}
示例#6
0
void OculusRiftService::onInit(){
	// initialization(prepare varibles for data acquisition of OculusRift's sensors)
	OVR::System::Init();
     std::string Service_name ="";
     IPv4 ip;
     std::string str ="";
      str = getMyIP(&ip);
      if( str == "" )
        {
          std::cout <<"Theres been an error !" << std::endl;
      
        }

   Service_name = "SIGORS ";
   Service_name += str;
  // sendMsg("multiuserctr_0",Service_name);


	pFusionResult = new SensorFusion();
	pManager = *DeviceManager::Create();

	pHMD = *pManager->EnumerateDevices<HMDDevice>().CreateDevice();

	if (pHMD){
		InfoLoaded = pHMD->GetDeviceInfo(&Info);
		pSensor = *pHMD->GetSensor();
	}
	else{
	   pSensor = *pManager->EnumerateDevices<SensorDevice>().CreateDevice();
	}

	if (pSensor){
	   pFusionResult->AttachToSensor(pSensor);
	}
}
示例#7
0
JNIEXPORT void JNICALL Java_de_fruitfly_ovr_OculusRift__1setPredictionEnabled(JNIEnv *, jobject, jfloat delta, jboolean enable) 
{
	if (Initialized)
	{
		FusionResult.SetPrediction(delta, enable);
	}
}
示例#8
0
 __declspec(dllexport) void initialize(void) {
     System::Init(Log::ConfigureDefaultLog(LogMask_None));    
     pManager = *DeviceManager::Create();
     pHMD = *pManager->EnumerateDevices<HMDDevice>().CreateDevice(); 
     pSensor = *pHMD->GetSensor();
     SFusion.AttachToSensor(pSensor);
     MagCal.BeginAutoCalibration(SFusion);
 }
extern "C" RIFTAPI_API int GetSensorSample()
{
   
	SFusion.GetOrientation().GetEulerAngles<Axis_Y, Axis_X, Axis_Z, Rotate_CCW, Handed_R>(&Heading, &Pitch, &Roll);
	

	return 1;
}
示例#10
0
 __declspec(dllexport) void update_calibration(void) {
     if (MagCal.IsAutoCalibrating()) 
     {
         MagCal.UpdateAutoCalibration(SFusion);
         if (MagCal.IsCalibrated())
         {
             SFusion.SetYawCorrectionEnabled(true);
         }
     }
 }
示例#11
0
JNIEXPORT jboolean JNICALL Java_de_fruitfly_ovr_OculusRift_initSubsystem(JNIEnv *env, jobject jobj) 
{
	Initialized = false;

	System::Init();

	pManager = *DeviceManager::Create();

	_ipd = 0.0635f; // Default
	Info.InterpupillaryDistance = _ipd;

	pHMD = *pManager->EnumerateDevices<HMDDevice>().CreateDevice();
	if (pHMD) {
		printf("Oculus Rift Device Interface created.\n");
		InfoLoaded = pHMD->GetDeviceInfo(&Info);
		pSensor = *pHMD->GetSensor();
		FusionResult.AttachToSensor(pSensor);
		Initialized = InfoLoaded && pSensor;
		printf("Oculus Rift Device Interface initialized.\n");
	}
	else {
		printf("Unable to create Oculus Rift device interface.\n");
	}

	if (InfoLoaded) {

		_ipd = Info.InterpupillaryDistance;

		printf(" DisplayDeviceName: %s\n", Info.DisplayDeviceName);
		printf(" ProductName: %s\n", Info.ProductName);
		printf(" Manufacturer: %s\n", Info.Manufacturer);
		printf(" Version: %s\n", Info.Version);
		printf(" HResolution: %i\n", Info.HResolution);
		printf(" VResolution: %i\n", Info.VResolution);
		printf(" HScreenSize: %f\n", Info.HScreenSize);
		printf(" VScreenSize: %f\n", Info.VScreenSize);
		printf(" VScreenCenter: %f\n", Info.VScreenCenter);
		printf(" EyeToScreenDistance: %f\n", Info.EyeToScreenDistance);
		printf(" LensSeparationDistance: %f\n", Info.LensSeparationDistance);
        printf(" InterpupillaryDistance: %f\n", Info.InterpupillaryDistance);
        printf(" DistortionK[0]: %f\n", Info.DistortionK[0]);
        printf(" DistortionK[1]: %f\n", Info.DistortionK[1]);
		printf(" DistortionK[2]: %f\n", Info.DistortionK[2]);
		printf(" DistortionK[3]: %f\n", Info.DistortionK[3]);
		printf(" ChromaticAb[0]: %f\n", Info.ChromaAbCorrection[0]);
        printf(" ChromaticAb[1]: %f\n", Info.ChromaAbCorrection[1]);
		printf(" ChromaticAb[2]: %f\n", Info.ChromaAbCorrection[2]);
		printf(" ChromaticAb[3]: %f\n", Info.ChromaAbCorrection[3]);
	}

	return Initialized;
}
示例#12
0
//periodic procedure for sending messages to the controller
double OculusRiftService::onAction(){
  
     IPv4 ip;
     std::string str ="";
    str = getMyIP(&ip);
    if( str == "" )
        {
          std::cout <<"Theres been an error !" << std::endl;
      
        }
    // else
    //    std::cout << "IPv4 address stored in struct: ";
    //    std::cout << ip.b1<<"."<<ip.b2<<"."<<ip.b3<<"."<<ip.b4<< std::endl;
    
	float r_yaw, r_pitch, r_roll;

	Quatf q = pFusionResult->GetOrientation();
	Matrix4f bodyFrameMatrix(q);
	q.GetEulerAngles<Axis_Y, Axis_X, Axis_Z>(&r_yaw, &r_pitch, &r_roll);

	std::vector<std::string> names;
	names = this->getAllConnectedEntitiesName();
	int entSize = names.size();

   // std::cout << "IPv4 Address:  " << str << std::endl;
   // std::cout << " entity " <<  entSize << std::endl;


	for(int i = 0; i < entSize; i++) {
    std::string msg = "ORS_DATA ";	
	    msg += str;
		msg += DoubleToString(r_yaw);
		msg += DoubleToString(r_pitch);
		msg += DoubleToString(r_roll);

		this->sendMsgToCtr(names[i], msg);
    std::cout << msg << std::endl;
	}


	return 0.1;  //time period
}
示例#13
0
文件: main.cpp 项目: Tezar/tunnel
/** Oculus init **/
void Init()
{
	System::Init();

	pManager = *DeviceManager::Create();

	pHMD = *pManager->EnumerateDevices<HMDDevice>().CreateDevice();

	if (pHMD)
        {
           InfoLoaded = pHMD->GetDeviceInfo(&Info);

	   pSensor = *pHMD->GetSensor();
	}
	else
	{
	   pSensor = *pManager->EnumerateDevices<SensorDevice>().CreateDevice();
	}

	if (pSensor)
	{
	   FusionResult.AttachToSensor(pSensor);
	}
}
void IN_MotionSensor_Init(void)
{
	//Carl: Don't initialize has* here to false, because they can also be set by command line parameters

	// *** Oculus Sensor Initialization
	OVR::System::Init(OVR::Log::ConfigureDefaultLog(OVR::LogMask_All));

    // Create DeviceManager and first available HMDDevice from it.
    // Sensor object is created from the HMD, to ensure that it is on the
    // correct device.

    pManager = *DeviceManager::Create();

	// We'll handle it's messages in this case.
	//pManager->SetMessageHandler(this);

    // Release Sensor/HMD in case this is a retry.
    pSensor.Clear();
    pHMD.Clear();
    pHMD  = *pManager->EnumerateDevices<HMDDevice>().CreateDevice();
    if (pHMD)
    {
        pSensor = *pHMD->GetSensor();
    }
    else
    {            
        // If we didn't detect an HMD, try to create the sensor directly.
        // This is useful for debugging sensor interaction; it is not needed in
        // a shipping app.
        pSensor = *pManager->EnumerateDevices<SensorDevice>().CreateDevice();
    }
    if (!pHMD && !pSensor)
        common->Warning("Oculus Rift not detected.\n");
    else if (!pHMD)
        common->Warning("Oculus Sensor detected; HMD Display not detected.\n");
    else if (!pSensor)
        common->Warning("Oculus HMD Display detected; Sensor not detected.\n");
    //else if (HMDInfo.DisplayDeviceName[0] == '\0')
    //    common->Printf("Oculus Sensor detected; HMD display EDID not detected.\n");

	if (pSensor)
    {
		SFusion.AttachToSensor(pSensor);
        SFusion.SetPredictionEnabled(true);
		hasOculusRift = true;
		hasHMD = true;
    }
	
	if (!pSensor)
		LoadVR920();
	hasHMD = hasHMD || hasVR920Tracker;

	//Hillcrest libfreespace stuff
	LPDWORD dwThreadId=0;
	struct freespace_message message;
	int numIds;
	int rc;
	int i;

	// Initialize the freespace library
	rc = freespace_init();
	if (rc != FREESPACE_SUCCESS) {
		common->Warning("Hillcrest Initialization error. rc=%d\n", rc);
		return;
	}

	/** --- START EXAMPLE INITIALIZATION OF DEVICE -- **/
	rc = freespace_getDeviceList(&device, 1, &numIds);
	if (numIds == 0) {
		common->Warning("MotionSensor: Didn't find any devices.\n");
		return;
	}

	rc = freespace_openDevice(device);
	if (rc != FREESPACE_SUCCESS) {
		common->Warning("MotionSensor: Error opening device: %d\n", rc);
		return;
	}

	rc = freespace_flush(device);
	if (rc != FREESPACE_SUCCESS) {
		common->Warning("MotionSensor: Error flushing device: %d\n", rc);
		return;
	}

	memset(&message, 0, sizeof(message));
	message.messageType = FREESPACE_MESSAGE_DATAMODECONTROLV2REQUEST;
	message.dataModeControlV2Request.packetSelect = 2;
	message.dataModeControlV2Request.modeAndStatus |= 0 << 1;

	rc = freespace_sendMessage(device, &message);
	if (rc != FREESPACE_SUCCESS) {
		common->Warning("freespaceInputThread: Could not send message: %d.\n", rc);
	}
	CreateThread(NULL, //Choose default security
		0, //Default stack size
		(LPTHREAD_START_ROUTINE)&IN_MotionSensor_Thread,
		//Routine to execute
		(LPVOID) &i, //Thread parameter
		0, //Immediately run the thread
		dwThreadId //Thread Id
		);
	hasHillcrest = true;
	hasHMD = true;
}
示例#15
0
 __declspec(dllexport) void get_orientation(float *yaw, float *pitch, float *roll) {
     Quatf hmdOrient = SFusion.GetPredictedOrientation();
     hmdOrient.GetEulerAngles<Axis_Y, Axis_X, Axis_Z>(yaw, pitch, roll);
 }
void InputTestApp::OnKey(KeyCode key, int chr, bool down, int modifiers)
{
    OVR_UNUSED2(chr, modifiers);

    switch (key)
    {
    case Key_Q:
        if (!down)
            pPlatform->Exit(0);
        break;

    case Key_F1:
        CurrentView = View_Perspective;
        SetView(CurrentView);
        //UpdateWindowTitle();
        break;
    case Key_F2:
        CurrentView = View_XY_DownZ;
        SetView(CurrentView);
        break;
    case Key_F3:
        CurrentView = View_XZ_UpY;
        SetView(CurrentView);
        break;

    case Key_R:
        if (down)
        {
            SFusion.Reset();
            SFusion2.Reset();
        }
        break;

    case Key_H:
        if (down && pSensor)
        {
            SensorDevice::CoordinateFrame coord = pSensor->GetCoordinateFrame();
            pSensor->SetCoordinateFrame(
                (coord == SensorDevice::Coord_Sensor) ?
                SensorDevice::Coord_HMD : SensorDevice::Coord_Sensor);
            SFusion.Reset();
            SFusion2.Reset();
        }
        break;

    case Key_G:
        if (down)
        {
            SFusion.SetGravityEnabled(!SFusion.IsGravityEnabled());
            SFusion2.SetGravityEnabled(SFusion.IsGravityEnabled());
        }
        break;

    case Key_A:

        if (down)
        {
            if (!pSensor2)
            {
                LogText("Angle: %2.3f\n", CalcDownAngleDegrees(SFusion.GetOrientation()));
            }
            else
            {
                LogText("Angle: %2.3f Secondary Sensor Angle: %2.3f\n",
                        CalcDownAngleDegrees(SFusion.GetOrientation()),
                        CalcDownAngleDegrees(SFusion2.GetOrientation()));
            }                        
        }
        break;

        /*
    case Key_End:
        if (!down)
        {
            OriAdjust = OriSensor.Conj();
            Sc.ViewPoint.SetOrientation(Quatf());
        }
        break; */
        default:
            break;
    }
}
int InputTestApp::OnStartup(int argc, const char** argv)
{
    if (!pPlatform->SetupWindow(1200,800))
        return 1;
    

    pManager = *DeviceManager::Create();
    
    // This initialization logic supports running two sensors at the same time.
   
    DeviceEnumerator<SensorDevice> isensor = pManager->EnumerateDevices<SensorDevice>();
    DeviceEnumerator<SensorDevice> oculusSensor;
    DeviceEnumerator<SensorDevice> oculusSensor2;
    
    while(isensor)
    {
        DeviceInfo di;
        if (isensor.GetDeviceInfo(&di))
        {
            if (strstr(di.ProductName, "Tracker"))
            {
                if (!oculusSensor)
                    oculusSensor = isensor;
                else if (!oculusSensor2)
                    oculusSensor2 = isensor;
            }
        }

        isensor.Next();
    }

    if (oculusSensor)
    {
        pSensor = *oculusSensor.CreateDevice();

        if (pSensor)
            pSensor->SetRange(SensorRange(4 * 9.81f, 8 * Math<float>::Pi, 1.0f), true);

        if (oculusSensor2)
        {
            // Second Oculus sensor, useful for comparing firmware behavior & settings.
            pSensor2 = *oculusSensor2.CreateDevice();

            if (pSensor2)
                pSensor2->SetRange(SensorRange(4 * 9.81f, 8 * Math<float>::Pi, 1.0f), true);
        }
    }

    oculusSensor.Clear();
    oculusSensor2.Clear();
       
    
    /*
    DeviceHandle hHMD = pManager->EnumerateDevices<HMDDevice>();
    HMDInfo      hmdInfo;
    if (hHMD)
    {        
        hHMD.GetDeviceInfo(&hmdInfo);
    }
    */

    if (pSensor)
        SFusion.AttachToSensor(pSensor);
    if (pSensor2)
        SFusion2.AttachToSensor(pSensor2);

    /*
    // Test rotation: This give rotations clockwise (CW) while looking from
    // origin in the direction of the axis.

    Vector3f xV(1,0,0);
    Vector3f zV(0,0,1);

    Vector3f rxV = Matrix4f::RotationZ(DegreeToRad(10.0f)).Transform(xV);
    Vector3f ryV = Matrix4f::RotationY(DegreeToRad(10.0f)).Transform(xV);
    Vector3f rzV = Matrix4f::RotationX(DegreeToRad(10.0f)).Transform(zV);
    */

    // Report relative mouse motion (not absolute position)
   // pPlatform->SetMouseMode(Mouse_Relative);

    const char* graphics = "d3d10";
    for (int i = 1; i < argc; i++)
        if (!strcmp(argv[i], "-r") && i < argc-1)
            graphics = argv[i+1];

    pRender = pPlatform->SetupGraphics(OVR_DEFAULT_RENDER_DEVICE_SET, graphics,
                                       RendererParams());
  
    //WireframeFill = pRender->CreateSimpleFill(Fill::F_Wireframe);


    
    // *** Rotating Box
    
    pBox = *new Container;
    pBox->Add(Ptr<Model>(        
       *Model::CreateAxisFaceColorBox(-2.0f, 2.0f, Color(0,   0xAA, 0),        // x = green
                                      -1.0f, 1.0f, Color(0xAA,0,    0),        // y = red
                                      -1.0f, 1.0f, Color(0,   0,    0xAA)) )); // z = blue 
    // Drop-down line from box, to make it easier to see differences in angle.
    Ptr<Model> downLine = *new Model(Prim_Lines);
    downLine->AddLine(Vertex(0.0f,-4.5f, 0.0f, 0xFFE0B0B0),
                      Vertex(0.0f, 0.0f, 0.0f, 0xFFE0B0B0));
    pBox->Add(downLine);
    Sc.World.Add(pBox);

    
    // Secondary rotating coordinate object, if we have two values.
    if (pSensor2)
    {
        pBox2 = *new Container;

        // Drop-down line from box, to make it easier to see differences in angle.
        Ptr<Model> lines = *new Model(Prim_Lines);
        lines->AddLine(Vertex( 0.0f,-4.0f, 0.0f, 0xFFA07070),  // -Y
                       Vertex( 0.0f, 0.0f, 0.0f, 0xFFA07070));
        lines->AddLine(Vertex(-4.0f, 0.0f, 0.0f, 0xFF70A070),  // -X
                       Vertex( 0.0f, 0.0f, 0.0f, 0xFF70A070));
        lines->AddLine(Vertex( 0.0f, 0.0f,-4.0f, 0xFF7070A0),  // -Z
                       Vertex( 0.0f, 0.0f, 0.0f, 0xFF7070A0));
        pBox2->Add(lines);
        Sc.World.Add(pBox2);
    }


    // *** World axis X,Y,Z rendering.

    pAxes = *new Model(Prim_Lines);
    pAxes->AddLine(Vertex(-8.0f, 0.0f, 0.0f, 0xFF40FF40),
                   Vertex( 8.0f, 0.0f, 0.0f, 0xFF40FF40)); // X
    pAxes->AddLine(Vertex( 7.6f, 0.4f, 0.4f, 0xFF40FF40),
                   Vertex( 8.0f, 0.0f, 0.0f, 0xFF40FF40)); // X - arrow
    pAxes->AddLine(Vertex( 7.6f,-0.4f,-0.4f, 0xFF40FF40),
                   Vertex( 8.0f, 0.0f, 0.0f, 0xFF40FF40)); // X - arrow

    pAxes->AddLine(Vertex( 0.0f,-8.0f, 0.0f, 0xFFFF4040),
                   Vertex( 0.0f, 8.0f, 0.0f, 0xFFFF4040)); // Y
    pAxes->AddLine(Vertex( 0.4f, 7.6f, 0.0f, 0xFFFF4040),
                   Vertex( 0.0f, 8.0f, 0.0f, 0xFFFF4040)); // Y - arrow
    pAxes->AddLine(Vertex(-0.4f, 7.6f, 0.0f, 0xFFFF4040),
                   Vertex( 0.0f, 8.0f, 0.0f, 0xFFFF4040)); // Y
    
    pAxes->AddLine(Vertex( 0.0f, 0.0f,-8.0f, 0xFF4040FF),
                   Vertex( 0.0f, 0.0f, 8.0f, 0xFF4040FF)); // Z
    pAxes->AddLine(Vertex( 0.4f, 0.0f, 7.6f, 0xFF4040FF),
                   Vertex( 0.0f, 0.0f, 8.0f, 0xFF4040FF)); // Z - arrow
    pAxes->AddLine(Vertex(-0.4f, 0.0f, 7.6f, 0xFF4040FF),
                   Vertex( 0.0f, 0.0f, 8.0f, 0xFF4040FF)); // Z - arrow
    Sc.World.Add(pAxes);
   

    SetView(CurrentView);


    LastUpdate = pPlatform->GetAppTime();
    return 0;
}
示例#18
0
void InputTestApp::OnIdle()
{
    double curtime = pPlatform->GetAppTime();
	time_t t = time(0);   // get time now
	struct tm * now = localtime(&t);

 //   float  dt      = float(LastUpdate - curtime);
    LastUpdate     = curtime;
    
    if (pBox)
    {
		Vector3f acceldata = SFusion.GetAcceleration();
		Vector3f gyrodata = SFusion.GetAngularVelocity();
		Vector3f magdata = SFusion.GetMagnetometer();	

        Quatf q = SFusion.GetOrientation();
        pBox->SetOrientation(q);

		//fstream outFile;
		//outFile.open("C://Users//Barrett//Documents//oculus_sensor_data.txt");
		// Output the sensor data to the text file
		ofstream outFile("C://Users//Barrett//Documents//oculus_sensor_data.csv", ios::app);
		outFile << \
			now->tm_sec << "," << \
			curtime << "," << \
			acceldata.x << "," << acceldata.y << "," << acceldata.z << "," << \
			gyrodata.x << "," << gyrodata.y << "," << gyrodata.z << "," << \
			magdata.x << "," << magdata.y << "," << magdata.z << "," << \
			q.x << "," << q.y << "," << q.z << q.w << "\n";
		
   // Test Euler conversion, alternative to the above:
   //     Vector3f euler;
   //     SFusion.GetOrientation().GetEulerABC<Axis_Y, Axis_X, Axis_Z, Rotate_CCW, Handed_R>(&euler.y, &euler.x, &euler.z);
   //     Matrix4f mat = Matrix4f::RotationY(euler.y) * Matrix4f::RotationX(euler.x) * Matrix4f::RotationZ(euler.z);
   //  pBox->SetMatrix(mat);    

        // Update titlebar every 20th of a second.
        if ((curtime - LastTitleUpdate) > 0.05f)
        {
            char                          titleBuffer[512];
            SensorDevice::CoordinateFrame coord = SensorDevice::Coord_Sensor;
            if (pSensor)
                coord = pSensor->GetCoordinateFrame();

            OVR_sprintf(titleBuffer, 512, "OVR SensorBox %s %s  Ang: %0.3f",
                        (SFusion.IsGravityEnabled() ?  "" : "[Grav Off]"),
                        (coord == SensorDevice::Coord_HMD) ? "[HMD Coord]" : "",
                        CalcDownAngleDegrees(q));
            pPlatform->SetWindowTitle(titleBuffer);
            LastTitleUpdate = curtime;
        }
    }

    if (pBox2)
    {
        pBox2->SetOrientation(SFusion2.GetOrientation());
    }

    // Render
    int w, h;
    pPlatform->GetWindowSize(&w, &h);

    pRender->SetViewport(0, 0, w, h);

    pRender->Clear();
    pRender->BeginScene();

    pRender->SetProjection(Proj);
    pRender->SetDepthMode(1,1);
    
    Sc.Render(pRender, View);

    pRender->Present();

}
示例#19
0
文件: main.cpp 项目: Tezar/tunnel
int main()
{
	// create device
    EventHandler receiver;
	Init();
	Output();
	ISceneNode* objects [MAX_OBJECTS];



	 IrrlichtDevice *device = createDevice(EDT_OPENGL, dimension2d<u32>(ResX, ResY), 32, fullscreen, false, vsync, &receiver); 
	 
	 receiver.device = device; 

    if (!device)
        return 1;

    IVideoDriver* driver = device->getVideoDriver();
    ISceneManager* smgr = device->getSceneManager();
    IGUIEnvironment* guienv = device->getGUIEnvironment();

	HMDDescriptor HMD;
	// Parameters from the Oculus Rift DK1
	HMD.hResolution = ResX;
	HMD.vResolution = ResY;
	HMD.hScreenSize = 0.14976;
	HMD.vScreenSize = 0.0936;
	HMD.interpupillaryDistance = 0.064;
	HMD.lensSeparationDistance = 0.064;
	HMD.eyeToScreenDistance = 0.041;
	HMD.distortionK[0] = 1.0;
	HMD.distortionK[1] = 0.22;
	HMD.distortionK[2] = 0.24;
	HMD.distortionK[3] = 0.0;

	HMDStereoRender renderer(device, HMD, 10); 


	#ifdef OCCULUS
		ICameraSceneNode* camera = smgr->addCameraSceneNode();
		camera->bindTargetAndRotation(false);
		camera->setTarget(vector3df(1,0,0));
	#else	
		ICameraSceneNode* camera = smgr->addCameraSceneNodeFPS();
	#endif	

	
    device->getCursorControl()->setVisible(false); 


	// load a faerie 
	IAnimatedMesh* faerie = smgr->getMesh("media/faerie.md2");
	IAnimatedMeshSceneNode* faerieNode = smgr->addAnimatedMeshSceneNode(faerie);
	faerieNode->setMaterialTexture(0, driver->getTexture("media/faerie2.bmp"));
	faerieNode->setMaterialFlag(EMF_LIGHTING, false);
	faerieNode->setPosition(vector3df(40,190,-1030));
	faerieNode->setRotation(vector3df(0,-90,0));
	faerieNode->setMD2Animation(EMAT_SALUTE);

	// load a dwarf
	IAnimatedMesh* dwarf = smgr->getMesh("media/dwarf.x");
	IAnimatedMeshSceneNode* dwarfNode = smgr->addAnimatedMeshSceneNode(dwarf);
	dwarfNode->setPosition(vector3df(40,-25,20));
	  
	
	Level currentLevel(device);
	currentLevel.makeLevel(0);

	smgr->setAmbientLight(video::SColorf(0.1,0.1,0.1,1));
	ILightSceneNode* light1 = smgr->addLightSceneNode( camera , vector3df(0,0,0), video::SColorf(0.3f,0.4f,0.4f), 80.0f, 1 );


	vector3df pos = vector3df(0,0,0);

	//naplníme tunel pøekážkama

	 srand (time(NULL));
	/* generate secret number between 1 and 10: */
	
	for(int i = 0; i < MAX_OBJECTS; i++){
		objects[i] = smgr->addCubeSceneNode(2);
		objects[i]->setMaterialFlag(EMF_LIGHTING, false);
		objects[i]->setPosition( vector3df( (rand() % 30) - 5, (rand() % 30) - 5, rand() % 80) );
	}

	//device->setInputReceivingSceneManager(smgr);
	
	//použivane pro 
	vector3df tempRot; 
	irr::core::quaternion tempQ;
	irr::core::matrix4 tempM;

	float round = 0;

    while(device->run())
    {
		round += 0.01;
        driver->beginScene(true, true, SColor(255,100,101,140));
		
		for(int i = 0; i < MAX_OBJECTS; i++){
			vector3df tmpPos = objects[i]->getPosition();
			if(tmpPos.Z > pos.Z) continue;
			
			objects[i]->setPosition( vector3df( (rand() % 30) - 15, (rand() % 30) - 15, rand() % 80 + pos.Z) );
		}
		
	#ifndef OCCULUS
			tempM.setRotationDegrees(vector3df(sin(round*0.5)*360-180, sin(round)*360-180, cos(round*0.8)*360-180));
			
			// transform forward vector of camera
			irr::core::vector3df frv = irr::core::vector3df (0.0f, 0.0f, 1.0f);
			tempM.transformVect(frv);
    
			// transform upvector of camera
		    irr::core::vector3df upv = irr::core::vector3df (0.0f, 1.0f, 0.0f);
			tempM.transformVect(upv);

		    camera->setUpVector(upv); //set up vector of camera
			camera->setTarget(frv); //set target of camera (look at point) (thx Zeuss for correcting it)

	#endif

		
		if(pSensor){
			Quatf quaternion = FusionResult.GetOrientation();

		   ICameraSceneNode* camera = smgr->getActiveCamera();
   
		   tempQ.set(-quaternion.z,quaternion.y,-quaternion.x, quaternion.w);
		   tempQ.normalize();
		   tempQ.toEuler(tempRot);
    
		   
			tempM.setRotationDegrees(tempRot);

			// transform forward vector of camera
			irr::core::vector3df frv = irr::core::vector3df (0.0f, 0.0f, 1.0f);
			tempM.transformVect(frv);
    
			// transform upvector of camera
		    irr::core::vector3df upv = irr::core::vector3df (0.0f, 1.0f, 0.0f);
			tempM.transformVect(upv);

		    camera->setUpVector(upv); //set up vector of camera
			camera->setTarget(frv); //set target of camera (look at point) (thx Zeuss for correcting it)

			// update absolute position
			camera->updateAbsolutePosition();



			float yaw, pitch, roll;
			quaternion.GetEulerAngles<Axis_Y, Axis_X, Axis_Z>(&yaw, &pitch, &roll);
			camera->getParent()->setRotation( vector3df(RadToDegree(pitch),RadToDegree(yaw),RadToDegree(roll)));
			//camera->setRotation( vector3df(RadToDegree(-pitch),RadToDegree(-yaw),RadToDegree(roll)));
			//camera->setProjectionMatrix(ToMatrix(quaternion));
			cout << " Yaw: " << RadToDegree(yaw) << 
				", Pitch: " << RadToDegree(pitch) << 
				", Roll: " << RadToDegree(roll) << endl;
		
			if (_kbhit()) exit(0);
		}

		#ifdef OCCULUS
			renderer.drawAll(smgr); 
		#else
			smgr->drawAll();
		#endif

        guienv->drawAll();

        driver->endScene();
    }


    device->drop();
	Clear();
    return 0;
}
示例#20
0
void Output()
{
	cout << "----- Oculus Console -----" << endl;

	if (pHMD)
	{
		cout << " [x] HMD Found" << endl;
	}
	else
	{
		cout << " [ ] HMD Not Found" << endl;
	}

	if (pSensor)
	{
		cout << " [x] Sensor Found" << endl;
	}
	else
	{
		cout << " [ ] Sensor Not Found" << endl;
	}

	cout << "--------------------------" << endl;

	if (InfoLoaded)
        {
		cout << " DisplayDeviceName: " << Info.DisplayDeviceName << endl;
		cout << " ProductName: " << Info.ProductName << endl;
		cout << " Manufacturer: " << Info.Manufacturer << endl;
		cout << " Version: " << Info.Version << endl;
		cout << " HResolution: " << Info.HResolution<< endl;
		cout << " VResolution: " << Info.VResolution<< endl;
		cout << " HScreenSize: " << Info.HScreenSize<< endl;
		cout << " VScreenSize: " << Info.VScreenSize<< endl;
		cout << " VScreenCenter: " << Info.VScreenCenter<< endl;
		cout << " EyeToScreenDistance: " << Info.EyeToScreenDistance << endl;
		cout << " LensSeparationDistance: " << Info.LensSeparationDistance << endl;
		cout << " InterpupillaryDistance: " << Info.InterpupillaryDistance << endl;
		cout << " DistortionK[0]: " << Info.DistortionK[0] << endl;
		cout << " DistortionK[1]: " << Info.DistortionK[1] << endl;
		cout << " DistortionK[2]: " << Info.DistortionK[2] << endl;
		cout << "--------------------------" << endl;
        }

	cout << endl << " Press ENTER to continue" << endl;

	cin.get();

	while(pSensor)
	{
		Vector3f acc = FusionResult.GetAcceleration();
		Quatf quaternion = FusionResult.GetOrientation();

		float yaw, pitch, roll;
		quaternion.GetEulerAngles<Axis_Y, Axis_X, Axis_Z>(&yaw, &pitch, &roll);

		cout << "Yaw=" << RadToDegree(yaw) << 
			" Pitch=" << RadToDegree(pitch) << 
			" Roll=" << RadToDegree(roll) <<
			" X=" << acc.x / STD_GRAV << 
			" Y=" << acc.y / STD_GRAV << 
			" Z=" << acc.z / STD_GRAV << endl;

		Sleep(50);

		if (_kbhit()) exit(0);
	}
}
extern "C" RIFTAPI_API bool ResetRift()
{
	SFusion.Reset();
	return true;
}