Esempio n. 1
0
void ClutchTool::buttonCallback(int,int deviceButtonIndex,InputDevice::ButtonCallbackData* cbData)
	{
	if(deviceButtonIndex==0) // Clutch button
		{
		bool mustInit=false;
		if(factory->clutchButtonToggleFlag)
			{
			if(!cbData->newButtonState)
				{
				clutchButtonState=!clutchButtonState;
				mustInit=!clutchButtonState;
				}
			}
		else
			{
			clutchButtonState=cbData->newButtonState;
			mustInit=!clutchButtonState;
			}
		
		if(mustInit)
			{
			/* Calculate the new offset transformation: */
			Vector offsetT=transformedDevice->getPosition()-getDevicePosition(0);
			Rotation offsetR=transformedDevice->getTransformation().getRotation()*Geometry::invert(getDeviceTransformation(0).getRotation());
			offset=TrackerState(offsetT,offsetR);
			}
		}
	else // Pass-through button
		{
		if(setButtonState(deviceButtonIndex-1,cbData->newButtonState))
			transformedDevice->setButtonState(deviceButtonIndex-1,buttonStates[deviceButtonIndex-1]);
		}
	}
void PlaneSnapInputDeviceTool::buttonCallback(int,InputDevice::ButtonCallbackData* cbData)
	{
	if(cbData->newButtonState) // Button has just been pressed
		{
		/* Try activating the tool: */
		if(interactionDevice->isRayDevice())
			{
			/* Pick a virtual input device using ray selection: */
			activate(calcInteractionRay());
			}
		else
			{
			/* Pick a virtual input device using point selection: */
			activate(getInteractionPosition());
			}
		
		/* Check if the tool was activated: */
		if(isActive())
			{
			/* Check if there are currently three selected points: */
			if(numSelectedPoints==3)
				{
				/* Snap the selected virtual input device to the plane defined by the three selected points: */
				Vector y=Geometry::cross(selectedPoints[1]-selectedPoints[0],selectedPoints[2]-selectedPoints[0]);
				Scalar offset=(selectedPoints[0]*y+selectedPoints[1]*y+selectedPoints[2]*y)/Scalar(3);
				Vector x=Geometry::normal(y);
				Point devicePos=getInverseNavigationTransformation().transform(getGrabbedDevice()->getPosition());
				Scalar lambda=(offset-devicePos*y)/Geometry::sqr(y);
				devicePos+=y*lambda;
				NavTransform dt=NavTransform(devicePos-Point::origin,NavTransform::Rotation::fromBaseVectors(x,y),Scalar(1));
				dt.leftMultiply(getNavigationTransformation());
				getGrabbedDevice()->setTransformation(TrackerState(dt.getTranslation(),dt.getRotation()));
				}
			
			/* Deactivate the tool again: */
			deactivate();
			}
		else
			{
			/* Start dragging another selection point: */
			if(numSelectedPoints==3)
				numSelectedPoints=0;
			draggingPoint=true;
			++numSelectedPoints;
			}
		}
	else // Button has just been released
		{
		/* Stop dragging a selection point: */
		draggingPoint=false;
		}
	}
void InputDeviceAdapterTrackd::updateInputDevices(void)
	{
	for(int deviceIndex=0;deviceIndex<numInputDevices;++deviceIndex)
		{
		/* Get pointer to the input device: */
		InputDevice* device=inputDevices[deviceIndex];
		
		/* Don't update tracker-related state for devices that are not tracked: */
		if(trackerIndexMapping[deviceIndex]>=0)
			{
			/* Get device's tracker state from sensor shared memory segment: */
			SensorData& sd=*sensors[trackerIndexMapping[deviceIndex]];
			
			/*****************************************************************
			Construct device's transformation:
			*****************************************************************/
			
			/* Translation vector is straightforward: */
			Vector translation=Vector(Scalar(sd.position[0]),Scalar(sd.position[1]),Scalar(sd.position[2]));
			
			/* To assemble the orientation, we assume all angles are in degrees, and the order of rotations is as follows: */
			Rotation rotation=Rotation::rotateZ(Math::rad(Scalar(sd.angles[0])));
			rotation*=Rotation::rotateX(Math::rad(Scalar(sd.angles[1])));
			rotation*=Rotation::rotateY(Math::rad(Scalar(sd.angles[2])));
			
			/* Calibrate the device's position and orientation from the trackd daemon's space to Vrui's physical space: */
			OGTransform calibratedTransformation=calibrationTransformation;
			calibratedTransformation*=OGTransform(translation,rotation,Scalar(1));
			
			/* Calibrate and set the device's transformation: */
			device->setTransformation(TrackerState(calibratedTransformation.getTranslation(),calibratedTransformation.getRotation()));
			
			/* Set device's linear and angular velocities to zero because we don't know any better: */
			device->setLinearVelocity(Vector::zero);
			device->setAngularVelocity(Vector::zero);
			}
		
		/* Update button states: */
		for(int i=0;i<device->getNumButtons();++i)
			device->setButtonState(i,buttons[buttonIndexMapping[deviceIndex][i]]);
		
		/* Update valuator states: */
		for(int i=0;i<device->getNumValuators();++i)
			device->setValuator(i,valuators[valuatorIndexMapping[deviceIndex][i]]);
		}
	
	/* Schedule the next Vrui frame at the update interval if asked to do so: */
	if(updateInterval!=0.0)
		Vrui::scheduleUpdate(getApplicationTime()+updateInterval);
	}
TrackerState SixAxisTransformToolFactory::Configuration::getHomePosition(void) const
{
    if(homePosition.isSpecified())
    {
        /* Return the configured home position: */
        return homePosition.getValue();
    }
    else
    {
        /* Calculate the home position from current display center and environment orientation: */
        Vector x=getForwardDirection()^getUpDirection();
        Vector y=getUpDirection()^x;
        return TrackerState(getDisplayCenter()-Point::origin,Rotation::fromBaseVectors(x,y));
    }
}
void InputDeviceAdapterVisBox::updateInputDevices(void)
	{
	/*********************************************************************
	Convert the most recent xyzhpr values from the shared memory segment
	into a tracker state:
	*********************************************************************/
	
	/* Translation vector is straightforward: */
	Vector translation=Vector(Scalar(xyzhpr[0]),Scalar(xyzhpr[1]),Scalar(xyzhpr[2]));
	
	/* To assemble the orientation, we assume all angles are in degrees, and the order of rotations is as follows: */
	Rotation rotation=Rotation::rotateZ(Math::rad(xyzhpr[3]));
	rotation*=Rotation::rotateX(Math::rad(xyzhpr[4]));
	rotation*=Rotation::rotateY(Math::rad(xyzhpr[5]));
	
	inputDevices[0]->setTransformation(TrackerState(translation,rotation));
	}
Esempio n. 6
0
void UIManagerSpherical::projectDevice(InputDevice* device) const
	{
	/* Get the device's ray: */
	Ray ray=device->getRay();
	
	/* Check if the line defined by the device's ray intersects the sphere: */
	Scalar d2=Geometry::sqr(ray.getDirection());
	Vector oc=ray.getOrigin()-sphere.getCenter();
	Scalar ph=(oc*ray.getDirection());
	Scalar det=Math::sqr(ph)-(Geometry::sqr(oc)-Math::sqr(sphere.getRadius()))*d2;
	Scalar lambda(0);
	Point devicePos;
	Vector y;
	if(det>=Scalar(0))
		{
		/* Calculate the point where the line exits the sphere: */
		det=Math::sqrt(det);
		lambda=(-ph+det)/d2; // Second intersection
		devicePos=ray(lambda);
		y=devicePos-sphere.getCenter();
		}
	else
		{
		/* Project the device's position onto the sphere: */
		y=device->getPosition()-sphere.getCenter();
		Scalar yLen=y.mag();
		if(yLen==Scalar(0))
			{
			y=getForwardDirection();
			yLen=y.mag();
			}
		devicePos=sphere.getCenter()+y*(sphere.getRadius()/yLen);
		}
	
	/* Calculate a device orientation such that the y axis is normal to the sphere and points outwards: */
	Vector x=y^getUpDirection();
	if(x.mag()==Scalar(0))
		x=getForwardDirection()^getUpDirection();
	
	/* Set the device transformation: */
	device->setTransformation(TrackerState(devicePos-Point::origin,Rotation::fromBaseVectors(x,y)));
	
	/* Update the device's ray: */
	device->setDeviceRay(device->getTransformation().inverseTransform(ray.getDirection()),-lambda);
	}
void InputDeviceAdapterDeviceDaemon::updateInputDevices(void)
	{
	/* Check for error messages from the device client: */
	{
	Threads::Spinlock::Lock errorMessageLock(errorMessageMutex);
	for(std::vector<std::string>::iterator emIt=errorMessages.begin();emIt!=errorMessages.end();++emIt)
		showErrorMessage("Vrui::InputDeviceAdapterDeviceDaemon",emIt->c_str());
	errorMessages.clear();
	}
	
	/* Update all managed input devices: */
	deviceClient.lockState();
	const VRDeviceState& state=deviceClient.getState();
	for(int deviceIndex=0;deviceIndex<numInputDevices;++deviceIndex)
		{
		/* Get pointer to the input device: */
		InputDevice* device=inputDevices[deviceIndex];
		
		/* Don't update tracker-related state for devices that are not tracked: */
		if(trackerIndexMapping[deviceIndex]>=0)
			{
			/* Get device's tracker state from VR device client: */
			const VRDeviceState::TrackerState& ts=state.getTrackerState(trackerIndexMapping[deviceIndex]);
			
			/* Set device's transformation: */
			device->setTransformation(TrackerState(ts.positionOrientation));
			
			/* Set device's linear and angular velocities: */
			device->setLinearVelocity(Vector(ts.linearVelocity));
			device->setAngularVelocity(Vector(ts.angularVelocity));
			}
		
		/* Update button states: */
		for(int i=0;i<device->getNumButtons();++i)
			device->setButtonState(i,state.getButtonState(buttonIndexMapping[deviceIndex][i]));
		
		/* Update valuator states: */
		for(int i=0;i<device->getNumValuators();++i)
			device->setValuator(i,state.getValuatorState(valuatorIndexMapping[deviceIndex][i]));
		}
	
	deviceClient.unlockState();
	}
Esempio n. 8
0
void WaldoTool::frame(void)
	{
	/* Act depending on the waldo activation state: */
	if(transformActive)
		{
		/* Calculate the input device transformation update: */
		const TrackerState& current=sourceDevice->getTransformation();
		Vector translation=current.getTranslation()-last.getTranslation();
		Vector rotation=(current.getRotation()*Geometry::invert(last.getRotation())).getScaledAxis();
		last=current;
		
		/* Scale linear and angular motion: */
		translation*=factory->linearScale;
		rotation*=factory->angularScale;
		
		/* Set the virtual input device to the result transformation: */
		const TrackerState& waldoTransform=transformedDevice->getTransformation();
		TrackerState::Rotation waldoRotation=waldoTransform.getRotation();
		waldoRotation.leftMultiply(TrackerState::Rotation(rotation));
		waldoRotation.renormalize();
		Vector waldoTranslation=waldoTransform.getTranslation();
		waldoTranslation+=translation;
		transformedDevice->setTransformation(TrackerState(waldoTranslation,waldoRotation));
		
		if(numPressedButtons==0)
			{
			/* Deactivate the waldo transformation: */
			transformActive=false;
			
			/* Deactivate the virtual input device's glyph: */
			waldoGlyph->disable();
			}
		}
	else
		{
		/* Snap the virtual input device to the source input device: */
		resetDevice();
		}
	}
Esempio n. 9
0
void InputDeviceAdapterPlayback::updateInputDevices(void)
	{
	/* Do nothing if at end of file: */
	if(done)
		return;
	
	if(synchronizePlayback)
		{
		Misc::Time rt=Misc::Time::now();
		double realTime=double(rt.tv_sec)+double(rt.tv_nsec)/1000000000.0;
		
		if(firstFrame)
			{
			/* Calculate the offset between the saved timestamps and the system's wall clock time: */
			timeStampOffset=nextTimeStamp-realTime;
			}
		else
			{
			/* Check if there is positive drift between the system's offset wall clock time and the next time stamp: */
			double delta=nextTimeStamp-(realTime+timeStampOffset);
			if(delta>0.0)
				{
				/* Block to correct the drift: */
				vruiDelay(delta);
				}
			}
		}
	
	/* Update time stamp and synchronize Vrui's application timer: */
	timeStamp=nextTimeStamp;
	synchronize(timeStamp);
	
	/* Start sound playback: */
	if(firstFrame&&soundPlayer!=0)
		soundPlayer->start();
	
	/* Update all input devices: */
	for(int device=0;device<numInputDevices;++device)
		{
		/* Update tracker state: */
		if(inputDevices[device]->getTrackType()!=InputDevice::TRACK_NONE)
			{
			TrackerState::Vector translation;
			inputDeviceDataFile.read(translation.getComponents(),3);
			Scalar quat[4];
			inputDeviceDataFile.read(quat,4);
			inputDevices[device]->setTransformation(TrackerState(translation,TrackerState::Rotation::fromQuaternion(quat)));
			}
		
		/* Update button states: */
		for(int i=0;i<inputDevices[device]->getNumButtons();++i)
			{
			int buttonState=inputDeviceDataFile.read<int>();
			inputDevices[device]->setButtonState(i,buttonState);
			}
		
		/* Update valuator states: */
		for(int i=0;i<inputDevices[device]->getNumValuators();++i)
			{
			double valuatorState=inputDeviceDataFile.read<double>();
			inputDevices[device]->setValuator(i,valuatorState);
			}
		}
	
	/* Read time stamp of next data frame: */
	try
		{
		nextTimeStamp=inputDeviceDataFile.read<double>();
		
		/* Request an update for the next frame: */
		requestUpdate();
		}
	catch(Misc::File::ReadError)
		{
		done=true;
		nextTimeStamp=Math::Constants<double>::max;
		
		if(quitWhenDone)
			{
			/* Request exiting the program: */
			shutdown();
			}
		}
	
	if(saveMovie)
		{
		if(firstFrame)
			{
			/* Get a pointer to the window from which to save movie frames: */
			if(movieWindowIndex>=0&&movieWindowIndex<getNumWindows())
				movieWindow=getWindow(movieWindowIndex);
			else
				std::cerr<<"InputDeviceAdapterPlayback: Not saving movie due to invalid movie window index "<<movieWindowIndex<<std::endl;
			}
		
		if(movieWindow!=0)
			{
			/* Copy the last saved screenshot if multiple movie frames needed to be taken during the last Vrui frame: */
			while(nextMovieFrameTime<timeStamp)
				{
				/* Copy the last saved screenshot: */
				pid_t childPid=fork();
				if(childPid==0)
					{
					/* Create the old and new file names: */
					char oldImageFileName[1024];
					snprintf(oldImageFileName,sizeof(oldImageFileName),movieFileNameTemplate.c_str(),nextMovieFrameCounter-1);
					char imageFileName[1024];
					snprintf(imageFileName,sizeof(imageFileName),movieFileNameTemplate.c_str(),nextMovieFrameCounter);
					
					/* Execute the cp system command: */
					char* cpArgv[10];
					int cpArgc=0;
					cpArgv[cpArgc++]=const_cast<char*>("/bin/cp");
					cpArgv[cpArgc++]=oldImageFileName;
					cpArgv[cpArgc++]=imageFileName;
					cpArgv[cpArgc++]=0;
					execvp(cpArgv[0],cpArgv);
					}
				else
					{
					/* Wait for the copy process to finish: */
					waitpid(childPid,0,0);
					}
				
				/* Advance the frame counters: */
				nextMovieFrameTime+=movieFrameTimeInterval;
				++nextMovieFrameCounter;
				}
			
			if(nextTimeStamp>nextMovieFrameTime)
				{
				/* Request a screenshot from the movie window: */
				char imageFileName[1024];
				snprintf(imageFileName,sizeof(imageFileName),movieFileNameTemplate.c_str(),nextMovieFrameCounter);
				movieWindow->requestScreenshot(imageFileName);
				
				/* Advance the movie frame counters: */
				nextMovieFrameTime+=movieFrameTimeInterval;
				++nextMovieFrameCounter;
				}
			}
		}
	
	firstFrame=false;
	}
void InputDeviceAdapterDeviceDaemon::updateInputDevices(void)
	{
	/* Check for error messages from the device client: */
	{
	Threads::Spinlock::Lock errorMessageLock(errorMessageMutex);
	for(std::vector<std::string>::iterator emIt=errorMessages.begin();emIt!=errorMessages.end();++emIt)
		showErrorMessage("Vrui::InputDeviceAdapterDeviceDaemon",emIt->c_str());
	errorMessages.clear();
	}
	
	/* Update all managed input devices: */
	deviceClient.lockState();
	const VRDeviceState& state=deviceClient.getState();
	
	#ifdef MEASURE_LATENCY
	
	Realtime::TimePointMonotonic now;
	VRDeviceState::TimeStamp ts=VRDeviceState::TimeStamp(now.tv_sec*1000000+(now.tv_nsec+500)/1000);
	
	double staleness=0.0;
	for(int i=0;i<state.getNumTrackers();++i)
		staleness+=double(ts-state.getTrackerTimeStamp(i));
	printf("Tracking data staleness: %f ms\n",staleness*0.001/double(state.getNumTrackers()));
	
	#endif
	
	/* Get the current time for input device motion prediction: */
	Realtime::TimePointMonotonic now;
	VRDeviceState::TimeStamp nowTs=VRDeviceState::TimeStamp(now.tv_sec*1000000+(now.tv_nsec+500)/1000);
	
	for(int deviceIndex=0;deviceIndex<numInputDevices;++deviceIndex)
		{
		/* Get pointer to the input device: */
		InputDevice* device=inputDevices[deviceIndex];
		
		/* Don't update tracker-related state for devices that are not tracked: */
		if(trackerIndexMapping[deviceIndex]>=0)
			{
			/* Get device's tracker state from VR device client: */
			const VRDeviceState::TrackerState& ts=state.getTrackerState(trackerIndexMapping[deviceIndex]);
			
			/* Motion-predict the device's tracker state from its sampling time to the current time: */
			typedef VRDeviceState::TrackerState::PositionOrientation PO;
			
			float predictionDelta=float(nowTs-state.getTrackerTimeStamp(trackerIndexMapping[deviceIndex]))*1.0e-6f+motionPredictionDelta;
			
			PO::Rotation predictRot=PO::Rotation::rotateScaledAxis(ts.angularVelocity*predictionDelta)*ts.positionOrientation.getRotation();
			predictRot.renormalize();
			PO::Vector predictTrans=ts.linearVelocity*predictionDelta+ts.positionOrientation.getTranslation();
			
			#ifdef SAVE_TRACKERSTATES
			predictedFile->write<Misc::UInt32>(nowTs+Misc::UInt32(predictionDelta*1.0e6f+0.5f));
			Misc::Marshaller<PO>::write(PO(predictTrans,predictRot),*predictedFile);
			#endif
			
			/* Set device's transformation: */
			device->setTransformation(TrackerState(predictTrans,predictRot));
			
			/* Set device's linear and angular velocities: */
			device->setLinearVelocity(Vector(ts.linearVelocity));
			device->setAngularVelocity(Vector(ts.angularVelocity));
			}
		
		/* Update button states: */
		for(int i=0;i<device->getNumButtons();++i)
			device->setButtonState(i,state.getButtonState(buttonIndexMapping[deviceIndex][i]));
		
		/* Update valuator states: */
		for(int i=0;i<device->getNumValuators();++i)
			device->setValuator(i,state.getValuatorState(valuatorIndexMapping[deviceIndex][i]));
		}
	
	deviceClient.unlockState();
	}