Пример #1
0
static int position_orientationto(lua_State* l)
{
    CelxLua celx(l);

    celx.checkArgs(3, 3, "Two arguments expected for position:orientationto");
    
    UniversalCoord* src = this_position(l);
    UniversalCoord* target = to_position(l, 2);
    
    if (target == NULL)
    {
        celx.doError("First argument to position:orientationto must be a position");
    }
    
    Vec3d* upd = celx.toVector(3);
    if (upd == NULL)
    {
        celx.doError("Second argument to position:orientationto must be a vector");
    }
    
    Vec3d src2target = *target - *src;
    src2target.normalize();
    Vec3d v = src2target ^ *upd;
    v.normalize();
    Vec3d u = v ^ src2target;
    Quatd qd = Quatd(Mat3d(v, u, -src2target));
    celx.newRotation(qd);
    
    return 1;
}
// This is a "perceptually tuned predictive filter", which means that it is optimized
// for improvements in the VR experience, rather than pure error.  In particular,
// jitter is more perceptible at lower speeds whereas latency is more perceptible
// after a high-speed motion.  Therefore, the prediction interval is dynamically
// adjusted based on speed.  Significant more research is needed to further improve
// this family of filters.
static Pose<double> calcPredictedPose(const PoseState<double>& poseState, double predictionDt)
{
	Pose<double> pose = poseState.ThePose;
	const double linearCoef = 1.0;
	Vector3d angularVelocity = poseState.AngularVelocity;
	double angularSpeed = angularVelocity.Length();

	// This could be tuned so that linear and angular are combined with different coefficients
	double speed = angularSpeed + linearCoef * poseState.LinearVelocity.Length();

	const double slope = 0.2; // The rate at which the dynamic prediction interval varies
	double candidateDt = slope * speed; // TODO: Replace with smoothstep function

	double dynamicDt = predictionDt;

	// Choose the candidate if it is shorter, to improve stability
	if (candidateDt < predictionDt)
	{
		dynamicDt = candidateDt;
	}

	if (angularSpeed > 0.001)
	{
		pose.Rotation = pose.Rotation * Quatd(angularVelocity, angularSpeed * dynamicDt);
	}

	pose.Translation += poseState.LinearVelocity * dynamicDt;

	return pose;
}
        // This is a simple predictive filter based only on extrapolating the smoothed, current angular velocity.
        // Note that both QP (the predicted future orientation) and Q (the current orientation) are both maintained.
Quatf       SensorFusion::GetPredictedOrientation()
{		
	Lock::Locker lockScope(Handler.GetHandlerLock());
	Quatf qP = QUncorrected;
	if (EnablePrediction) {
#if 1
	    Vector3f angVelF  = FAngV.SavitzkyGolaySmooth8();
        float    angVelFL = angVelF.Length();
            
        if (angVelFL > 0.001f)
        {
            Vector3f    rotAxisP      = angVelF / angVelFL;  
            float       halfRotAngleP = angVelFL * PredictionDT * 0.5f;
            float       sinaHRAP      = sin(halfRotAngleP);
		    Quatf       deltaQP(rotAxisP.x*sinaHRAP, rotAxisP.y*sinaHRAP,
                                rotAxisP.z*sinaHRAP, cos(halfRotAngleP));
            qP = QUncorrected * deltaQP;
        }
#else
        Quatd qpd = Quatd(Q.x,Q.y,Q.z,Q.w);
        int predictionStages = (int)(PredictionDT / DeltaT);
        Vector3f  aa = FAngV.SavitzkyGolayDerivative12();
        Vector3d  aad     = Vector3d(aa.x,aa.y,aa.z);
        Vector3f angVelF  = FAngV.SavitzkyGolaySmooth8();
        Vector3d  avkd    = Vector3d(angVelF.x,angVelF.y,angVelF.z);
        for (int i = 0; i < predictionStages; i++)
        {
            double angVelLengthd = avkd.Length();
            Vector3d rotAxisd      = avkd / angVelLengthd;
            double halfRotAngled = angVelLengthd * DeltaT * 0.5;
            double sinHRAd       = sin(halfRotAngled);
            Quatd  deltaQd       = Quatd(rotAxisd.x*sinHRAd, rotAxisd.y*sinHRAd, rotAxisd.z*sinHRAd,
                                         cos(halfRotAngled));
            qpd =  qpd * deltaQd;
            // Update vel
            avkd += aad;
        }
        qP = Quatf((float)qpd.x,(float)qpd.y,(float)qpd.z,(float)qpd.w);
#endif
	}
    return qP;
}    
void SensorStateReader::RecenterPose()
{
	if (!Updater)
	{
		return;
	}

	/*
		This resets position to center in x, y, z, and resets yaw to center.
		Other rotation components are not affected.
	*/

	const LocklessSensorState lstate = Updater->SharedSensorState.GetState();

	Posed worldFromCpf = lstate.WorldFromImu.ThePose * lstate.ImuFromCpf;
	double hmdYaw, hmdPitch, hmdRoll;
	worldFromCpf.Rotation.GetEulerAngles<Axis_Y, Axis_X, Axis_Z>(&hmdYaw, &hmdPitch, &hmdRoll);

	Posed worldFromCentered(Quatd(Axis_Y, hmdYaw), worldFromCpf.Translation);

	CenteredFromWorld = worldFromCentered.Inverted();
}
Пример #5
0
	void view(double azimuth, double elevation, double bank) {
		view(Quatd().fromEuler(azimuth, elevation, bank));
	}
Пример #6
0
	/// Get linear and angular velocities as a Pose
	Pose vel() const {
		return Pose(mMove1, Quatd().fromEuler(mSpin1[1], mSpin1[0], mSpin1[2]));
	}
Пример #7
0
bool Model::getBonePos(const char *boneName, const MotionPose &var, Vec3d *pos, Quatd *rot)const{
	for(int i = 0; i < this->n; i++) if(bones[i]->depth == 0)
		if(getBonePosInt(boneName, var, bones[i], Vec3d(0,0,0), Quatd(0,0,0,1), pos, rot))
			return true;
	return false;
}
Пример #8
0
void WarSpace::draw(wardraw_t *wd){
	for(int i = 0; i < 2; i++)
	for(WarField::EntityList::iterator it = (this->*list[i]).begin(); it != (this->*list[i]).end(); it++){
		if(!*it)
			continue;
		Entity *pe = *it;
		if(pe->w == this/* && wd->vw->zslice == (pl->chase && pl->mover == &Player::freelook && pl->chase->getUltimateOwner() == pe->getUltimateOwner() ? 0 : 1)*/){
			try{
				pe->draw(wd);
			}
			catch(std::exception e){
				fprintf(stderr, __FILE__"(%d) Exception in %p->%s::draw(): %s\n", __LINE__, pe, pe->idname(), e.what());
			}
			catch(...){
				fprintf(stderr, __FILE__"(%d) Exception in %p->%s::draw(): ?\n", __LINE__, pe, pe->idname());
			}
		}
	}


#if 0
	glPushAttrib(GL_POLYGON_BIT | GL_ENABLE_BIT | GL_TEXTURE_BIT | GL_LIGHTING_BIT);
	static GLuint tex = 0;
	if(!tex){
		glGenTextures(1, &tex);
		extern double perlin_noise_pixel(int x, int y, int bit);
		GLubyte texbits[TEXSIZE][TEXSIZE][3];
		for(int i = 0; i < TEXSIZE; i++) for(int j = 0; j < TEXSIZE; j++) for(int k = 0; k < 3; k++){
			texbits[i][j][k] = 128 * perlin_noise_pixel(i, j + TEXSIZE * k, 4) + 128;
//			texbits[i][j][k] = rand() % 256;
		}
//		glBindTexture(GL_TEXTURE_2D, tex);
//		glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
//		glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
//		glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, TEXSIZE, TEXSIZE, 0, GL_RGB, GL_UNSIGNED_BYTE, texbits);
		glBindTexture(GL_TEXTURE_CUBE_MAP, tex);
		for(int n = 0; n < numof(cubetarget); n++)
			glTexImage2D(cubetarget[n], 0, GL_RGB, TEXSIZE, TEXSIZE, 0, GL_RGB, GL_cubetype, texbits);
		glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
		glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
		glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_S, GL_REPEAT);
		glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_T, GL_REPEAT);
	}
//	glBindTexture(GL_TEXTURE_2D, tex);
	glBindTexture(GL_TEXTURE_CUBE_MAP,tex);
	glEnable(GL_NORMALIZE);
	{
		const GLfloat mat_specular[] = {0., 0., 0., 1.};
		const GLfloat mat_shininess[] = { 50.0 };
		const GLfloat color[] = {1.f, 1.f, 1.f, 1.f}, amb[] = {.25f, .25f, .25f, 1.f};

		glMaterialfv(GL_FRONT, GL_SPECULAR, mat_specular);
		glMaterialfv(GL_FRONT, GL_DIFFUSE, color);
		glMaterialfv(GL_FRONT, GL_AMBIENT, amb);
		glMaterialfv(GL_FRONT, GL_SHININESS, mat_shininess);
		glLightfv(GL_LIGHT0, GL_AMBIENT, amb);
		glLightfv(GL_LIGHT0, GL_DIFFUSE, color);
	}
//	glEnable(GL_TEXTURE_2D);
	glEnable(GL_TEXTURE_CUBE_MAP);
	glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
	glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
//	glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
	glDisable(GL_CULL_FACE);
	drawIcosaSphere(Vec3d(.1,0,-1), gradius, *wd->vw, Vec3d(1,.125,1), Quatd(0, 0, sin(war_time() / 10.), cos(war_time() / 10.)) * Quatd(0, sin(war_time()), 0, cos(war_time())));
	glBindTexture(GL_TEXTURE_2D, 0);
	glPopAttrib();
#endif

	Teline3DrawData dd;
	dd.viewdir = -wd->vw->rot.vec3(2);
	dd.viewpoint = wd->vw->pos;
	dd.invrot = wd->vw->irot;
	dd.fov = wd->vw->fov;
	dd.pgc = wd->vw->gc;
	dd.rot = wd->vw->qrot;
	dd.user = this;
	DrawTeline3D(gibs, &dd);
}
bool SensorStateReader::GetSensorStateAtTime(double absoluteTime, TrackingState& ss) const
{
	if (!Updater)
	{
        ss.StatusFlags = 0;
        return false;
	}

	const LocklessSensorState lstate = Updater->SharedSensorState.GetState();

    // Update time
	ss.HeadPose.TimeInSeconds = absoluteTime;

	// Update the status flags
	ss.StatusFlags = lstate.StatusFlags;
	// If no hardware is connected, override the tracking flags
	if (0 == (ss.StatusFlags & Status_HMDConnected))
	{
		ss.StatusFlags &= ~Status_TrackingMask;
	}
	if (0 == (ss.StatusFlags & Status_PositionConnected))
	{
		ss.StatusFlags &= ~(Status_PositionTracked | Status_CameraPoseTracked);
	}

	// If tracking info is invalid,
    if (0 == (ss.StatusFlags & Status_TrackingMask))
	{
        return false;
	}

    // Delta time from the last available data
	double pdt = absoluteTime - lstate.WorldFromImu.TimeInSeconds;
	static const double maxPdt = 0.1;

	// If delta went negative due to synchronization problems between processes or just a lag spike,
	if (pdt < 0.)
	{
		pdt = 0.;
	}
	else if (pdt > maxPdt)
	{
        if (LastLatWarnTime != lstate.WorldFromImu.TimeInSeconds)
        {
            LastLatWarnTime = lstate.WorldFromImu.TimeInSeconds;
            LogText("[SensorStateReader] Prediction interval too high: %f s, clamping at %f s\n", pdt, maxPdt);
        }
		pdt = maxPdt;
	}

	ss.HeadPose = PoseStatef(lstate.WorldFromImu);
	// Do prediction logic and ImuFromCpf transformation
	ss.HeadPose.ThePose = Posef(CenteredFromWorld * calcPredictedPose(lstate.WorldFromImu, pdt) * lstate.ImuFromCpf);

    ss.CameraPose = Posef(CenteredFromWorld * lstate.WorldFromCamera);

    Posed worldFromLeveledCamera = Posed(Quatd(), lstate.WorldFromCamera.Translation);
    ss.LeveledCameraPose = Posef(CenteredFromWorld * worldFromLeveledCamera);

    ss.RawSensorData = lstate.RawSensorData;
    ss.LastVisionProcessingTime = lstate.LastVisionProcessingTime;

	return true;
}