Пример #1
0
void Gaussian3::computeFromSamples(const std::vector<OrientedPoint> & poses, const std::vector<double>& weights ){
	OrientedPoint mean=OrientedPoint(0,0,0);
	double wcum=0;
	double s=0, c=0;
	std::vector<double>::const_iterator w=weights.begin();
	for (std::vector<OrientedPoint>::const_iterator p=poses.begin(); p!=poses.end(); p++){
		s+=*w*sin(p->theta);
		c+=*w*cos(p->theta);
		mean.x+=*w*p->x;
		mean.y+=*w*p->y;
		wcum+=*w;
		w++;
	}
	mean.x/=wcum;
	mean.y/=wcum;
	s/=wcum;
	c/=wcum;
	mean.theta=atan2(s,c);

	Covariance3 cov=Covariance3::zero;
	w=weights.begin();
	for (std::vector<OrientedPoint>::const_iterator p=poses.begin(); p!=poses.end(); p++){
		OrientedPoint delta=(*p)-mean;
		delta.theta=atan2(sin(delta.theta),cos(delta.theta));
		cov.xx+=*w*delta.x*delta.x;
		cov.yy+=*w*delta.y*delta.y;
		cov.tt+=*w*delta.theta*delta.theta;
		cov.xy+=*w*delta.x*delta.y;
		cov.yt+=*w*delta.y*delta.theta;
		cov.xt+=*w*delta.x*delta.theta;
		w++;
	}
	cov.xx/=wcum;
	cov.yy/=wcum;
	cov.tt/=wcum;
	cov.xy/=wcum;
	cov.yt/=wcum;
	cov.xt/=wcum;
	EigenCovariance3 ecov(cov);
	this->mean=mean;
	this->covariance=ecov;
	this->cov=cov;
}
Пример #2
0
OrientedPoint RobotLaserMessage::getLaserPose() const {
  return OrientedPoint(m_msg->laser_pose.x,
		       m_msg->laser_pose.y,
		       m_msg->laser_pose.theta);
}
void ComparableModels::CreateUsingOctree()
{
	/* 
	Determines the matching points, but nothing about their relative distance.
	*/
	
	/*
	Outputs:
	UsedWorldPoints, MatchingModelPoints: The points in these two files are correspondences
	*/
	
	assert(World.NumPoints() >= 4); //there must be enough points to determine a frustrum
	
	Model.setPointIndices();
	World.setPointIndices();

	ModelNumber = 0;

	//cull the world points by seeing which world points are in the visual hull (visual frustrum) of the model
	vgl_box_3d<double> ModelBox = Model.GetBoundingBox();
	std::vector<vgl_point_3d<double> > WorldOPCoords = GetOPCoords(World.getPoints());
	
	std::vector<unsigned int> WorldIndicesInsideFrustrum = geom::IndicesInsideFrustrum(ModelBox, WorldOPCoords, Scanner.ScanParams.getLocation());
	
	this->MissPoints = 0;
	unsigned int ValidModel = 0;
	
	unsigned int NumFrustrumPoints = WorldIndicesInsideFrustrum.size();
	std::clog << "There are " << NumFrustrumPoints << " points inside the viewing frustrum." << std::endl;
	
	boost::progress_display* show_progress = NULL;
	if(ShowProgress_)
	{
		//std::clog << "Comparable models showing progress." << std::endl;
		//std::clog << "Setup progressbar with " << NumFrustrumPoints << " points." << std::endl;
		show_progress = new boost::progress_display(NumFrustrumPoints);
	}
	else
	{
		//std::clog << "Comparable models not showing progress." << std::endl;
	}
	//#pragma omp parallel for shared(ModelPoints, WorldPoints, InvalidModel, ValidModel) private(WorldPoint, dir, LP, bIntersect, ModelPoint)
	std::vector<unsigned int> TempUsedWorldPoints(NumFrustrumPoints);
	std::vector<OrientedPoint> TempMatchingModelPoints(NumFrustrumPoints);
	std::vector<bool> Used(NumFrustrumPoints, false);
#pragma omp parallel for
	for(unsigned int i = 0; i < NumFrustrumPoints; i++)
	{
		if(ShowProgress_)
			++(*show_progress);//this should be atomic
		
		vgl_point_3d<double> WorldPoint;
		vgl_vector_3d<double> dir;
		LidarPoint LP;
		bool bIntersect;
		
		WorldPoint = World.getCoord(WorldIndicesInsideFrustrum[i]);
		
		dir = WorldPoint - Scanner.ScanParams.getLocation();
		
		bIntersect = Scanner.AcquirePoint(Model, dir, LP);
				
		if(!bIntersect)
		{
			this->MissPoints++; //this should be atomic
			continue;
		}
		else
		{
			if(!LP.getValid())
			{
				std::cout << "The intersection is not valid!" << std::endl;
			}
			if(LP.getCoord() == Scanner.ScanParams.getLocation())
			{
				std::cout << "Intersection is at the scanner?!" << std::endl;
			}
			ValidModel++; //this should be atomic
			TempUsedWorldPoints[i] = WorldIndicesInsideFrustrum[i]; //save world point index
			TempMatchingModelPoints[i] = OrientedPoint(LP.getCoord()); //save model point (could be a point not originally on the model if triangles were intersected)
			Used[i] = true;
		}
	}
	
	std::clog << "Miss points: " << MissPoints << " ValidModel: " << ValidModel << std::endl;
	
	MatchingModelPoints.clear();
	UsedWorldPoints.clear();

	//go through TempMatchingModelPoints and TempUsedWorldPoints and put valid points into the non-temp versions

	unsigned int usedcount = 0;
	for(unsigned int i = 0; i < NumFrustrumPoints; i++)
	{
		if(Used[i] == true)
		{
			usedcount++;
			MatchingModelPoints.push_back(TempMatchingModelPoints[i]);
			UsedWorldPoints.push_back(TempUsedWorldPoints[i]);
		}
	}
	
	//std::clog << "Matching model points: " << MatchingModelPoints.size() << " UsedWorldPoints: " << UsedWorldPoints.size() << std::endl;
	//std::clog << "usedcount: " << usedcount << " ValidModel: " << ValidModel << std::endl;
	std::clog << "There are " << usedcount << " points that match for consistency evaluation." << std::endl;
	this->NumPoints = ValidModel;
}
Пример #4
0
OrientedPoint TrueposMessage::getTruePose() const {
  return OrientedPoint(m_msg->truepose.x,
		       m_msg->truepose.y,
		       m_msg->truepose.theta);
}
Пример #5
0
OrientedPoint TrueposMessage::getOdometryPose() const {
  return OrientedPoint(m_msg->odometrypose.x,
		       m_msg->odometrypose.y,
		       m_msg->odometrypose.theta);
}
Пример #6
0
OrientedPoint OdometryMessage::getRobotPose() const {
  return OrientedPoint(m_msg->x,m_msg->y,m_msg->theta);
}