Esempio n. 1
0
/** Constructor from a CPose3D */
CPose3DQuat::CPose3DQuat(const CPose3D &p)
{
	x() = p.x();
	y() = p.y();
	z() = p.z();
	p.getAsQuaternion(m_quat);
}
void ICPslamWrapper::updateSensorPose(std::string _frame_id)
{
  CPose3D pose;
  tf::StampedTransform transform;
  try
  {
    listenerTF_.lookupTransform(base_frame_id, _frame_id, ros::Time(0), transform);

    tf::Vector3 translation = transform.getOrigin();
    tf::Quaternion quat = transform.getRotation();
    pose.x() = translation.x();
    pose.y() = translation.y();
    pose.z() = translation.z();
    tf::Matrix3x3 Rsrc(quat);
    CMatrixDouble33 Rdes;
    for (int c = 0; c < 3; c++)
      for (int r = 0; r < 3; r++)
        Rdes(r, c) = Rsrc.getRow(r)[c];
    pose.setRotationMatrix(Rdes);
    laser_poses_[_frame_id] = pose;
  }
  catch (tf::TransformException ex)
  {
    ROS_ERROR("%s", ex.what());
    ros::Duration(1.0).sleep();
  }
}
Esempio n. 3
0
void CEllipsoid_setFromPosePDF(CEllipsoid& self, CPose3DPDF& posePDF)
{
    CPose3D meanPose;
    CMatrixDouble66 COV;
    posePDF.getCovarianceAndMean(COV, meanPose);
    CMatrixDouble33 COV3 = COV.block(0,0,3,3);
    self.setLocation(meanPose.x(), meanPose.y(), meanPose.z() + 0.001 );
    self.setCovMatrix(COV3, COV3(2,2)==0 ? 2:3 );
}
Esempio n. 4
0
CPose3DRotVec::CPose3DRotVec(const CPose3D &m)
{
    m_coords[0] = m.x();
    m_coords[1] = m.y();
    m_coords[2] = m.z();
    CMatrixDouble44 R;
    m.getHomogeneousMatrix( R );
    m_rotvec = rotVecFromRotMat( R );
}
Esempio n. 5
0
void mrpt::math::slerp(
	const CPose3D  & p0,
	const CPose3D  & p1,
	const double     t,
	CPose3D        & p)
{
	CQuaternionDouble q0,q1,q;
	p0.getAsQuaternion(q0);
	p1.getAsQuaternion(q1);
	// The quaternion part (this will raise exception on t not in [0,1])
	mrpt::math::slerp(q0,q1,t, q);
	// XYZ:
	p = CPose3D(
		q,
		(1-t)*p0.x()+t*p1.x(),
		(1-t)*p0.y()+t*p1.y(),
		(1-t)*p0.z()+t*p1.z() );
}
Esempio n. 6
0
mrpt::opengl::CSetOfObjects::Ptr framePosesVecVisualize(
	const TFramePosesVec& poses, const double len, const double lineWidth)
{
	mrpt::opengl::CSetOfObjects::Ptr obj =
		mrpt::make_aligned_shared<mrpt::opengl::CSetOfObjects>();

	for (size_t i = 0; i < poses.size(); i++)
	{
		CSetOfObjects::Ptr corner =
			opengl::stock_objects::CornerXYZSimple(len, lineWidth);
		CPose3D p = poses[i];
		p.x(WORLD_SCALE * p.x());
		p.y(WORLD_SCALE * p.y());
		p.z(WORLD_SCALE * p.z());
		corner->setPose(p);
		corner->setName(format("%u", (unsigned int)i));
		corner->enableShowName();
		obj->insert(corner);
	}
	return obj;
}
Esempio n. 7
0
void guideLines(const CPose3D& base, CSetOfLines::Ptr& lines, float dist)
{
	CPoint3D pDist = CPoint3D(dist, 0, 0);
	CPoint3D pps[4];
	pps[0] = base + pDist;
	pps[1] = base + CPose3D(0, 0, 0, 0, -M_PI / 2, 0) + pDist;
	pps[2] = base + CPose3D(0, 0, 0, -M_PI / 2, 0, 0) + pDist;
	pps[3] = base + CPose3D(0, 0, 0, M_PI / 2, 0, 0) + pDist;
	for (size_t i = 0; i < 4; i++)
		lines->appendLine(
			base.x(), base.y(), base.z(), pps[i].x(), pps[i].y(), pps[i].z());
	lines->setLineWidth(5);
	lines->setColor(0, 0, 1);
}
Esempio n. 8
0
	void test_default_values(const CPose3D &p, const std::string & label)
	{
		EXPECT_EQ(p.x(),0);
		EXPECT_EQ(p.y(),0);
		EXPECT_EQ(p.z(),0);
		EXPECT_EQ(p.yaw(),0);
		EXPECT_EQ(p.pitch(),0);
		EXPECT_EQ(p.roll(),0);
		for (size_t i=0;i<4;i++)
			for (size_t j=0;j<4;j++)
				EXPECT_NEAR(p.getHomogeneousMatrixVal()(i,j), i==j ? 1.0 : 0.0, 1e-8 )
					<< "Failed for (i,j)=" << i << "," << j << endl
					<< "Matrix is: " << endl << p.getHomogeneousMatrixVal() << endl
					<< "case was: " << label << endl;
	}
Esempio n. 9
0
/*---------------------------------------------------------------
					saveInterpolatedToTextFile
  ---------------------------------------------------------------*/
bool CPose3DInterpolator::saveInterpolatedToTextFile(const std::string &s, double period) const
{
	try
	{
		CFileOutputStream	f(s);

		if (m_path.empty()) return true;


		const TTimeStamp t_ini = m_path.begin()->first;
		const TTimeStamp t_end = m_path.rbegin()->first;

		TTimeStamp At = mrpt::system::secondsToTimestamp(period);

		//cout << "Interp: " << t_ini << " " << t_end << " " << At << endl;

		CPose3D	   p;
		bool       valid;

		for (TTimeStamp t=t_ini;t<=t_end;t+=At)
		{
			this->interpolate( t, p, valid );
			if (!valid) continue;

			int r = f.printf("%.06f %.06f %.06f %.06f %.06f %.06f %.06f\n",
				mrpt::system::timestampTotime_t(t),
				p.x(),p.y(),p.z(),
				p.yaw(),p.pitch(),p.roll());
			ASSERT_(r>0);
		}

		return true;
	}
	catch(...)
	{
		return false;
	}
}
Esempio n. 10
0
/*---------------------------------------------------------------
						getCovarianceAndMean
  ---------------------------------------------------------------*/
void CPose3DPDFParticles::getCovarianceAndMean(
	CMatrixDouble66& cov, CPose3D& mean) const
{
	MRPT_START

	getMean(mean);  // First! the mean value:

	// Now the covariance:
	cov.zeros();
	CVectorDouble vars;
	vars.assign(6, 0.0);  // The diagonal of the final covariance matrix

	// Elements off the diagonal of the covariance matrix:
	double std_xy = 0, std_xz = 0, std_xya = 0, std_xp = 0, std_xr = 0;
	double std_yz = 0, std_yya = 0, std_yp = 0, std_yr = 0;
	double std_zya = 0, std_zp = 0, std_zr = 0;
	double std_yap = 0, std_yar = 0;
	double std_pr = 0;

	// Mean values in [0, 2pi] range:
	double mean_yaw = mean.yaw();
	double mean_pitch = mean.pitch();
	double mean_roll = mean.roll();
	if (mean_yaw < 0) mean_yaw += M_2PI;
	if (mean_pitch < 0) mean_pitch += M_2PI;
	if (mean_roll < 0) mean_roll += M_2PI;

	// Enought information to estimate the covariance?
	if (m_particles.size() < 2) return;

	// Sum all weight values:
	double W = 0;
	for (CPose3DPDFParticles::CParticleList::const_iterator it =
			 m_particles.begin();
		 it != m_particles.end(); ++it)
		W += exp(it->log_w);

	ASSERT_(W > 0);

	// Compute covariance:
	for (CPose3DPDFParticles::CParticleList::const_iterator it =
			 m_particles.begin();
		 it != m_particles.end(); ++it)
	{
		double w = exp(it->log_w) / W;

		// Manage 1 PI range:
		double err_yaw = wrapToPi(fabs(it->d->yaw() - mean_yaw));
		double err_pitch = wrapToPi(fabs(it->d->pitch() - mean_pitch));
		double err_roll = wrapToPi(fabs(it->d->roll() - mean_roll));

		double err_x = it->d->x() - mean.x();
		double err_y = it->d->y() - mean.y();
		double err_z = it->d->z() - mean.z();

		vars[0] += square(err_x) * w;
		vars[1] += square(err_y) * w;
		vars[2] += square(err_z) * w;
		vars[3] += square(err_yaw) * w;
		vars[4] += square(err_pitch) * w;
		vars[5] += square(err_roll) * w;

		std_xy += err_x * err_y * w;
		std_xz += err_x * err_z * w;
		std_xya += err_x * err_yaw * w;
		std_xp += err_x * err_pitch * w;
		std_xr += err_x * err_roll * w;

		std_yz += err_y * err_z * w;
		std_yya += err_y * err_yaw * w;
		std_yp += err_y * err_pitch * w;
		std_yr += err_y * err_roll * w;

		std_zya += err_z * err_yaw * w;
		std_zp += err_z * err_pitch * w;
		std_zr += err_z * err_roll * w;

		std_yap += err_yaw * err_pitch * w;
		std_yar += err_yaw * err_roll * w;

		std_pr += err_pitch * err_roll * w;
	}  // end for it

	// Unbiased estimation of variance:
	cov(0, 0) = vars[0];
	cov(1, 1) = vars[1];
	cov(2, 2) = vars[2];
	cov(3, 3) = vars[3];
	cov(4, 4) = vars[4];
	cov(5, 5) = vars[5];

	cov(1, 0) = cov(0, 1) = std_xy;
	cov(2, 0) = cov(0, 2) = std_xz;
	cov(3, 0) = cov(0, 3) = std_xya;
	cov(4, 0) = cov(0, 4) = std_xp;
	cov(5, 0) = cov(0, 5) = std_xr;

	cov(2, 1) = cov(1, 2) = std_yz;
	cov(3, 1) = cov(1, 3) = std_yya;
	cov(4, 1) = cov(1, 4) = std_yp;
	cov(5, 1) = cov(1, 5) = std_yr;

	cov(3, 2) = cov(2, 3) = std_zya;
	cov(4, 2) = cov(2, 4) = std_zp;
	cov(5, 2) = cov(2, 5) = std_zr;

	cov(4, 3) = cov(3, 4) = std_yap;
	cov(5, 3) = cov(3, 5) = std_yar;

	cov(5, 4) = cov(4, 5) = std_pr;

	MRPT_END
}
Esempio n. 11
0
/** Constructor from an CPose3D object. */
CPoint3D::CPoint3D(const CPose3D& p)
{
	m_coords[0] = p.x();
	m_coords[1] = p.y();
	m_coords[2] = p.z();
}
Esempio n. 12
0
/*---------------------------------------------------------------
				pose3D = point3D + pose3D
  ---------------------------------------------------------------*/
CPose3D CPoint3D::operator+(const CPose3D& b) const
{
	return CPose3D(
		m_coords[0] + b.x(), m_coords[1] + b.y(), m_coords[2] + b.z(), b.yaw(),
		b.pitch(), b.roll());
}
Esempio n. 13
0
/** Return one or both of the following 6x6 Jacobians, useful in graph-slam
 * problems...
  */
void SE_traits<3>::jacobian_dP1DP2inv_depsilon(
	const CPose3D& P1DP2inv, matrix_VxV_t* df_de1, matrix_VxV_t* df_de2)
{
	const CMatrixDouble33& R =
		P1DP2inv.getRotationMatrix();  // The rotation matrix.

	// Common part: d_Ln(R)_dR:
	CMatrixFixedNumeric<double, 3, 9> dLnRot_dRot(UNINITIALIZED_MATRIX);
	CPose3D::ln_rot_jacob(R, dLnRot_dRot);

	if (df_de1)
	{
		matrix_VxV_t& J1 = *df_de1;
		// This Jacobian has the structure:
		//           [   I_3    |      -[d_t]_x      ]
		//  Jacob1 = [ ---------+------------------- ]
		//           [   0_3x3  |   dLnR_dR * (...)  ]
		//
		J1.zeros();
		J1(0, 0) = 1;
		J1(1, 1) = 1;
		J1(2, 2) = 1;

		J1(0, 4) = P1DP2inv.z();
		J1(0, 5) = -P1DP2inv.y();
		J1(1, 3) = -P1DP2inv.z();
		J1(1, 5) = P1DP2inv.x();
		J1(2, 3) = P1DP2inv.y();
		J1(2, 4) = -P1DP2inv.x();

		alignas(MRPT_MAX_ALIGN_BYTES) const double aux_vals[] = {
			0, R(2, 0), -R(1, 0), -R(2, 0), 0, R(0, 0), R(1, 0), -R(0, 0), 0,
			// -----------------------
			0, R(2, 1), -R(1, 1), -R(2, 1), 0, R(0, 1), R(1, 1), -R(0, 1), 0,
			// -----------------------
			0, R(2, 2), -R(1, 2), -R(2, 2), 0, R(0, 2), R(1, 2), -R(0, 2), 0};
		const CMatrixFixedNumeric<double, 9, 3> aux(aux_vals);

		// right-bottom part = dLnRot_dRot * aux
		J1.block(3, 3, 3, 3) = (dLnRot_dRot * aux).eval();
	}
	if (df_de2)
	{
		// This Jacobian has the structure:
		//           [    -R    |      0_3x3         ]
		//  Jacob2 = [ ---------+------------------- ]
		//           [   0_3x3  |   dLnR_dR * (...)  ]
		//
		matrix_VxV_t& J2 = *df_de2;
		J2.zeros();

		for (int i = 0; i < 3; i++)
			for (int j = 0; j < 3; j++)
				J2.set_unsafe(i, j, -R.get_unsafe(i, j));

		alignas(MRPT_MAX_ALIGN_BYTES) const double aux_vals[] = {
			0, R(0, 2), -R(0, 1), 0, R(1, 2), -R(1, 1), 0, R(2, 2), -R(2, 1),
			// -----------------------
			-R(0, 2), 0, R(0, 0), -R(1, 2), 0, R(1, 0), -R(2, 2), 0, R(2, 0),
			// -----------------------
			R(0, 1), -R(0, 0), 0, R(1, 1), -R(1, 0), 0, R(2, 1), -R(2, 0), 0};
		const CMatrixFixedNumeric<double, 9, 3> aux(aux_vals);

		// right-bottom part = dLnRot_dRot * aux
		J2.block(3, 3, 3, 3) = (dLnRot_dRot * aux).eval();
	}
}
Esempio n. 14
0
// ------------------------------------------------------
//				MapBuilding_ICP
// ------------------------------------------------------
void MapBuilding_ICP()
{
	MRPT_TRY_START
	  

	CTicTac								tictac,tictacGlobal,tictac_JH;
	int									step = 0;
	std::string							str;
	CSensFrameProbSequence				finalMap;
	float								t_exec;
	COccupancyGridMap2D::TEntropyInfo	entropy;

	size_t						rawlogEntry = 0;
	CFileGZInputStream					rawlogFile( RAWLOG_FILE.c_str() );

	CFileGZOutputStream sensor_data;

	// ---------------------------------
	//		Constructor
	// ---------------------------------
	CMetricMapBuilderICP mapBuilder(
		&metricMapsOpts,
		insertionLinDistance,
		insertionAngDistance,
		&icpOptions
		);

	mapBuilder.ICP_options.matchAgainstTheGrid = matchAgainstTheGrid;

	// ---------------------------------
	//   CMetricMapBuilder::TOptions
	// ---------------------------------
	mapBuilder.options.verbose					= true;
	mapBuilder.options.enableMapUpdating		= true;
    mapBuilder.options.debugForceInsertion		= false;
	mapBuilder.options.insertImagesAlways		= false;

	// Prepare output directory:
	// --------------------------------
	deleteFilesInDirectory(OUT_DIR);
	createDirectory(OUT_DIR);

	// Open log files:
	// ----------------------------------
	CFileOutputStream  f_log(format("%s/log_times.txt",OUT_DIR));
	CFileOutputStream  f_path(format("%s/log_estimated_path.txt",OUT_DIR));
	CFileOutputStream  f_pathOdo(format("%s/log_odometry_path.txt",OUT_DIR));


	// Create 3D window if requested:
	CDisplayWindow3DPtr	win3D;
#if MRPT_HAS_WXWIDGETS
	if (SHOW_PROGRESS_3D_REAL_TIME)
	{
		win3D = CDisplayWindow3DPtr( new CDisplayWindow3D("ICP-SLAM @ MRPT C++ Library (C) 2004-2008", 600, 500) );
		win3D->setCameraZoom(20);
		win3D->setCameraAzimuthDeg(-45);
	}
#endif

	if(OBS_FROM_FILE == 0){
	  sensor_data.open("sensor_data.rawlog");
	  printf("Receive From Sensor\n");
	  initLaser();
	  printf("OK\n");
	}
	

	// ----------------------------------------------------------
	//						Map Building
	// ----------------------------------------------------------
	CActionCollectionPtr	action;
	CSensoryFramePtr		observations, temp_obs;
	CSensoryFramePtr obs_set;
	CPose2D					odoPose(0,0,0);

	CSimplePointsMap	oldMap, newMap;
	CICP					ICP;

	vector_float		accum_x, accum_y, accum_z;
	
	// ICP Setting
	ICP.options.ICP_algorithm = (TICPAlgorithm)ICP_method;

	ICP.options.maxIterations			= 40;
	ICP.options.thresholdAng =			0.15;
	ICP.options.thresholdDist			= 0.75f;
	ICP.options.ALFA					= 0.30f;
	ICP.options.smallestThresholdDist	= 0.10f;
	ICP.options.doRANSAC = false;
	ICP.options.dumpToConsole();
	//
	CObservationPtr obsSick = CObservationPtr(new CObservation2DRangeScan());
	CObservationPtr obsHokuyo = CObservationPtr(new CObservation2DRangeScan());
	CSimplePointsMap hokuyoMap;
	
	bool isFirstTime = true;
	bool loop = true;

    /*
		  cout << index << "frame, Input the any key to continue" << endl;
		  getc(stdin);
		  fflush(stdin);
    */

	tictacGlobal.Tic();
	while(loop)
	{
      /*
		if(BREAK){
		  cout << index << "frame, Input the any key to continue" << endl;
		  getc(stdin);
		  fflush(stdin);
		}else{
		  if(os::kbhit())
			loop = true;
		}
      */

        if(os::kbhit())
          loop = true;
        

		if(DELAY) {
		  sleep(15);
		}
	  

		// Load action/observation pair from the rawlog:
		// --------------------------------------------------

		if(OBS_FROM_FILE == 1) {
		  if (! CRawlog::readActionObservationPair( rawlogFile, action, temp_obs, rawlogEntry) )
			break; // file EOF

		  obsSick = temp_obs->getObservationByIndex(0);
		  obsHokuyo = temp_obs->getObservationByIndex(1);
		  
		  observations = CSensoryFramePtr(new CSensoryFrame());		  
		  observations->insert((CObservationPtr)obsSick);

		  hokuyoMap.clear();
		  hokuyoMap.insertObservation(obsHokuyo.pointer());
		  
		}else{
		  rawlogEntry = rawlogEntry+2;

		  tictac.Tic();

		  obsSick = CObservationPtr(new CObservation2DRangeScan());
		  obsHokuyo = CObservationPtr(new CObservation2DRangeScan());

		  if(!receiveDataFromSensor((CObservation2DRangeScan*)obsSick.pointer(),SICK)){
			cout << " Error in receive sensor data" << endl;
			return;
		  }

		  if(!receiveDataFromSensor((CObservation2DRangeScan*)obsHokuyo.pointer(),HOKUYO)){
			cout << " Error in receive sensor data" << endl;

			return;
		  }
		  
		  cout << "Time to receive data : " << tictac.Tac()*1000.0f << endl;

		  obsSick->timestamp = mrpt::system::now();
		  obsSick->setSensorPose(CPose3D(0,0,0,DEG2RAD(0),DEG2RAD(0),DEG2RAD(0)));
		  ((CObservation2DRangeScan*)obsSick.pointer())->rightToLeft = true;
		  ((CObservation2DRangeScan*)obsSick.pointer())->stdError = 0.003f;

		  obsHokuyo->timestamp = mrpt::system::now();
		  obsHokuyo->setSensorPose(CPose3D(0,0,0.4,DEG2RAD(0),DEG2RAD(-90),DEG2RAD(0)));
		  ((CObservation2DRangeScan*)obsHokuyo.pointer())->rightToLeft = true;
		  ((CObservation2DRangeScan*)obsHokuyo.pointer())->stdError = 0.003f;
		  
		  cout << "rawlogEntry : " << rawlogEntry << endl;

		  CActionRobotMovement2D myAction;

		  newMap.clear();
		  obsSick.pointer()->insertObservationInto(&newMap);

		  if(!isFirstTime){

			static float					runningTime;
			static CICP::TReturnInfo		info;
			static CPose2D initial(0,0,0);

			CPosePDFPtr ICPPdf = ICP.AlignPDF(&oldMap, &newMap, initial, &runningTime, (void*)&info);
			CPose2D		estMean;
			CMatrixDouble33		estCov;
			ICPPdf->getCovarianceAndMean(estCov, estMean);
			printf("ICP run in %.02fms, %d iterations (%.02fms/iter), %.01f%% goodness\n -> ", runningTime*1000, info.nIterations, runningTime*1000.0f/info.nIterations, info.goodness*100 );
			cout << "ICP Odometry : " << ICPPdf->getMeanVal() << endl;

			myAction.estimationMethod = CActionRobotMovement2D::emScan2DMatching;
			myAction.poseChange = CPosePDFPtr( new CPosePDFGaussian(estMean, estCov));
		  }else{
			isFirstTime = false;
		  }

		  oldMap.clear();
		  oldMap.copyFrom(newMap);

		  observations = CSensoryFramePtr(new CSensoryFrame());
		  action = CActionCollectionPtr(new CActionCollection());		  

		  observations->insert((CObservationPtr)obsSick);

		  obs_set = CSensoryFramePtr(new CSensoryFrame());

		  obs_set->insert((CObservationPtr)obsSick);
		  obs_set->insert((CObservationPtr)obsHokuyo);
		  
		  action->insert(myAction);

		  sensor_data << action << obs_set;

		  hokuyoMap.clear();
		  hokuyoMap.insertObservation(obsHokuyo.pointer());

		}


		if (rawlogEntry>=rawlog_offset)
		{
			// Update odometry:
			{
				CActionRobotMovement2DPtr act= action->getBestMovementEstimation();
				if (act)
					odoPose = odoPose + act->poseChange->getMeanVal();
			}

			// Execute:
			// ----------------------------------------
			tictac.Tic();
				mapBuilder.processActionObservation( *action, *observations );
			t_exec = tictac.Tac();
			printf("Map building executed in %.03fms\n", 1000.0f*t_exec );


			// Info log:
			// -----------
			f_log.printf("%f %i\n",1000.0f*t_exec,mapBuilder.getCurrentlyBuiltMapSize() );

			const CMultiMetricMap* mostLikMap =  mapBuilder.getCurrentlyBuiltMetricMap();

			if (0==(step % LOG_FREQUENCY))
			{
				// Pose log:
				// -------------
				if (SAVE_POSE_LOG)
				{
					printf("Saving pose log information...");
					mapBuilder.getCurrentPoseEstimation()->saveToTextFile( format("%s/mapbuild_posepdf_%03u.txt",OUT_DIR,step) );
					printf("Ok\n");
				}
			}


			// Save a 3D scene view of the mapping process:
			if (0==(step % LOG_FREQUENCY) || (SAVE_3D_SCENE || win3D.present()))
			{
                CPose3D robotPose;
				mapBuilder.getCurrentPoseEstimation()->getMean(robotPose);

				COpenGLScenePtr		scene = COpenGLScene::Create();

                COpenGLViewportPtr view=scene->getViewport("main");
                ASSERT_(view);

                COpenGLViewportPtr view_map = scene->createViewport("mini-map");
                view_map->setBorderSize(2);
                view_map->setViewportPosition(0.01,0.01,0.35,0.35);
                view_map->setTransparent(false);

				{
					mrpt::opengl::CCamera &cam = view_map->getCamera();
					cam.setAzimuthDegrees(-90);
					cam.setElevationDegrees(90);
					cam.setPointingAt(robotPose);
					cam.setZoomDistance(20);
					cam.setOrthogonal();
				}

				// The ground:
				mrpt::opengl::CGridPlaneXYPtr groundPlane = mrpt::opengl::CGridPlaneXY::Create(-200,200,-200,200,0,5);
				groundPlane->setColor(0.4,0.4,0.4);
				view->insert( groundPlane );
				view_map->insert( CRenderizablePtr( groundPlane) ); // A copy

				// The camera pointing to the current robot pose:
				if (CAMERA_3DSCENE_FOLLOWS_ROBOT)
				{
				    scene->enableFollowCamera(true);

					mrpt::opengl::CCamera &cam = view_map->getCamera();
					cam.setAzimuthDegrees(-45);
					cam.setElevationDegrees(45);
					cam.setPointingAt(robotPose);
				}

				// The maps:
				{
					opengl::CSetOfObjectsPtr obj = opengl::CSetOfObjects::Create();
					mostLikMap->getAs3DObject( obj );
					view->insert(obj);

					// Only the point map:
					opengl::CSetOfObjectsPtr ptsMap = opengl::CSetOfObjects::Create();
					if (mostLikMap->m_pointsMaps.size())
					{
                        mostLikMap->m_pointsMaps[0]->getAs3DObject(ptsMap);
                        view_map->insert( ptsMap );
					}
				}

				// Draw the robot path:
				CPose3DPDFPtr posePDF =  mapBuilder.getCurrentPoseEstimation();
				CPose3D  curRobotPose;
				posePDF->getMean(curRobotPose);
				{
					opengl::CSetOfObjectsPtr obj = opengl::stock_objects::RobotPioneer();
					obj->setPose( curRobotPose );
					view->insert(obj);
				}
				{
					opengl::CSetOfObjectsPtr obj = opengl::stock_objects::RobotPioneer();
					obj->setPose( curRobotPose );
					view_map->insert( obj );
				}


				// Draw Hokuyo total data
				{
				  CRenderizablePtr hokuyoRender_t = scene->getByName("hokuyo_total");

				  if(!hokuyoRender_t){
					hokuyoRender_t = CPointCloud::Create();
					hokuyoRender_t->setName("hokuyo_total");
					hokuyoRender_t->setColor(0,0,1);
					hokuyoRender_t->setPose( CPose3D(0,0,0) );
					getAs<CPointCloud>(hokuyoRender_t)->setPointSize(3);
					scene->insert( hokuyoRender_t);					
				  }

				  for(size_t i =0 ; i < accum_x.size(); i++){
					getAs<CPointCloud>(hokuyoRender_t)->insertPoint(accum_x[i], accum_y[i], accum_z[i]);					
				  }
				  cout << "accum_x size : " << accum_x.size() << endl;


				}
				
				// Draw Hokuyo Current data plate
				{
				  CRenderizablePtr hokuyoRender = scene->getByName("hokuyo_cur");
				  hokuyoRender = CPointCloud::Create();
				  hokuyoRender->setName("hokuyo_cur");
				  hokuyoRender->setColor(0,1,0);
				  hokuyoRender->setPose( curRobotPose );
				  getAs<CPointCloud>(hokuyoRender)->setPointSize(0.1);
				  getAs<CPointCloud>(hokuyoRender)->loadFromPointsMap(&hokuyoMap);				  
				  scene->insert( hokuyoRender);

				  vector_float cur_x = getAs<CPointCloud>(hokuyoRender)->getArrayX();
				  vector_float cur_y = getAs<CPointCloud>(hokuyoRender)->getArrayY();
				  vector_float cur_z = getAs<CPointCloud>(hokuyoRender)->getArrayZ();


				  //				  cout << "current pose : " << curRobotPose << endl;				  
				  for(size_t i =0 ; i < cur_x.size(); i++){
					/*
					float rotate_x = cur_x[i]+ curRobotPose.y()*sin(curRobotPose.yaw()*3.14/180.0);
					float rotate_y = cur_y[i]+ curRobotPose.y()*cos(curRobotPose.yaw()*3.14/180.0);
					*/
					float rotate_x = curRobotPose.x() + cur_y[i]*sin(-curRobotPose.yaw());
					float rotate_y = curRobotPose.y() + cur_y[i]*cos(-curRobotPose.yaw());
					//					printf("cur_x, cur_y, cur_z : %f %f %f \n", rotate_x,rotate_y, cur_z[i]);

					accum_x.push_back(rotate_x);
					accum_y.push_back(rotate_y);
					accum_z.push_back(cur_z[i]);
				  }
				}

				// Save as file:
				if (0==(step % LOG_FREQUENCY) && SAVE_3D_SCENE)
				{
					CFileGZOutputStream	f( format( "%s/buildingmap_%05u.3Dscene",OUT_DIR,step ));
					f << *scene;
				}

				// Show 3D?
				if (win3D)
				{
					opengl::COpenGLScenePtr &ptrScene = win3D->get3DSceneAndLock();
					ptrScene = scene;

					win3D->unlockAccess3DScene();

					// Move camera:
					win3D->setCameraPointingToPoint( curRobotPose.x(),curRobotPose.y(),curRobotPose.z() );

					// Update:
					win3D->forceRepaint();

					sleep( SHOW_PROGRESS_3D_REAL_TIME_DELAY_MS );
				}
			}


			// Save the memory usage:
			// ------------------------------------------------------------------
			{
				printf("Saving memory usage...");
				unsigned long	memUsage = getMemoryUsage();
				FILE		*f=os::fopen( format("%s/log_MemoryUsage.txt",OUT_DIR).c_str() ,"at");
				if (f)
				{
					os::fprintf(f,"%u\t%lu\n",step,memUsage);
					os::fclose(f);
				}
				printf("Ok! (%.04fMb)\n", ((float)memUsage)/(1024*1024) );
			}

			// Save the robot estimated pose for each step:
			f_path.printf("%i %f %f %f\n",
				step,
				mapBuilder.getCurrentPoseEstimation()->getMeanVal().x(),
				mapBuilder.getCurrentPoseEstimation()->getMeanVal().y(),
				mapBuilder.getCurrentPoseEstimation()->getMeanVal().yaw() );


			f_pathOdo.printf("%i %f %f %f\n",step,odoPose.x(),odoPose.y(),odoPose.phi());

		} // end of if "rawlog_offset"...

		step++;
		printf("\n---------------- STEP %u | RAWLOG ENTRY %u ----------------\n",step, (unsigned)rawlogEntry);

		// Free memory:
		action.clear_unique();
		observations.clear_unique();
	};

	printf("\n---------------- END!! (total time: %.03f sec) ----------------\n",tictacGlobal.Tac());

	//	hokuyo.turnOff();	
	sick.stop();

	

	// Save map:
	mapBuilder.getCurrentlyBuiltMap(finalMap);

	str = format("%s/_finalmap_.simplemap",OUT_DIR);
	printf("Dumping final map in binary format to: %s\n", str.c_str() );
	mapBuilder.saveCurrentMapToFile(str);

	CMultiMetricMap  *finalPointsMap = mapBuilder.getCurrentlyBuiltMetricMap();
	str = format("%s/_finalmaps_.txt",OUT_DIR);
	printf("Dumping final metric maps to %s_XXX\n", str.c_str() );
	finalPointsMap->saveMetricMapRepresentationToFile( str );

	if (win3D)
		win3D->waitForKey();

	MRPT_TRY_END
}
Esempio n. 15
0
void Run_KF_SLAM(CConfigFile& cfgFile, const std::string& rawlogFileName)
{
	// The EKF-SLAM class:
	// Traits for this KF implementation (2D or 3D)
	using traits_t = kfslam_traits<IMPL>;
	using ekfslam_t = typename traits_t::ekfslam_t;

	ekfslam_t mapping;

	// The rawlog file:
	// ----------------------------------------
	const unsigned int rawlog_offset =
		cfgFile.read_int("MappingApplication", "rawlog_offset", 0);

	const unsigned int SAVE_LOG_FREQUENCY =
		cfgFile.read_int("MappingApplication", "SAVE_LOG_FREQUENCY", 1);

	const bool SAVE_DA_LOG =
		cfgFile.read_bool("MappingApplication", "SAVE_DA_LOG", true);

	const bool SAVE_3D_SCENES =
		cfgFile.read_bool("MappingApplication", "SAVE_3D_SCENES", true);
	const bool SAVE_MAP_REPRESENTATIONS = cfgFile.read_bool(
		"MappingApplication", "SAVE_MAP_REPRESENTATIONS", true);
	bool SHOW_3D_LIVE =
		cfgFile.read_bool("MappingApplication", "SHOW_3D_LIVE", false);
	const bool CAMERA_3DSCENE_FOLLOWS_ROBOT = cfgFile.read_bool(
		"MappingApplication", "CAMERA_3DSCENE_FOLLOWS_ROBOT", false);

#if !MRPT_HAS_WXWIDGETS
	SHOW_3D_LIVE = false;
#endif

	string OUT_DIR = cfgFile.read_string(
		"MappingApplication", "logOutput_dir", "OUT_KF-SLAM");
	string ground_truth_file =
		cfgFile.read_string("MappingApplication", "ground_truth_file", "");
	string ground_truth_file_robot = cfgFile.read_string(
		"MappingApplication", "ground_truth_file_robot", "");

	string ground_truth_data_association = cfgFile.read_string(
		"MappingApplication", "ground_truth_data_association", "");

	cout << "RAWLOG FILE:" << endl << rawlogFileName << endl;
	ASSERT_FILE_EXISTS_(rawlogFileName);
	CFileGZInputStream rawlogFile(rawlogFileName);

	cout << "---------------------------------------------------" << endl
		 << endl;

	deleteFilesInDirectory(OUT_DIR);
	createDirectory(OUT_DIR);

	// Load the config options for mapping:
	// ----------------------------------------
	mapping.loadOptions(cfgFile);
	mapping.KF_options.dumpToConsole();
	mapping.options.dumpToConsole();

	// debug:
	// mapping.KF_options.use_analytic_observation_jacobian = true;
	// mapping.KF_options.use_analytic_transition_jacobian = true;
	// mapping.KF_options.debug_verify_analytic_jacobians = true;

	// Is there ground truth of the robot poses??
	CMatrixDouble GT_PATH(0, 0);
	if (ground_truth_file_robot.size() && fileExists(ground_truth_file_robot))
	{
		GT_PATH.loadFromTextFile(ground_truth_file_robot);
		ASSERT_(GT_PATH.rows() > 0 && GT_PATH.cols() == 6);
	}

	// Is there a ground truth file of the data association?
	std::map<double, std::vector<int>>
		GT_DA;  // Map: timestamp -> vector(index in observation -> real index)
	mrpt::containers::bimap<int, int> DA2GTDA_indices;  // Landmark indices
	// bimapping: SLAM DA <--->
	// GROUND TRUTH DA
	if (!ground_truth_data_association.empty() &&
		fileExists(ground_truth_data_association))
	{
		CMatrixDouble mGT_DA;
		mGT_DA.loadFromTextFile(ground_truth_data_association);
		ASSERT_ABOVEEQ_(mGT_DA.cols(), 3);
		// Convert the loaded matrix into a std::map in GT_DA:
		for (int i = 0; i < mGT_DA.rows(); i++)
		{
			std::vector<int>& v = GT_DA[mGT_DA(i, 0)];
			if (v.size() <= mGT_DA(i, 1)) v.resize(mGT_DA(i, 1) + 1);
			v[mGT_DA(i, 1)] = mGT_DA(i, 2);
		}
		cout << "Loaded " << GT_DA.size()
			 << " entries from DA ground truth file\n";
	}

	// Create output file for DA perf:
	std::ofstream out_da_performance_log;
	{
		const std::string f = std::string(
			OUT_DIR + std::string("/data_association_performance.log"));
		out_da_performance_log.open(f.c_str());
		ASSERTMSG_(
			out_da_performance_log.is_open(),
			std::string("Error writing to: ") + f);

		// Header:
		out_da_performance_log
			<< "%           TIMESTAMP                INDEX_IN_OBS    TruePos "
			   "FalsePos TrueNeg FalseNeg  NoGroundTruthSoIDontKnow \n"
			<< "%--------------------------------------------------------------"
			   "--------------------------------------------------\n";
	}

	if (SHOW_3D_LIVE)
	{
		win3d = mrpt::make_aligned_shared<mrpt::gui::CDisplayWindow3D>(
			"KF-SLAM live view", 800, 500);

		win3d->addTextMessage(
			0.01, 0.96, "Red: Estimated path", TColorf(0.8f, 0.8f, 0.8f), 100,
			MRPT_GLUT_BITMAP_HELVETICA_10);
		win3d->addTextMessage(
			0.01, 0.93, "Black: Ground truth path", TColorf(0.8f, 0.8f, 0.8f),
			101, MRPT_GLUT_BITMAP_HELVETICA_10);
	}

	// Create DA-log output file:
	std::ofstream out_da_log;
	if (SAVE_DA_LOG)
	{
		const std::string f =
			std::string(OUT_DIR + std::string("/data_association.log"));
		out_da_log.open(f.c_str());
		ASSERTMSG_(out_da_log.is_open(), std::string("Error writing to: ") + f);

		// Header:
		out_da_log << "%           TIMESTAMP                INDEX_IN_OBS    ID "
					  "   RANGE(m)    YAW(rad)   PITCH(rad) \n"
				   << "%-------------------------------------------------------"
					  "-------------------------------------\n";
	}

	// The main loop:
	// ---------------------------------------
	CActionCollection::Ptr action;
	CSensoryFrame::Ptr observations;
	size_t rawlogEntry = 0, step = 0;

	vector<TPose3D> meanPath;  // The estimated path
	typename traits_t::posepdf_t robotPose;
	const bool is_pose_3d = robotPose.state_length != 3;

	std::vector<typename IMPL::landmark_point_t> LMs;
	std::map<unsigned int, CLandmark::TLandmarkID> LM_IDs;
	CMatrixDouble fullCov;
	CVectorDouble fullState;
	CTicTac kftictac;

	auto rawlogArch = mrpt::serialization::archiveFrom(rawlogFile);

	for (;;)
	{
		if (os::kbhit())
		{
			char pushKey = os::getch();
			if (27 == pushKey) break;
		}

		// Load action/observation pair from the rawlog:
		// --------------------------------------------------
		if (!CRawlog::readActionObservationPair(
				rawlogArch, action, observations, rawlogEntry))
			break;  // file EOF

		if (rawlogEntry >= rawlog_offset)
		{
			// Process the action and observations:
			// --------------------------------------------
			kftictac.Tic();

			mapping.processActionObservation(action, observations);

			const double tim_kf_iter = kftictac.Tac();

			// Get current state:
			// -------------------------------
			mapping.getCurrentState(robotPose, LMs, LM_IDs, fullState, fullCov);
			cout << "Mean pose: " << endl << robotPose.mean << endl;
			cout << "# of landmarks in the map: " << LMs.size() << endl;

			// Get the mean robot pose as 3D:
			const CPose3D robotPoseMean3D = CPose3D(robotPose.mean);

			// Build the path:
			meanPath.push_back(robotPoseMean3D.asTPose());

			// Save mean pose:
			if (!(step % SAVE_LOG_FREQUENCY))
			{
				const auto p = robotPose.mean.asVectorVal();
				p.saveToTextFile(
					OUT_DIR +
					format("/robot_pose_%05u.txt", (unsigned int)step));
			}

			// Save full cov:
			if (!(step % SAVE_LOG_FREQUENCY))
			{
				fullCov.saveToTextFile(
					OUT_DIR + format("/full_cov_%05u.txt", (unsigned int)step));
			}

			// Generate Data Association log?
			if (SAVE_DA_LOG)
			{
				const typename ekfslam_t::TDataAssocInfo& da =
					mapping.getLastDataAssociation();

				const CObservationBearingRange::Ptr obs =
					observations
						->getObservationByClass<CObservationBearingRange>();
				if (obs)
				{
					const CObservationBearingRange* obsRB = obs.get();
					const double tim =
						mrpt::system::timestampToDouble(obsRB->timestamp);

					for (size_t i = 0; i < obsRB->sensedData.size(); i++)
					{
						auto it = da.results.associations.find(i);
						int assoc_ID_in_SLAM;
						if (it != da.results.associations.end())
							assoc_ID_in_SLAM = it->second;
						else
						{
							// It should be a newly created LM:
							auto itNewLM = da.newly_inserted_landmarks.find(i);
							if (itNewLM != da.newly_inserted_landmarks.end())
								assoc_ID_in_SLAM = itNewLM->second;
							else
								assoc_ID_in_SLAM = -1;
						}

						out_da_log << format(
							"%35.22f %8i %10i %10f %12f %12f\n", tim, (int)i,
							assoc_ID_in_SLAM,
							(double)obsRB->sensedData[i].range,
							(double)obsRB->sensedData[i].yaw,
							(double)obsRB->sensedData[i].pitch);
					}
				}
			}

			// Save report on DA performance:
			{
				const typename ekfslam_t::TDataAssocInfo& da =
					mapping.getLastDataAssociation();

				const CObservationBearingRange::Ptr obs =
					observations
						->getObservationByClass<CObservationBearingRange>();
				if (obs)
				{
					const CObservationBearingRange* obsRB = obs.get();
					const double tim =
						mrpt::system::timestampToDouble(obsRB->timestamp);

					auto itDA = GT_DA.find(tim);

					for (size_t i = 0; i < obsRB->sensedData.size(); i++)
					{
						bool is_FP = false, is_TP = false, is_FN = false,
							 is_TN = false;

						if (itDA != GT_DA.end())
						{
							const std::vector<int>& vDA = itDA->second;
							ASSERT_BELOW_(i, vDA.size());
							const int GT_ASSOC = vDA[i];

							auto it = da.results.associations.find(i);
							if (it != da.results.associations.end())
							{
								// This observation was assigned the already
								// existing LM in the map: "it->second"
								// TruePos -> If that LM index corresponds to
								// that in the GT (with index mapping):

								// mrpt::containers::bimap<int,int>
								// DA2GTDA_indices;
								// // Landmark indices bimapping: SLAM DA <--->
								// GROUND TRUTH DA
								if (DA2GTDA_indices.hasKey(it->second))
								{
									const int slam_asigned_LM_idx =
										DA2GTDA_indices.direct(it->second);
									if (slam_asigned_LM_idx == GT_ASSOC)
										is_TP = true;
									else
										is_FP = true;
								}
								else
								{
									// Is this case possible? Assigned to an
									// index not ever seen for the first time
									// with a GT....
									//  Just in case:
									is_FP = true;
								}
							}
							else
							{
								// No pairing, but should be a newly created LM:
								auto itNewLM =
									da.newly_inserted_landmarks.find(i);
								if (itNewLM !=
									da.newly_inserted_landmarks.end())
								{
									const int new_LM_in_SLAM = itNewLM->second;

									// Was this really a NEW LM not observed
									// before?
									if (DA2GTDA_indices.hasValue(GT_ASSOC))
									{
										// GT says this LM was already observed,
										// so it shouldn't appear here as new:
										is_FN = true;
									}
									else
									{
										// Really observed for the first time:
										is_TN = true;
										DA2GTDA_indices.insert(
											new_LM_in_SLAM, GT_ASSOC);
									}
								}
								else
								{
									// Not associated neither inserted:
									// Shouldn't really never arrive here.
								}
							}
						}

						// "%           TIMESTAMP                INDEX_IN_OBS
						// TruePos FalsePos TrueNeg FalseNeg
						// NoGroundTruthSoIDontKnow \n"
						out_da_performance_log << format(
							"%35.22f %13i %8i %8i %8i %8i %8i\n", tim, (int)i,
							(int)(is_TP ? 1 : 0), (int)(is_FP ? 1 : 0),
							(int)(is_TN ? 1 : 0), (int)(is_FN ? 1 : 0),
							(int)(!is_FP && !is_TP && !is_FN && !is_TN ? 1 : 0));
					}
				}
			}

			// Save map to file representations?
			if (SAVE_MAP_REPRESENTATIONS && !(step % SAVE_LOG_FREQUENCY))
			{
				mapping.saveMapAndPath2DRepresentationAsMATLABFile(
					OUT_DIR + format("/slam_state_%05u.m", (unsigned int)step));
			}

			// Save 3D view of the filter state:
			if (win3d || (SAVE_3D_SCENES && !(step % SAVE_LOG_FREQUENCY)))
			{
				COpenGLScene::Ptr scene3D =
					mrpt::make_aligned_shared<COpenGLScene>();
				{
					opengl::CGridPlaneXY::Ptr grid =
						mrpt::make_aligned_shared<opengl::CGridPlaneXY>(
							-1000, 1000, -1000, 1000, 0, 5);
					grid->setColor(0.4, 0.4, 0.4);
					scene3D->insert(grid);
				}

				// Robot path:
				{
					opengl::CSetOfLines::Ptr linesPath =
						mrpt::make_aligned_shared<opengl::CSetOfLines>();
					linesPath->setColor(1, 0, 0);

					TPose3D init_pose;
					if (!meanPath.empty())
						init_pose = CPose3D(meanPath[0]).asTPose();

					int path_decim = 0;
					for (auto& it : meanPath)
					{
						linesPath->appendLine(init_pose, it);
						init_pose = it;

						if (++path_decim > 10)
						{
							path_decim = 0;
							mrpt::opengl::CSetOfObjects::Ptr xyz =
								mrpt::opengl::stock_objects::CornerXYZSimple(
									0.3f, 2.0f);
							xyz->setPose(CPose3D(it));
							scene3D->insert(xyz);
						}
					}
					scene3D->insert(linesPath);

					// finally a big corner for the latest robot pose:
					{
						mrpt::opengl::CSetOfObjects::Ptr xyz =
							mrpt::opengl::stock_objects::CornerXYZSimple(
								1.0, 2.5);
						xyz->setPose(robotPoseMean3D);
						scene3D->insert(xyz);
					}

					// The camera pointing to the current robot pose:
					if (CAMERA_3DSCENE_FOLLOWS_ROBOT)
					{
						win3d->setCameraPointingToPoint(
							robotPoseMean3D.x(), robotPoseMean3D.y(),
							robotPoseMean3D.z());
					}
				}

				// Do we have a ground truth?
				if (GT_PATH.cols() == 6 || GT_PATH.cols() == 3)
				{
					opengl::CSetOfLines::Ptr GT_path =
						mrpt::make_aligned_shared<opengl::CSetOfLines>();
					GT_path->setColor(0, 0, 0);
					size_t N =
						std::min((int)GT_PATH.rows(), (int)meanPath.size());

					if (GT_PATH.cols() == 6)
					{
						double gtx0 = 0, gty0 = 0, gtz0 = 0;
						for (size_t i = 0; i < N; i++)
						{
							const CPose3D p(
								GT_PATH(i, 0), GT_PATH(i, 1), GT_PATH(i, 2),
								GT_PATH(i, 3), GT_PATH(i, 4), GT_PATH(i, 5));

							GT_path->appendLine(
								gtx0, gty0, gtz0, p.x(), p.y(), p.z());
							gtx0 = p.x();
							gty0 = p.y();
							gtz0 = p.z();
						}
					}
					else if (GT_PATH.cols() == 3)
					{
						double gtx0 = 0, gty0 = 0;
						for (size_t i = 0; i < N; i++)
						{
							const CPose2D p(
								GT_PATH(i, 0), GT_PATH(i, 1), GT_PATH(i, 2));

							GT_path->appendLine(gtx0, gty0, 0, p.x(), p.y(), 0);
							gtx0 = p.x();
							gty0 = p.y();
						}
					}
					scene3D->insert(GT_path);
				}

				// Draw latest data association:
				{
					const typename ekfslam_t::TDataAssocInfo& da =
						mapping.getLastDataAssociation();

					mrpt::opengl::CSetOfLines::Ptr lins =
						mrpt::make_aligned_shared<mrpt::opengl::CSetOfLines>();
					lins->setLineWidth(1.2f);
					lins->setColor(1, 1, 1);
					for (auto it = da.results.associations.begin();
						 it != da.results.associations.end(); ++it)
					{
						const prediction_index_t idxPred = it->second;
						// This index must match the internal list of features
						// in the map:
						typename ekfslam_t::KFArray_FEAT featMean;
						mapping.getLandmarkMean(idxPred, featMean);

						TPoint3D featMean3D;
						traits_t::landmark_to_3d(featMean, featMean3D);

						// Line: robot -> landmark:
						lins->appendLine(
							robotPoseMean3D.x(), robotPoseMean3D.y(),
							robotPoseMean3D.z(), featMean3D.x, featMean3D.y,
							featMean3D.z);
					}
					scene3D->insert(lins);
				}

				// The current state of KF-SLAM:
				{
					opengl::CSetOfObjects::Ptr objs =
						mrpt::make_aligned_shared<opengl::CSetOfObjects>();
					mapping.getAs3DObject(objs);
					scene3D->insert(objs);
				}

				if (win3d)
				{
					mrpt::opengl::COpenGLScene::Ptr& scn =
						win3d->get3DSceneAndLock();
					scn = scene3D;

					// Update text messages:
					win3d->addTextMessage(
						0.02, 0.02,
						format(
							"Step %u - Landmarks in the map: %u",
							(unsigned int)step, (unsigned int)LMs.size()),
						TColorf(1, 1, 1), 0, MRPT_GLUT_BITMAP_HELVETICA_12);

					win3d->addTextMessage(
						0.02, 0.06,
						format(
							is_pose_3d
								? "Estimated pose: (x y z qr qx qy qz) = %s"
								: "Estimated pose: (x y yaw) = %s",
							robotPose.mean.asString().c_str()),
						TColorf(1, 1, 1), 1, MRPT_GLUT_BITMAP_HELVETICA_12);

					static vector<double> estHz_vals;
					const double curHz = 1.0 / std::max(1e-9, tim_kf_iter);
					estHz_vals.push_back(curHz);
					if (estHz_vals.size() > 50)
						estHz_vals.erase(estHz_vals.begin());
					const double meanHz = mrpt::math::mean(estHz_vals);

					win3d->addTextMessage(
						0.02, 0.10,
						format(
							"Iteration time: %7ss",
							mrpt::system::unitsFormat(tim_kf_iter).c_str()),
						TColorf(1, 1, 1), 2, MRPT_GLUT_BITMAP_HELVETICA_12);

					win3d->addTextMessage(
						0.02, 0.14,
						format(
							"Execution rate: %7sHz",
							mrpt::system::unitsFormat(meanHz).c_str()),
						TColorf(1, 1, 1), 3, MRPT_GLUT_BITMAP_HELVETICA_12);

					win3d->unlockAccess3DScene();
					win3d->repaint();
				}

				if (SAVE_3D_SCENES && !(step % SAVE_LOG_FREQUENCY))
				{
					// Save to file:
					CFileGZOutputStream f(
						OUT_DIR +
						format("/kf_state_%05u.3Dscene", (unsigned int)step));
					mrpt::serialization::archiveFrom(f) << *scene3D;
				}
			}

			// Free rawlog items memory:
			// --------------------------------------------
			action.reset();
			observations.reset();

		}  // (rawlogEntry>=rawlog_offset)

		cout << format(
			"\nStep %u  - Rawlog entries processed: %i\n", (unsigned int)step,
			(unsigned int)rawlogEntry);

		step++;
	};  // end "while(1)"

	// Partitioning experiment: Only for 6D SLAM:
	traits_t::doPartitioningExperiment(mapping, fullCov, OUT_DIR);

	// Is there ground truth of landmarks positions??
	if (ground_truth_file.size() && fileExists(ground_truth_file))
	{
		CMatrixFloat GT(0, 0);
		try
		{
			GT.loadFromTextFile(ground_truth_file);
		}
		catch (const std::exception& e)
		{
			cerr << "Ignoring the following error loading ground truth file: "
				 << mrpt::exception_to_str(e) << endl;
		}

		if (GT.rows() > 0 && !LMs.empty())
		{
			// Each row has:
			//   [0] [1] [2]  [6]
			//    x   y   z    ID
			CVectorDouble ERRS(0);
			for (size_t i = 0; i < LMs.size(); i++)
			{
				// Find the entry in the GT for this mapped LM:
				bool found = false;
				for (int r = 0; r < GT.rows(); r++)
				{
					if (LM_IDs[i] == GT(r, 6))
					{
						const CPoint3D gtPt(GT(r, 0), GT(r, 1), GT(r, 2));
						ERRS.push_back(gtPt.distanceTo(
							CPoint3D(TPoint3D(LMs[i]))));  // All these
						// conversions
						// are to make it
						// work with
						// either
						// CPoint3D &
						// TPoint2D
						found = true;
						break;
					}
				}
				if (!found)
				{
					cerr << "Ground truth entry not found for landmark ID:"
						 << LM_IDs[i] << endl;
				}
			}

			printf("ERRORS VS. GROUND TRUTH:\n");
			printf("Mean Error: %f meters\n", math::mean(ERRS));
			printf("Minimum error: %f meters\n", math::minimum(ERRS));
			printf("Maximum error: %f meters\n", math::maximum(ERRS));
		}
	}  // end if GT

	cout << "********* KF-SLAM finished! **********" << endl;

	if (win3d)
	{
		cout << "\n Close the 3D window to quit the application.\n";
		win3d->waitForKey();
	}
}
void ICPslamWrapper::run3Dwindow()
{
  // Create 3D window if requested (the code is copied from ../mrpt/apps/icp-slam/icp-slam_main.cpp):
  if (SHOW_PROGRESS_3D_REAL_TIME && win3D_)
  {
    // get currently builded map
    metric_map_ = mapBuilder.getCurrentlyBuiltMetricMap();

    lst_current_laser_scans.clear();

    CPose3D robotPose;
    mapBuilder.getCurrentPoseEstimation()->getMean(robotPose);
    COpenGLScene::Ptr scene = COpenGLScene::Create();

    COpenGLViewport::Ptr view = scene->getViewport("main");

    COpenGLViewport::Ptr view_map = scene->createViewport("mini-map");
    view_map->setBorderSize(2);
    view_map->setViewportPosition(0.01, 0.01, 0.35, 0.35);
    view_map->setTransparent(false);

    {
      mrpt::opengl::CCamera &cam = view_map->getCamera();
      cam.setAzimuthDegrees(-90);
      cam.setElevationDegrees(90);
      cam.setPointingAt(robotPose);
      cam.setZoomDistance(20);
      cam.setOrthogonal();
    }

    // The ground:
    mrpt::opengl::CGridPlaneXY::Ptr groundPlane = mrpt::opengl::CGridPlaneXY::Create(-200, 200, -200, 200, 0, 5);
    groundPlane->setColor(0.4, 0.4, 0.4);
    view->insert(groundPlane);
    view_map->insert(CRenderizable::Ptr(groundPlane));  // A copy

    // The camera pointing to the current robot pose:
    if (CAMERA_3DSCENE_FOLLOWS_ROBOT)
    {
      scene->enableFollowCamera(true);

      mrpt::opengl::CCamera &cam = view_map->getCamera();
      cam.setAzimuthDegrees(-45);
      cam.setElevationDegrees(45);
      cam.setPointingAt(robotPose);
    }

    // The maps:
    {
      opengl::CSetOfObjects::Ptr obj = opengl::CSetOfObjects::Create();
      metric_map_->getAs3DObject(obj);
      view->insert(obj);

      // Only the point map:
      opengl::CSetOfObjects::Ptr ptsMap = opengl::CSetOfObjects::Create();
      if (metric_map_->m_pointsMaps.size())
      {
        metric_map_->m_pointsMaps[0]->getAs3DObject(ptsMap);
        view_map->insert(ptsMap);
      }
    }

    // Draw the robot path:
    CPose3DPDF::Ptr posePDF = mapBuilder.getCurrentPoseEstimation();
    CPose3D curRobotPose;
    posePDF->getMean(curRobotPose);
    {
      opengl::CSetOfObjects::Ptr obj = opengl::stock_objects::RobotPioneer();
      obj->setPose(curRobotPose);
      view->insert(obj);
    }
    {
      opengl::CSetOfObjects::Ptr obj = opengl::stock_objects::RobotPioneer();
      obj->setPose(curRobotPose);
      view_map->insert(obj);
    }

    opengl::COpenGLScene::Ptr &ptrScene = win3D_->get3DSceneAndLock();
    ptrScene = scene;

    win3D_->unlockAccess3DScene();

    // Move camera:
    win3D_->setCameraPointingToPoint(curRobotPose.x(), curRobotPose.y(), curRobotPose.z());

    // Update:
    win3D_->forceRepaint();

    // Build list of scans:
    if (SHOW_LASER_SCANS_3D)
    {
      // Rawlog in "Observation-only" format:
      if (isObsBasedRawlog)
      {
        if (IS_CLASS(observation, CObservation2DRangeScan))
        {
          lst_current_laser_scans.push_back(mrpt::ptr_cast<CObservation2DRangeScan>::from(observation));
        }
      }
      else
      {
        // Rawlog in the Actions-SF format:
        for (size_t i = 0;; i++)
        {
          CObservation2DRangeScan::Ptr new_obs = observations->getObservationByClass<CObservation2DRangeScan>(i);
          if (!new_obs)
            break;  // There're no more scans
          else
            lst_current_laser_scans.push_back(new_obs);
        }
      }
    }

    // Draw laser scanners in 3D:
    if (SHOW_LASER_SCANS_3D)
    {
      for (size_t i = 0; i < lst_current_laser_scans.size(); i++)
      {
        // Create opengl object and load scan data from the scan observation:
        opengl::CPlanarLaserScan::Ptr obj = opengl::CPlanarLaserScan::Create();
        obj->setScan(*lst_current_laser_scans[i]);
        obj->setPose(curRobotPose);
        obj->setSurfaceColor(1.0f, 0.0f, 0.0f, 0.5f);
        // inser into the scene:
        view->insert(obj);
      }
    }
  }
}
Esempio n. 17
0
CPose3DPDFPtr CICP::ICP3D_Method_Classic(
		const mrpt::maps::CMetricMap		*m1,
		const mrpt::maps::CMetricMap		*mm2,
		const CPose3DPDFGaussian &initialEstimationPDF,
		TReturnInfo				&outInfo )
{
	MRPT_START

	// The result can be either a Gaussian or a SOG:
	CPose3DPDFPtr								resultPDF;
	CPose3DPDFGaussianPtr						gaussPdf;

	size_t									nCorrespondences=0;
	bool									keepApproaching;
	CPose3D									grossEst = initialEstimationPDF.mean;
	mrpt::utils::TMatchingPairList			correspondences,old_correspondences;
	CPose3D									lastMeanPose;

	// Assure the class of the maps:
	ASSERT_(mm2->GetRuntimeClass()->derivedFrom(CLASS_ID(CPointsMap)));
	const CPointsMap		*m2 = (CPointsMap*)mm2;

	// Asserts:
	// -----------------
	ASSERT_( options.ALFA>0 && options.ALFA<1 );

	// The algorithm output auxiliar info:
	// -------------------------------------------------
	outInfo.nIterations		= 0;
	outInfo.goodness		= 1;
	outInfo.quality			= 0;

	// The gaussian PDF to estimate:
	// ------------------------------------------------------
	gaussPdf = CPose3DPDFGaussian::Create();

	// First gross approximation:
	gaussPdf->mean = grossEst;

	// Initial thresholds:
	TMatchingParams matchParams;
	TMatchingExtraResults matchExtraResults;

	matchParams.maxDistForCorrespondence = options.thresholdDist;			// Distance threshold
	matchParams.maxAngularDistForCorrespondence = options.thresholdAng;	// Angular threshold
	matchParams.onlyKeepTheClosest = options.onlyClosestCorrespondences;
	matchParams.onlyUniqueRobust = options.onlyUniqueRobust;
	matchParams.decimation_other_map_points = options.corresponding_points_decimation;

	// Asure maps are not empty!
	// ------------------------------------------------------
	if ( !m2->isEmpty() )
	{
		matchParams.offset_other_map_points = 0;

		// ------------------------------------------------------
		//					The ICP loop
		// ------------------------------------------------------
		do
		{
			matchParams.angularDistPivotPoint = TPoint3D(gaussPdf->mean.x(),gaussPdf->mean.y(),gaussPdf->mean.z());

			// ------------------------------------------------------
			//		Find the matching (for a points map)
			// ------------------------------------------------------
			m1->determineMatching3D(
				m2,						// The other map
				gaussPdf->mean,						// The other map pose
				correspondences,
				matchParams, matchExtraResults);

			nCorrespondences = correspondences.size();

			if ( !nCorrespondences )
			{
				// Nothing we can do !!
				keepApproaching = false;
			}
			else
			{
				// Compute the estimated pose, using Horn's method.
				// ----------------------------------------------------------------------
				mrpt::poses::CPose3DQuat estPoseQuat;
				double transf_scale;
				mrpt::tfest::se3_l2(correspondences, estPoseQuat, transf_scale, false /* dont force unit scale */ );
				gaussPdf->mean = estPoseQuat;

				// If matching has not changed, decrease the thresholds:
				// --------------------------------------------------------
				keepApproaching = true;
				if	(!(fabs(lastMeanPose.x()-gaussPdf->mean.x())>options.minAbsStep_trans ||
					fabs(lastMeanPose.y()-gaussPdf->mean.y())>options.minAbsStep_trans ||
					fabs(lastMeanPose.z()-gaussPdf->mean.z())>options.minAbsStep_trans ||
					fabs(math::wrapToPi(lastMeanPose.yaw()-gaussPdf->mean.yaw()))>options.minAbsStep_rot ||
					fabs(math::wrapToPi(lastMeanPose.pitch()-gaussPdf->mean.pitch()))>options.minAbsStep_rot ||
					fabs(math::wrapToPi(lastMeanPose.roll()-gaussPdf->mean.roll()))>options.minAbsStep_rot ))
				{
					matchParams.maxDistForCorrespondence		*= options.ALFA;
					matchParams.maxAngularDistForCorrespondence		*= options.ALFA;
					if (matchParams.maxDistForCorrespondence < options.smallestThresholdDist )
						keepApproaching = false;

					if (++matchParams.offset_other_map_points>=options.corresponding_points_decimation)
						matchParams.offset_other_map_points=0;
				}

				lastMeanPose = gaussPdf->mean;

			}	// end of "else, there are correspondences"

			// Next iteration:
			outInfo.nIterations++;

			if (outInfo.nIterations >= options.maxIterations && matchParams.maxDistForCorrespondence>options.smallestThresholdDist)
			{
				matchParams.maxDistForCorrespondence		*= options.ALFA;
			}

		} while	( (keepApproaching && outInfo.nIterations<options.maxIterations) ||
					(outInfo.nIterations >= options.maxIterations && matchParams.maxDistForCorrespondence>options.smallestThresholdDist) );


		// -------------------------------------------------
		//   Obtain the covariance matrix of the estimation
		// -------------------------------------------------
		if (!options.skip_cov_calculation && nCorrespondences)
		{
			// ...
		}

		//
		outInfo.goodness = matchExtraResults.correspondencesRatio;


	} // end of "if m2 is not empty"

	// Return the gaussian distribution:
	resultPDF = gaussPdf;

	return resultPDF;

	MRPT_END
}
Esempio n. 18
0
// ------------------------------------------------------
//				MapBuilding_ICP
//  override_rawlog_file: If not empty, use that rawlog
//  instead of that in the config file.
// ------------------------------------------------------
void MapBuilding_ICP(
	const string& INI_FILENAME, const string& override_rawlog_file)
{
	MRPT_START

	CConfigFile iniFile(INI_FILENAME);

	// ------------------------------------------
	//			Load config from file:
	// ------------------------------------------
	const string RAWLOG_FILE = !override_rawlog_file.empty()
								   ? override_rawlog_file
								   : iniFile.read_string(
										 "MappingApplication", "rawlog_file",
										 "", /*Force existence:*/ true);
	const unsigned int rawlog_offset = iniFile.read_int(
		"MappingApplication", "rawlog_offset", 0, /*Force existence:*/ true);
	const string OUT_DIR_STD = iniFile.read_string(
		"MappingApplication", "logOutput_dir", "log_out",
		/*Force existence:*/ true);
	const int LOG_FREQUENCY = iniFile.read_int(
		"MappingApplication", "LOG_FREQUENCY", 5, /*Force existence:*/ true);
	const bool SAVE_POSE_LOG = iniFile.read_bool(
		"MappingApplication", "SAVE_POSE_LOG", false,
		/*Force existence:*/ true);
	const bool SAVE_3D_SCENE = iniFile.read_bool(
		"MappingApplication", "SAVE_3D_SCENE", false,
		/*Force existence:*/ true);
	const bool CAMERA_3DSCENE_FOLLOWS_ROBOT = iniFile.read_bool(
		"MappingApplication", "CAMERA_3DSCENE_FOLLOWS_ROBOT", true,
		/*Force existence:*/ true);

	bool SHOW_PROGRESS_3D_REAL_TIME = false;
	int SHOW_PROGRESS_3D_REAL_TIME_DELAY_MS = 0;
	bool SHOW_LASER_SCANS_3D = true;

	MRPT_LOAD_CONFIG_VAR(
		SHOW_PROGRESS_3D_REAL_TIME, bool, iniFile, "MappingApplication");
	MRPT_LOAD_CONFIG_VAR(
		SHOW_LASER_SCANS_3D, bool, iniFile, "MappingApplication");
	MRPT_LOAD_CONFIG_VAR(
		SHOW_PROGRESS_3D_REAL_TIME_DELAY_MS, int, iniFile,
		"MappingApplication");

	const char* OUT_DIR = OUT_DIR_STD.c_str();

	// ------------------------------------
	//		Constructor of ICP-SLAM object
	// ------------------------------------
	CMetricMapBuilderICP mapBuilder;

	mapBuilder.ICP_options.loadFromConfigFile(iniFile, "MappingApplication");
	mapBuilder.ICP_params.loadFromConfigFile(iniFile, "ICP");

	// Construct the maps with the loaded configuration.
	mapBuilder.initialize();

	// ---------------------------------
	//   CMetricMapBuilder::TOptions
	// ---------------------------------
	mapBuilder.setVerbosityLevel(LVL_DEBUG);
	mapBuilder.options.alwaysInsertByClass.fromString(
		iniFile.read_string("MappingApplication", "alwaysInsertByClass", ""));

	// Print params:
	printf("Running with the following parameters:\n");
	printf(" RAWLOG file:'%s'\n", RAWLOG_FILE.c_str());
	printf(" Output directory:\t\t\t'%s'\n", OUT_DIR);
	printf(
		" matchAgainstTheGrid:\t\t\t%c\n",
		mapBuilder.ICP_options.matchAgainstTheGrid ? 'Y' : 'N');
	printf(" Log record freq:\t\t\t%u\n", LOG_FREQUENCY);
	printf("  SAVE_3D_SCENE:\t\t\t%c\n", SAVE_3D_SCENE ? 'Y' : 'N');
	printf("  SAVE_POSE_LOG:\t\t\t%c\n", SAVE_POSE_LOG ? 'Y' : 'N');
	printf(
		"  CAMERA_3DSCENE_FOLLOWS_ROBOT:\t%c\n",
		CAMERA_3DSCENE_FOLLOWS_ROBOT ? 'Y' : 'N');

	printf("\n");

	mapBuilder.ICP_params.dumpToConsole();
	mapBuilder.ICP_options.dumpToConsole();

	// Checks:
	ASSERT_(RAWLOG_FILE.size() > 0)
	ASSERT_FILE_EXISTS_(RAWLOG_FILE)

	CTicTac tictac, tictacGlobal, tictac_JH;
	int step = 0;
	string str;
	CSimpleMap finalMap;
	float t_exec;
	COccupancyGridMap2D::TEntropyInfo entropy;

	size_t rawlogEntry = 0;
	CFileGZInputStream rawlogFile(RAWLOG_FILE.c_str());

	// Prepare output directory:
	// --------------------------------
	deleteFilesInDirectory(OUT_DIR);
	createDirectory(OUT_DIR);

	// Open log files:
	// ----------------------------------
	CFileOutputStream f_log(format("%s/log_times.txt", OUT_DIR));
	CFileOutputStream f_path(format("%s/log_estimated_path.txt", OUT_DIR));
	CFileOutputStream f_pathOdo(format("%s/log_odometry_path.txt", OUT_DIR));

	// Create 3D window if requested:
	CDisplayWindow3D::Ptr win3D;
#if MRPT_HAS_WXWIDGETS
	if (SHOW_PROGRESS_3D_REAL_TIME)
	{
		win3D = mrpt::make_aligned_shared<CDisplayWindow3D>(
			"ICP-SLAM @ MRPT C++ Library", 600, 500);
		win3D->setCameraZoom(20);
		win3D->setCameraAzimuthDeg(-45);
	}
#endif

	// ----------------------------------------------------------
	//						Map Building
	// ----------------------------------------------------------
	CPose2D odoPose(0, 0, 0);

	tictacGlobal.Tic();
	for (;;)
	{
		CActionCollection::Ptr action;
		CSensoryFrame::Ptr observations;
		CObservation::Ptr observation;

		if (os::kbhit())
		{
			char c = os::getch();
			if (c == 27) break;
		}

		// Load action/observation pair from the rawlog:
		// --------------------------------------------------
		if (!CRawlog::getActionObservationPairOrObservation(
				rawlogFile, action, observations, observation, rawlogEntry))
			break;  // file EOF

		const bool isObsBasedRawlog = observation ? true : false;
		std::vector<mrpt::obs::CObservation2DRangeScan::Ptr>
			lst_current_laser_scans;  // Just for drawing in 3D views

		if (rawlogEntry >= rawlog_offset)
		{
			// Update odometry:
			if (isObsBasedRawlog)
			{
				static CPose2D lastOdo;
				static bool firstOdo = true;
				if (IS_CLASS(observation, CObservationOdometry))
				{
					CObservationOdometry::Ptr o =
						std::dynamic_pointer_cast<CObservationOdometry>(
							observation);
					if (!firstOdo) odoPose = odoPose + (o->odometry - lastOdo);

					lastOdo = o->odometry;
					firstOdo = false;
				}
			}
			else
			{
				CActionRobotMovement2D::Ptr act =
					action->getBestMovementEstimation();
				if (act) odoPose = odoPose + act->poseChange->getMeanVal();
			}

			// Build list of scans:
			if (SHOW_LASER_SCANS_3D)
			{
				// Rawlog in "Observation-only" format:
				if (isObsBasedRawlog)
				{
					if (IS_CLASS(observation, CObservation2DRangeScan))
					{
						lst_current_laser_scans.push_back(
							std::dynamic_pointer_cast<CObservation2DRangeScan>(
								observation));
					}
				}
				else
				{
					// Rawlog in the Actions-SF format:
					for (size_t i = 0;; i++)
					{
						CObservation2DRangeScan::Ptr new_obs =
							observations->getObservationByClass<
								CObservation2DRangeScan>(i);
						if (!new_obs)
							break;  // There're no more scans
						else
							lst_current_laser_scans.push_back(new_obs);
					}
				}
			}

			// Execute:
			// ----------------------------------------
			tictac.Tic();
			if (isObsBasedRawlog)
				mapBuilder.processObservation(observation);
			else
				mapBuilder.processActionObservation(*action, *observations);
			t_exec = tictac.Tac();
			printf("Map building executed in %.03fms\n", 1000.0f * t_exec);

			// Info log:
			// -----------
			f_log.printf(
				"%f %i\n", 1000.0f * t_exec,
				mapBuilder.getCurrentlyBuiltMapSize());

			const CMultiMetricMap* mostLikMap =
				mapBuilder.getCurrentlyBuiltMetricMap();

			if (0 == (step % LOG_FREQUENCY))
			{
				// Pose log:
				// -------------
				if (SAVE_POSE_LOG)
				{
					printf("Saving pose log information...");
					mapBuilder.getCurrentPoseEstimation()->saveToTextFile(
						format("%s/mapbuild_posepdf_%03u.txt", OUT_DIR, step));
					printf("Ok\n");
				}
			}

			// Save a 3D scene view of the mapping process:
			if (0 == (step % LOG_FREQUENCY) || (SAVE_3D_SCENE || win3D))
			{
				CPose3D robotPose;
				mapBuilder.getCurrentPoseEstimation()->getMean(robotPose);

				COpenGLScene::Ptr scene =
					mrpt::make_aligned_shared<COpenGLScene>();

				COpenGLViewport::Ptr view = scene->getViewport("main");
				ASSERT_(view);

				COpenGLViewport::Ptr view_map =
					scene->createViewport("mini-map");
				view_map->setBorderSize(2);
				view_map->setViewportPosition(0.01, 0.01, 0.35, 0.35);
				view_map->setTransparent(false);

				{
					mrpt::opengl::CCamera& cam = view_map->getCamera();
					cam.setAzimuthDegrees(-90);
					cam.setElevationDegrees(90);
					cam.setPointingAt(robotPose);
					cam.setZoomDistance(20);
					cam.setOrthogonal();
				}

				// The ground:
				mrpt::opengl::CGridPlaneXY::Ptr groundPlane =
					mrpt::make_aligned_shared<mrpt::opengl::CGridPlaneXY>(
						-200, 200, -200, 200, 0, 5);
				groundPlane->setColor(0.4, 0.4, 0.4);
				view->insert(groundPlane);
				view_map->insert(CRenderizable::Ptr(groundPlane));  // A copy

				// The camera pointing to the current robot pose:
				if (CAMERA_3DSCENE_FOLLOWS_ROBOT)
				{
					scene->enableFollowCamera(true);

					mrpt::opengl::CCamera& cam = view_map->getCamera();
					cam.setAzimuthDegrees(-45);
					cam.setElevationDegrees(45);
					cam.setPointingAt(robotPose);
				}

				// The maps:
				{
					opengl::CSetOfObjects::Ptr obj =
						mrpt::make_aligned_shared<opengl::CSetOfObjects>();
					mostLikMap->getAs3DObject(obj);
					view->insert(obj);

					// Only the point map:
					opengl::CSetOfObjects::Ptr ptsMap =
						mrpt::make_aligned_shared<opengl::CSetOfObjects>();
					if (mostLikMap->m_pointsMaps.size())
					{
						mostLikMap->m_pointsMaps[0]->getAs3DObject(ptsMap);
						view_map->insert(ptsMap);
					}
				}

				// Draw the robot path:
				CPose3DPDF::Ptr posePDF = mapBuilder.getCurrentPoseEstimation();
				CPose3D curRobotPose;
				posePDF->getMean(curRobotPose);
				{
					opengl::CSetOfObjects::Ptr obj =
						opengl::stock_objects::RobotPioneer();
					obj->setPose(curRobotPose);
					view->insert(obj);
				}
				{
					opengl::CSetOfObjects::Ptr obj =
						opengl::stock_objects::RobotPioneer();
					obj->setPose(curRobotPose);
					view_map->insert(obj);
				}

				// Draw laser scanners in 3D:
				if (SHOW_LASER_SCANS_3D)
				{
					for (size_t i = 0; i < lst_current_laser_scans.size(); i++)
					{
						// Create opengl object and load scan data from the scan
						// observation:
						opengl::CPlanarLaserScan::Ptr obj =
							mrpt::make_aligned_shared<
								opengl::CPlanarLaserScan>();
						obj->setScan(*lst_current_laser_scans[i]);
						obj->setPose(curRobotPose);
						obj->setSurfaceColor(1.0f, 0.0f, 0.0f, 0.5f);
						// inser into the scene:
						view->insert(obj);
					}
				}

				// Save as file:
				if (0 == (step % LOG_FREQUENCY) && SAVE_3D_SCENE)
				{
					CFileGZOutputStream f(
						format("%s/buildingmap_%05u.3Dscene", OUT_DIR, step));
					f << *scene;
				}

				// Show 3D?
				if (win3D)
				{
					opengl::COpenGLScene::Ptr& ptrScene =
						win3D->get3DSceneAndLock();
					ptrScene = scene;

					win3D->unlockAccess3DScene();

					// Move camera:
					win3D->setCameraPointingToPoint(
						curRobotPose.x(), curRobotPose.y(), curRobotPose.z());

					// Update:
					win3D->forceRepaint();

					std::this_thread::sleep_for(
						std::chrono::milliseconds(
							SHOW_PROGRESS_3D_REAL_TIME_DELAY_MS));
				}
			}

			// Save the memory usage:
			// ------------------------------------------------------------------
			{
				printf("Saving memory usage...");
				unsigned long memUsage = getMemoryUsage();
				FILE* f = os::fopen(
					format("%s/log_MemoryUsage.txt", OUT_DIR).c_str(), "at");
				if (f)
				{
					os::fprintf(f, "%u\t%lu\n", step, memUsage);
					os::fclose(f);
				}
				printf("Ok! (%.04fMb)\n", ((float)memUsage) / (1024 * 1024));
			}

			// Save the robot estimated pose for each step:
			f_path.printf(
				"%i %f %f %f\n", step,
				mapBuilder.getCurrentPoseEstimation()->getMeanVal().x(),
				mapBuilder.getCurrentPoseEstimation()->getMeanVal().y(),
				mapBuilder.getCurrentPoseEstimation()->getMeanVal().yaw());

			f_pathOdo.printf(
				"%i %f %f %f\n", step, odoPose.x(), odoPose.y(), odoPose.phi());

		}  // end of if "rawlog_offset"...

		step++;
		printf(
			"\n---------------- STEP %u | RAWLOG ENTRY %u ----------------\n",
			step, (unsigned)rawlogEntry);
	};

	printf(
		"\n---------------- END!! (total time: %.03f sec) ----------------\n",
		tictacGlobal.Tac());

	// Save map:
	mapBuilder.getCurrentlyBuiltMap(finalMap);

	str = format("%s/_finalmap_.simplemap", OUT_DIR);
	printf("Dumping final map in binary format to: %s\n", str.c_str());
	mapBuilder.saveCurrentMapToFile(str);

	const CMultiMetricMap* finalPointsMap =
		mapBuilder.getCurrentlyBuiltMetricMap();
	str = format("%s/_finalmaps_.txt", OUT_DIR);
	printf("Dumping final metric maps to %s_XXX\n", str.c_str());
	finalPointsMap->saveMetricMapRepresentationToFile(str);

	if (win3D) win3D->waitForKey();

	MRPT_END
}