예제 #1
0
void CRawlog::addAction(CAction& action)
{
	CActionCollection::Ptr temp =
		mrpt::make_aligned_shared<CActionCollection>();
	temp->insert(action);
	m_seqOfActObs.push_back(temp);
}
void PFLocalizationCore::updateFilter(
	CActionCollection::Ptr _action, CSensoryFrame::Ptr _sf)
{
	if (state_ == INIT) initializeFilter();
	tictac_.Tic();
	pf_.executeOn(pdf_, _action.get(), _sf.get(), &pf_stats_);
	time_last_update_ = _sf->getObservationByIndex(0)->timestamp;
	update_counter_++;
}
/*---------------------------------------------------------------

					CLSLAM_RBPF_2DLASER

	Implements a 2D local SLAM method based on a RBPF
		over an occupancy grid map. A part of HMT-SLAM.

\param LMH   The local metric hypothesis which must be updated by this SLAM
algorithm.
\param act   The action to process (or nullptr).
\param sf    The observations to process (or nullptr).

 WE ALREADY HAVE CONTROL OVER THE CRITICAL SECTION OF THE LMHs!

--------------------------------------------------------------- */
void CLSLAM_RBPF_2DLASER::processOneLMH(
	CLocalMetricHypothesis* LMH, const CActionCollection::Ptr& actions,
	const CSensoryFrame::Ptr& sf)
{
	MRPT_START

	// Get the current robot pose estimation:
	TPoseID currentPoseID = LMH->m_currentRobotPose;

	// If this is the first iteration, just create a new robot pose at the
	// origin:
	if (currentPoseID == POSEID_INVALID)
	{
		currentPoseID = CHMTSLAM::generatePoseID();
		LMH->m_currentRobotPose = currentPoseID;  // Change it in the LMH

		// Create a new robot pose:
		CPose3D initPose(0, 0, 0);

		ASSERT_(LMH->m_particles.size() > 0);
		for (auto& m_particle : LMH->m_particles)
			m_particle.d->robotPoses[currentPoseID] = initPose;

		ASSERT_(m_parent->m_map.nodeCount() == 1);

		m_parent->m_map_cs.lock();
		CHMHMapNode::Ptr firstArea = m_parent->m_map.getFirstNode();
		ASSERT_(firstArea);
		LMH->m_nodeIDmemberships[currentPoseID] = firstArea->getID();

		// Set annotation for the reference pose:
		firstArea->m_annotations.setElemental(
			NODE_ANNOTATION_REF_POSEID, currentPoseID, LMH->m_ID);
		m_parent->m_map_cs.unlock();
	}

	bool insertNewRobotPose = false;
	if (sf)
	{
		if (LMH->m_nodeIDmemberships.size() < 2)  // If there is just 1 node
		// (the current robot pose),
		// then there is no
		// observation in the map yet!
		{  // Update map if this is the first observation!
			insertNewRobotPose = true;
		}
		else
		{
			// Check minimum distance from current robot pose to those in the
			// neighborhood:
			TMapPoseID2Pose3D lstRobotPoses;
			LMH->getMeans(lstRobotPoses);

			CPose3D* currentRobotPose = &lstRobotPoses[currentPoseID];
			float minDistLin = 1e6f;
			float minDistAng = 1e6f;

			// printf("Poses in graph:\n");
			for (auto& lstRobotPose : lstRobotPoses)
			{
				if (lstRobotPose.first != currentPoseID)
				{
					float linDist =
						lstRobotPose.second.distanceTo(*currentRobotPose);
					float angDist = fabs(math::wrapToPi(
						lstRobotPose.second.yaw() - currentRobotPose->yaw()));

					minDistLin = min(minDistLin, linDist);

					if (linDist < m_parent->m_options.SLAM_MIN_DIST_BETWEEN_OBS)
						minDistAng = min(minDistAng, angDist);
				}
			}

			// time to insert a new node??
			insertNewRobotPose =
				(minDistLin > m_parent->m_options.SLAM_MIN_DIST_BETWEEN_OBS) ||
				(minDistAng > m_parent->m_options.SLAM_MIN_HEADING_BETWEEN_OBS);
		}

	}  // end if there are SF

	// Save data in members so PF callback "prediction_and_update_pfXXXX" have
	// access to them:
	m_insertNewRobotPose = insertNewRobotPose;

	// ------------------------------------------------
	//  Execute RBPF method:
	// 	1) PROCESS ACTION
	// 	2) PROCESS OBSERVATIONS
	// ------------------------------------------------
	CParticleFilter pf;
	pf.m_options = m_parent->m_options.pf_options;
	pf.executeOn(*LMH, actions.get(), sf.get());

	// 3) The appearance observation: update the log likelihood
	// ...

	// -----------------------------------------------------------
	//			4) UPDATE THE MAP
	// -----------------------------------------------------------
	if (insertNewRobotPose)
	{
		m_parent->logStr(
			mrpt::system::LVL_INFO,
			"[CLSLAM_RBPF_2DLASER] Adding new pose...\n");

		//	Leave the up-to-now "current pose" in the map, insert the SF in it,
		// and...
		// ----------------------------------------------------------------------------
		TPoseID newCurrentPoseID = CHMTSLAM::generatePoseID();

		//	...and create a new "current pose" making a copy of the previous
		// one:
		//     and insert the observations into the metric maps:
		// ----------------------------------------------------------------------------
		for (auto& m_particle : LMH->m_particles)
		{
			const CPose3D* curRobotPose =
				&m_particle.d->robotPoses[currentPoseID];
			m_particle.d->robotPoses[newCurrentPoseID] = *curRobotPose;
			sf->insertObservationsInto(&m_particle.d->metricMaps, curRobotPose);
		}

		// Add node membership: for now, a copy of the current pose:
		LMH->m_nodeIDmemberships[newCurrentPoseID] =
			LMH->m_nodeIDmemberships[currentPoseID];

		// Now, insert the SF in the just added robot pose:
		// -----------------------------------------------------
		LMH->m_SFs[currentPoseID] = *sf;

		// Sets the new poseID as current robot pose:
		// ----------------------------------------------------
		TPoseID newlyAddedPose = currentPoseID;
		currentPoseID = LMH->m_currentRobotPose = newCurrentPoseID;

		// Mark the "newlyAddedPose" as pending to reconsider in the graph-cut
		// method
		//  (Done in the main loop @ LSLAM thread)
		// --------------------------------------------------------------------------------
		LMH->m_posesPendingAddPartitioner.push_back(newlyAddedPose);

		m_parent->logFmt(
			mrpt::system::LVL_INFO, "[CLSLAM_RBPF_2DLASER] Added pose %i.\n",
			(int)newlyAddedPose);

		// Notice LC detectors:
		// ------------------------------
		{
			std::lock_guard<std::mutex> lock(m_parent->m_topLCdets_cs);

			for (auto& m_topLCdet : m_parent->m_topLCdets)
				m_topLCdet->OnNewPose(newlyAddedPose, sf.get());
		}

	}  // end of insertNewRobotPose

	MRPT_END
}
예제 #4
0
void xRawLogViewerFrame::OnMenuConvertObservationOnly(wxCommandEvent& event)
{
	WX_START_TRY

	wxMessageBox(_("Select the rawlog file to convert..."));

	string str;
	if (!AskForOpenRawlog(str)) return;

	wxMessageBox(_("Select the target file where to save the new rawlog."));
	string filToSave;
	if (!AskForSaveRawlog(filToSave)) return;

	wxBusyCursor waitCursor;
	CFileGZInputStream fil(str);
	unsigned int filSize = (unsigned int)fil.getTotalBytesCount();

	CFileGZOutputStream f_out(filToSave);

	wxString auxStr;
	wxProgressDialog progDia(
		wxT("Progress"), wxT("Parsing file..."),
		filSize,  // range
		this,  // parent
		wxPD_CAN_ABORT | wxPD_APP_MODAL | wxPD_SMOOTH | wxPD_AUTO_HIDE |
			wxPD_ELAPSED_TIME | wxPD_ESTIMATED_TIME | wxPD_REMAINING_TIME);

	wxTheApp->Yield();  // Let the app. process messages

	unsigned int countLoop = 0;
	bool keepLoading = true;
	string errorMsg;

	CPose2D odometry_accum;

	// We'll save here all the individual observations ordered in time:
	TListTimeAndObservations time_ordered_list_observation;

	mrpt::system::TTimeStamp lastValidObsTime = INVALID_TIMESTAMP;

	while (keepLoading)
	{
		if (countLoop++ % 5 == 0)
		{
			auxStr.sprintf(wxT("Parsing file... %u objects"), countLoop);
			if (!progDia.Update((int)fil.getPosition(), auxStr))
				keepLoading = false;
			wxTheApp->Yield();  // Let the app. process messages
		}

		try
		{
			CSerializable::Ptr newObj;
			fil >> newObj;

			// Check type:
			if (newObj->GetRuntimeClass() == CLASS_ID(CSensoryFrame))
			{
				CSensoryFrame::Ptr SF(
					std::dynamic_pointer_cast<CSensoryFrame>(newObj));
				for (CSensoryFrame::iterator it = SF->begin(); it != SF->end();
					 ++it)
				{
					time_ordered_list_observation.insert(
						TTimeObservationPair((*it)->timestamp, (*it)));
					lastValidObsTime = (*it)->timestamp;
				}
			}
			else if (newObj->GetRuntimeClass() == CLASS_ID(CActionCollection))
			{
				// Replace "odometry action" with "odometry observation":
				CActionCollection::Ptr acts =
					std::dynamic_pointer_cast<CActionCollection>(newObj);
				// Get odometry:
				CActionRobotMovement2D::Ptr actOdom =
					acts->getBestMovementEstimation();
				if (actOdom)
				{
					odometry_accum =
						odometry_accum + actOdom->poseChange->getMeanVal();

					// Generate "odometry obs":
					CObservationOdometry::Ptr newO =
						mrpt::make_aligned_shared<CObservationOdometry>();
					newO->sensorLabel = "odometry";
					newO->timestamp = actOdom->timestamp != INVALID_TIMESTAMP
										  ? actOdom->timestamp
										  : lastValidObsTime;
					newO->odometry = odometry_accum;

					time_ordered_list_observation.insert(
						TTimeObservationPair(newO->timestamp, newO));
				}
			}
			else if (
				newObj->GetRuntimeClass()->derivedFrom(CLASS_ID(CObservation)))
			{
				CObservation::Ptr o =
					std::dynamic_pointer_cast<CObservation>(newObj);
				time_ordered_list_observation.insert(
					TTimeObservationPair(o->timestamp, o));
			}

			// Dump to the new file: Only the oldest one:
			// --------------------------------------------------
			if (time_ordered_list_observation.size() > 30)
			{
				// Save a few of the oldest and continue:
				for (unsigned i = 0; i < 15; i++)
				{
					f_out << *(time_ordered_list_observation.begin()->second);
					time_ordered_list_observation.erase(
						time_ordered_list_observation.begin());
				}
			}

			// Free memory:
			newObj.reset();
		}
		catch (exception& e)
		{
			errorMsg = e.what();
			keepLoading = false;
		}
		catch (...)
		{
			keepLoading = false;
		}
	}  // end while keep loading

	// Save the rest to the out file:
	while (!time_ordered_list_observation.empty())
	{
		f_out << *(time_ordered_list_observation.begin()->second);
		time_ordered_list_observation.erase(
			time_ordered_list_observation.begin());
	}

	progDia.Update(filSize);

	// Set error msg:
	wxMessageBox(_("Done"));

	WX_END_TRY
}
예제 #5
0
void xRawLogViewerFrame::OnMenuLossLessDecFILE(wxCommandEvent& event)
{
	WX_START_TRY

	string filToOpen;
	if (!AskForOpenRawlog(filToOpen)) return;

	wxString strDecimation = wxGetTextFromUser(
		_("The number of observations will be decimated (only 1 out of M will "
		  "be kept). Enter the decimation ratio M:"),
		_("Decimation"), _("1"));
	long DECIMATE_RATIO;
	strDecimation.ToLong(&DECIMATE_RATIO);

	ASSERT_(DECIMATE_RATIO >= 1)

	string filToSave;
	AskForSaveRawlog(filToSave);

	CFileGZInputStream fil(filToOpen);
	CFileGZOutputStream f_out(filToSave);

	wxBusyCursor waitCursor;
	unsigned int filSize = (unsigned int)fil.getTotalBytesCount();

	wxString auxStr;
	wxProgressDialog progDia(
		wxT("Progress"), wxT("Parsing file..."),
		filSize,  // range
		this,  // parent
		wxPD_CAN_ABORT | wxPD_APP_MODAL | wxPD_SMOOTH | wxPD_AUTO_HIDE |
			wxPD_ELAPSED_TIME | wxPD_ESTIMATED_TIME | wxPD_REMAINING_TIME);

	wxTheApp->Yield();  // Let the app. process messages

	unsigned int countLoop = 0;
	int entryIndex = 0;
	bool keepLoading = true;
	string errorMsg;

	// ------------------------------------------------------------------------------
	// METHOD TO BE MEMORY EFFICIENT:
	//  To free the memory of the current rawlog entries as we create the new
	//  one,
	//  then call "clearWithoutDelete" at the end.
	// ------------------------------------------------------------------------------
	CSensoryFrame::Ptr accum_sf;
	CActionRobotMovement2D::TMotionModelOptions odometryOptions;
	bool cummMovementInit = false;
	long SF_counter = 0;

	// Reset cummulative pose change:
	CPose2D accumMovement(0, 0, 0);

	TTimeStamp timestamp_lastAction = INVALID_TIMESTAMP;

	while (keepLoading)
	{
		if (countLoop++ % 100 == 0)
		{
			auxStr.sprintf(wxT("Parsing file... %u objects"), entryIndex);
			if (!progDia.Update((int)fil.getPosition(), auxStr))
				keepLoading = false;
			wxTheApp->Yield();  // Let the app. process messages
		}

		CSerializable::Ptr newObj;
		try
		{
			fil >> newObj;
			entryIndex++;

			// Check type:
			if (newObj->GetRuntimeClass() == CLASS_ID(CSensoryFrame))
			{
				// Decimate Observations
				// ---------------------------

				// Add observations to the accum. SF:
				if (!accum_sf)
					accum_sf = mrpt::make_aligned_shared<CSensoryFrame>();

				// Copy pointers to observations only (fast):
				accum_sf->moveFrom(
					*std::dynamic_pointer_cast<CSensoryFrame>(newObj));

				if (++SF_counter >= DECIMATE_RATIO)
				{
					SF_counter = 0;

					// INSERT OBSERVATIONS:
					f_out << *accum_sf;
					accum_sf.reset();

					// INSERT ACTIONS:
					CActionCollection::Ptr actsCol =
						mrpt::make_aligned_shared<CActionCollection>();
					if (cummMovementInit)
					{
						CActionRobotMovement2D cummMovement;
						cummMovement.computeFromOdometry(
							accumMovement, odometryOptions);
						cummMovement.timestamp = timestamp_lastAction;
						actsCol->insert(cummMovement);
						// Reset odometry accumulation:
						accumMovement = CPose2D(0, 0, 0);
					}
					f_out << *actsCol;
					actsCol.reset();
				}
			}
			else if (newObj->GetRuntimeClass() == CLASS_ID(CActionCollection))
			{
				// Accumulate Actions
				// ----------------------
				CActionCollection::Ptr curActs =
					std::dynamic_pointer_cast<CActionCollection>(newObj);
				CActionRobotMovement2D::Ptr mov =
					curActs->getBestMovementEstimation();
				if (mov)
				{
					timestamp_lastAction = mov->timestamp;
					CPose2D incrPose = mov->poseChange->getMeanVal();

					// If we have previous observations, shift them according to
					// this new increment:
					if (accum_sf)
					{
						CPose3D inv_incrPose3D(
							CPose3D(0, 0, 0) - CPose3D(incrPose));

						for (CSensoryFrame::iterator it = accum_sf->begin();
							 it != accum_sf->end(); ++it)
						{
							CPose3D tmpPose;

							(*it)->getSensorPose(tmpPose);
							tmpPose = inv_incrPose3D + tmpPose;
							(*it)->setSensorPose(tmpPose);
						}
					}

					// Accumulate from odometry:
					accumMovement = accumMovement + incrPose;

					// Copy the probabilistic options from the first entry we
					// find:
					if (!cummMovementInit)
					{
						odometryOptions = mov->motionModelConfiguration;
						cummMovementInit = true;
					}
				}
			}
			else
			{
				// Unknown class:
				THROW_EXCEPTION("Unknown class found in the file!");
			}
		}
		catch (exception& e)
		{
			errorMsg = e.what();
			keepLoading = false;
		}
		catch (...)
		{
			keepLoading = false;
		}
	}  // end while keep loading

	progDia.Update(filSize);

	wxTheApp->Yield();  // Let the app. process messages

	WX_END_TRY
}
예제 #6
0
void xRawLogViewerFrame::OnMenuLossLessDecimate(wxCommandEvent& event)
{
	WX_START_TRY

	CRawlog newRawLog;
	newRawLog.setCommentText(rawlog.getCommentText());

	wxString strDecimation = wxGetTextFromUser(
		_("The number of observations will be decimated (only 1 out of M will "
		  "be kept). Enter the decimation ratio M:"),
		_("Decimation"), _("1"));
	long DECIMATE_RATIO;
	strDecimation.ToLong(&DECIMATE_RATIO);

	ASSERT_(DECIMATE_RATIO >= 1)

	wxBusyCursor busyCursor;
	wxTheApp->Yield();  // Let the app. process messages

	size_t i, N = rawlog.size();

	// ------------------------------------------------------------------------------
	// METHOD TO BE MEMORY EFFICIENT:
	//  To free the memory of the current rawlog entries as we create the new
	//  one,
	//  then call "clearWithoutDelete" at the end.
	// ------------------------------------------------------------------------------
	CSensoryFrame::Ptr accum_sf;
	CActionRobotMovement2D::TMotionModelOptions odometryOptions;
	bool cummMovementInit = false;
	long SF_counter = 0;

	// Reset cummulative pose change:
	CPose2D accumMovement(0, 0, 0);

	// For each entry:
	for (i = 0; i < N; i++)
	{
		CSerializable::Ptr obj = rawlog.getAsGeneric(i);

		if (rawlog.getType(i) == CRawlog::etActionCollection)
		{
			// Accumulate Actions
			// ----------------------
			CActionCollection::Ptr curActs =
				std::dynamic_pointer_cast<CActionCollection>(obj);
			CActionRobotMovement2D::Ptr mov =
				curActs->getBestMovementEstimation();
			if (mov)
			{
				CPose2D incrPose = mov->poseChange->getMeanVal();

				// If we have previous observations, shift them according to
				// this new increment:
				if (accum_sf)
				{
					CPose3D inv_incrPose3D(
						CPose3D(0, 0, 0) - CPose3D(incrPose));

					for (CSensoryFrame::iterator it = accum_sf->begin();
						 it != accum_sf->end(); ++it)
					{
						CPose3D tmpPose;

						(*it)->getSensorPose(tmpPose);
						tmpPose = inv_incrPose3D + tmpPose;
						(*it)->setSensorPose(tmpPose);
					}
				}

				// Accumulate from odometry:
				accumMovement = accumMovement + incrPose;

				// Copy the probabilistic options from the first entry we find:
				if (!cummMovementInit)
				{
					odometryOptions = mov->motionModelConfiguration;
					cummMovementInit = true;
				}
			}
		}
		else if (rawlog.getType(i) == CRawlog::etSensoryFrame)
		{
			// Decimate Observations
			// ---------------------------

			// Add observations to the accum. SF:
			if (!accum_sf)
				accum_sf = mrpt::make_aligned_shared<CSensoryFrame>();

			// Copy pointers to observations only (fast):
			accum_sf->moveFrom(*std::dynamic_pointer_cast<CSensoryFrame>(obj));

			if (++SF_counter >= DECIMATE_RATIO)
			{
				SF_counter = 0;

				// INSERT OBSERVATIONS:
				newRawLog.addObservationsMemoryReference(accum_sf);
				accum_sf.reset();

				// INSERT ACTIONS:
				CActionCollection actsCol;
				if (cummMovementInit)
				{
					CActionRobotMovement2D cummMovement;
					cummMovement.computeFromOdometry(
						accumMovement, odometryOptions);
					actsCol.insert(cummMovement);
					// Reset odometry accumulation:
					accumMovement = CPose2D(0, 0, 0);
				}
				newRawLog.addActions(actsCol);
			}
		}
		else
			THROW_EXCEPTION(
				"This operation implemented for SF-based rawlogs only.");

		// Delete object
		obj.reset();

	}  // end for i each entry

	// Clear the list only (objects already deleted)
	rawlog.clear();

	// Copy as new log:
	rawlog = newRawLog;

	rebuildTreeView();

	WX_END_TRY
}
예제 #7
0
파일: kf-slam_main.cpp 프로젝트: MRPT/mrpt
void Run_KF_SLAM(CConfigFile& cfgFile, const std::string& rawlogFileName)
{
	// The EKF-SLAM class:
	// Traits for this KF implementation (2D or 3D)
	using traits_t = kfslam_traits<IMPL>;
	using ekfslam_t = typename traits_t::ekfslam_t;

	ekfslam_t mapping;

	// The rawlog file:
	// ----------------------------------------
	const unsigned int rawlog_offset =
		cfgFile.read_int("MappingApplication", "rawlog_offset", 0);

	const unsigned int SAVE_LOG_FREQUENCY =
		cfgFile.read_int("MappingApplication", "SAVE_LOG_FREQUENCY", 1);

	const bool SAVE_DA_LOG =
		cfgFile.read_bool("MappingApplication", "SAVE_DA_LOG", true);

	const bool SAVE_3D_SCENES =
		cfgFile.read_bool("MappingApplication", "SAVE_3D_SCENES", true);
	const bool SAVE_MAP_REPRESENTATIONS = cfgFile.read_bool(
		"MappingApplication", "SAVE_MAP_REPRESENTATIONS", true);
	bool SHOW_3D_LIVE =
		cfgFile.read_bool("MappingApplication", "SHOW_3D_LIVE", false);
	const bool CAMERA_3DSCENE_FOLLOWS_ROBOT = cfgFile.read_bool(
		"MappingApplication", "CAMERA_3DSCENE_FOLLOWS_ROBOT", false);

#if !MRPT_HAS_WXWIDGETS
	SHOW_3D_LIVE = false;
#endif

	string OUT_DIR = cfgFile.read_string(
		"MappingApplication", "logOutput_dir", "OUT_KF-SLAM");
	string ground_truth_file =
		cfgFile.read_string("MappingApplication", "ground_truth_file", "");
	string ground_truth_file_robot = cfgFile.read_string(
		"MappingApplication", "ground_truth_file_robot", "");

	string ground_truth_data_association = cfgFile.read_string(
		"MappingApplication", "ground_truth_data_association", "");

	cout << "RAWLOG FILE:" << endl << rawlogFileName << endl;
	ASSERT_FILE_EXISTS_(rawlogFileName);
	CFileGZInputStream rawlogFile(rawlogFileName);

	cout << "---------------------------------------------------" << endl
		 << endl;

	deleteFilesInDirectory(OUT_DIR);
	createDirectory(OUT_DIR);

	// Load the config options for mapping:
	// ----------------------------------------
	mapping.loadOptions(cfgFile);
	mapping.KF_options.dumpToConsole();
	mapping.options.dumpToConsole();

	// debug:
	// mapping.KF_options.use_analytic_observation_jacobian = true;
	// mapping.KF_options.use_analytic_transition_jacobian = true;
	// mapping.KF_options.debug_verify_analytic_jacobians = true;

	// Is there ground truth of the robot poses??
	CMatrixDouble GT_PATH(0, 0);
	if (ground_truth_file_robot.size() && fileExists(ground_truth_file_robot))
	{
		GT_PATH.loadFromTextFile(ground_truth_file_robot);
		ASSERT_(GT_PATH.rows() > 0 && GT_PATH.cols() == 6);
	}

	// Is there a ground truth file of the data association?
	std::map<double, std::vector<int>>
		GT_DA;  // Map: timestamp -> vector(index in observation -> real index)
	mrpt::containers::bimap<int, int> DA2GTDA_indices;  // Landmark indices
	// bimapping: SLAM DA <--->
	// GROUND TRUTH DA
	if (!ground_truth_data_association.empty() &&
		fileExists(ground_truth_data_association))
	{
		CMatrixDouble mGT_DA;
		mGT_DA.loadFromTextFile(ground_truth_data_association);
		ASSERT_ABOVEEQ_(mGT_DA.cols(), 3);
		// Convert the loaded matrix into a std::map in GT_DA:
		for (int i = 0; i < mGT_DA.rows(); i++)
		{
			std::vector<int>& v = GT_DA[mGT_DA(i, 0)];
			if (v.size() <= mGT_DA(i, 1)) v.resize(mGT_DA(i, 1) + 1);
			v[mGT_DA(i, 1)] = mGT_DA(i, 2);
		}
		cout << "Loaded " << GT_DA.size()
			 << " entries from DA ground truth file\n";
	}

	// Create output file for DA perf:
	std::ofstream out_da_performance_log;
	{
		const std::string f = std::string(
			OUT_DIR + std::string("/data_association_performance.log"));
		out_da_performance_log.open(f.c_str());
		ASSERTMSG_(
			out_da_performance_log.is_open(),
			std::string("Error writing to: ") + f);

		// Header:
		out_da_performance_log
			<< "%           TIMESTAMP                INDEX_IN_OBS    TruePos "
			   "FalsePos TrueNeg FalseNeg  NoGroundTruthSoIDontKnow \n"
			<< "%--------------------------------------------------------------"
			   "--------------------------------------------------\n";
	}

	if (SHOW_3D_LIVE)
	{
		win3d = mrpt::make_aligned_shared<mrpt::gui::CDisplayWindow3D>(
			"KF-SLAM live view", 800, 500);

		win3d->addTextMessage(
			0.01, 0.96, "Red: Estimated path", TColorf(0.8f, 0.8f, 0.8f), 100,
			MRPT_GLUT_BITMAP_HELVETICA_10);
		win3d->addTextMessage(
			0.01, 0.93, "Black: Ground truth path", TColorf(0.8f, 0.8f, 0.8f),
			101, MRPT_GLUT_BITMAP_HELVETICA_10);
	}

	// Create DA-log output file:
	std::ofstream out_da_log;
	if (SAVE_DA_LOG)
	{
		const std::string f =
			std::string(OUT_DIR + std::string("/data_association.log"));
		out_da_log.open(f.c_str());
		ASSERTMSG_(out_da_log.is_open(), std::string("Error writing to: ") + f);

		// Header:
		out_da_log << "%           TIMESTAMP                INDEX_IN_OBS    ID "
					  "   RANGE(m)    YAW(rad)   PITCH(rad) \n"
				   << "%-------------------------------------------------------"
					  "-------------------------------------\n";
	}

	// The main loop:
	// ---------------------------------------
	CActionCollection::Ptr action;
	CSensoryFrame::Ptr observations;
	size_t rawlogEntry = 0, step = 0;

	vector<TPose3D> meanPath;  // The estimated path
	typename traits_t::posepdf_t robotPose;
	const bool is_pose_3d = robotPose.state_length != 3;

	std::vector<typename IMPL::landmark_point_t> LMs;
	std::map<unsigned int, CLandmark::TLandmarkID> LM_IDs;
	CMatrixDouble fullCov;
	CVectorDouble fullState;
	CTicTac kftictac;

	auto rawlogArch = mrpt::serialization::archiveFrom(rawlogFile);

	for (;;)
	{
		if (os::kbhit())
		{
			char pushKey = os::getch();
			if (27 == pushKey) break;
		}

		// Load action/observation pair from the rawlog:
		// --------------------------------------------------
		if (!CRawlog::readActionObservationPair(
				rawlogArch, action, observations, rawlogEntry))
			break;  // file EOF

		if (rawlogEntry >= rawlog_offset)
		{
			// Process the action and observations:
			// --------------------------------------------
			kftictac.Tic();

			mapping.processActionObservation(action, observations);

			const double tim_kf_iter = kftictac.Tac();

			// Get current state:
			// -------------------------------
			mapping.getCurrentState(robotPose, LMs, LM_IDs, fullState, fullCov);
			cout << "Mean pose: " << endl << robotPose.mean << endl;
			cout << "# of landmarks in the map: " << LMs.size() << endl;

			// Get the mean robot pose as 3D:
			const CPose3D robotPoseMean3D = CPose3D(robotPose.mean);

			// Build the path:
			meanPath.push_back(robotPoseMean3D.asTPose());

			// Save mean pose:
			if (!(step % SAVE_LOG_FREQUENCY))
			{
				const auto p = robotPose.mean.asVectorVal();
				p.saveToTextFile(
					OUT_DIR +
					format("/robot_pose_%05u.txt", (unsigned int)step));
			}

			// Save full cov:
			if (!(step % SAVE_LOG_FREQUENCY))
			{
				fullCov.saveToTextFile(
					OUT_DIR + format("/full_cov_%05u.txt", (unsigned int)step));
			}

			// Generate Data Association log?
			if (SAVE_DA_LOG)
			{
				const typename ekfslam_t::TDataAssocInfo& da =
					mapping.getLastDataAssociation();

				const CObservationBearingRange::Ptr obs =
					observations
						->getObservationByClass<CObservationBearingRange>();
				if (obs)
				{
					const CObservationBearingRange* obsRB = obs.get();
					const double tim =
						mrpt::system::timestampToDouble(obsRB->timestamp);

					for (size_t i = 0; i < obsRB->sensedData.size(); i++)
					{
						auto it = da.results.associations.find(i);
						int assoc_ID_in_SLAM;
						if (it != da.results.associations.end())
							assoc_ID_in_SLAM = it->second;
						else
						{
							// It should be a newly created LM:
							auto itNewLM = da.newly_inserted_landmarks.find(i);
							if (itNewLM != da.newly_inserted_landmarks.end())
								assoc_ID_in_SLAM = itNewLM->second;
							else
								assoc_ID_in_SLAM = -1;
						}

						out_da_log << format(
							"%35.22f %8i %10i %10f %12f %12f\n", tim, (int)i,
							assoc_ID_in_SLAM,
							(double)obsRB->sensedData[i].range,
							(double)obsRB->sensedData[i].yaw,
							(double)obsRB->sensedData[i].pitch);
					}
				}
			}

			// Save report on DA performance:
			{
				const typename ekfslam_t::TDataAssocInfo& da =
					mapping.getLastDataAssociation();

				const CObservationBearingRange::Ptr obs =
					observations
						->getObservationByClass<CObservationBearingRange>();
				if (obs)
				{
					const CObservationBearingRange* obsRB = obs.get();
					const double tim =
						mrpt::system::timestampToDouble(obsRB->timestamp);

					auto itDA = GT_DA.find(tim);

					for (size_t i = 0; i < obsRB->sensedData.size(); i++)
					{
						bool is_FP = false, is_TP = false, is_FN = false,
							 is_TN = false;

						if (itDA != GT_DA.end())
						{
							const std::vector<int>& vDA = itDA->second;
							ASSERT_BELOW_(i, vDA.size());
							const int GT_ASSOC = vDA[i];

							auto it = da.results.associations.find(i);
							if (it != da.results.associations.end())
							{
								// This observation was assigned the already
								// existing LM in the map: "it->second"
								// TruePos -> If that LM index corresponds to
								// that in the GT (with index mapping):

								// mrpt::containers::bimap<int,int>
								// DA2GTDA_indices;
								// // Landmark indices bimapping: SLAM DA <--->
								// GROUND TRUTH DA
								if (DA2GTDA_indices.hasKey(it->second))
								{
									const int slam_asigned_LM_idx =
										DA2GTDA_indices.direct(it->second);
									if (slam_asigned_LM_idx == GT_ASSOC)
										is_TP = true;
									else
										is_FP = true;
								}
								else
								{
									// Is this case possible? Assigned to an
									// index not ever seen for the first time
									// with a GT....
									//  Just in case:
									is_FP = true;
								}
							}
							else
							{
								// No pairing, but should be a newly created LM:
								auto itNewLM =
									da.newly_inserted_landmarks.find(i);
								if (itNewLM !=
									da.newly_inserted_landmarks.end())
								{
									const int new_LM_in_SLAM = itNewLM->second;

									// Was this really a NEW LM not observed
									// before?
									if (DA2GTDA_indices.hasValue(GT_ASSOC))
									{
										// GT says this LM was already observed,
										// so it shouldn't appear here as new:
										is_FN = true;
									}
									else
									{
										// Really observed for the first time:
										is_TN = true;
										DA2GTDA_indices.insert(
											new_LM_in_SLAM, GT_ASSOC);
									}
								}
								else
								{
									// Not associated neither inserted:
									// Shouldn't really never arrive here.
								}
							}
						}

						// "%           TIMESTAMP                INDEX_IN_OBS
						// TruePos FalsePos TrueNeg FalseNeg
						// NoGroundTruthSoIDontKnow \n"
						out_da_performance_log << format(
							"%35.22f %13i %8i %8i %8i %8i %8i\n", tim, (int)i,
							(int)(is_TP ? 1 : 0), (int)(is_FP ? 1 : 0),
							(int)(is_TN ? 1 : 0), (int)(is_FN ? 1 : 0),
							(int)(!is_FP && !is_TP && !is_FN && !is_TN ? 1 : 0));
					}
				}
			}

			// Save map to file representations?
			if (SAVE_MAP_REPRESENTATIONS && !(step % SAVE_LOG_FREQUENCY))
			{
				mapping.saveMapAndPath2DRepresentationAsMATLABFile(
					OUT_DIR + format("/slam_state_%05u.m", (unsigned int)step));
			}

			// Save 3D view of the filter state:
			if (win3d || (SAVE_3D_SCENES && !(step % SAVE_LOG_FREQUENCY)))
			{
				COpenGLScene::Ptr scene3D =
					mrpt::make_aligned_shared<COpenGLScene>();
				{
					opengl::CGridPlaneXY::Ptr grid =
						mrpt::make_aligned_shared<opengl::CGridPlaneXY>(
							-1000, 1000, -1000, 1000, 0, 5);
					grid->setColor(0.4, 0.4, 0.4);
					scene3D->insert(grid);
				}

				// Robot path:
				{
					opengl::CSetOfLines::Ptr linesPath =
						mrpt::make_aligned_shared<opengl::CSetOfLines>();
					linesPath->setColor(1, 0, 0);

					TPose3D init_pose;
					if (!meanPath.empty())
						init_pose = CPose3D(meanPath[0]).asTPose();

					int path_decim = 0;
					for (auto& it : meanPath)
					{
						linesPath->appendLine(init_pose, it);
						init_pose = it;

						if (++path_decim > 10)
						{
							path_decim = 0;
							mrpt::opengl::CSetOfObjects::Ptr xyz =
								mrpt::opengl::stock_objects::CornerXYZSimple(
									0.3f, 2.0f);
							xyz->setPose(CPose3D(it));
							scene3D->insert(xyz);
						}
					}
					scene3D->insert(linesPath);

					// finally a big corner for the latest robot pose:
					{
						mrpt::opengl::CSetOfObjects::Ptr xyz =
							mrpt::opengl::stock_objects::CornerXYZSimple(
								1.0, 2.5);
						xyz->setPose(robotPoseMean3D);
						scene3D->insert(xyz);
					}

					// The camera pointing to the current robot pose:
					if (CAMERA_3DSCENE_FOLLOWS_ROBOT)
					{
						win3d->setCameraPointingToPoint(
							robotPoseMean3D.x(), robotPoseMean3D.y(),
							robotPoseMean3D.z());
					}
				}

				// Do we have a ground truth?
				if (GT_PATH.cols() == 6 || GT_PATH.cols() == 3)
				{
					opengl::CSetOfLines::Ptr GT_path =
						mrpt::make_aligned_shared<opengl::CSetOfLines>();
					GT_path->setColor(0, 0, 0);
					size_t N =
						std::min((int)GT_PATH.rows(), (int)meanPath.size());

					if (GT_PATH.cols() == 6)
					{
						double gtx0 = 0, gty0 = 0, gtz0 = 0;
						for (size_t i = 0; i < N; i++)
						{
							const CPose3D p(
								GT_PATH(i, 0), GT_PATH(i, 1), GT_PATH(i, 2),
								GT_PATH(i, 3), GT_PATH(i, 4), GT_PATH(i, 5));

							GT_path->appendLine(
								gtx0, gty0, gtz0, p.x(), p.y(), p.z());
							gtx0 = p.x();
							gty0 = p.y();
							gtz0 = p.z();
						}
					}
					else if (GT_PATH.cols() == 3)
					{
						double gtx0 = 0, gty0 = 0;
						for (size_t i = 0; i < N; i++)
						{
							const CPose2D p(
								GT_PATH(i, 0), GT_PATH(i, 1), GT_PATH(i, 2));

							GT_path->appendLine(gtx0, gty0, 0, p.x(), p.y(), 0);
							gtx0 = p.x();
							gty0 = p.y();
						}
					}
					scene3D->insert(GT_path);
				}

				// Draw latest data association:
				{
					const typename ekfslam_t::TDataAssocInfo& da =
						mapping.getLastDataAssociation();

					mrpt::opengl::CSetOfLines::Ptr lins =
						mrpt::make_aligned_shared<mrpt::opengl::CSetOfLines>();
					lins->setLineWidth(1.2f);
					lins->setColor(1, 1, 1);
					for (auto it = da.results.associations.begin();
						 it != da.results.associations.end(); ++it)
					{
						const prediction_index_t idxPred = it->second;
						// This index must match the internal list of features
						// in the map:
						typename ekfslam_t::KFArray_FEAT featMean;
						mapping.getLandmarkMean(idxPred, featMean);

						TPoint3D featMean3D;
						traits_t::landmark_to_3d(featMean, featMean3D);

						// Line: robot -> landmark:
						lins->appendLine(
							robotPoseMean3D.x(), robotPoseMean3D.y(),
							robotPoseMean3D.z(), featMean3D.x, featMean3D.y,
							featMean3D.z);
					}
					scene3D->insert(lins);
				}

				// The current state of KF-SLAM:
				{
					opengl::CSetOfObjects::Ptr objs =
						mrpt::make_aligned_shared<opengl::CSetOfObjects>();
					mapping.getAs3DObject(objs);
					scene3D->insert(objs);
				}

				if (win3d)
				{
					mrpt::opengl::COpenGLScene::Ptr& scn =
						win3d->get3DSceneAndLock();
					scn = scene3D;

					// Update text messages:
					win3d->addTextMessage(
						0.02, 0.02,
						format(
							"Step %u - Landmarks in the map: %u",
							(unsigned int)step, (unsigned int)LMs.size()),
						TColorf(1, 1, 1), 0, MRPT_GLUT_BITMAP_HELVETICA_12);

					win3d->addTextMessage(
						0.02, 0.06,
						format(
							is_pose_3d
								? "Estimated pose: (x y z qr qx qy qz) = %s"
								: "Estimated pose: (x y yaw) = %s",
							robotPose.mean.asString().c_str()),
						TColorf(1, 1, 1), 1, MRPT_GLUT_BITMAP_HELVETICA_12);

					static vector<double> estHz_vals;
					const double curHz = 1.0 / std::max(1e-9, tim_kf_iter);
					estHz_vals.push_back(curHz);
					if (estHz_vals.size() > 50)
						estHz_vals.erase(estHz_vals.begin());
					const double meanHz = mrpt::math::mean(estHz_vals);

					win3d->addTextMessage(
						0.02, 0.10,
						format(
							"Iteration time: %7ss",
							mrpt::system::unitsFormat(tim_kf_iter).c_str()),
						TColorf(1, 1, 1), 2, MRPT_GLUT_BITMAP_HELVETICA_12);

					win3d->addTextMessage(
						0.02, 0.14,
						format(
							"Execution rate: %7sHz",
							mrpt::system::unitsFormat(meanHz).c_str()),
						TColorf(1, 1, 1), 3, MRPT_GLUT_BITMAP_HELVETICA_12);

					win3d->unlockAccess3DScene();
					win3d->repaint();
				}

				if (SAVE_3D_SCENES && !(step % SAVE_LOG_FREQUENCY))
				{
					// Save to file:
					CFileGZOutputStream f(
						OUT_DIR +
						format("/kf_state_%05u.3Dscene", (unsigned int)step));
					mrpt::serialization::archiveFrom(f) << *scene3D;
				}
			}

			// Free rawlog items memory:
			// --------------------------------------------
			action.reset();
			observations.reset();

		}  // (rawlogEntry>=rawlog_offset)

		cout << format(
			"\nStep %u  - Rawlog entries processed: %i\n", (unsigned int)step,
			(unsigned int)rawlogEntry);

		step++;
	};  // end "while(1)"

	// Partitioning experiment: Only for 6D SLAM:
	traits_t::doPartitioningExperiment(mapping, fullCov, OUT_DIR);

	// Is there ground truth of landmarks positions??
	if (ground_truth_file.size() && fileExists(ground_truth_file))
	{
		CMatrixFloat GT(0, 0);
		try
		{
			GT.loadFromTextFile(ground_truth_file);
		}
		catch (const std::exception& e)
		{
			cerr << "Ignoring the following error loading ground truth file: "
				 << mrpt::exception_to_str(e) << endl;
		}

		if (GT.rows() > 0 && !LMs.empty())
		{
			// Each row has:
			//   [0] [1] [2]  [6]
			//    x   y   z    ID
			CVectorDouble ERRS(0);
			for (size_t i = 0; i < LMs.size(); i++)
			{
				// Find the entry in the GT for this mapped LM:
				bool found = false;
				for (int r = 0; r < GT.rows(); r++)
				{
					if (LM_IDs[i] == GT(r, 6))
					{
						const CPoint3D gtPt(GT(r, 0), GT(r, 1), GT(r, 2));
						ERRS.push_back(gtPt.distanceTo(
							CPoint3D(TPoint3D(LMs[i]))));  // All these
						// conversions
						// are to make it
						// work with
						// either
						// CPoint3D &
						// TPoint2D
						found = true;
						break;
					}
				}
				if (!found)
				{
					cerr << "Ground truth entry not found for landmark ID:"
						 << LM_IDs[i] << endl;
				}
			}

			printf("ERRORS VS. GROUND TRUTH:\n");
			printf("Mean Error: %f meters\n", math::mean(ERRS));
			printf("Minimum error: %f meters\n", math::minimum(ERRS));
			printf("Maximum error: %f meters\n", math::maximum(ERRS));
		}
	}  // end if GT

	cout << "********* KF-SLAM finished! **********" << endl;

	if (win3d)
	{
		cout << "\n Close the 3D window to quit the application.\n";
		win3d->waitForKey();
	}
}
예제 #8
0
파일: test.cpp 프로젝트: Jarlene/mrpt
// ------------------------------------------------------
//                  TestGeometry3D
// ------------------------------------------------------
void TestLaser2Imgs()
{
	// Set your rawlog file name
	if (!mrpt::system::fileExists(RAWLOG_FILE))
		THROW_EXCEPTION_FMT(
			"Rawlog file does not exist: %s", RAWLOG_FILE.c_str())

	CActionCollection::Ptr action;
	CSensoryFrame::Ptr observations;
	size_t rawlogEntry = 0;
	// bool					end 			= false;
	CDisplayWindow wind;

	// Set relative path for externally-stored images in rawlogs:
	string rawlog_images_path = extractFileDirectory(RAWLOG_FILE);
	rawlog_images_path += "/Images";
	CImage::setImagesPathBase(rawlog_images_path);  // Set it.

	mrpt::io::CFileGZInputStream rawlogFile(RAWLOG_FILE);

	for (;;)
	{
		if (os::kbhit())
		{
			char c = os::getch();
			if (c == 27) break;
		}

		// Load action/observation pair from the rawlog:
		// --------------------------------------------------
		cout << "Reading act/oct pair from rawlog..." << endl;
		auto arch = mrpt::serialization::archiveFrom(rawlogFile);
		if (!CRawlog::readActionObservationPair(
				arch, action, observations, rawlogEntry))
			break;  // file EOF

		// CAMERA............
		// Get CObservationStereoImages
		CObservationStereoImages::Ptr sImgs;
		CObservationImage::Ptr Img;

		sImgs = observations->getObservationByClass<CObservationStereoImages>();
		if (!sImgs)
		{
			Img = observations->getObservationByClass<CObservationImage>();
			if (!Img) continue;
		}

		CPose3D cameraPose;  // Get Camera Pose (B) (CPose3D)
		CMatrixDouble33 K;  // Get Calibration matrix (K)

		sImgs ? sImgs->getSensorPose(cameraPose)
			  : Img->getSensorPose(cameraPose);
		K = sImgs ? sImgs->leftCamera.intrinsicParams
				  : Img->cameraParams.intrinsicParams;

		// LASER.............
		// Get CObservationRange2D
		CObservation2DRangeScan::Ptr laserScan =
			observations->getObservationByClass<CObservation2DRangeScan>();
		if (!laserScan) continue;

		// Get Laser Pose (A) (CPose3D)
		CPose3D laserPose;
		laserScan->getSensorPose(laserPose);

		if (abs(laserPose.yaw()) > DEG2RAD(90)) continue;  // Only front lasers

		// Get 3D Point relative to the Laser coordinate Frame (P1) (CPoint3D)
		CPoint3D point;
		CSimplePointsMap mapa;
		mapa.insertionOptions.minDistBetweenLaserPoints = 0;
		observations->insertObservationsInto(
			&mapa);  // <- The map contains the pose of the points (P1)

		// Get the points into the map
		vector<float> X, Y, Z;
		vector<float>::iterator itX, itY, itZ;
		mapa.getAllPoints(X, Y, Z);

		unsigned int imgW =
			sImgs ? sImgs->imageLeft.getWidth() : Img->image.getWidth();
		unsigned int imgH =
			sImgs ? sImgs->imageLeft.getHeight() : Img->image.getHeight();

		// unsigned int			idx = 0;
		vector<float> imgX, imgY;
		vector<float>::iterator itImgX, itImgY;
		imgX.resize(X.size());
		imgY.resize(Y.size());

		CImage image;
		image = sImgs ? sImgs->imageLeft : Img->image;

		// Get pixels in the image:
		// Pimg = (kx,ky,k)^T = K(I|0)*P2
		// Main loop
		for (itX = X.begin(), itY = Y.begin(), itZ = Z.begin(),
			itImgX = imgX.begin(), itImgY = imgY.begin();
			 itX != X.end(); itX++, itY++, itZ++, itImgX++, itImgY++)
		{
			// Coordinates Transformation
			CPoint3D pLaser(*itX, *itY, *itZ);
			CPoint3D pCamera(pLaser - cameraPose);

			if (pCamera.z() > 0)
			{
				*itImgX = -K(0, 0) * ((pCamera.x()) / (pCamera.z())) + K(0, 2);
				*itImgY = -K(1, 1) * ((pCamera.y()) / (pCamera.z())) + K(1, 2);

				if (*itImgX > 0 && *itImgX<imgW&& * itImgY> 0 && *itImgY < imgH)
					image.filledRectangle(
						*itImgX - 1, *itImgY - 1, *itImgX + 1, *itImgY + 1,
						TColor(255, 0, 0));
			}  // end if
		}  // end for

		action.reset();
		observations.reset();

		wind.showImage(image);

		std::this_thread::sleep_for(50ms);
	};  // end for

	mrpt::system::pause();
}
예제 #9
0
// ------------------------------------------------------
//				MapBuilding_ICP
//  override_rawlog_file: If not empty, use that rawlog
//  instead of that in the config file.
// ------------------------------------------------------
void MapBuilding_ICP(
	const string& INI_FILENAME, const string& override_rawlog_file)
{
	MRPT_START

	CConfigFile iniFile(INI_FILENAME);

	// ------------------------------------------
	//			Load config from file:
	// ------------------------------------------
	const string RAWLOG_FILE = !override_rawlog_file.empty()
								   ? override_rawlog_file
								   : iniFile.read_string(
										 "MappingApplication", "rawlog_file",
										 "", /*Force existence:*/ true);
	const unsigned int rawlog_offset = iniFile.read_int(
		"MappingApplication", "rawlog_offset", 0, /*Force existence:*/ true);
	const string OUT_DIR_STD = iniFile.read_string(
		"MappingApplication", "logOutput_dir", "log_out",
		/*Force existence:*/ true);
	const int LOG_FREQUENCY = iniFile.read_int(
		"MappingApplication", "LOG_FREQUENCY", 5, /*Force existence:*/ true);
	const bool SAVE_POSE_LOG = iniFile.read_bool(
		"MappingApplication", "SAVE_POSE_LOG", false,
		/*Force existence:*/ true);
	const bool SAVE_3D_SCENE = iniFile.read_bool(
		"MappingApplication", "SAVE_3D_SCENE", false,
		/*Force existence:*/ true);
	const bool CAMERA_3DSCENE_FOLLOWS_ROBOT = iniFile.read_bool(
		"MappingApplication", "CAMERA_3DSCENE_FOLLOWS_ROBOT", true,
		/*Force existence:*/ true);

	bool SHOW_PROGRESS_3D_REAL_TIME = false;
	int SHOW_PROGRESS_3D_REAL_TIME_DELAY_MS = 0;
	bool SHOW_LASER_SCANS_3D = true;

	MRPT_LOAD_CONFIG_VAR(
		SHOW_PROGRESS_3D_REAL_TIME, bool, iniFile, "MappingApplication");
	MRPT_LOAD_CONFIG_VAR(
		SHOW_LASER_SCANS_3D, bool, iniFile, "MappingApplication");
	MRPT_LOAD_CONFIG_VAR(
		SHOW_PROGRESS_3D_REAL_TIME_DELAY_MS, int, iniFile,
		"MappingApplication");

	const char* OUT_DIR = OUT_DIR_STD.c_str();

	// ------------------------------------
	//		Constructor of ICP-SLAM object
	// ------------------------------------
	CMetricMapBuilderICP mapBuilder;

	mapBuilder.ICP_options.loadFromConfigFile(iniFile, "MappingApplication");
	mapBuilder.ICP_params.loadFromConfigFile(iniFile, "ICP");

	// Construct the maps with the loaded configuration.
	mapBuilder.initialize();

	// ---------------------------------
	//   CMetricMapBuilder::TOptions
	// ---------------------------------
	mapBuilder.setVerbosityLevel(LVL_DEBUG);
	mapBuilder.options.alwaysInsertByClass.fromString(
		iniFile.read_string("MappingApplication", "alwaysInsertByClass", ""));

	// Print params:
	printf("Running with the following parameters:\n");
	printf(" RAWLOG file:'%s'\n", RAWLOG_FILE.c_str());
	printf(" Output directory:\t\t\t'%s'\n", OUT_DIR);
	printf(
		" matchAgainstTheGrid:\t\t\t%c\n",
		mapBuilder.ICP_options.matchAgainstTheGrid ? 'Y' : 'N');
	printf(" Log record freq:\t\t\t%u\n", LOG_FREQUENCY);
	printf("  SAVE_3D_SCENE:\t\t\t%c\n", SAVE_3D_SCENE ? 'Y' : 'N');
	printf("  SAVE_POSE_LOG:\t\t\t%c\n", SAVE_POSE_LOG ? 'Y' : 'N');
	printf(
		"  CAMERA_3DSCENE_FOLLOWS_ROBOT:\t%c\n",
		CAMERA_3DSCENE_FOLLOWS_ROBOT ? 'Y' : 'N');

	printf("\n");

	mapBuilder.ICP_params.dumpToConsole();
	mapBuilder.ICP_options.dumpToConsole();

	// Checks:
	ASSERT_(RAWLOG_FILE.size() > 0)
	ASSERT_FILE_EXISTS_(RAWLOG_FILE)

	CTicTac tictac, tictacGlobal, tictac_JH;
	int step = 0;
	string str;
	CSimpleMap finalMap;
	float t_exec;
	COccupancyGridMap2D::TEntropyInfo entropy;

	size_t rawlogEntry = 0;
	CFileGZInputStream rawlogFile(RAWLOG_FILE.c_str());

	// Prepare output directory:
	// --------------------------------
	deleteFilesInDirectory(OUT_DIR);
	createDirectory(OUT_DIR);

	// Open log files:
	// ----------------------------------
	CFileOutputStream f_log(format("%s/log_times.txt", OUT_DIR));
	CFileOutputStream f_path(format("%s/log_estimated_path.txt", OUT_DIR));
	CFileOutputStream f_pathOdo(format("%s/log_odometry_path.txt", OUT_DIR));

	// Create 3D window if requested:
	CDisplayWindow3D::Ptr win3D;
#if MRPT_HAS_WXWIDGETS
	if (SHOW_PROGRESS_3D_REAL_TIME)
	{
		win3D = mrpt::make_aligned_shared<CDisplayWindow3D>(
			"ICP-SLAM @ MRPT C++ Library", 600, 500);
		win3D->setCameraZoom(20);
		win3D->setCameraAzimuthDeg(-45);
	}
#endif

	// ----------------------------------------------------------
	//						Map Building
	// ----------------------------------------------------------
	CPose2D odoPose(0, 0, 0);

	tictacGlobal.Tic();
	for (;;)
	{
		CActionCollection::Ptr action;
		CSensoryFrame::Ptr observations;
		CObservation::Ptr observation;

		if (os::kbhit())
		{
			char c = os::getch();
			if (c == 27) break;
		}

		// Load action/observation pair from the rawlog:
		// --------------------------------------------------
		if (!CRawlog::getActionObservationPairOrObservation(
				rawlogFile, action, observations, observation, rawlogEntry))
			break;  // file EOF

		const bool isObsBasedRawlog = observation ? true : false;
		std::vector<mrpt::obs::CObservation2DRangeScan::Ptr>
			lst_current_laser_scans;  // Just for drawing in 3D views

		if (rawlogEntry >= rawlog_offset)
		{
			// Update odometry:
			if (isObsBasedRawlog)
			{
				static CPose2D lastOdo;
				static bool firstOdo = true;
				if (IS_CLASS(observation, CObservationOdometry))
				{
					CObservationOdometry::Ptr o =
						std::dynamic_pointer_cast<CObservationOdometry>(
							observation);
					if (!firstOdo) odoPose = odoPose + (o->odometry - lastOdo);

					lastOdo = o->odometry;
					firstOdo = false;
				}
			}
			else
			{
				CActionRobotMovement2D::Ptr act =
					action->getBestMovementEstimation();
				if (act) odoPose = odoPose + act->poseChange->getMeanVal();
			}

			// Build list of scans:
			if (SHOW_LASER_SCANS_3D)
			{
				// Rawlog in "Observation-only" format:
				if (isObsBasedRawlog)
				{
					if (IS_CLASS(observation, CObservation2DRangeScan))
					{
						lst_current_laser_scans.push_back(
							std::dynamic_pointer_cast<CObservation2DRangeScan>(
								observation));
					}
				}
				else
				{
					// Rawlog in the Actions-SF format:
					for (size_t i = 0;; i++)
					{
						CObservation2DRangeScan::Ptr new_obs =
							observations->getObservationByClass<
								CObservation2DRangeScan>(i);
						if (!new_obs)
							break;  // There're no more scans
						else
							lst_current_laser_scans.push_back(new_obs);
					}
				}
			}

			// Execute:
			// ----------------------------------------
			tictac.Tic();
			if (isObsBasedRawlog)
				mapBuilder.processObservation(observation);
			else
				mapBuilder.processActionObservation(*action, *observations);
			t_exec = tictac.Tac();
			printf("Map building executed in %.03fms\n", 1000.0f * t_exec);

			// Info log:
			// -----------
			f_log.printf(
				"%f %i\n", 1000.0f * t_exec,
				mapBuilder.getCurrentlyBuiltMapSize());

			const CMultiMetricMap* mostLikMap =
				mapBuilder.getCurrentlyBuiltMetricMap();

			if (0 == (step % LOG_FREQUENCY))
			{
				// Pose log:
				// -------------
				if (SAVE_POSE_LOG)
				{
					printf("Saving pose log information...");
					mapBuilder.getCurrentPoseEstimation()->saveToTextFile(
						format("%s/mapbuild_posepdf_%03u.txt", OUT_DIR, step));
					printf("Ok\n");
				}
			}

			// Save a 3D scene view of the mapping process:
			if (0 == (step % LOG_FREQUENCY) || (SAVE_3D_SCENE || win3D))
			{
				CPose3D robotPose;
				mapBuilder.getCurrentPoseEstimation()->getMean(robotPose);

				COpenGLScene::Ptr scene =
					mrpt::make_aligned_shared<COpenGLScene>();

				COpenGLViewport::Ptr view = scene->getViewport("main");
				ASSERT_(view);

				COpenGLViewport::Ptr view_map =
					scene->createViewport("mini-map");
				view_map->setBorderSize(2);
				view_map->setViewportPosition(0.01, 0.01, 0.35, 0.35);
				view_map->setTransparent(false);

				{
					mrpt::opengl::CCamera& cam = view_map->getCamera();
					cam.setAzimuthDegrees(-90);
					cam.setElevationDegrees(90);
					cam.setPointingAt(robotPose);
					cam.setZoomDistance(20);
					cam.setOrthogonal();
				}

				// The ground:
				mrpt::opengl::CGridPlaneXY::Ptr groundPlane =
					mrpt::make_aligned_shared<mrpt::opengl::CGridPlaneXY>(
						-200, 200, -200, 200, 0, 5);
				groundPlane->setColor(0.4, 0.4, 0.4);
				view->insert(groundPlane);
				view_map->insert(CRenderizable::Ptr(groundPlane));  // A copy

				// The camera pointing to the current robot pose:
				if (CAMERA_3DSCENE_FOLLOWS_ROBOT)
				{
					scene->enableFollowCamera(true);

					mrpt::opengl::CCamera& cam = view_map->getCamera();
					cam.setAzimuthDegrees(-45);
					cam.setElevationDegrees(45);
					cam.setPointingAt(robotPose);
				}

				// The maps:
				{
					opengl::CSetOfObjects::Ptr obj =
						mrpt::make_aligned_shared<opengl::CSetOfObjects>();
					mostLikMap->getAs3DObject(obj);
					view->insert(obj);

					// Only the point map:
					opengl::CSetOfObjects::Ptr ptsMap =
						mrpt::make_aligned_shared<opengl::CSetOfObjects>();
					if (mostLikMap->m_pointsMaps.size())
					{
						mostLikMap->m_pointsMaps[0]->getAs3DObject(ptsMap);
						view_map->insert(ptsMap);
					}
				}

				// Draw the robot path:
				CPose3DPDF::Ptr posePDF = mapBuilder.getCurrentPoseEstimation();
				CPose3D curRobotPose;
				posePDF->getMean(curRobotPose);
				{
					opengl::CSetOfObjects::Ptr obj =
						opengl::stock_objects::RobotPioneer();
					obj->setPose(curRobotPose);
					view->insert(obj);
				}
				{
					opengl::CSetOfObjects::Ptr obj =
						opengl::stock_objects::RobotPioneer();
					obj->setPose(curRobotPose);
					view_map->insert(obj);
				}

				// Draw laser scanners in 3D:
				if (SHOW_LASER_SCANS_3D)
				{
					for (size_t i = 0; i < lst_current_laser_scans.size(); i++)
					{
						// Create opengl object and load scan data from the scan
						// observation:
						opengl::CPlanarLaserScan::Ptr obj =
							mrpt::make_aligned_shared<
								opengl::CPlanarLaserScan>();
						obj->setScan(*lst_current_laser_scans[i]);
						obj->setPose(curRobotPose);
						obj->setSurfaceColor(1.0f, 0.0f, 0.0f, 0.5f);
						// inser into the scene:
						view->insert(obj);
					}
				}

				// Save as file:
				if (0 == (step % LOG_FREQUENCY) && SAVE_3D_SCENE)
				{
					CFileGZOutputStream f(
						format("%s/buildingmap_%05u.3Dscene", OUT_DIR, step));
					f << *scene;
				}

				// Show 3D?
				if (win3D)
				{
					opengl::COpenGLScene::Ptr& ptrScene =
						win3D->get3DSceneAndLock();
					ptrScene = scene;

					win3D->unlockAccess3DScene();

					// Move camera:
					win3D->setCameraPointingToPoint(
						curRobotPose.x(), curRobotPose.y(), curRobotPose.z());

					// Update:
					win3D->forceRepaint();

					std::this_thread::sleep_for(
						std::chrono::milliseconds(
							SHOW_PROGRESS_3D_REAL_TIME_DELAY_MS));
				}
			}

			// Save the memory usage:
			// ------------------------------------------------------------------
			{
				printf("Saving memory usage...");
				unsigned long memUsage = getMemoryUsage();
				FILE* f = os::fopen(
					format("%s/log_MemoryUsage.txt", OUT_DIR).c_str(), "at");
				if (f)
				{
					os::fprintf(f, "%u\t%lu\n", step, memUsage);
					os::fclose(f);
				}
				printf("Ok! (%.04fMb)\n", ((float)memUsage) / (1024 * 1024));
			}

			// Save the robot estimated pose for each step:
			f_path.printf(
				"%i %f %f %f\n", step,
				mapBuilder.getCurrentPoseEstimation()->getMeanVal().x(),
				mapBuilder.getCurrentPoseEstimation()->getMeanVal().y(),
				mapBuilder.getCurrentPoseEstimation()->getMeanVal().yaw());

			f_pathOdo.printf(
				"%i %f %f %f\n", step, odoPose.x(), odoPose.y(), odoPose.phi());

		}  // end of if "rawlog_offset"...

		step++;
		printf(
			"\n---------------- STEP %u | RAWLOG ENTRY %u ----------------\n",
			step, (unsigned)rawlogEntry);
	};

	printf(
		"\n---------------- END!! (total time: %.03f sec) ----------------\n",
		tictacGlobal.Tac());

	// Save map:
	mapBuilder.getCurrentlyBuiltMap(finalMap);

	str = format("%s/_finalmap_.simplemap", OUT_DIR);
	printf("Dumping final map in binary format to: %s\n", str.c_str());
	mapBuilder.saveCurrentMapToFile(str);

	const CMultiMetricMap* finalPointsMap =
		mapBuilder.getCurrentlyBuiltMetricMap();
	str = format("%s/_finalmaps_.txt", OUT_DIR);
	printf("Dumping final metric maps to %s_XXX\n", str.c_str());
	finalPointsMap->saveMetricMapRepresentationToFile(str);

	if (win3D) win3D->waitForKey();

	MRPT_END
}