Example #1
0
// ------------------------------------------------------
//				Generate a list of matched points
// ------------------------------------------------------
void generate_list_of_points( const TPoints &pA, const TPoints &pB, TMatchingPairList &list )
{
	TMatchingPair		pair;
	for( unsigned int i = 0; i < 5; ++i )
	{
		pair.this_idx	= pair.other_idx = i;
		pair.this_x		= pA[i][0];
		pair.this_y		= pA[i][1];
		pair.this_z		= pA[i][2];

		pair.other_x	= pB[i][0];
		pair.other_y	= pB[i][1];
		pair.other_z	= pB[i][2];

		list.push_back( pair );
	}
} // end generate_list_of_points
Example #2
0
/*---------------------------------------------------------------

					robustRigidTransformation

  The technique was described in the paper:
	J.L. Blanco, J. Gonzalez-Jimenez and J.A. Fernandez-Madrigal.
	"A robust, multi-hypothesis approach to matching occupancy grid maps".
	Robotica, available on CJO2013. doi:10.1017/S0263574712000732.
	http://journals.cambridge.org/action/displayAbstract?aid=8815308

 This works as follows:
	- Repeat "results.ransac_iters" times:
		- Randomly pick TWO correspondences from the set "in_correspondences".
		- Compute the associated rigid transformation.
		- For "ransac_maxSetSize" randomly selected correspondences, test for
 "consensus" with the current group:
			- If if is compatible (ransac_maxErrorXY, ransac_maxErrorPHI), grow
 the "consensus set"
			- If not, do not add it.
  ---------------------------------------------------------------*/
bool tfest::se2_l2_robust(
	const mrpt::tfest::TMatchingPairList& in_correspondences,
	const double normalizationStd, const TSE2RobustParams& params,
	TSE2RobustResult& results)
{
	//#define DO_PROFILING

#ifdef DO_PROFILING
	CTimeLogger timlog;
#endif

	const size_t nCorrs = in_correspondences.size();

	// Default: 2 * normalizationStd ("noise level")
	const double MAX_RMSE_TO_END =
		(params.max_rmse_to_end <= 0 ? 2 * normalizationStd
									 : params.max_rmse_to_end);

	MRPT_START

	// Asserts:
	if (nCorrs < params.ransac_minSetSize)
	{
		// Nothing to do!
		results.transformation.clear();
		results.largestSubSet = TMatchingPairList();
		return false;
	}

#ifdef DO_PROFILING
	timlog.enter("ransac.find_max*");
#endif
	// Find the max. index of "this" and "other:
	unsigned int maxThis = 0, maxOther = 0;
	for (const auto& in_correspondence : in_correspondences)
	{
		maxThis = max(maxThis, in_correspondence.this_idx);
		maxOther = max(maxOther, in_correspondence.other_idx);
	}
#ifdef DO_PROFILING
	timlog.leave("ransac.find_max*");
#endif

#ifdef DO_PROFILING
	timlog.enter("ransac.count_unique_corrs");
#endif

	// Fill out 2 arrays indicating whether each element has a correspondence:
	std::vector<bool> hasCorrThis(maxThis + 1, false);
	std::vector<bool> hasCorrOther(maxOther + 1, false);
	unsigned int howManyDifCorrs = 0;
	for (const auto& in_correspondence : in_correspondences)
	{
		if (!hasCorrThis[in_correspondence.this_idx] &&
			!hasCorrOther[in_correspondence.other_idx])
		{
			hasCorrThis[in_correspondence.this_idx] = true;
			hasCorrOther[in_correspondence.other_idx] = true;
			howManyDifCorrs++;
		}
	}
#ifdef DO_PROFILING
	timlog.leave("ransac.count_unique_corrs");
#endif

	// Clear the set of output particles:
	results.transformation.clear();

	// If there are less different correspondences than the minimum required,
	// quit:
	if (howManyDifCorrs < params.ransac_minSetSize)
	{
		// Nothing we can do here!!! :~$
		results.transformation.clear();
		results.largestSubSet = TMatchingPairList();
		return false;
	}

#ifdef AVOID_MULTIPLE_CORRESPONDENCES
	unsigned k;
	// Find duplicated landmarks (from SIFT features with different
	// descriptors,etc...)
	//   this is to avoid establishing multiple correspondences for the same
	//   physical point!
	// ------------------------------------------------------------------------------------------------
	std::vector<std::vector<int>> listDuplicatedLandmarksThis(maxThis + 1);
	ASSERT_(nCorrs >= 1);
	for (k = 0; k < nCorrs - 1; k++)
	{
		std::vector<int> duplis;
		for (unsigned j = k; j < nCorrs - 1; j++)
		{
			if (in_correspondences[k].this_x == in_correspondences[j].this_x &&
				in_correspondences[k].this_y == in_correspondences[j].this_y &&
				in_correspondences[k].this_z == in_correspondences[j].this_z)
				duplis.push_back(in_correspondences[j].this_idx);
		}
		listDuplicatedLandmarksThis[in_correspondences[k].this_idx] = duplis;
	}

	std::vector<std::vector<int>> listDuplicatedLandmarksOther(maxOther + 1);
	for (k = 0; k < nCorrs - 1; k++)
	{
		std::vector<int> duplis;
		for (unsigned j = k; j < nCorrs - 1; j++)
		{
			if (in_correspondences[k].other_x ==
					in_correspondences[j].other_x &&
				in_correspondences[k].other_y ==
					in_correspondences[j].other_y &&
				in_correspondences[k].other_z == in_correspondences[j].other_z)
				duplis.push_back(in_correspondences[j].other_idx);
		}
		listDuplicatedLandmarksOther[in_correspondences[k].other_idx] = duplis;
	}
#endif

	std::deque<TMatchingPairList> alreadyAddedSubSets;

	CPosePDFGaussian referenceEstimation;
	CPoint2DPDFGaussian pt_this;

	const double ransac_consistency_test_chi2_quantile = 0.99;
	const double chi2_thres_dim1 =
		mrpt::math::chi2inv(ransac_consistency_test_chi2_quantile, 1);

	// -------------------------
	//		The RANSAC loop
	// -------------------------
	size_t largest_consensus_yet = 0;  // Used for dynamic # of steps
	double largestSubSet_RMSE = std::numeric_limits<double>::max();

	results.ransac_iters = params.ransac_nSimulations;
	const bool use_dynamic_iter_number = results.ransac_iters == 0;
	if (use_dynamic_iter_number)
	{
		ASSERT_(
			params.probability_find_good_model > 0 &&
			params.probability_find_good_model < 1);
		// Set an initial # of iterations:
		results.ransac_iters = 10;  // It doesn't matter actually, since will be
		// changed in the first loop
	}

	std::vector<bool> alreadySelectedThis, alreadySelectedOther;

	if (!params.ransac_algorithmForLandmarks)
	{
		alreadySelectedThis.assign(maxThis + 1, false);
		alreadySelectedOther.assign(maxOther + 1, false);
	}
	// else -> It will be done anyway inside the for() below

	// First: Build a permutation of the correspondences to pick from it
	// sequentially:
	std::vector<size_t> corrsIdxs(nCorrs), corrsIdxsPermutation;
	for (size_t i = 0; i < nCorrs; i++) corrsIdxs[i] = i;

	size_t iter_idx;
	for (iter_idx = 0; iter_idx < results.ransac_iters;
		 iter_idx++)  // results.ransac_iters can be dynamic
	{
#ifdef DO_PROFILING
		CTimeLoggerEntry tle(timlog, "ransac.iter");
#endif

#ifdef DO_PROFILING
		timlog.enter("ransac.permute");
#endif
		getRandomGenerator().permuteVector(corrsIdxs, corrsIdxsPermutation);

#ifdef DO_PROFILING
		timlog.leave("ransac.permute");
#endif

		TMatchingPairList subSet;

		// Select a subset of correspondences at random:
		if (params.ransac_algorithmForLandmarks)
		{
#ifdef DO_PROFILING
			timlog.enter("ransac.reset_selection_marks");
#endif
			alreadySelectedThis.assign(maxThis + 1, false);
			alreadySelectedOther.assign(maxOther + 1, false);
#ifdef DO_PROFILING
			timlog.leave("ransac.reset_selection_marks");
#endif
		}
		else
		{
			// For points: Do not repeat the corrs, and take the number of corrs
			// as weights
		}

// Try to build a subsetof "ransac_maxSetSize" (maximum) elements that achieve
// consensus:
// ------------------------------------------------------------------------------------------
#ifdef DO_PROFILING
		timlog.enter("ransac.inner_loops");
#endif
		for (unsigned int j = 0;
			 j < nCorrs && subSet.size() < params.ransac_maxSetSize; j++)
		{
			const size_t idx = corrsIdxsPermutation[j];

			const TMatchingPair& corr_j = in_correspondences[idx];

			// Don't pick the same features twice!
			if (alreadySelectedThis[corr_j.this_idx] ||
				alreadySelectedOther[corr_j.other_idx])
				continue;

			// Additional user-provided filter:
			if (params.user_individual_compat_callback)
			{
				mrpt::tfest::TPotentialMatch pm;
				pm.idx_this = corr_j.this_idx;
				pm.idx_other = corr_j.other_idx;
				if (!params.user_individual_compat_callback(pm))
					continue;  // Skip this one!
			}

			if (subSet.size() < 2)
			{
				// ------------------------------------------------------------------------------------------------------
				// If we are within the first two correspondences, just add them
				// to the subset:
				// ------------------------------------------------------------------------------------------------------
				subSet.push_back(corr_j);
				markAsPicked(corr_j, alreadySelectedThis, alreadySelectedOther);

				if (subSet.size() == 2)
				{
					// Consistency Test: From

					// Check the feasibility of this pair "idx1"-"idx2":
					//  The distance between the pair of points in MAP1 must be
					//  very close
					//   to that of their correspondences in MAP2:
					const double corrs_dist1 =
						mrpt::math::distanceBetweenPoints(
							subSet[0].this_x, subSet[0].this_y,
							subSet[1].this_x, subSet[1].this_y);

					const double corrs_dist2 =
						mrpt::math::distanceBetweenPoints(
							subSet[0].other_x, subSet[0].other_y,
							subSet[1].other_x, subSet[1].other_y);

					// Is is a consistent possibility?
					//  We use a chi2 test (see paper for the derivation)
					const double corrs_dist_chi2 =
						square(square(corrs_dist1) - square(corrs_dist2)) /
						(8.0 * square(normalizationStd) *
						 (square(corrs_dist1) + square(corrs_dist2)));

					bool is_acceptable = (corrs_dist_chi2 < chi2_thres_dim1);

					if (is_acceptable)
					{
						// Perform estimation:
						tfest::se2_l2(subSet, referenceEstimation);
						// Normalized covariance: scale!
						referenceEstimation.cov *= square(normalizationStd);

						// Additional filter:
						//  If the correspondences as such the transformation
						//  has a high ambiguity, we discard it!
						is_acceptable =
							(referenceEstimation.cov(2, 2) <
							 square(DEG2RAD(5.0f)));
					}

					if (!is_acceptable)
					{
						// Remove this correspondence & try again with a
						// different pair:
						subSet.erase(subSet.begin() + (subSet.size() - 1));
					}
					else
					{
						// Only mark as picked if we're really keeping it:
						markAsPicked(
							corr_j, alreadySelectedThis, alreadySelectedOther);
					}
				}
			}
			else
			{
#ifdef DO_PROFILING
				timlog.enter("ransac.test_consistency");
#endif

				// ------------------------------------------------------------------------------------------------------
				// The normal case:
				//  - test for "consensus" with the current group:
				//		- If it is compatible (ransac_maxErrorXY,
				// ransac_maxErrorPHI), grow the "consensus set"
				//		- If not, do not add it.
				// ------------------------------------------------------------------------------------------------------

				// Test for the mahalanobis distance between:
				//  "referenceEstimation (+) point_other" AND "point_this"
				referenceEstimation.composePoint(
					mrpt::math::TPoint2D(corr_j.other_x, corr_j.other_y),
					pt_this);

				const double maha_dist = pt_this.mahalanobisDistanceToPoint(
					corr_j.this_x, corr_j.this_y);

				const bool passTest =
					maha_dist < params.ransac_mahalanobisDistanceThreshold;

				if (passTest)
				{
					// OK, consensus passed:
					subSet.push_back(corr_j);
					markAsPicked(
						corr_j, alreadySelectedThis, alreadySelectedOther);
				}
				// else -> Test failed

#ifdef DO_PROFILING
				timlog.leave("ransac.test_consistency");
#endif
			}  // end else "normal case"

		}  // end for j
#ifdef DO_PROFILING
		timlog.leave("ransac.inner_loops");
#endif

		const bool has_to_eval_RMSE =
			(subSet.size() >= params.ransac_minSetSize);

		// Compute the RMSE of this matching and the corresponding
		// transformation (only if we'll use this value below)
		double this_subset_RMSE = 0;
		if (has_to_eval_RMSE)
		{
#ifdef DO_PROFILING
			CTimeLoggerEntry tle(timlog, "ransac.comp_rmse");
#endif

			// Recompute referenceEstimation from all the corrs:
			tfest::se2_l2(subSet, referenceEstimation);
			// Normalized covariance: scale!
			referenceEstimation.cov *= square(normalizationStd);

			for (size_t k = 0; k < subSet.size(); k++)
			{
				double gx, gy;
				referenceEstimation.mean.composePoint(
					subSet[k].other_x, subSet[k].other_y, gx, gy);

				this_subset_RMSE +=
					mrpt::math::distanceSqrBetweenPoints<double>(
						subSet[k].this_x, subSet[k].this_y, gx, gy);
			}
			this_subset_RMSE /= std::max(static_cast<size_t>(1), subSet.size());
		}
		else
		{
			this_subset_RMSE = std::numeric_limits<double>::max();
		}

		// Save the estimation result as a "particle", only if the subSet
		// contains
		//  "ransac_minSetSize" elements at least:
		if (subSet.size() >= params.ransac_minSetSize)
		{
			// If this subset was previously added to the SOG, just increment
			// its weight
			//  and do not add a new mode:
			int indexFound = -1;

			// JLBC Added DEC-2007: An alternative (optional) method to fuse
			// Gaussian modes:
			if (!params.ransac_fuseByCorrsMatch)
			{
				// Find matching by approximate match in the X,Y,PHI means
				// -------------------------------------------------------------------
				for (size_t i = 0; i < results.transformation.size(); i++)
				{
					double diffXY =
						results.transformation.get(i).mean.distanceTo(
							referenceEstimation.mean);
					double diffPhi = fabs(math::wrapToPi(
						results.transformation.get(i).mean.phi() -
						referenceEstimation.mean.phi()));
					if (diffXY < params.ransac_fuseMaxDiffXY &&
						diffPhi < params.ransac_fuseMaxDiffPhi)
					{
						// printf("Match by distance found: distXY:%f distPhi=%f
						// deg\n",diffXY,RAD2DEG(diffPhi));
						indexFound = i;
						break;
					}
				}
			}
			else
			{
				// Find matching mode by exact match in the list of
				// correspondences:
				// -------------------------------------------------------------------
				// Sort "subSet" in order to compare them easily!
				// std::sort( subSet.begin(), subSet.end() );

				// Try to find matching corrs:
				for (size_t i = 0; i < alreadyAddedSubSets.size(); i++)
				{
					if (subSet == alreadyAddedSubSets[i])
					{
						indexFound = i;
						break;
					}
				}
			}

			if (indexFound != -1)
			{
				// This is an already added mode:
				if (params.ransac_algorithmForLandmarks)
					results.transformation.get(indexFound).log_w = log(
						1 + exp(results.transformation.get(indexFound).log_w));
				else
					results.transformation.get(indexFound).log_w =
						log(subSet.size() +
							exp(results.transformation.get(indexFound).log_w));
			}
			else
			{
				// Add a new mode to the SOG:
				alreadyAddedSubSets.push_back(subSet);

				CPosePDFSOG::TGaussianMode newSOGMode;
				if (params.ransac_algorithmForLandmarks)
					newSOGMode.log_w = 0;  // log(1);
				else
					newSOGMode.log_w = log(static_cast<double>(subSet.size()));

				newSOGMode.mean = referenceEstimation.mean;
				newSOGMode.cov = referenceEstimation.cov;

				// Add a new mode to the SOG!
				results.transformation.push_back(newSOGMode);
			}
		}  // end if subSet.size()>=ransac_minSetSize

		const size_t ninliers = subSet.size();
		if (largest_consensus_yet < ninliers)
		{
			largest_consensus_yet = ninliers;

			// Dynamic # of steps:
			if (use_dynamic_iter_number)
			{
				// Update estimate of nCorrs, the number of trials to ensure we
				// pick,
				// with probability p, a data set with no outliers.
				const double fracinliers =
					ninliers /
					static_cast<double>(howManyDifCorrs);  // corrsIdxs.size());
				double pNoOutliers =
					1 - pow(fracinliers, static_cast<double>(
											 2.0 /*minimumSizeSamplesToFit*/));

				pNoOutliers = std::max(
					std::numeric_limits<double>::epsilon(),
					pNoOutliers);  // Avoid division by -Inf
				pNoOutliers = std::min(
					1.0 - std::numeric_limits<double>::epsilon(),
					pNoOutliers);  // Avoid division by 0.
				// Number of
				results.ransac_iters =
					log(1 - params.probability_find_good_model) /
					log(pNoOutliers);

				results.ransac_iters = std::max(
					results.ransac_iters, params.ransac_min_nSimulations);

				if (params.verbose)
					cout << "[tfest::RANSAC] Iter #" << iter_idx
						 << ":est. # iters=" << results.ransac_iters
						 << " pNoOutliers=" << pNoOutliers
						 << " #inliers: " << ninliers << endl;
			}
		}

		// Save the largest subset:
		if (subSet.size() >= params.ransac_minSetSize &&
			this_subset_RMSE < largestSubSet_RMSE)
		{
			if (params.verbose)
				cout << "[tfest::RANSAC] Iter #" << iter_idx
					 << " Better subset: " << subSet.size()
					 << " inliers, RMSE=" << this_subset_RMSE << endl;

			results.largestSubSet = subSet;
			largestSubSet_RMSE = this_subset_RMSE;
		}

		// Is the found subset good enough?
		if (subSet.size() >= params.ransac_minSetSize &&
			this_subset_RMSE < MAX_RMSE_TO_END)
		{
			break;  // end RANSAC iterations.
		}

#ifdef DO_PROFILING
		timlog.leave("ransac.iter");
#endif
	}  // end for each iteration

	if (params.verbose)
		cout << "[tfest::RANSAC] Finished after " << iter_idx
			 << " iterations.\n";

	// Set the weights of the particles to sum the unity:
	results.transformation.normalizeWeights();

	// Done!

	MRPT_END_WITH_CLEAN_UP(
		printf("nCorrs=%u\n", static_cast<unsigned int>(nCorrs));
		printf("Saving '_debug_in_correspondences.txt'...");
		in_correspondences.dumpToFile("_debug_in_correspondences.txt");
		printf("Ok\n"); printf("Saving '_debug_results.transformation.txt'...");
		results.transformation.saveToTextFile(
			"_debug_results.transformation.txt");
		printf("Ok\n"););
/*---------------------------------------------------------------
							computeMatchingWith
 ---------------------------------------------------------------*/
void COccupancyGridMap2D::determineMatching2D(
	const mrpt::maps::CMetricMap      * otherMap2,
	const CPose2D         & otherMapPose_,
	TMatchingPairList     & correspondences,
	const TMatchingParams & params,
	TMatchingExtraResults & extraResults ) const
{
	MRPT_START

	extraResults = TMatchingExtraResults();

	ASSERT_ABOVE_(params.decimation_other_map_points,0)
	ASSERT_BELOW_(params.offset_other_map_points, params.decimation_other_map_points)

	ASSERT_(otherMap2->GetRuntimeClass()->derivedFrom( CLASS_ID(CPointsMap) ));
	const CPointsMap			*otherMap = static_cast<const CPointsMap*>(otherMap2);

	const TPose2D  otherMapPose = TPose2D(otherMapPose_);

	const size_t nLocalPoints = otherMap->size();
	std::vector<float>				x_locals(nLocalPoints), y_locals(nLocalPoints),z_locals(nLocalPoints);

	const float sin_phi = sin(otherMapPose.phi);
	const float cos_phi = cos(otherMapPose.phi);

	size_t nOtherMapPointsWithCorrespondence = 0;	// Number of points with one corrs. at least
	size_t nTotalCorrespondences = 0;				// Total number of corrs
	float _sumSqrDist=0;

	// The number of cells to look around each point:
	const int cellsSearchRange = round( params.maxDistForCorrespondence / resolution );

	// Initially there are no correspondences:
	correspondences.clear();

	// Hay mapa local?
	if (!nLocalPoints)  return;  // No

	// Solo hacer matching si existe alguna posibilidad de que
	//  los dos mapas se toquen:
	// -----------------------------------------------------------
	float local_x_min =  std::numeric_limits<float>::max();
	float local_x_max = -std::numeric_limits<float>::max();
	float local_y_min =  std::numeric_limits<float>::max();
	float local_y_max = -std::numeric_limits<float>::max();

	const std::vector<float> & otherMap_pxs = otherMap->getPointsBufferRef_x();
	const std::vector<float> & otherMap_pys = otherMap->getPointsBufferRef_y();
	const std::vector<float> & otherMap_pzs = otherMap->getPointsBufferRef_z();

	// Translate all local map points:
	for (unsigned int localIdx=params.offset_other_map_points;localIdx<nLocalPoints;localIdx+=params.decimation_other_map_points)
	{
		// Girar y desplazar cada uno de los puntos del local map:
		const float xx = x_locals[localIdx] = otherMapPose.x + cos_phi* otherMap_pxs[localIdx] - sin_phi* otherMap_pys[localIdx] ;
		const float yy = y_locals[localIdx] = otherMapPose.y + sin_phi* otherMap_pxs[localIdx] + cos_phi* otherMap_pys[localIdx] ;
		z_locals[localIdx] = /* otherMapPose.z +*/ otherMap_pzs[localIdx];

		// mantener el max/min de los puntos:
		local_x_min = min(local_x_min,xx);
		local_x_max = max(local_x_max,xx);
		local_y_min = min(local_y_min,yy);
		local_y_max = max(local_y_max,yy);
	}

	// If the local map is entirely out of the grid,
	//   do not even try to match them!!
	if (local_x_min> x_max ||
		local_x_max< x_min ||
		local_y_min> y_max ||
		local_y_max< y_min) return;		// Matching is NULL!


	const cellType thresholdCellValue = p2l(0.5f);

	// For each point in the other map:
	for (unsigned int localIdx=params.offset_other_map_points;localIdx<nLocalPoints;localIdx+=params.decimation_other_map_points)
	{
		// Starting value:
		float maxDistForCorrespondenceSquared = square( params.maxDistForCorrespondence );

		// For speed-up:
		const float x_local = x_locals[localIdx];
		const float y_local = y_locals[localIdx];
		const float z_local = z_locals[localIdx];

		// Look for the occupied cell closest from the map point:
		float min_dist = 1e6;
		TMatchingPair		closestCorr;

		// Get the indexes of cell where the point falls:
		const int cx0=x2idx(x_local);
		const int cy0=y2idx(y_local);

		// Get the rectangle to look for into:
		const int cx_min = max(0, cx0 - cellsSearchRange );
		const int cx_max = min(static_cast<int>(size_x)-1, cx0 + cellsSearchRange );
		const int cy_min = max(0, cy0 - cellsSearchRange );
		const int cy_max = min(static_cast<int>(size_y)-1, cy0 + cellsSearchRange );

		// Will be set to true if a corrs. is found:
		bool thisLocalHasCorr = false;


		// Look in nearby cells:
		for (int cx=cx_min;cx<=cx_max;cx++)
		{
			for (int cy=cy_min;cy<=cy_max;cy++)
			{
				// Is an occupied cell?
				if ( map[cx+cy*size_x] < thresholdCellValue )//  getCell(cx,cy)<0.49)
				{
					const float residual_x = idx2x(cx)- x_local;
					const float residual_y = idx2y(cy)- y_local;

					// Compute max. allowed distance:
					maxDistForCorrespondenceSquared = square(
						params.maxAngularDistForCorrespondence * params.angularDistPivotPoint.distanceTo(TPoint3D(x_local,y_local,0) ) +
								params.maxDistForCorrespondence );

					// Square distance to the point:
					const float this_dist = square( residual_x ) + square( residual_y );

					if (this_dist<maxDistForCorrespondenceSquared)
					{
						if (!params.onlyKeepTheClosest)
						{
							// save the correspondence:
							nTotalCorrespondences++;
							TMatchingPair   mp;
							mp.this_idx = cx+cy*size_x;
							mp.this_x = idx2x(cx);
							mp.this_y = idx2y(cy);
							mp.this_z = z_local;
							mp.other_idx = localIdx;
							mp.other_x = otherMap_pxs[localIdx];
							mp.other_y = otherMap_pys[localIdx];
							mp.other_z = otherMap_pzs[localIdx];
							correspondences.push_back( mp );
						}
						else
						{
							// save the closest only:
							if (this_dist<min_dist)
							{
								min_dist = this_dist;

								closestCorr.this_idx = cx+cy*size_x;
								closestCorr.this_x = idx2x(cx);
								closestCorr.this_y = idx2y(cy);
								closestCorr.this_z = z_local;
								closestCorr.other_idx = localIdx;
								closestCorr.other_x = otherMap_pxs[localIdx];
								closestCorr.other_y = otherMap_pys[localIdx];
								closestCorr.other_z = otherMap_pzs[localIdx];
							}
						}

						// At least one:
						thisLocalHasCorr = true;
					}
				}
			}
		} // End of find closest nearby cell

		// save the closest correspondence:
		if (params.onlyKeepTheClosest && (min_dist<maxDistForCorrespondenceSquared))
		{
			nTotalCorrespondences++;
			correspondences.push_back( closestCorr );
		}

		// At least one corr:
		if (thisLocalHasCorr)
		{
			nOtherMapPointsWithCorrespondence++;

			// Accumulate the MSE:
			_sumSqrDist+= min_dist;
		}

	}	// End "for each local point"...

	extraResults.correspondencesRatio = nOtherMapPointsWithCorrespondence / static_cast<float>(nLocalPoints/params.decimation_other_map_points);
	extraResults.sumSqrDist = _sumSqrDist;


	MRPT_END
}
Example #4
0
// ------------------------------------------------------
//				Test_Kinect
// ------------------------------------------------------
void Test_Kinect()
{
	// Launch grabbing thread:
	// --------------------------------------------------------
	TThreadParam thrPar;
	std::thread thHandle = std::thread(thread_grabbing, std::ref(thrPar));

	// Wait until data stream starts so we can say for sure the sensor has been
	// initialized OK:
	cout << "Waiting for sensor initialization...\n";
	do
	{
		CObservation3DRangeScan::Ptr possiblyNewObs =
			std::atomic_load(&thrPar.new_obs);
		if (possiblyNewObs && possiblyNewObs->timestamp != INVALID_TIMESTAMP)
			break;
		else
			std::this_thread::sleep_for(10ms);
	} while (!thrPar.quit);

	// Check error condition:
	if (thrPar.quit) return;

	// Feature tracking variables:
	CFeatureList trackedFeats;
	unsigned int step_num = 0;

	bool SHOW_FEAT_IDS = true;
	bool SHOW_RESPONSES = true;

	CGenericFeatureTrackerAutoPtr tracker;

	// "CFeatureTracker_KL" is by far the most robust implementation for now:
	tracker = CGenericFeatureTrackerAutoPtr(new CFeatureTracker_KL);
	tracker->enableTimeLogger(true);  // Do time profiling.

	// Set of parameters common to any tracker implementation:
	// To see all the existing params and documentation, see
	// mrpt::vision::CGenericFeatureTracker
	//  http://reference.mrpt.org/devel/structmrpt_1_1vision_1_1_c_generic_feature_tracker.html
	tracker->extra_params["add_new_features"] =
		1;  // track, AND ALSO, add new features
	tracker->extra_params["add_new_feat_min_separation"] = 25;
	tracker->extra_params["add_new_feat_max_features"] = 150;
	tracker->extra_params["add_new_feat_patch_size"] = 21;

	tracker->extra_params["minimum_KLT_response_to_add"] = 40;
	tracker->extra_params["check_KLT_response_every"] =
		5;  // Re-check the KLT-response to assure features are in good points.
	tracker->extra_params["minimum_KLT_response"] =
		25;  // Re-check the KLT-response to assure features are in good points.

	tracker->extra_params["update_patches_every"] = 0;  // Update patches

	// Specific params for "CFeatureTracker_KL"
	tracker->extra_params["window_width"] = 25;
	tracker->extra_params["window_height"] = 25;

	// Global points map:
	CColouredPointsMap globalPtsMap;
	globalPtsMap.colorScheme.scheme =
		CColouredPointsMap::cmFromIntensityImage;  // Take points color from
	// RGB+D observations
	// globalPtsMap.colorScheme.scheme =
	// CColouredPointsMap::cmFromHeightRelativeToSensorGray;

	// Create window and prepare OpenGL object in the scene:
	// --------------------------------------------------------
	mrpt::gui::CDisplayWindow3D win3D("kinect-3d-slam 3D view", 800, 600);

	win3D.setCameraAzimuthDeg(140);
	win3D.setCameraElevationDeg(20);
	win3D.setCameraZoom(8.0);
	win3D.setFOV(90);
	win3D.setCameraPointingToPoint(2.5, 0, 0);

	mrpt::opengl::CPointCloudColoured::Ptr gl_points =
		mrpt::make_aligned_shared<mrpt::opengl::CPointCloudColoured>();
	gl_points->setPointSize(2.5);

	mrpt::opengl::CSetOfObjects::Ptr gl_curFeats =
		mrpt::make_aligned_shared<mrpt::opengl::CSetOfObjects>();
	mrpt::opengl::CSetOfObjects::Ptr gl_keyframes =
		mrpt::make_aligned_shared<mrpt::opengl::CSetOfObjects>();

	mrpt::opengl::CPointCloudColoured::Ptr gl_points_map =
		mrpt::make_aligned_shared<mrpt::opengl::CPointCloudColoured>();
	gl_points_map->setPointSize(2.0);

	const double aspect_ratio =
		480.0 / 640.0;  // kinect.rows() / double( kinect.cols() );

	mrpt::opengl::CSetOfObjects::Ptr gl_cur_cam_corner =
		mrpt::opengl::stock_objects::CornerXYZSimple(0.4f, 4);

	opengl::COpenGLViewport::Ptr viewInt;
	{
		mrpt::opengl::COpenGLScene::Ptr& scene = win3D.get3DSceneAndLock();

		// Create the Opengl object for the point cloud:
		scene->insert(gl_points_map);
		scene->insert(gl_points);
		scene->insert(gl_curFeats);
		scene->insert(gl_keyframes);
		scene->insert(mrpt::make_aligned_shared<mrpt::opengl::CGridPlaneXY>());

		scene->insert(gl_cur_cam_corner);

		const int VW_WIDTH =
			350;  // Size of the viewport into the window, in pixel units.
		const int VW_HEIGHT = aspect_ratio * VW_WIDTH;

		// Create the Opengl objects for the planar images each in a separate
		// viewport:
		viewInt = scene->createViewport("view2d_int");
		viewInt->setViewportPosition(2, 2, VW_WIDTH, VW_HEIGHT);
		viewInt->setTransparent(true);

		win3D.unlockAccess3DScene();
		win3D.repaint();
	}

	CImage previous_image;

	map<TFeatureID, TPoint3D> lastVisibleFeats;
	std::vector<TPose3D>
		camera_key_frames_path;  // The 6D path of the Kinect camera.
	CPose3D
		currentCamPose_wrt_last;  // wrt last pose in "camera_key_frames_path"

	bool gl_keyframes_must_refresh =
		true;  // Need to update gl_keyframes from camera_key_frames_path??
	CObservation3DRangeScan::Ptr last_obs;
	string str_status, str_status2;

	while (win3D.isOpen() && !thrPar.quit)
	{
		CObservation3DRangeScan::Ptr possiblyNewObs =
			std::atomic_load(&thrPar.new_obs);
		if (possiblyNewObs && possiblyNewObs->timestamp != INVALID_TIMESTAMP &&
			(!last_obs || possiblyNewObs->timestamp != last_obs->timestamp))
		{
			// It IS a new observation:
			last_obs = possiblyNewObs;

			// Feature tracking -------------------------------------------
			ASSERT_(last_obs->hasIntensityImage);

			CImage theImg;  // The grabbed image:
			theImg = last_obs->intensityImage;

			// Do tracking:
			if (step_num > 1)  // we need "previous_image" to be valid.
			{
				tracker->trackFeatures(previous_image, theImg, trackedFeats);

				// Remove those now out of the image plane:
				CFeatureList::iterator itFeat = trackedFeats.begin();
				while (itFeat != trackedFeats.end())
				{
					const TFeatureTrackStatus status = (*itFeat)->track_status;
					bool eras =
						(status_TRACKED != status && status_IDLE != status);
					if (!eras)
					{
						// Also, check if it's too close to the image border:
						const float x = (*itFeat)->x;
						const float y = (*itFeat)->y;
						static const float MIN_DIST_MARGIN_TO_STOP_TRACKING =
							10;
						if (x < MIN_DIST_MARGIN_TO_STOP_TRACKING ||
							y < MIN_DIST_MARGIN_TO_STOP_TRACKING ||
							x > (last_obs->cameraParamsIntensity.ncols -
								 MIN_DIST_MARGIN_TO_STOP_TRACKING) ||
							y > (last_obs->cameraParamsIntensity.nrows -
								 MIN_DIST_MARGIN_TO_STOP_TRACKING))
						{
							eras = true;
						}
					}
					if (eras)  // Erase or keep?
						itFeat = trackedFeats.erase(itFeat);
					else
						++itFeat;
				}
			}

			// Create list of 3D features in space, wrt current camera pose:
			// --------------------------------------------------------------------
			map<TFeatureID, TPoint3D> curVisibleFeats;
			for (CFeatureList::iterator itFeat = trackedFeats.begin();
				 itFeat != trackedFeats.end(); ++itFeat)
			{
				// Pixel coordinates in the intensity image:
				const int int_x = (*itFeat)->x;
				const int int_y = (*itFeat)->y;

				// Convert to pixel coords in the range image:
				//  APPROXIMATION: Assume coordinates are equal (that's not
				//  exact!!)
				const int x = int_x;
				const int y = int_y;

				// Does this (x,y) have valid range data?
				const float d = last_obs->rangeImage(y, x);
				if (d > 0.05 && d < 10.0)
				{
					ASSERT_(
						size_t(
							last_obs->rangeImage.cols() *
							last_obs->rangeImage.rows()) ==
						last_obs->points3D_x.size());
					const size_t nPt = last_obs->rangeImage.cols() * y + x;
					curVisibleFeats[(*itFeat)->ID] = TPoint3D(
						last_obs->points3D_x[nPt], last_obs->points3D_y[nPt],
						last_obs->points3D_z[nPt]);
				}
			}

			// Load local points map from 3D points + color:
			CColouredPointsMap localPntsMap;
			localPntsMap.colorScheme.scheme =
				CColouredPointsMap::cmFromIntensityImage;
			localPntsMap.loadFromRangeScan(*last_obs);

			// Estimate our current camera pose from feature2feature matching:
			// --------------------------------------------------------------------
			if (!lastVisibleFeats.empty())
			{
				TMatchingPairList corrs;  // pairs of correspondences

				for (map<TFeatureID, TPoint3D>::const_iterator itCur =
						 curVisibleFeats.begin();
					 itCur != curVisibleFeats.end(); ++itCur)
				{
					map<TFeatureID, TPoint3D>::const_iterator itFound =
						lastVisibleFeats.find(itCur->first);
					if (itFound != lastVisibleFeats.end())
					{
						corrs.push_back(
							TMatchingPair(
								itFound->first, itCur->first, itFound->second.x,
								itFound->second.y, itFound->second.z,
								itCur->second.x, itCur->second.y,
								itCur->second.z));
					}
				}

				if (corrs.size() >= 3)
				{
					// Find matchings:
					mrpt::tfest::TSE3RobustParams params;
					params.ransac_minSetSize = 3;
					params.ransac_maxSetSizePct = 6.0 / corrs.size();

					mrpt::tfest::TSE3RobustResult results;
					bool register_ok = false;
					try
					{
						mrpt::tfest::se3_l2_robust(corrs, params, results);
						register_ok = true;
					}
					catch (std::exception&)
					{
						/* Cannot find a minimum number of matches, inconsistent
						 * parameters due to very reduced numberof matches,etc.
						 */
					}

					const CPose3D relativePose =
						CPose3D(results.transformation);

					str_status = mrpt::format(
						"%d corrs | inliers: %d | rel.pose: %s ",
						int(corrs.size()), int(results.inliers_idx.size()),
						relativePose.asString().c_str());
					str_status2 = string(
						results.inliers_idx.size() == 0
							? "LOST! Please, press 'r' to restart"
							: "");

					if (register_ok && std::abs(results.scale - 1.0) < 0.1)
					{
						// Seems a good match:
						if ((relativePose.norm() > KEYFRAMES_MIN_DISTANCE ||
							 std::abs(relativePose.yaw()) > KEYFRAMES_MIN_ANG ||
							 std::abs(relativePose.pitch()) >
								 KEYFRAMES_MIN_ANG ||
							 std::abs(relativePose.roll()) > KEYFRAMES_MIN_ANG))
						{
							// Accept this as a new key-frame pose ------------
							// Append new global pose of this key-frame:

							const CPose3D new_keyframe_global =
								CPose3D(*camera_key_frames_path.rbegin()) +
								relativePose;

							camera_key_frames_path.push_back(
								new_keyframe_global.asTPose());

							gl_keyframes_must_refresh = true;

							currentCamPose_wrt_last =
								CPose3D();  // It's (0,0,0) since the last
							// key-frame is the current pose!
							lastVisibleFeats = curVisibleFeats;

							cout << "Adding new key-frame: pose="
								 << new_keyframe_global << endl;

							// Update global map: append another map at a given
							// position:
							globalPtsMap.insertObservation(
								last_obs.get(), &new_keyframe_global);
							win3D.get3DSceneAndLock();
							gl_points_map->loadFromPointsMap(&globalPtsMap);
							win3D.unlockAccess3DScene();
						}
						else
						{
							currentCamPose_wrt_last = relativePose;
							// cout << "cur pose: " << currentCamPose_wrt_last
							// << endl;
						}
					}
				}
			}

			if (camera_key_frames_path.empty() || lastVisibleFeats.empty())
			{
				// First iteration:
				camera_key_frames_path.clear();
				camera_key_frames_path.push_back(TPose3D(0, 0, 0, 0, 0, 0));
				gl_keyframes_must_refresh = true;
				lastVisibleFeats = curVisibleFeats;

				// Update global map:
				globalPtsMap.clear();
				globalPtsMap.insertObservation(last_obs.get());

				win3D.get3DSceneAndLock();
				gl_points_map->loadFromPointsMap(&globalPtsMap);
				win3D.unlockAccess3DScene();
			}

			// Save the image for the next step:
			previous_image = theImg;

			// Draw marks on the RGB image:
			theImg.selectTextFont("10x20");
			{  // Tracked feats:
				theImg.drawFeatures(
					trackedFeats, TColor(0, 0, 255), SHOW_FEAT_IDS,
					SHOW_RESPONSES);
				theImg.textOut(
					3, 22,
					format("# feats: %u", (unsigned int)trackedFeats.size()),
					TColor(200, 20, 20));
			}

			// Update visualization ---------------------------------------

			// Show intensity image
			win3D.get3DSceneAndLock();
			viewInt->setImageView(theImg);
			win3D.unlockAccess3DScene();

			// Show 3D points & current visible feats, at the current camera 3D
			// pose "currentCamPose_wrt_last"
			// ---------------------------------------------------------------------
			if (last_obs->hasPoints3D)
			{
				const CPose3D curGlobalPose =
					CPose3D(*camera_key_frames_path.rbegin()) +
					currentCamPose_wrt_last;
				win3D.get3DSceneAndLock();
				// All 3D points:
				gl_points->loadFromPointsMap(&localPntsMap);
				gl_points->setPose(curGlobalPose);
				gl_cur_cam_corner->setPose(curGlobalPose);

				// Current visual landmarks:
				gl_curFeats->clear();
				for (map<TFeatureID, TPoint3D>::const_iterator it =
						 curVisibleFeats.begin();
					 it != curVisibleFeats.end(); ++it)
				{
					static double D = 0.02;
					mrpt::opengl::CBox::Ptr box =
						mrpt::make_aligned_shared<mrpt::opengl::CBox>(
							TPoint3D(-D, -D, -D), TPoint3D(D, D, D));
					box->setWireframe(true);
					box->setName(format("%d", int(it->first)));
					box->enableShowName(true);
					box->setLocation(it->second);
					gl_curFeats->insert(box);
				}
				gl_curFeats->setPose(curGlobalPose);

				win3D.unlockAccess3DScene();
				win3D.repaint();
			}

			win3D.get3DSceneAndLock();
			win3D.addTextMessage(
				-100, -20, format("%.02f Hz", thrPar.Hz), TColorf(0, 1, 1), 100,
				MRPT_GLUT_BITMAP_HELVETICA_18);
			win3D.unlockAccess3DScene();

			win3D.repaint();

			step_num++;

		}  // end update visualization:

		if (gl_keyframes_must_refresh)
		{
			gl_keyframes_must_refresh = false;
			// cout << "Updating gl_keyframes with " <<
			// camera_key_frames_path.size() << " frames.\n";
			win3D.get3DSceneAndLock();
			gl_keyframes->clear();
			for (size_t i = 0; i < camera_key_frames_path.size(); i++)
			{
				CSetOfObjects::Ptr obj =
					mrpt::opengl::stock_objects::CornerXYZSimple(0.3f, 3);
				obj->setPose(camera_key_frames_path[i]);
				gl_keyframes->insert(obj);
			}
			win3D.unlockAccess3DScene();
		}

		// Process possible keyboard commands:
		// --------------------------------------
		if (win3D.keyHit() && thrPar.pushed_key == 0)
		{
			const int key = tolower(win3D.getPushedKey());

			switch (key)
			{
				// Some of the keys are processed in this thread:
				case 'r':
					lastVisibleFeats.clear();
					camera_key_frames_path.clear();
					gl_keyframes_must_refresh = true;
					globalPtsMap.clear();
					win3D.get3DSceneAndLock();
					gl_points_map->loadFromPointsMap(&globalPtsMap);
					win3D.unlockAccess3DScene();

					break;
				case 's':
				{
					const std::string s = "point_cloud.txt";
					cout << "Dumping 3D point-cloud to: " << s << endl;
					globalPtsMap.save3D_to_text_file(s);
					break;
				}
				case 'o':
					win3D.setCameraZoom(win3D.getCameraZoom() * 1.2);
					win3D.repaint();
					break;
				case 'i':
					win3D.setCameraZoom(win3D.getCameraZoom() / 1.2);
					win3D.repaint();
					break;
				// ...and the rest in the kinect thread:
				default:
					thrPar.pushed_key = key;
					break;
			};
		}

		win3D.get3DSceneAndLock();
		win3D.addTextMessage(
			2, -30, format(
						"'s':save point cloud, 'r': reset, 'o'/'i': zoom "
						"out/in, mouse: orbit 3D, ESC: quit"),
			TColorf(1, 1, 1), 110, MRPT_GLUT_BITMAP_HELVETICA_12);
		win3D.addTextMessage(
			2, -50, str_status, TColorf(1, 1, 1), 111,
			MRPT_GLUT_BITMAP_HELVETICA_12);
		win3D.addTextMessage(
			2, -70, str_status2, TColorf(1, 1, 1), 112,
			MRPT_GLUT_BITMAP_HELVETICA_18);
		win3D.unlockAccess3DScene();

		std::this_thread::sleep_for(1ms);
	}

	cout << "Waiting for grabbing thread to exit...\n";
	thrPar.quit = true;
	thHandle.join();
	cout << "Bye!\n";
}
Example #5
0
/*---------------------------------------------------------------
					path_from_rtk_gps
 ---------------------------------------------------------------*/
void  mrpt::topography::path_from_rtk_gps(
	mrpt::poses::CPose3DInterpolator	&robot_path,
	const mrpt::slam::CRawlog			&rawlog,
	size_t 								first,
	size_t 								last,
	bool								isGUI,
	bool								disableGPSInterp,
	int									PATH_SMOOTH_FILTER ,
	TPathFromRTKInfo					*outInfo
	)
{
	MRPT_START

#if MRPT_HAS_WXWIDGETS
	// Use a smart pointer so we are safe against exceptions:
	stlplus::smart_ptr<wxBusyCursor>	waitCursorPtr;
	if (isGUI)
		waitCursorPtr = stlplus::smart_ptr<wxBusyCursor>( new  wxBusyCursor() );
#endif

    // Go: generate the map:
    size_t      i;

    ASSERT_(first<=last);
    ASSERT_(last<=rawlog.size()-1);


	set<string>	lstGPSLabels;

    size_t count = 0;

	robot_path.clear();
	robot_path.setMaxTimeInterpolation(3.0);	// Max. seconds of GPS blackout not to interpolate.
	robot_path.setInterpolationMethod( CPose3DInterpolator::imSSLSLL );

	TPathFromRTKInfo	outInfoTemp;
	if (outInfo) *outInfo = outInfoTemp;

	map<string, map<TTimeStamp,TPoint3D> >	gps_paths;	// label -> (time -> 3D local coords)

    bool		abort = false;
	bool		ref_valid = false;

	// Load configuration block:
	CConfigFileMemory	memFil;
	rawlog.getCommentTextAsConfigFile(memFil);

	TGeodeticCoords ref(
		memFil.read_double("GPS_ORIGIN","lat_deg",0),
		memFil.read_double("GPS_ORIGIN","lon_deg",0),
		memFil.read_double("GPS_ORIGIN","height",0) );

	ref_valid = !ref.isClear();

	// Do we have info for the consistency test?
	const double std_0 = memFil.read_double("CONSISTENCY_TEST","std_0",0);
	bool  doConsistencyCheck = std_0>0;

	// Do we have the "reference uncertainty" matrix W^\star ??
	memFil.read_matrix("UNCERTAINTY","W_star",outInfoTemp.W_star);
	const bool doUncertaintyCovs = size(outInfoTemp.W_star,1)!=0;
	if (doUncertaintyCovs && (size(outInfoTemp.W_star,1)!=6 || size(outInfoTemp.W_star,2)!=6))
		THROW_EXCEPTION("ERROR: W_star matrix for uncertainty estimation is provided but it's not a 6x6 matrix.");

	// ------------------------------------------
	// Look for the 2 observations:
	// ------------------------------------------
#if MRPT_HAS_WXWIDGETS
	wxProgressDialog    *progDia=NULL;
	if (isGUI)
	{
		progDia = new wxProgressDialog(
			wxT("Building map"),
			wxT("Getting GPS observations..."),
			(int)(last-first+1), // range
			NULL, // parent
			wxPD_CAN_ABORT |
			wxPD_APP_MODAL |
			wxPD_SMOOTH |
			wxPD_AUTO_HIDE |
			wxPD_ELAPSED_TIME |
			wxPD_ESTIMATED_TIME |
			wxPD_REMAINING_TIME);
	}
#endif

	// The list with all time ordered gps's in valid RTK mode
	typedef std::map< mrpt::system::TTimeStamp, std::map<std::string,CObservationGPSPtr> > TListGPSs;
	TListGPSs	list_gps_obs;

	map<string,size_t>  GPS_RTK_reads;	// label-># of RTK readings
	map<string,TPoint3D> GPS_local_coords_on_vehicle;  // label -> local pose on the vehicle

    for (i=first;!abort && i<=last;i++)
    {
        switch ( rawlog.getType(i) )
		{
		default:
			break;

        case CRawlog::etObservation:
            {
                CObservationPtr o = rawlog.getAsObservation(i);

				if (o->GetRuntimeClass()==CLASS_ID(CObservationGPS))
				{
					CObservationGPSPtr obs = CObservationGPSPtr(o);

					if (obs->has_GGA_datum && obs->GGA_datum.fix_quality==4)
					{
						// Add to the list:
						list_gps_obs[obs->timestamp][obs->sensorLabel] = obs;

						lstGPSLabels.insert(obs->sensorLabel);
					}

					// Save to GPS paths:
					if (obs->has_GGA_datum && (obs->GGA_datum.fix_quality==4 || obs->GGA_datum.fix_quality==5))
					{
						GPS_RTK_reads[obs->sensorLabel]++;

						// map<string,TPoint3D> GPS_local_coords_on_vehicle;  // label -> local pose on the vehicle
						if (GPS_local_coords_on_vehicle.find(obs->sensorLabel)==GPS_local_coords_on_vehicle.end())
							GPS_local_coords_on_vehicle[obs->sensorLabel] = TPoint3D( obs->sensorPose );

						//map<string, map<TTimeStamp,TPoint3D> >	gps_paths;	// label -> (time -> 3D local coords)
						gps_paths[obs->sensorLabel][obs->timestamp] = TPoint3D(
							obs->GGA_datum.longitude_degrees,
							obs->GGA_datum.latitude_degrees,
							obs->GGA_datum.altitude_meters );
					}
                }
            }
            break;
        } // end switch type

		// Show progress:
        if ((count++ % 100)==0)
        {
#if MRPT_HAS_WXWIDGETS
        	if (progDia)
        	{
				if (!progDia->Update((int)(i-first)))
					abort = true;
				wxTheApp->Yield();
        	}
#endif
        }
    } // end for i

#if MRPT_HAS_WXWIDGETS
	if (progDia)
	{
		delete progDia;
		progDia=NULL;
	}
#endif

	// -----------------------------------------------------------
	// At this point we already have the sensor positions, thus
	//  we can estimate the covariance matrix D:
	//
	// TODO: Generalize equations for # of GPS > 3
	// -----------------------------------------------------------
	map< set<string>, double >  Ad_ij;  // InterGPS distances in 2D
	map< set<string>, double >  phi_ij; // Directions on XY of the lines between i-j
	map< string, size_t>  D_cov_indexes; // Sensor label-> index in the matrix (first=0, ...)
	map< size_t, string>  D_cov_rev_indexes; // Reverse of D_cov_indexes

	CMatrixDouble	  D_cov;   // square distances cov
	CMatrixDouble	  D_cov_1;   // square distances cov (inverse)
	vector_double	  D_mean;  // square distances mean

	if (doConsistencyCheck && GPS_local_coords_on_vehicle.size()==3)
	{
		unsigned int cnt = 0;
		for(map<string,TPoint3D>::iterator i=GPS_local_coords_on_vehicle.begin();i!=GPS_local_coords_on_vehicle.end();++i)
		{
			// Index tables:
			D_cov_indexes[i->first] = cnt;
			D_cov_rev_indexes[cnt] = i->first;
			cnt++;

			for(map<string,TPoint3D>::iterator j=i;j!=GPS_local_coords_on_vehicle.end();++j)
			{
				if (i!=j)
				{
					const TPoint3D  &pi = i->second;
					const TPoint3D  &pj = j->second;
					Ad_ij[ make_set(i->first,j->first) ]  = pi.distanceTo( pj );
					phi_ij[ make_set(i->first,j->first) ] = atan2( pj.y-pi.y, pj.x-pi.x );
				}
			}
		}
		ASSERT_( D_cov_indexes.size()==3  && D_cov_rev_indexes.size()==3 );

		D_cov.setSize( D_cov_indexes.size(), D_cov_indexes.size() );
		D_mean.resize( D_cov_indexes.size() );

		// See paper for the formulas!
		// TODO: generalize for N>3

		D_cov(0,0) = 2*square( Ad_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[1])] );
		D_cov(1,1) = 2*square( Ad_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[2])] );
		D_cov(2,2) = 2*square( Ad_ij[ make_set(D_cov_rev_indexes[1],D_cov_rev_indexes[2])] );

		D_cov(1,0) = Ad_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[1])] * Ad_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[2])]
					 * cos( phi_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[1])] - phi_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[2])] );
		D_cov(0,1) = D_cov(1,0);

		D_cov(2,0) =-Ad_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[1])] * Ad_ij[ make_set(D_cov_rev_indexes[1],D_cov_rev_indexes[2])]
					 * cos( phi_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[1])] - phi_ij[ make_set(D_cov_rev_indexes[1],D_cov_rev_indexes[2])] );
		D_cov(0,2) = D_cov(2,0);

		D_cov(2,1) = Ad_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[2])] * Ad_ij[ make_set(D_cov_rev_indexes[1],D_cov_rev_indexes[2])]
					 * cos( phi_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[2])] - phi_ij[ make_set(D_cov_rev_indexes[1],D_cov_rev_indexes[2])] );
		D_cov(1,2) = D_cov(2,1);

		D_cov *= 4*square(std_0);

		D_cov_1 = D_cov.inv();

		//cout << D_cov.inMatlabFormat() << endl;

		D_mean[0] = square( Ad_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[1])] );
		D_mean[1] = square( Ad_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[2])] );
		D_mean[2] = square( Ad_ij[ make_set(D_cov_rev_indexes[1],D_cov_rev_indexes[2])] );

	}
	else doConsistencyCheck =false;


	// ------------------------------------------
	// Look for the 2 observations:
	// ------------------------------------------
	int N_GPSs = list_gps_obs.size();

	if (N_GPSs)
	{
		// loop interpolate 1-out-of-5: this solves the issue with JAVAD GPSs
		//  that skip some readings at some times .0 .2 .4 .6 .8
		if (list_gps_obs.size()>4)
		{
			TListGPSs::iterator F = list_gps_obs.begin();
			++F; ++F;
			TListGPSs::iterator E = list_gps_obs.end();
			--E; --E;

			for (TListGPSs::iterator i=F;i!=E;++i)
			{
				// Now check if we have 3 gps with the same time stamp:
				//const size_t N = i->second.size();
				std::map<std::string, CObservationGPSPtr> & GPS = i->second;

				// Check if any in "lstGPSLabels" is missing here:
				for (set<string>::iterator l=lstGPSLabels.begin();l!=lstGPSLabels.end();++l)
				{
					// For each GPS in the current timestamp:
					bool fnd = ( GPS.find(*l)!=GPS.end() );

					if (fnd) continue; // this one is present.

					// Ok, we have "*l" missing in the set "*i".
					// Try to interpolate from neighbors:
					TListGPSs::iterator	i_b1 = i; --i_b1;
					TListGPSs::iterator	i_a1 = i; ++i_a1;

					CObservationGPSPtr	GPS_b1, GPS_a1;

					if (i_b1->second.find( *l )!=i_b1->second.end())
						GPS_b1 = i_b1->second.find( *l )->second;

					if (i_a1->second.find( *l )!=i_a1->second.end())
						GPS_a1 = i_a1->second.find( *l )->second;

					if (!disableGPSInterp && GPS_a1 && GPS_b1)
					{
						if ( mrpt::system::timeDifference( GPS_b1->timestamp, GPS_a1->timestamp ) < 0.5 )
						{
							CObservationGPSPtr	new_gps = CObservationGPSPtr( new CObservationGPS(*GPS_a1) );
							new_gps->sensorLabel = *l;

							//cout << mrpt::system::timeLocalToString(GPS_b1->timestamp) << " " << mrpt::system::timeLocalToString(GPS_a1->timestamp) << " " << *l;
							//cout << endl;

							new_gps->GGA_datum.longitude_degrees = 0.5 * ( GPS_a1->GGA_datum.longitude_degrees + GPS_b1->GGA_datum.longitude_degrees );
							new_gps->GGA_datum.latitude_degrees  = 0.5 * ( GPS_a1->GGA_datum.latitude_degrees + GPS_b1->GGA_datum.latitude_degrees );
							new_gps->GGA_datum.altitude_meters   = 0.5 * ( GPS_a1->GGA_datum.altitude_meters + GPS_b1->GGA_datum.altitude_meters );

							new_gps->timestamp = (GPS_a1->timestamp + GPS_b1->timestamp) / 2;

							i->second[new_gps->sensorLabel] = new_gps;
						}
					}
				}
			} // end loop interpolate 1-out-of-5
		}



#if MRPT_HAS_WXWIDGETS
	wxProgressDialog    *progDia3=NULL;
	if (isGUI)
	{
		progDia3 = new wxProgressDialog(
			wxT("Building map"),
			wxT("Estimating 6D path..."),
			N_GPSs, // range
			NULL, // parent
			wxPD_CAN_ABORT |
			wxPD_APP_MODAL |
			wxPD_SMOOTH |
			wxPD_AUTO_HIDE |
			wxPD_ELAPSED_TIME |
			wxPD_ESTIMATED_TIME |
			wxPD_REMAINING_TIME);
	}
#endif

		int	idx_in_GPSs = 0;

		for (TListGPSs::iterator i=list_gps_obs.begin();i!=list_gps_obs.end();++i, idx_in_GPSs++)
		{
			// Now check if we have 3 gps with the same time stamp:
			if (i->second.size()>=3)
			{
				const size_t N = i->second.size();
				std::map<std::string, CObservationGPSPtr> & GPS = i->second;
				vector_double X(N),Y(N),Z(N); 	// Global XYZ coordinates
				std::map<string,size_t> XYZidxs;  // Sensor label -> indices in X Y Z

				if (!ref_valid)	// get the reference lat/lon, if it's not set from rawlog configuration block.
				{
					ref_valid 	= true;
					ref = GPS.begin()->second->GGA_datum.getAsStruct<TGeodeticCoords>();
				}

				// Compute the XYZ coordinates of all sensors:
				TMatchingPairList corrs;
				unsigned int k;
				std::map<std::string, CObservationGPSPtr>::iterator g_it;

				for (k=0,g_it=GPS.begin();g_it!=GPS.end();g_it++,k++)
				{
					TPoint3D P;
					mrpt::topography::geodeticToENU_WGS84(
						g_it->second->GGA_datum.getAsStruct<TGeodeticCoords>(),
						P,
						ref );

					// Correction of offsets:
					const string sect = string("OFFSET_")+g_it->second->sensorLabel;
					P.x += memFil.read_double( sect, "x", 0 );
					P.y += memFil.read_double( sect, "y", 0 );
					P.z += memFil.read_double( sect, "z", 0 );

					XYZidxs[g_it->second->sensorLabel] = k; // Save index correspondence

					// Create the correspondence:
					corrs.push_back( TMatchingPair(
						k,k, 	// Indices
						P.x,P.y,P.z, // "This"/Global coords
						g_it->second->sensorPose.x(),g_it->second->sensorPose.y(),g_it->second->sensorPose.z() // "other"/local coordinates
						));

					X[k] = P.x;
					Y[k] = P.y;
					Z[k] = P.z;
				}

				if (doConsistencyCheck && GPS.size()==3)
				{
					// XYZ[k] have the k'd final coordinates of each GPS
					// GPS[k] are the CObservations:

					// Compute the inter-GPS square distances:
					vector_double iGPSdist2(3);

					// [0]: sq dist between: D_cov_rev_indexes[0],D_cov_rev_indexes[1]
					TPoint3D   P0( X[XYZidxs[D_cov_rev_indexes[0]]], Y[XYZidxs[D_cov_rev_indexes[0]]], Z[XYZidxs[D_cov_rev_indexes[0]]] );
					TPoint3D   P1( X[XYZidxs[D_cov_rev_indexes[1]]], Y[XYZidxs[D_cov_rev_indexes[1]]], Z[XYZidxs[D_cov_rev_indexes[1]]] );
					TPoint3D   P2( X[XYZidxs[D_cov_rev_indexes[2]]], Y[XYZidxs[D_cov_rev_indexes[2]]], Z[XYZidxs[D_cov_rev_indexes[2]]] );

					iGPSdist2[0] = P0.sqrDistanceTo(P1);
					iGPSdist2[1] = P0.sqrDistanceTo(P2);
					iGPSdist2[2] = P1.sqrDistanceTo(P2);

					double mahaD = mrpt::math::mahalanobisDistance( iGPSdist2, D_mean, D_cov_1 );
					outInfoTemp.mahalabis_quality_measure[i->first] = mahaD;

					//cout << "x: " << iGPSdist2 << " MU: " <<  D_mean << " -> " << mahaD  << endl;
				} // end consistency

				// Use a 6D matching method to estimate the location of the vehicle:
				CPose3D optimal_pose;
				double  optimal_scale;

				// "this" (reference map) -> GPS global coordinates
				// "other" -> GPS local coordinates on the vehicle
				mrpt::scanmatching::leastSquareErrorRigidTransformation6D( corrs,optimal_pose,optimal_scale, true ); // Force scale=1
				//cout << "optimal pose: " << optimal_pose << " " << optimal_scale << endl;

				// Final vehicle pose:
				CPose3D	&veh_pose= optimal_pose;


				// Add to the interpolator:
				MRPT_CHECK_NORMAL_NUMBER( veh_pose.x() );
				MRPT_CHECK_NORMAL_NUMBER( veh_pose.y() );
				MRPT_CHECK_NORMAL_NUMBER( veh_pose.z() );
				MRPT_CHECK_NORMAL_NUMBER( veh_pose.yaw() );
				MRPT_CHECK_NORMAL_NUMBER( veh_pose.pitch() );
				MRPT_CHECK_NORMAL_NUMBER( veh_pose.roll() );

				robot_path.insert( i->first, veh_pose );

				// If we have W_star, compute the pose uncertainty:
				if (doUncertaintyCovs)
				{
					CPose3DPDFGaussian	final_veh_uncert;
					final_veh_uncert.mean.setFromValues(0,0,0,0,0,0);
					final_veh_uncert.cov = outInfoTemp.W_star;

					// Rotate the covariance according to the real vehicle pose:
					final_veh_uncert.changeCoordinatesReference(veh_pose);

					outInfoTemp.vehicle_uncertainty[ i->first ] = final_veh_uncert.cov;
				}
			}

			// Show progress:
			if ((count++ % 100)==0)
			{
#if MRPT_HAS_WXWIDGETS
				if (progDia3)
				{
					if (!progDia3->Update(idx_in_GPSs))
						abort = true;
					wxTheApp->Yield();
				}
#endif
			}
		} // end for i

#if MRPT_HAS_WXWIDGETS
	if (progDia3)
	{
		delete progDia3;
		progDia3=NULL;
	}
#endif

		if (PATH_SMOOTH_FILTER>0 && robot_path.size()>1)
		{
			CPose3DInterpolator	filtered_robot_path = robot_path;

			// Do Angles smoother filter of the path:
			// ---------------------------------------------
			const double MAX_DIST_TO_FILTER = 4.0;

			for (CPose3DInterpolator::iterator i=robot_path.begin();i!=robot_path.end();++i)
			{
				CPose3D   p = i->second;

				vector_double pitchs, rolls;	// The elements to average

				pitchs.push_back(p.pitch());
				rolls.push_back(p.roll());

				CPose3DInterpolator::iterator q=i;
				for (int k=0;k<PATH_SMOOTH_FILTER && q!=robot_path.begin();k++)
				{
					--q;
					if (abs( mrpt::system::timeDifference(q->first,i->first))<MAX_DIST_TO_FILTER )
					{
						pitchs.push_back( q->second.pitch() );
						rolls.push_back( q->second.roll() );
					}
				}
				q=i;
				for (int k=0;k<PATH_SMOOTH_FILTER && q!=(--robot_path.end()) ;k++)
				{
					++q;
					if (abs( mrpt::system::timeDifference(q->first,i->first))<MAX_DIST_TO_FILTER )
					{
						pitchs.push_back( q->second.pitch() );
						rolls.push_back( q->second.roll() );
					}
				}

				p.setYawPitchRoll(p.yaw(), mrpt::math::averageWrap2Pi(pitchs), mrpt::math::averageWrap2Pi(rolls) );

				// save in filtered path:
				filtered_robot_path.insert( i->first, p );
			}
			// Replace:
			robot_path = filtered_robot_path;

		} // end PATH_SMOOTH_FILTER

	} // end step generate 6D path


	// Here we can set best_gps_path  (that with the max. number of RTK fixed/foat readings):
	if (outInfo)
	{
		string bestLabel;
		size_t bestNum = 0;
		for (map<string,size_t>::iterator i=GPS_RTK_reads.begin();i!=GPS_RTK_reads.end();++i)
		{
			if (i->second>bestNum)
			{
				bestNum = i->second;
				bestLabel = i->first;
			}
		}
		outInfoTemp.best_gps_path = gps_paths[bestLabel];

		// and transform to XYZ:
		// Correction of offsets:
		const string sect = string("OFFSET_")+bestLabel;
		const double off_X = memFil.read_double( sect, "x", 0 );
		const double off_Y = memFil.read_double( sect, "y", 0 );
		const double off_Z = memFil.read_double( sect, "z", 0 );

		// map<TTimeStamp,TPoint3D> best_gps_path;		// time -> 3D local coords
		for (map<TTimeStamp,TPoint3D>::iterator i=outInfoTemp.best_gps_path.begin();i!=outInfoTemp.best_gps_path.end();++i)
		{
			TPoint3D P;
			mrpt::topography::geodeticToENU_WGS84(
				TGeodeticCoords(i->second.x,i->second.y,i->second.z),  // i->second.x,i->second.y,i->second.z, // lat, lon, heigh
				P, // X Y Z
				ref );

			i->second.x = P.x + off_X;
			i->second.y = P.y + off_Y;
			i->second.z = P.z + off_Z;
		}
	} // end best_gps_path

	if (outInfo) *outInfo = outInfoTemp;


    MRPT_END
}