Esempio n. 1
0
void PointMatcher<T>::DataPoints::addField(const std::string& name, const MatrixType& newField, Labels& labels, MatrixType& data) const
{
	const int newFieldDim = newField.rows();
	const int newPointCount = newField.cols();
	const int pointCount = features.cols();

	if (newField.rows() == 0)
		return;

	// Replace if the field exists
	if (fieldExists(name, 0, labels))
	{
		const int fieldDim = getFieldDimension(name, labels);
		
		if(fieldDim == newFieldDim)
		{
			// Ensure that the number of points in the point cloud and in the field are the same
			if(pointCount == newPointCount)
			{
				const int row = getFieldStartingRow(name, labels);
				data.block(row, 0, fieldDim, pointCount) = newField;
			}
			else
			{
				stringstream errorMsg;
				errorMsg << "The field " << name << " cannot be added because the number of points is not the same. Old point count: " << pointCount << "new: " << newPointCount;
				throw InvalidField(errorMsg.str());
			}
		}
		else
		{
			stringstream errorMsg;
			errorMsg << "The field " << name << " already exists but could not be added because the dimension is not the same. Old dim: " << fieldDim << " new: " << newFieldDim;
			throw InvalidField(errorMsg.str());
		}
	}
	else // Add at the end if it is a new field
	{
		if(pointCount == newPointCount || pointCount == 0)
		{
			const int oldFieldDim(data.rows());
			const int totalDim = oldFieldDim + newFieldDim;
			data.conservativeResize(totalDim, newPointCount);
			data.bottomRows(newFieldDim) = newField;
			labels.push_back(Label(name, newFieldDim));
		}
		else
		{
			stringstream errorMsg;
			errorMsg << "The field " << name << " cannot be added because the number of points is not the same. Old point count: " << pointCount << " new: " << newPointCount;
			throw InvalidField(errorMsg.str());
		}
	}
}
Esempio n. 2
0
void PointMatcher<T>::DataPoints::allocateField(const std::string& name, const unsigned dim, Labels& labels, MatrixType& data) const
{
	if (fieldExists(name, 0, labels))
	{
		const unsigned descDim(getFieldDimension(name, labels));
		if (descDim != dim)
		{
			throw InvalidField(
				(boost::format("The existing field %1% has dimension %2%, different than requested dimension %3%") % name % descDim % dim).str()
			);
		}
	}
	else
	{
		const int oldDim(data.rows());
		const int totalDim(oldDim + dim);
		const int pointCount(features.cols());
		data.conservativeResize(totalDim, pointCount);
		labels.push_back(Label(name, dim));
	}
}
Esempio n. 3
0
void PointMatcher<T>::DataPoints::allocateFields(const Labels& newLabels, Labels& labels, MatrixType& data) const
{
	typedef vector<bool> BoolVector;
	BoolVector present(newLabels.size(), false);
	
	// for fields that exist, take note and check dimension
	size_t additionalDim(0);
	for (size_t i = 0; i < newLabels.size(); ++i)
	{
		const string& newName(newLabels[i].text);
		const size_t newSpan(newLabels[i].span);
		for(BOOST_AUTO(it, labels.begin()); it != labels.end(); ++it)
		{
			if (it->text == newName)
			{
				if (it->span != newSpan)
					throw InvalidField(
						(boost::format("The existing field %1% has dimension %2%, different than requested dimension %3%") % newName % it->span % newSpan).str()
					);
				present[i] = true;
				break;
			}
		}
		if (!present[i])
			additionalDim += newSpan;
	}
	
	// for new fields allocate
	const int oldDim(data.rows());
	const int totalDim(oldDim + additionalDim);
	const int pointCount(features.cols());
	data.conservativeResize(totalDim, pointCount);
	for (size_t i = 0; i < newLabels.size(); ++i)
	{
		if (!present[i])
			labels.push_back(newLabels[i]);
	}
}
Esempio n. 4
0
void PointMatcher<T>::DataPoints::concatenateLabelledMatrix(Labels* labels, MatrixType* data, const Labels extraLabels, const MatrixType extraData)
{
	const int nbPoints1 = data->cols();
	const int nbPoints2 = extraData.cols();
	const int nbPointsTotal = nbPoints1 + nbPoints2;


	if (*labels == extraLabels)
	{
		if (labels->size() > 0)
		{
			// same descriptors, fast merge
			data->conservativeResize(Eigen::NoChange, nbPointsTotal);
			data->rightCols(nbPoints2) = extraData;
		}
	}
	else
	{
		// different descriptors, slow merge
		
		// collect labels to be kept
		Labels newLabels;
		for(BOOST_AUTO(it, labels->begin()); it != labels->end(); ++it)
		{
			for(BOOST_AUTO(jt, extraLabels.begin()); jt != extraLabels.end(); ++jt)
			{
				if (it->text == jt->text)
				{
					if (it->span != jt->span)
						throw InvalidField(
						(boost::format("The field %1% has dimension %2% in this, different than dimension %3% in that") % it->text % it->span % jt->span).str()
					);
					newLabels.push_back(*it);
					break;
				}
			}
		}
		
		// allocate new descriptors
		if (newLabels.size() > 0)
		{
			MatrixType newData;
			Labels filledLabels;
			this->allocateFields(newLabels, filledLabels, newData);
			assert(newLabels == filledLabels);
			
			// fill
			unsigned row(0);
			for(BOOST_AUTO(it, newLabels.begin()); it != newLabels.end(); ++it)
			{
				Eigen::Block<MatrixType> view(newData.block(row, 0, it->span, newData.cols()));
				view.leftCols(nbPoints1) = getViewByName(it->text, *labels, *data);
				view.rightCols(nbPoints2) = getViewByName(it->text, extraLabels, extraData);
				row += it->span;
			}
			
			// swap descriptors
			data->swap(newData);
			*labels = newLabels;
		}
		else
		{
			*data = MatrixType();
			*labels = Labels();
		}
	}
}
	typename PointMatcher<T>::DataPoints rosMsgToPointMatcherCloud(const sensor_msgs::PointCloud2& rosMsg)
	{
		typedef PointMatcher<T> PM;
		typedef typename PM::DataPoints DataPoints;
		typedef typename DataPoints::Label Label;
		typedef typename DataPoints::Labels Labels;
		typedef typename DataPoints::View View;
		
		if (rosMsg.fields.empty())
			return DataPoints();
		
		// fill labels
		// conversions of descriptor fields from pcl
		// see http://www.ros.org/wiki/pcl/Overview
		Labels featLabels;
		Labels descLabels;
		vector<bool> isFeature;
		for(auto it(rosMsg.fields.begin()); it != rosMsg.fields.end(); ++it)
		{
			const string name(it->name);
			const size_t count(std::max<size_t>(it->count, 1));
			if (name == "x" || name == "y" || name == "z")
			{
				featLabels.push_back(Label(name, count));
				isFeature.push_back(true);
			}
			else if (name == "rgb" || name == "rgba")
			{
				descLabels.push_back(Label("color", (name == "rgba") ? 4 : 3));
				isFeature.push_back(false);
			}
			else if ((it+1) != rosMsg.fields.end() && it->name == "normal_x" && (it+1)->name == "normal_y")
			{
				if ((it+2) != rosMsg.fields.end() && (it+2)->name == "normal_z")
				{
					descLabels.push_back(Label("normals", 3));
					it += 2;
					isFeature.push_back(false);
					isFeature.push_back(false);
				}
				else
				{
					descLabels.push_back(Label("normals", 2));
					it += 1;
					isFeature.push_back(false);
				}
				isFeature.push_back(false);
			}
			else 
			{
				descLabels.push_back(Label(name, count));
				isFeature.push_back(false);
			}
		}
		featLabels.push_back(Label("pad", 1));
		assert(isFeature.size() == rosMsg.fields.size());
		
		// create cloud
		const unsigned pointCount(rosMsg.width * rosMsg.height);
		DataPoints cloud(featLabels, descLabels, pointCount);
		cloud.getFeatureViewByName("pad").setConstant(1);
		
		// fill cloud
		// TODO: support big endian, pass through endian-swapping method just after the *reinterpret_cast
		typedef sensor_msgs::PointField PF;
		size_t fieldId = 0;
		for(auto it(rosMsg.fields.begin()); it != rosMsg.fields.end(); ++it, ++fieldId)
		{
			if (it->name == "rgb" || it->name == "rgba")
			{
				// special case for colors
				if (((it->datatype != PF::UINT32) && (it->datatype != PF::INT32) && (it->datatype != PF::FLOAT32)) || (it->count != 1))
					throw runtime_error(
						(boost::format("Colors in a point cloud must be a single element of size 32 bits, found %1% elements of type %2%") % it->count % unsigned(it->datatype)).str()
					);
				View view(cloud.getDescriptorViewByName("color"));
				int ptId(0);
				for (size_t y(0); y < rosMsg.height; ++y)
				{
					const uint8_t* dataPtr(&rosMsg.data[0] + rosMsg.row_step*y);
					for (size_t x(0); x < rosMsg.width; ++x)
					{
						const uint32_t rgba(*reinterpret_cast<const uint32_t*>(dataPtr + it->offset));
						const T colorA(T((rgba >> 24) & 0xff) / 255.);
						const T colorR(T((rgba >> 16) & 0xff) / 255.);
						const T colorG(T((rgba >> 8) & 0xff) / 255.);
						const T colorB(T((rgba >> 0) & 0xff) / 255.);
						view(0, ptId) = colorR;
						view(1, ptId) = colorG;
						view(2, ptId) = colorB;
						if (view.rows() > 3)
							view(3, ptId) = colorA;
						dataPtr += rosMsg.point_step;
						ptId += 1;
					}
				}
			}
			else
			{
				// get view for editing data
				View view(
					 (it->name == "normal_x") ? cloud.getDescriptorRowViewByName("normals", 0) :
					((it->name == "normal_y") ? cloud.getDescriptorRowViewByName("normals", 1) :
					((it->name == "normal_z") ? cloud.getDescriptorRowViewByName("normals", 2) :
					((isFeature[fieldId]) ? cloud.getFeatureViewByName(it->name) :
					cloud.getDescriptorViewByName(it->name))))
				);
				// use view to read data
				int ptId(0);
				const size_t count(std::max<size_t>(it->count, 1));
				for (size_t y(0); y < rosMsg.height; ++y)
				{
					const uint8_t* dataPtr(&rosMsg.data[0] + rosMsg.row_step*y);
					for (size_t x(0); x < rosMsg.width; ++x)
					{
						const uint8_t* fPtr(dataPtr + it->offset);
						for (unsigned dim(0); dim < count; ++dim)
						{
							switch (it->datatype)
							{
								case PF::INT8: view(dim, ptId) = T(*reinterpret_cast<const int8_t*>(fPtr)); fPtr += 1; break;
								case PF::UINT8: view(dim, ptId) = T(*reinterpret_cast<const uint8_t*>(fPtr)); fPtr += 1; break;
								case PF::INT16: view(dim, ptId) = T(*reinterpret_cast<const int16_t*>(fPtr)); fPtr += 2; break;
								case PF::UINT16: view(dim, ptId) = T(*reinterpret_cast<const uint16_t*>(fPtr)); fPtr += 2; break;
								case PF::INT32: view(dim, ptId) = T(*reinterpret_cast<const int32_t*>(fPtr)); fPtr += 4; break;
								case PF::UINT32: view(dim, ptId) = T(*reinterpret_cast<const uint32_t*>(fPtr)); fPtr += 4; break;
								case PF::FLOAT32: view(dim, ptId) = T(*reinterpret_cast<const float*>(fPtr)); fPtr += 4; break;
								case PF::FLOAT64: view(dim, ptId) = T(*reinterpret_cast<const double*>(fPtr)); fPtr += 8; break;
								default: abort();
							}
						}
						dataPtr += rosMsg.point_step;
						ptId += 1;
					}
				}
			}
		}
void DataPointsFiltersImpl<T>::SamplingSurfaceNormalDataPointsFilter::inPlaceFilter(
	DataPoints& cloud)
{
	typedef Matrix Features;
	typedef typename DataPoints::View View;
	typedef typename DataPoints::Label Label;
	typedef typename DataPoints::Labels Labels;

	const int pointsCount(cloud.features.cols());
	const int featDim(cloud.features.rows());
	const int descDim(cloud.descriptors.rows());

	int insertDim(0);
	if (averageExistingDescriptors)
	{
		// TODO: this should be in the form of an assert
		// Validate descriptors and labels
		for(unsigned int i = 0; i < cloud.descriptorLabels.size(); i++)
			insertDim += cloud.descriptorLabels[i].span;
		if (insertDim != descDim)
			throw InvalidField("SamplingSurfaceNormalDataPointsFilter: Error, descriptor labels do not match descriptor data");
	}

	// Compute space requirement for new descriptors
	const int dimNormals(featDim-1);
	const int dimDensities(1);
	const int dimEigValues(featDim-1);
	const int dimEigVectors((featDim-1)*(featDim-1));

	// Allocate space for new descriptors
	Labels cloudLabels;
	if (keepNormals)
		cloudLabels.push_back(Label("normals", dimNormals));
	if (keepDensities)
		cloudLabels.push_back(Label("densities", dimDensities));
	if (keepEigenValues)
		cloudLabels.push_back(Label("eigValues", dimEigValues));
	if (keepEigenVectors)
		cloudLabels.push_back(Label("eigVectors", dimEigVectors));
	cloud.allocateDescriptors(cloudLabels);

	// we keep build data on stack for reentrant behaviour
	View cloudExistingDescriptors(cloud.descriptors.block(0,0,cloud.descriptors.rows(),cloud.descriptors.cols()));
	BuildData buildData(cloud.features, cloud.descriptors);

	// get views
	if (keepNormals)
		buildData.normals = cloud.getDescriptorViewByName("normals");
	if (keepDensities)
		buildData.densities = cloud.getDescriptorViewByName("densities");
	if (keepEigenValues)
		buildData.eigenValues = cloud.getDescriptorViewByName("eigValues");
	if (keepEigenVectors)
		buildData.eigenVectors = cloud.getDescriptorViewByName("eigVectors");
	// build the new point cloud
	buildNew(
		buildData,
		0,
		pointsCount,
		cloud.features.rowwise().minCoeff(),
		cloud.features.rowwise().maxCoeff()
	);

	// Bring the data we keep to the front of the arrays then
	// wipe the leftover unused space.
	std::sort(buildData.indicesToKeep.begin(), buildData.indicesToKeep.end());
	int ptsOut = buildData.indicesToKeep.size();
	for (int i = 0; i < ptsOut; i++){
		int k = buildData.indicesToKeep[i];
		assert(i <= k);
		cloud.features.col(i) = cloud.features.col(k);
		if (cloud.descriptors.rows() != 0)
			cloud.descriptors.col(i) = cloud.descriptors.col(k);
		if(keepNormals)
			buildData.normals->col(i) = buildData.normals->col(k);
		if(keepDensities)
			(*buildData.densities)(0,i) = (*buildData.densities)(0,k);
		if(keepEigenValues)
			buildData.eigenValues->col(i) = buildData.eigenValues->col(k);
		if(keepEigenVectors)
			buildData.eigenVectors->col(i) = buildData.eigenVectors->col(k);
	}
	cloud.features.conservativeResize(Eigen::NoChange, ptsOut);
	cloud.descriptors.conservativeResize(Eigen::NoChange, ptsOut);

	// warning if some points were dropped
	if(buildData.unfitPointsCount != 0)
		LOG_INFO_STREAM("  SamplingSurfaceNormalDataPointsFilter - Could not compute normal for " << buildData.unfitPointsCount << " pts.");
}
void DataPointsFiltersImpl<T>::SurfaceNormalDataPointsFilter::inPlaceFilter(
	DataPoints& cloud)
{
	typedef typename DataPoints::View View;
	typedef typename DataPoints::Label Label;
	typedef typename DataPoints::Labels Labels;
	typedef typename MatchersImpl<T>::KDTreeMatcher KDTreeMatcher;
	typedef typename PointMatcher<T>::Matches Matches;

	const int pointsCount(cloud.features.cols());
	const int featDim(cloud.features.rows());
	const int descDim(cloud.descriptors.rows());

	// Validate descriptors and labels
	int insertDim(0);
	for(unsigned int i = 0; i < cloud.descriptorLabels.size(); i++)
		insertDim += cloud.descriptorLabels[i].span;
	if (insertDim != descDim)
		throw InvalidField("SurfaceNormalDataPointsFilter: Error, descriptor labels do not match descriptor data");

	// Reserve memory for new descriptors
	const int dimNormals(featDim-1);
	const int dimDensities(1);
	const int dimEigValues(featDim-1);
	const int dimEigVectors((featDim-1)*(featDim-1));
	//const int dimMatchedIds(knn);

	boost::optional<View> normals;
	boost::optional<View> densities;
	boost::optional<View> eigenValues;
	boost::optional<View> eigenVectors;
	boost::optional<View> matchedValues;

	Labels cloudLabels;
	if (keepNormals)
		cloudLabels.push_back(Label("normals", dimNormals));
	if (keepDensities)
		cloudLabels.push_back(Label("densities", dimDensities));
	if (keepEigenValues)
		cloudLabels.push_back(Label("eigValues", dimEigValues));
	if (keepEigenVectors)
		cloudLabels.push_back(Label("eigVectors", dimEigVectors));
	cloud.allocateDescriptors(cloudLabels);

	if (keepNormals)
		normals = cloud.getDescriptorViewByName("normals");
	if (keepDensities)
		densities = cloud.getDescriptorViewByName("densities");
	if (keepEigenValues)
		eigenValues = cloud.getDescriptorViewByName("eigValues");
	if (keepEigenVectors)
		eigenVectors = cloud.getDescriptorViewByName("eigVectors");
	// TODO: implement keepMatchedIds
//	if (keepMatchedIds)
//	{
//		cloud.allocateDescriptor("normals", dimMatchedIds);
//		matchedValues = cloud.getDescriptorViewByName("normals");
//	}

	// Build kd-tree
	Parametrizable::Parameters param;
	boost::assign::insert(param) ( "knn", toParam(knn) );
	boost::assign::insert(param) ( "epsilon", toParam(epsilon) );
	KDTreeMatcher matcher(param);
	matcher.init(cloud);

	Matches matches(typename Matches::Dists(knn, pointsCount), typename Matches::Ids(knn, pointsCount));
	matches = matcher.findClosests(cloud);

	// Search for surrounding points and compute descriptors
	int degenerateCount(0);
	for (int i = 0; i < pointsCount; ++i)
	{
		// Mean of nearest neighbors (NN)
		Matrix d(featDim-1, knn);
		for(int j = 0; j < int(knn); j++)
		{
			const int refIndex(matches.ids(j,i));
			d.col(j) = cloud.features.block(0, refIndex, featDim-1, 1);
		}

		const Vector mean = d.rowwise().sum() / T(knn);
		const Matrix NN = d.colwise() - mean;

		const Matrix C(NN * NN.transpose());
		Vector eigenVa = Vector::Identity(featDim-1, 1);
		Matrix eigenVe = Matrix::Identity(featDim-1, featDim-1);
		// Ensure that the matrix is suited for eigenvalues calculation
		if(keepNormals || keepEigenValues || keepEigenVectors)
		{
			if(C.fullPivHouseholderQr().rank()+1 >= featDim-1)
			{
				const Eigen::EigenSolver<Matrix> solver(C);
				eigenVa = solver.eigenvalues().real();
				eigenVe = solver.eigenvectors().real();
			}
			else
			{
				//std::cout << "WARNING: Matrix C needed for eigen decomposition is degenerated. Expected cause: no noise in data" << std::endl;
				++degenerateCount;
			}
		}

		if(keepNormals)
			normals->col(i) = computeNormal(eigenVa, eigenVe);
		if(keepDensities)
			(*densities)(0, i) = computeDensity(NN);
		if(keepEigenValues)
			eigenValues->col(i) = eigenVa;
		if(keepEigenVectors)
			eigenVectors->col(i) = serializeEigVec(eigenVe);
	}
	if (degenerateCount)
	{
		LOG_WARNING_STREAM("WARNING: Matrix C needed for eigen decomposition was degenerated in " << degenerateCount << " points over " << pointsCount << " (" << float(degenerateCount)*100.f/float(pointsCount) << " %)");
	}

}