Beispiel #1
0
int main (int argc, char** argv)
{
  typedef pcl::PointXYZ InputPointType;
  pcl::PointCloud<InputPointType>::Ptr cloud (new pcl::PointCloud<InputPointType>);

  unsigned int numberOfPoints = 100;
  // Create a point cloud
  for(unsigned int i = 0; i < numberOfPoints; ++i)
    {
    InputPointType p; 
    // Random coordinate
    p.x = drand48(); p.y = drand48(); p.z = drand48();

    cloud->push_back(p);
    }

  // Compute the normals
  pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> normalEstimation;
  normalEstimation.setInputCloud (cloud);

  pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>);
  normalEstimation.setSearchMethod (tree);

  pcl::PointCloud<pcl::Normal>::Ptr cloudWithNormals (new pcl::PointCloud<pcl::Normal>);

  normalEstimation.setRadiusSearch (0.03);

  std::cout << "Computing normals..." << std::endl;
  normalEstimation.compute (*cloudWithNormals);

  // Setup the feature computation
  
  std::vector<int> pointIds(1);
  pointIds[0] = 0;

  pcl::PFHEstimation<pcl::PointXYZ, pcl::Normal, pcl::PFHSignature125> pfhEstimation;
  pfhEstimation.setInputCloud (cloud);
  pfhEstimation.setInputNormals(cloudWithNormals);
  pfhEstimation.setSearchMethod (tree);

  pcl::PointCloud<pcl::PFHSignature125>::Ptr pfhFeatures(new pcl::PointCloud<pcl::PFHSignature125>);
  pfhEstimation.setIndices(boost::make_shared<std::vector<int> >(pointIds));
  pfhEstimation.setKSearch(numberOfPoints - 1);

  // Actually compute the features
  std::cout << "Computing features..." << std::endl;
  pfhEstimation.compute (*pfhFeatures);

  std::cout << "output points.size (): " << pfhFeatures->points.size () << std::endl;

  // Display and retrieve the shape context descriptor vector for the 0th point.
  pcl::PFHSignature125 descriptor = pfhFeatures->points[0];
  std::cout << descriptor << std::endl;

  return 0;
}
Beispiel #2
0
main (int argc, char** argv)
{
  std::string fileName = argv[1];
  std::cout << "Reading " << fileName << std::endl;

  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);

  if (pcl::io::loadPCDFile<pcl::PointXYZ> (fileName, *cloud) == -1) //* load the file
  {
    PCL_ERROR ("Couldn't read file");
    return (-1);
  }

  std::cout << "Loaded " << cloud->points.size() << " points." << std::endl;

  // Compute the normals
  pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> normalEstimation;
  normalEstimation.setInputCloud (cloud);

  pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>);
  normalEstimation.setSearchMethod (tree);

  pcl::PointCloud<pcl::Normal>::Ptr cloudWithNormals (new pcl::PointCloud<pcl::Normal>);

  normalEstimation.setRadiusSearch (0.03);

  normalEstimation.compute (*cloudWithNormals);

  // Setup the shape context computation
  typedef pcl::SHOT ShapeContext; // Signature of Histograms of OrienTations (SHOT).
  pcl::ShapeContext3DEstimation<pcl::PointXYZ, pcl::Normal, ShapeContext > shapeContext;
  // Provide the original point cloud (without normals)
  shapeContext.setInputCloud (cloud);
  // Provide the point cloud with normals
  shapeContext.setInputNormals(cloudWithNormals);
  // Use the same KdTree from the normal estimation
  shapeContext.setSearchMethod (tree);

  pcl::PointCloud<ShapeContext>::Ptr shapeContextFeatures(new pcl::PointCloud<ShapeContext>);

  // The search radius must be set to above the minimal search radius
  std::cout << "min radius: " << shapeContext.getMinimalRadius() << std::endl;
  shapeContext.setRadiusSearch (0.2);

  // Actually compute the shape contexts
  shapeContext.compute (*shapeContextFeatures);

  std::cout << "output points.size (): " << shapeContextFeatures->points.size () << std::endl;

  // Display and retrieve the shape context descriptor vector for the 0th point.
  std::cout << shapeContextFeatures->points[0] << std::endl;
  std::vector<float> descriptor = shapeContextFeatures->points[0].descriptor;

  return 0;
}
Beispiel #3
0
main (int argc, char** argv)
{
  std::string fileName = argv[1];
  std::cout << "Reading " << fileName << std::endl;

  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);

  if (pcl::io::loadPCDFile<pcl::PointXYZ> (fileName, *cloud) == -1) //* load the file
  {
    PCL_ERROR ("Couldn't read file");
    return (-1);
  }

  std::cout << "Loaded " << cloud->points.size() << " points." << std::endl;

  // Compute the normals
  pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> normalEstimation;
  normalEstimation.setInputCloud (cloud);

  pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>);
  normalEstimation.setSearchMethod (tree);

  pcl::PointCloud<pcl::Normal>::Ptr cloudWithNormals (new pcl::PointCloud<pcl::Normal>);

  normalEstimation.setRadiusSearch (0.03);

  normalEstimation.compute (*cloudWithNormals);

  // Setup the spin images computation

  typedef pcl::Histogram<153> SpinImage;
  pcl::SpinImageEstimation<pcl::PointXYZ, pcl::Normal, SpinImage> spinImageEstimation(8, 0.5, 16);
  // Provide the original point cloud (without normals)
  //spinImageEstimation.setInputCloud (cloud);
  // Provide the point cloud with normals
  //spinImageEstimation.setInputNormals(cloudWithNormals);
  spinImageEstimation.setInputWithNormals(cloud, cloudWithNormals);
  // Use the same KdTree from the normal estimation
  spinImageEstimation.setSearchMethod (tree);

  pcl::PointCloud<SpinImage>::Ptr spinImages(new pcl::PointCloud<SpinImage>);

  spinImageEstimation.setRadiusSearch (0.2);

  // Actually compute the spin images
  spinImageEstimation.compute (*spinImages);

  std::cout << "output points.size (): " << spinImages->points.size () << std::endl;

  // Display and retrieve the shape context descriptor vector for the 0th point.
  SpinImage descriptor = spinImages->points[0];
  std::cout << descriptor << std::endl;

  return 0;
}
Beispiel #4
0
pcl::PointCloud<PointTNormal>::Ptr addNormalsToPointCloud(pcl::PointCloud<PointT>::Ptr cloud_input){
    pcl::PointCloud<PointTNormal>::Ptr cloudWithNormals(new pcl::PointCloud<PointTNormal>());

    pcl::NormalEstimation<PointT, pcl::Normal> ne;
    ne.setInputCloud (cloud_input);
    pcl::search::KdTree<PointT>::Ptr tree (new pcl::search::KdTree<PointT> ());
    ne.setSearchMethod (tree);
    pcl::PointCloud<pcl::Normal>::Ptr cloud_normals (new pcl::PointCloud<pcl::Normal>);
    ne.setRadiusSearch (0.03);
    ne.compute (*cloud_normals);
    pcl::concatenateFields(*cloud_input,*cloud_normals,*cloudWithNormals);

    return cloudWithNormals;
}
int main (int argc, char** argv)
{
  PointCloudType::Ptr cloud(new PointCloudType);
  PointCloudType::PointType p0(1,2,3);
  cloud->push_back(p0);

  // Call without normals
  {
  ComputeFeatures(cloud);
  }

  // Call with normals
  {
  PointCloudWithNormalsType::Ptr cloudWithNormals(new PointCloudWithNormalsType);
  copyPointCloud(*cloud, *cloudWithNormals);
  ComputeFeatures(cloudWithNormals);
  }

  return 0;
}
void ComputeSpinImages(InputCloud::Ptr input, OutputCloud::Ptr output)
{
  // Compute the normals
  std::cout << "Computing normals..." << std::endl;
  pcl::NormalEstimation<InputCloud::PointType, pcl::Normal> normalEstimation;
  normalEstimation.setInputCloud (input);

  pcl::search::KdTree<InputCloud::PointType>::Ptr tree (new pcl::search::KdTree<InputCloud::PointType>);
  normalEstimation.setSearchMethod (tree);

  pcl::PointCloud<pcl::Normal>::Ptr cloudWithNormals (new pcl::PointCloud<pcl::Normal>);

  normalEstimation.setRadiusSearch (0.03);

  normalEstimation.compute (*cloudWithNormals);

  pcl::SpinImageEstimation<InputCloud::PointType, pcl::Normal, OutputCloud::PointType> spinImageEstimation(8, 0.5, 16);
  // Provide the original point cloud (without normals)
  //spinImageEstimation.setInputCloud (cloud);
  // Provide the point cloud with normals
  //spinImageEstimation.setInputNormals(cloudWithNormals);
  spinImageEstimation.setInputWithNormals(input, cloudWithNormals);
  // Use the same KdTree from the normal estimation
  spinImageEstimation.setSearchMethod (tree);

  OutputCloud::Ptr spinImages(new OutputCloud);

  //spinImageEstimation.setRadiusSearch (0.2);
  //spinImageEstimation.setRadiusSearch (0.4);
  //spinImageEstimation.setMinPointCountInNeighbourhood(5); // If less than this many points is found in the specified radius, an error results.

  spinImageEstimation.setKSearch(20);

  // Actually compute the spin images
  std::cout << "Computing spin images..." << std::endl;
  spinImageEstimation.compute (*spinImages);

  std::cout << "output points.size (): " << spinImages->points.size () << std::endl;

}
int main (int argc, char** argv)
{
	boost::program_options::variables_map variablesMap;

	// Parse parameters, exit if help is requested
	if (parse_command_line_options(variablesMap, argc, argv))
		return 1;

	// General parameters
	std::string sessionName			= variablesMap[PARAM_NAME].as<std::string>();
	bool enabledViewer				= variablesMap[PARAM_VIEWER].as<bool>();
	int rotationModifier			= variablesMap[PARAM_ROTATION].as<int>();
	std::string cloudsDirectory		= variablesMap[PARAM_CLOUDSDIR].as<std::string>();
	int numClouds					= variablesMap[PARAM_NUMCLOUDS].as<int>();
	double stepRotationAngle		= variablesMap[PARAM_STEP_ANGLE].as<double>();
	bool saveBinaryFormat			= variablesMap[PARAM_SAVE_BINARY].as<bool>();

	// Ground truth parameters
	double groundCenterX = variablesMap[PARAM_GROUND_CENTER_X].as<double>();
	double groundCenterY = variablesMap[PARAM_GROUND_CENTER_Y].as<double>();
	double groundCenterZ = variablesMap[PARAM_GROUND_CENTER_Z].as<double>();
	double groundNormalX = variablesMap[PARAM_GROUND_NORMAL_X].as<double>();
	double groundNormalY = variablesMap[PARAM_GROUND_NORMAL_Y].as<double>();
	double groundNormalZ = variablesMap[PARAM_GROUND_NORMAL_Z].as<double>();

	// Bilateral Filtering parameters
	bool applyBilateralFilter = variablesMap[PARAM_BILATERAL_FILTER].as<bool>();
	float bilateralFilterSigmaR = variablesMap[PARAM_BILATERAL_FILTER_SIGMA_R].as<float>();
	float bilateralFilterSigmaS = variablesMap[PARAM_BILATERAL_FILTER_SIGMA_S].as<float>();

	// Normal estimation parameters
	int normalEstimationK				= variablesMap[PARAM_NORMAL_ESTIMATION_K].as<int>();
	int downsampledNormalEstimationK	= variablesMap[PARAM_NORMAL_ESTIMATION_DOWN_K].as<int>();

	// SOR parameters
	bool sorEnabled		= variablesMap[PARAM_SOR].as<bool>();
	int sorMeanK		= variablesMap[PARAM_SOR_MEANK].as<int>();
	float sorStdDev		= variablesMap[PARAM_SOR_STDDEV].as<float>();

	// ROR parameters
	bool rorEnabled		= variablesMap[PARAM_ROR].as<bool>();
	int rorNeighbors	= variablesMap[PARAM_ROR_NEIGHBORS].as<int>();
	float rorRadius		= variablesMap[PARAM_ROR_RADIUS].as<float>();

	// Cut parameters
	bool cutEnabled		= variablesMap[PARAM_CUT].as<bool>();
	int cutAmount		= variablesMap[PARAM_CUT_AMOUNT].as<int>();

	// ECE parameters
	bool eceEnabled		= variablesMap[PARAM_ECE].as<bool>();
	int eceClusters		= variablesMap[PARAM_ECE_CLUSTERS].as<int>();
	int eceMaxSize		= variablesMap[PARAM_ECE_MAXSIZE].as<int>();
	int eceMinSize		= variablesMap[PARAM_ECE_MINSIZE].as<int>();
	float eceTolerance	= variablesMap[PARAM_ECE_TOLERANCE].as<float>();

	// ICP parameters
	bool icpEnabled		= variablesMap[PARAM_ICP].as<bool>();
	int icpIterations	= variablesMap[PARAM_ICP_MAXITERATIONS].as<int>();
	double icpEpsilon	= variablesMap[PARAM_ICP_EPSILON].as<double>();

	// VG parameters
	bool vgEnabled		= variablesMap[PARAM_VG].as<bool>();
	float vgLeafSize	= variablesMap[PARAM_VG_LEAFSIZE].as<float>();

	// PMR parameters
	bool pmrEnabled		= variablesMap[PARAM_PMR].as<bool>();
	float pmrSrchRadM	= variablesMap[PARAM_PMR_MLS_SEARCHRADIUS].as<float>();
	bool pmrPolyFit		= variablesMap[PARAM_PMR_MLS_POLYNOMIALFIT].as<bool>();
	int pmrPolyOrder	= variablesMap[PARAM_PMR_MLS_POLYNOMIALORDER].as<int>();
	float pmrUpSmpRad	= variablesMap[PARAM_PMR_MLS_UPSAMPLINGRADIUS].as<float>();
	float pmrUpSmpStp	= variablesMap[PARAM_PMR_MLS_UPSAMPLINGSTEP].as<float>();
	float pmrSrchRadN	= variablesMap[PARAM_PMR_NE_RADIUSSEARCH].as<float>();
	int pmrDepth		= variablesMap[PARAM_PMR_P_DEPTH].as<int>();


	// Get PCD filenames from the specified directory
	std::vector<std::string> filenames;
	Utils::get_pcd_filenames(cloudsDirectory, filenames);

	// Ground truth center and normal
	Eigen::Vector3f groundCenter(groundCenterX, groundCenterY, groundCenterZ);
	Eigen::Vector3f groundNormal(groundNormalX, groundNormalY, groundNormalZ);

	pcl::visualization::PCLVisualizer viewer(sessionName.c_str());
	std::vector<pcl::PointCloud<pcl::PointXYZRGB>::Ptr> clouds;
	Eigen::Matrix4f icpAccumulatedTransformation = Eigen::Matrix4f::Identity();

	int rotationMultiplier = 0;
	for (auto it = filenames.begin(); it != filenames.end() && rotationMultiplier < numClouds; ++it)
	{
		pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZRGB>());

		// Load PCD file
		if (pcl::io::loadPCDFile (*it, *cloud) < 0)  
		{
			std::cerr << "Error loading point cloud " << *it << "\n";
			return -1;
		}

		std::cout << "Loaded point cloud: " << *it << "\n";

		// Apply bilateral filter to cloud
		if (applyBilateralFilter)
			PointCloudOperations::filter_bilateral(
			cloud,
			bilateralFilterSigmaR,
			bilateralFilterSigmaS,
			cloud);

		// Remove NaNs from the point cloud
		std::vector<int> mapping;
		pcl::removeNaNFromPointCloud(*cloud, *cloud, mapping);
		std::cout << "NaNs removed..." << "\n";

		// Statistical Outlier Removal filtering
		if (sorEnabled)
			PointCloudOperations::sor_cloud	
				(
					cloud, 
					cloud, 
					sorMeanK, 
					sorStdDev
				);

		// Radial Outlier Removal filtering
		if (rorEnabled)
			PointCloudOperations::ror_cloud
				(
					cloud,
					cloud,
					rorNeighbors,
					rorRadius
				);

		// Cloud cutting threshold
		if (cutEnabled)
			PointCloudOperations::cut_cloud
				(
					cloud,
					cloud,
					cutAmount
				);

		// Euclidean Cluster Extraction filter
		if (eceEnabled)
			PointCloudOperations::ece_cloud
				(
					cloud, 
					cloud, 
					eceTolerance,
					eceMinSize,
					eceMaxSize,
					eceClusters
				);

		// Translation to origin
		PointCloudOperations::translate_cloud
			(
				cloud, 
				cloud, 
				groundCenter
			);

		// Step rotation
		PointCloudOperations::rotate_cloud
			(
				cloud, 
				cloud, 
				(rotationModifier * stepRotationAngle * rotationMultiplier), 
				groundNormal
			);

		// Cloud ICP alignment
		if (icpEnabled && rotationMultiplier > 0)
			PointCloudOperations::icp_align_cloud
				(
					cloud,
					clouds.at(rotationMultiplier - 1),
					cloud,
					icpIterations,
					icpEpsilon
				);

		clouds.push_back(cloud);
		++rotationMultiplier;
	}

	// Concatenate all the processed clouds
	pcl::PointCloud<pcl::PointXYZRGB>::Ptr concatenatedClouds(new pcl::PointCloud<pcl::PointXYZRGB>());
	PointCloudOperations::concatenate_clouds(clouds, concatenatedClouds);

	//PointCloudOperations::translate_cloud(concatenatedClouds, concatenatedClouds, Eigen::Vector3f(-groundCenter.x(), -groundCenter.y(), -groundCenter.z()));

	// Save 360-registered clouds
	pcl::io::savePCDFile(sessionName + "_cloud.pcd", *concatenatedClouds, saveBinaryFormat);
	pcl::io::savePLYFile(sessionName + "_cloud.ply", *concatenatedClouds, saveBinaryFormat);

	// Save cloud normals
	pcl::PointCloud<pcl::Normal>::Ptr normals(new pcl::PointCloud<pcl::Normal>());
	PointCloudOperations::compute_normals(concatenatedClouds, normalEstimationK, normals);
	for (size_t i = 0; i < normals->size(); ++i)
	{
		normals->points[i].normal_x *= -1.0;
		normals->points[i].normal_y *= -1.0;
		normals->points[i].normal_z *= -1.0;
	}
	pcl::io::savePCDFile(sessionName + "_cloud_normals.pcd", *normals, saveBinaryFormat);
	pcl::io::savePLYFile(sessionName + "_cloud_normals.ply", *normals, saveBinaryFormat);

	// Save cloud with normals
	pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloudWithNormals(new pcl::PointCloud<pcl::PointXYZRGBNormal>());
	pcl::concatenateFields(*concatenatedClouds, *normals, *cloudWithNormals);
	pcl::io::savePCDFile(sessionName + "_cloud_with_normals.pcd", *cloudWithNormals, saveBinaryFormat);
	pcl::io::savePLYFile(sessionName + "_cloud_with_normals.ply", *cloudWithNormals, saveBinaryFormat);

	pcl::PointCloud<pcl::PointXYZRGB>::Ptr downsampledCloud(new pcl::PointCloud<pcl::PointXYZRGB>());
	pcl::PointCloud<pcl::Normal>::Ptr downsampledNormals(new pcl::PointCloud<pcl::Normal>());
	pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr downsampledCloudWithNormals(new pcl::PointCloud<pcl::PointXYZRGBNormal>());

	// Voxel Grid filter
	if (vgEnabled)
	{
		PointCloudOperations::vg_cloud
			(
				concatenatedClouds,
				downsampledCloud,
				vgLeafSize
			);

		// Save downsampled clouds
		pcl::io::savePCDFile(sessionName + "_cloud_downsampled.pcd", *downsampledCloud, saveBinaryFormat);
		pcl::io::savePLYFile(sessionName + "_cloud_downsampled.ply", *downsampledCloud, saveBinaryFormat);

		// Save downsampled clouds normals
		PointCloudOperations::compute_normals(downsampledCloud, downsampledNormalEstimationK, downsampledNormals);
		for (size_t i = 0; i < downsampledNormals->size(); ++i)
		{
			downsampledNormals->points[i].normal_x *= -1.0;
			downsampledNormals->points[i].normal_y *= -1.0;
			downsampledNormals->points[i].normal_z *= -1.0;
		}
		pcl::io::savePCDFile(sessionName + "_cloud_downsampled_normals.pcd", *downsampledNormals, saveBinaryFormat);
		pcl::io::savePLYFile(sessionName + "_cloud_downsampled_normals.ply", *downsampledNormals, saveBinaryFormat);

		// Save downsampled clouds with normals
		pcl::concatenateFields(*downsampledCloud, *downsampledNormals, *downsampledCloudWithNormals);
		pcl::io::savePCDFile(sessionName + "_cloud_downsampled_with_normals.pcd", *downsampledCloudWithNormals, saveBinaryFormat);
		pcl::io::savePLYFile(sessionName + "_cloud_downsampled_with_normals.ply", *downsampledCloudWithNormals, saveBinaryFormat);
	}
	else
	{
		//pcl::copyPointCloud(*concatenatedClouds, *downsampledCloud);
		downsampledCloud = concatenatedClouds;
		downsampledNormals = normals;
		downsampledCloudWithNormals = downsampledCloudWithNormals;
	}

	// Poisson Mesh reconstruction
	if (pmrEnabled)
	{
		pcl::PolygonMesh mesh;

		PointCloudOperations::pmr_cloud
			(
				downsampledCloud,
				mesh,
				pmrSrchRadM,
				pmrPolyFit,
				pmrPolyOrder,
				pmrUpSmpRad,
				pmrUpSmpStp,
				pmrSrchRadN,
				pmrDepth
			);

		// Save reconstructed mesh
		pcl::io::savePLYFile(sessionName + "_mesh.ply", mesh, 5);

		viewer.addPolygonMesh(mesh);
	}

	// Set up the viewer
	if (enabledViewer)
	{
		std::cout << "Adding clouds to viewer...\n";

		pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> cloudColorHandler(downsampledCloud);
		viewer.addPointCloud(downsampledCloud, cloudColorHandler, "cloud");
		viewer.setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 2, "cloud");
		viewer.addPointCloudNormals<pcl::PointXYZRGB, pcl::Normal>(downsampledCloud, downsampledNormals, 100, 0.05, "cloudNormals");

		viewer.addCoordinateSystem(0.75, 0);
		viewer.setBackgroundColor(0.8, 0.8, 0.8, 0);

		while (!viewer.wasStopped())
		{
			viewer.spinOnce();
		}
	}

	return 0;
}
int main (int argc, char** argv)
{
    std::string fileName = argv[1];
    std::cout << "Reading " << fileName << std::endl;

    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);

    if (pcl::io::loadPCDFile<pcl::PointXYZ> (fileName, *cloud) == -1) //* load the file
    {
        PCL_ERROR ("Couldn't read file");
        return (-1);
    }

    std::cout << "Loaded " << cloud->points.size() << " points." << std::endl;

    // Compute the normals
    pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> normalEstimation;
    normalEstimation.setInputCloud (cloud);

    pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>);
    normalEstimation.setSearchMethod (tree);

    pcl::PointCloud<pcl::Normal>::Ptr cloudWithNormals (new pcl::PointCloud<pcl::Normal>);

    normalEstimation.setRadiusSearch (0.03);

    normalEstimation.compute (*cloudWithNormals);

    std::cout << "Computed " << cloudWithNormals->points.size() << " normals." << std::endl;

    // Setup the feature computation
    typedef pcl::VFHEstimation<pcl::PointXYZ, pcl::Normal, pcl::VFHSignature308> VFHEstimationType;
    VFHEstimationType vfhEstimation;

    // Provide the original point cloud (without normals)
    vfhEstimation.setInputCloud (cloud);

    // Provide the point cloud with normals
    vfhEstimation.setInputNormals(cloudWithNormals);

    // Use the same KdTree from the normal estimation
    vfhEstimation.setSearchMethod (tree);

    //vfhEstimation.setRadiusSearch (0.2); // With this, error: "Both radius (.2) and K (1) defined! Set one of them to zero first and then re-run compute()"

    // Actually compute the VFH features
    pcl::PointCloud<pcl::VFHSignature308>::Ptr vfhFeatures(new pcl::PointCloud<pcl::VFHSignature308>);
    vfhEstimation.compute (*vfhFeatures);

    std::cout << "output points.size (): " << vfhFeatures->points.size () << std::endl; // This outputs 1 - should be 397!

    // Display and retrieve the shape context descriptor vector for the 0th point.
    pcl::VFHSignature308 descriptor = vfhFeatures->points[0];
    VFHEstimationType::PointCloudOut::PointType descriptor2 = vfhFeatures->points[0];
    std::cout << "VFH:" << descriptor << std::endl;
    std::cout << "Numero de Elementos del VFH = " << sizeof(descriptor.histogram)/sizeof(descriptor.histogram[0]) << std::endl;


    // Create *_vfh.pcd file
    std::stringstream vfh_file;
    vfh_file << "# .PCD v.6 - Point Cloud Data file format" << std::endl;
    vfh_file << "FIELDS vfh" << std::endl;
    vfh_file << "SIZE 4" << std::endl;
    vfh_file << "TYPE F" << std::endl;
    vfh_file << "COUNT 308" << std::endl;
    vfh_file << "WIDTH 1" << std::endl;
    vfh_file << "HEIGHT 1" << std::endl;
    vfh_file << "POINTS 1" << std::endl;
    vfh_file << "DATA ascii" << std::endl;
    int vfh_length = sizeof(descriptor.histogram)/sizeof(descriptor.histogram[0]);
    for (int i = 0; i < vfh_length; i++)
    {
        vfh_file << descriptor.histogram[i] << " ";
    }

    std::ofstream outFile;
    outFile.open("Prueba_vfh.pcd");
    outFile << vfh_file.str();
    outFile.close();

    return 0;
}
int main (int argc, char** argv)
{
  std::string fileName = argv[1];
  std::cout << "Reading " << fileName << std::endl;

  typedef pcl::PointXYZ PointType;
  pcl::PointCloud<PointType>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);
  
  for(unsigned int i = 0; i < 121; ++i)
    {
    PointType p; p.x = drand48(); p.y = drand48(); p.z = drand48();
    cloud->push_back(p);
    }

//   if (pcl::io::loadPCDFile<pcl::PointXYZ> (fileName, *cloud) == -1) //* load the file
//   {
//     PCL_ERROR ("Couldn't read file");
//     return (-1);
//   }

  std::cout << "Loaded " << cloud->points.size() << " points." << std::endl;

  // Compute the normals
  pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> normalEstimation;
  normalEstimation.setInputCloud (cloud);

  pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>);
  normalEstimation.setSearchMethod (tree);

  pcl::PointCloud<pcl::Normal>::Ptr cloudWithNormals (new pcl::PointCloud<pcl::Normal>);

  normalEstimation.setRadiusSearch (0.03);

  normalEstimation.compute (*cloudWithNormals);

  std::cout << "Computed " << cloudWithNormals->points.size() << " normals." << std::endl;
  
  // Setup the feature computation
  pcl::CVFHEstimation<pcl::PointXYZ, pcl::Normal, pcl::VFHSignature308> cvfhEstimation;

  // Provide the original point cloud (without normals)
  cvfhEstimation.setInputCloud (cloud);

  // Provide the point cloud with normals
  cvfhEstimation.setInputNormals(cloudWithNormals);

  // Use the same KdTree from the normal estimation
  cvfhEstimation.setSearchMethod (tree);

  //vfhEstimation.setRadiusSearch (0.2); // With this, error: "Both radius (.2) and K (1) defined! Set one of them to zero first and then re-run compute()"

  // Actually compute the VFH features
  pcl::PointCloud<pcl::VFHSignature308>::Ptr vfhFeatures(new pcl::PointCloud<pcl::VFHSignature308>);
  cvfhEstimation.compute (*vfhFeatures);

  std::cout << "output points.size (): " << vfhFeatures->points.size () << std::endl; // This outputs 1 - should be 397!

  // Display and retrieve the shape context descriptor vector for the 0th point.
  pcl::VFHSignature308 descriptor = vfhFeatures->points[0];
  std::cout << descriptor << std::endl;

  return 0;
}