// global variables used: NONE
void
vtree_user::process_file(std::string& filename, std::vector<FeatureVector>& clouds, bool onlySaveClouds )
{

	ROS_INFO("Processing file %s...", filename.c_str());

	// load the file
	pcl::PointCloud<PointT>::Ptr cloud(new pcl::PointCloud<PointT>);
    pcl::io::loadPCDFile<PointT> (filename, *cloud);

	// Compute the features
	pcl::PointCloud<PointT>::Ptr keypoint_cloud ( new pcl::PointCloud<PointT> );
	pcl::PointCloud<FeatureType>::Ptr feature_cloud( new pcl::PointCloud<FeatureType> );
	compute_features( cloud, keypoint_cloud, feature_cloud );
		
	// Rectify the historgram values to ensure they are in [0,100]
	FeatureVector feature_vector;		
	for( pcl::PointCloud<FeatureType>::iterator iter = feature_cloud->begin();
	iter != feature_cloud->end(); ++iter)
	{
		rectify_histogram( *iter );			
		feature_vector.push_back( FeatureHist( iter->histogram ) );
	}
	ROS_INFO("Keypoints extracted and %d Features computed", static_cast<int>(feature_cloud -> size()) );

	if (!onlySaveClouds)
		clouds.push_back( feature_vector );
}
Exemplo n.º 2
0
template <typename PointSource, typename PointFeature> void
pcl::MultiscaleFeaturePersistence<PointSource, PointFeature>::computeFeaturesAtAllScales ()
{
  features_at_scale_.resize (scale_values_.size ());
  features_at_scale_vectorized_.resize (scale_values_.size ());
  for (size_t scale_i = 0; scale_i < scale_values_.size (); ++scale_i)
  {
    FeatureCloudPtr feature_cloud (new FeatureCloud ());
    computeFeatureAtScale (scale_values_[scale_i], feature_cloud);
    features_at_scale_[scale_i] = feature_cloud;

    // Vectorize each feature and insert it into the vectorized feature storage
    std::vector<std::vector<float> > feature_cloud_vectorized (feature_cloud->points.size ());
    for (size_t feature_i = 0; feature_i < feature_cloud->points.size (); ++feature_i)
    {
      std::vector<float> feature_vectorized (feature_representation_->getNumberOfDimensions ());
      feature_representation_->vectorize (feature_cloud->points[feature_i], feature_vectorized);
      feature_cloud_vectorized[feature_i] = feature_vectorized;
    }
    features_at_scale_vectorized_[scale_i] = feature_cloud_vectorized;
  }
}
void 
vtree_user::match_list( pcl::PointCloud<PointT>::Ptr & point_cloud_in,
						std::vector<std::pair<float,std::string> > & match_names,
						std::vector<std::pair<float,std::string> > & cluster_match_names,
						int num_match )
{
	// Extract keypoint in the pointcloud
	ROS_INFO("Extracting keypoints and computing features! We have a cloud with %d points", static_cast<int>(point_cloud_in->size()) );
	
	pcl::PointCloud<PointT>::Ptr keypoint_cloud ( new pcl::PointCloud<PointT> );
	pcl::PointCloud<FeatureType>::Ptr feature_cloud( new pcl::PointCloud<FeatureType> );
	compute_features( point_cloud_in, keypoint_cloud, feature_cloud );
	
	int num_feat = feature_cloud->size();
	ROS_INFO("Done. %d features found", num_feat);
	
	if( num_feat == 0)
	{
		ROS_INFO("The feature cloud is empty");
		return;
	}
	
	// Rectify the historgram values to ensure they are in [0,100] and create a document
	vt::Document full_doc;		
	for( pcl::PointCloud<FeatureType>::iterator iter = feature_cloud->begin();
	iter != feature_cloud->end(); ++iter)
	{
		rectify_histogram( *iter );
		full_doc.push_back(tree.quantize( FeatureHist( iter->histogram ) ));	
	}
	
	
	// Cluster the keypoints in 2D
	ANNpointArray ann_points;
	ann_points = annAllocPts(num_feat, 3);
	std::vector<KeypointExt*> extended_keypoints;
		
	for( int i=0; i < num_feat; ++i )
	{
		if (enable_clustering)
	  	{
	  		ann_points[i][0] = keypoint_cloud->points[i].x;
	  		ann_points[i][1] = keypoint_cloud->points[i].y;
	  		ann_points[i][2] = keypoint_cloud->points[i].z;
	  	}
		extended_keypoints.push_back( new KeypointExt( feature_cloud->at(i), full_doc[i] ) );
	}
	
	
	int cluster_count = 0;
	std::vector<int> cluster_sizes;
	if (enable_clustering)
	{
		std::vector<int> membership(num_feat);
		cluster_count = pcd_utils::cluster_points( ann_points, num_feat, membership,
												  radius_adaptation_r_max, radius_adaptation_r_min, 
												  radius_adaptation_A, radius_adaptation_K );
																		
		cluster_sizes.resize(cluster_count, 0);
		//cluster_sizes.assign(cluster_count, 0);
		for (int i = 0; i < num_feat; ++i)
		{
			extended_keypoints[i]->cluster = membership[i];
			++cluster_sizes[membership[i]];
		}
		delete[] ann_points;
	}
	if(DEBUG)
		ROS_INFO_STREAM("Clusters found = " << cluster_count);
	//*******************************************************************

	// Obtain the matches from the database	
	vt::Matches matches;
	db->find(full_doc, num_match, matches);	// std::string documents_map[matches[i].id]->name; float matches[i].score, 
	
	match_names.clear();
	for ( vt::Matches::iterator it = matches.begin(); it != matches.end(); ++it)
		match_names.push_back( std::make_pair( it->score, documents_map[it->id]->name ));
		
			
	if (enable_clustering)
	{
		// store in matches_map
		std::map<uint32_t, float> matches_map;
		for ( vt::Matches::iterator it = matches.begin(); it != matches.end(); ++it)
		{
			matches_map[it->id] = it->score;
		}
	
		// Calculates and accumulates scores for each cluster
		for (int c = 0; c < cluster_count; ++c) 
		{
			vt::Document cluster_doc;
			vt::Matches cluster_matches;
		
			for (int i = 0; i < num_feat; ++i)
				if ( extended_keypoints[i]->cluster == static_cast<unsigned int>(c) )
					cluster_doc.push_back(full_doc[i]);
		
			if (cluster_doc.size() < static_cast<unsigned int>(min_cluster_size))
				continue;
		
			db->find(cluster_doc, num_match, cluster_matches);
		
			if(DEBUG)
				ROS_INFO_STREAM("Cluster " << c <<  "(size = " << cluster_doc.size() << "):");

		
			//update_matches_map(cluster_matches, cluster_doc.size());
			for ( vt::Matches::iterator it = cluster_matches.begin(); it != cluster_matches.end(); ++it)
			{
				matches_map[it->id] = it->score;
			}		
		}
		
		// Get the updated match names
		cluster_match_names.clear();
		for( std::map<uint32_t, float>::iterator iter = matches_map.begin();
		 	 iter != matches_map.end(); ++iter )
		{
			cluster_match_names.push_back( std::make_pair( iter->second, documents_map[iter->first]->name) );
		}
		
		// sort
		std::sort( cluster_match_names.begin(), cluster_match_names.end() );
	}
}