예제 #1
0
void FeatureEval::fast_point_feature_histogram(){
    qDebug() << "params used: " << subsample_res_ << search_radius_;
    qDebug() << "-----------------------------------";
    boost::shared_ptr<PointCloud> _cloud = core_->cl_->active_;
    if(_cloud == nullptr)
        return;

    pcl::PointCloud<pcl::PointXYZINormal>::Ptr filt_cloud;
    std::vector<int> sub_idxs;
    filt_cloud = downsample(_cloud, subsample_res_, sub_idxs);

    // Compute
    t_.start(); qDebug() << "Timer started (FPFH)";
    pcl::FPFHEstimation<pcl::PointXYZINormal, pcl::PointXYZINormal, pcl::FPFHSignature33> fpfh;
    fpfh.setInputCloud(filt_cloud);
    fpfh.setInputNormals(filt_cloud);
    pcl::search::KdTree<pcl::PointXYZINormal>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZINormal>);
    fpfh.setSearchMethod(tree);
    fpfh.setRadiusSearch(search_radius_);
    pcl::PointCloud<pcl::FPFHSignature33>::Ptr fpfhs (new pcl::PointCloud<pcl::FPFHSignature33> ());
    fpfh.compute(*fpfhs);
    qDebug() << "FPFH in " << (time_ = t_.elapsed()) << " ms";

    // Correlate
    computeCorrelation(reinterpret_cast<float*>(fpfhs->points.data()), 33, fpfhs->points.size(), sub_idxs);
    if(!visualise_on_) return;
}
예제 #2
0
QList <pcl::cloud_composer::CloudComposerItem*>
pcl::cloud_composer::FPFHEstimationTool::performAction (ConstItemList input_data, PointTypeFlags::PointType)
{
  QList <CloudComposerItem*> output;
  const CloudComposerItem* input_item;
  // Check input data length
  if ( input_data.size () == 0)
  {
    qCritical () << "Empty input in FPFH Estimation Tool!";
    return output;
  }
  else if ( input_data.size () > 1)
  {
    qWarning () << "Input vector has more than one item in FPFH Estimation!";
  }
  input_item = input_data.value (0);
  
  
  if (input_item->type () == CloudComposerItem::CLOUD_ITEM)
  {
    //Check if this cloud has normals computed!
    QList <CloudComposerItem*> normals_list = input_item->getChildren (CloudComposerItem::NORMALS_ITEM);
    if ( normals_list.size () == 0 )
    {
      qCritical () << "No normals item child found in this cloud item";
      return output;
    }
    qDebug () << "Found item text="<<normals_list.at(0)->text();

    double radius = parameter_model_->getProperty("Radius").toDouble();
    
    pcl::PCLPointCloud2::ConstPtr input_cloud = input_item->data (ItemDataRole::CLOUD_BLOB).value <pcl::PCLPointCloud2::ConstPtr> ();
    //Get the cloud in template form
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);
    pcl::fromPCLPointCloud2 (*input_cloud, *cloud);
    
    //Get the normals cloud, we just use the first normals that were found if there are more than one
    pcl::PointCloud<pcl::Normal>::ConstPtr input_normals = normals_list.value(0)->data(ItemDataRole::CLOUD_TEMPLATED).value <pcl::PointCloud<pcl::Normal>::ConstPtr> ();
    
    pcl::FPFHEstimation<pcl::PointXYZ, pcl::Normal, pcl::FPFHSignature33> fpfh;
 //   qDebug () << "Input cloud size = "<<cloud->size ();

    //////////////// THE WORK - COMPUTING FPFH ///////////////////
    // Create the FPFH estimation class, and pass the input dataset+normals to it
    fpfh.setInputCloud (cloud);
    fpfh.setInputNormals (input_normals);

    // Create an empty kdtree representation, and pass it to the FPFH estimation object.
    // Its content will be filled inside the object, based on the given input dataset (as no other search surface is given).
    qDebug () << "Building KD Tree";
    pcl::search::KdTree<PointXYZ>::Ptr tree (new pcl::search::KdTree<PointXYZ>);
    fpfh.setSearchMethod (tree);

    // Output datasets
    pcl::PointCloud<pcl::FPFHSignature33>::Ptr fpfhs (new pcl::PointCloud<pcl::FPFHSignature33> ());

    // Use all neighbors in a sphere of radius 5cm
    // IMPORTANT: the radius used here has to be larger than the radius used to estimate the surface normals!!!
    fpfh.setRadiusSearch (radius);

    // Compute the features
    qDebug () << "Computing FPFH features";
    fpfh.compute (*fpfhs);
    qDebug () << "Size of computed features ="<<fpfhs->width;
    //////////////////////////////////////////////////////////////////
    FPFHItem* fpfh_item = new FPFHItem (tr("FPFH r=%1").arg(radius),fpfhs,radius);
    output.append (fpfh_item);
  }
  else
  {
    qCritical () << "Input item in FPFH Estimation is not a cloud!!!";
  }
  
  
  return output;
}
void ColorizeRandomForest::extract(const sensor_msgs::PointCloud2 pc)
{
    pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZRGB> ());

    std::vector<int> indices;
    pcl::fromROSMsg(pc, *cloud);
    cloud->is_dense = false;
    pcl::removeNaNFromPointCloud(*cloud, *cloud, indices);

    pcl::search::KdTree<pcl::PointXYZRGB>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZRGB>);
    tree->setInputCloud (cloud);
    pcl::PassThrough<pcl::PointXYZRGB> pass;
    pass.setInputCloud (cloud);
    pass.setFilterFieldName (std::string("z"));
    pass.setFilterLimits (0.0, 1.5);
    pass.filter (*cloud);

    std::vector<pcl::PointIndices> cluster_indices;
    pcl::EuclideanClusterExtraction<pcl::PointXYZRGB> ec;
    ec.setClusterTolerance (0.025);
    ec.setMinClusterSize (200);
    ec.setMaxClusterSize (100000);
    ec.setSearchMethod (tree);
    ec.setInputCloud (cloud);
    ec.extract (cluster_indices);

    pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloth_cloud_cluster (new pcl::PointCloud<pcl::PointXYZRGB>);
    pcl::PointCloud<pcl::PointXYZRGB>::Ptr noncloth_cloud_cluster (new pcl::PointCloud<pcl::PointXYZRGB>);
    int cluster_num = 0;
    for (std::vector<pcl::PointIndices>::const_iterator it = cluster_indices.begin (); it != cluster_indices.end (); ++it)
    {
        JSK_ROS_INFO("Calculate time %d / %ld", cluster_num  , cluster_indices.size());
        pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_cluster (new pcl::PointCloud<pcl::PointXYZRGB>);
        for (std::vector<int>::const_iterator pit = it->indices.begin (); pit != it->indices.end (); pit++)
            cloud_cluster->points.push_back (cloud->points[*pit]);
        cloud_cluster->is_dense = true;
        cluster_num ++ ;

        pcl::NormalEstimation<pcl::PointXYZRGB, pcl::PointXYZRGBNormal> ne;
        pcl::search::KdTree<pcl::PointXYZRGB>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZRGB> ());
        pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud_normals (new pcl::PointCloud<pcl::PointXYZRGBNormal>);
        ne.setInputCloud (cloud_cluster);
        ne.setSearchMethod (tree);
        ne.setRadiusSearch (0.02);
        ne.compute (*cloud_normals);

        for (int cloud_index = 0; cloud_index <  cloud_normals->points.size(); cloud_index++) {
            cloud_normals->points[cloud_index].x = cloud_cluster->points[cloud_index].x;
            cloud_normals->points[cloud_index].y = cloud_cluster->points[cloud_index].y;
            cloud_normals->points[cloud_index].z = cloud_cluster->points[cloud_index].z;
        }

        int result_counter=0, call_counter = 0;
        pcl::PointXYZRGBNormal max_pt,min_pt;
        pcl::getMinMax3D(*cloud_normals, min_pt, max_pt);

        for (int i = 0 ; i < 30; i++) {
            double lucky = 0, lucky2 = 0;
            std::string axis("x"), other_axis("y");
            int rand_xy = rand()%2;
            if (rand_xy == 0) {
                lucky = min_pt.x - pass_offset_ + (max_pt.x - min_pt.x - pass_offset_*2) * 1.0 * rand() / RAND_MAX;
                lucky2 = min_pt.y + (max_pt.y - min_pt.y) * 1.0 * rand() / RAND_MAX;
            } else {
                lucky = min_pt.y - pass_offset_ + (max_pt.y - min_pt.y - pass_offset_*2) * 1.0 * rand() / RAND_MAX;
                lucky2 = min_pt.x + (max_pt.x - min_pt.x) * 1.0 * rand() / RAND_MAX;
                axis = std::string("y");
                other_axis = std::string("x");
            }
            pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud_normals_pass (new pcl::PointCloud<pcl::PointXYZRGBNormal>);
            pcl::PassThrough<pcl::PointXYZRGBNormal> pass;
            pcl::IndicesPtr indices_x(new std::vector<int>());
            pass.setInputCloud (cloud_normals);
            pass.setFilterFieldName (axis);
            float small = std::min(lucky, lucky + pass_offset_);
            float large = std::max(lucky, lucky + pass_offset_);
            pass.setFilterLimits (small, large);
            pass.filter (*cloud_normals_pass);
            pass.setInputCloud (cloud_normals_pass);
            pass.setFilterFieldName (other_axis);
            float small2 = std::min(lucky2, lucky2 + pass_offset2_);
            float large2 = std::max(lucky2, lucky2 + pass_offset2_);
            pass.setFilterLimits (small2, large2);
            pass.filter (*cloud_normals_pass);

            std::vector<int> tmp_indices;
            pcl::removeNaNFromPointCloud(*cloud_normals_pass, *cloud_normals_pass, tmp_indices);

            if(cloud_normals_pass->points.size() == 0)
                continue;

            pcl::FPFHEstimationOMP<pcl::PointXYZRGBNormal, pcl::PointXYZRGBNormal, pcl::FPFHSignature33> fpfh;
            pcl::search::KdTree<pcl::PointXYZRGBNormal>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZRGBNormal>);
            pcl::PointCloud<pcl::FPFHSignature33>::Ptr fpfhs (new pcl::PointCloud<pcl::FPFHSignature33> ());
            fpfh.setNumberOfThreads(8);
            fpfh.setInputCloud (cloud_normals_pass);
            fpfh.setInputNormals (cloud_normals_pass);
            fpfh.setSearchMethod (tree);
            fpfh.setRadiusSearch (radius_search_);
            fpfh.compute (*fpfhs);

            if((int)fpfhs->points.size() == 0)
                continue;

            std::vector<double> result;
            int target_id, max_value;
            if ((int)fpfhs->points.size() - sum_num_ - 1 < 1) {
                target_id = 0;
                max_value = (int)fpfhs->points.size();
            } else {
                target_id = rand() % ((int)fpfhs->points.size() - sum_num_ - 1);
                max_value = target_id + sum_num_;
            }

            bool has_nan = false;
            for(int index = 0; index < 33; index++) {
                float sum_hist_points = 0;
                for(int kndex = target_id; kndex < max_value; kndex++)
                {
                    sum_hist_points+=fpfhs->points[kndex].histogram[index];
                }
                result.push_back( sum_hist_points/sum_num_ );
            }

            for(int x = 0; x < result.size(); x ++) {
                if(pcl_isnan(result[x]))
                    has_nan = true;
            }
            if(has_nan)
                break;

            call_counter++;
            ros::ServiceClient client = pnh_->serviceClient<ml_classifiers::ClassifyData>("/predict");
            ml_classifiers::ClassifyData srv;
            ml_classifiers::ClassDataPoint class_data_point;
            class_data_point.point = result;
            srv.request.data.push_back(class_data_point);
            if(client.call(srv))
                if (atoi(srv.response.classifications[0].c_str()) == 0)
                    result_counter += 1;
            JSK_ROS_INFO("response result : %s", srv.response.classifications[0].c_str());
        }

        if(result_counter >= call_counter / 2) {
            JSK_ROS_INFO("Cloth %d / %d", result_counter, call_counter);
        }
        else {
            JSK_ROS_INFO("Not Cloth %d / %d", result_counter, call_counter);
        }

        for (int i = 0; i < cloud_cluster->points.size(); i++) {
            if(result_counter >= call_counter / 2) {
                cloth_cloud_cluster->points.push_back (cloud_cluster->points[i]);
            }
            else {
                noncloth_cloud_cluster->points.push_back (cloud_cluster->points[i]);
            }
        }
    }
    sensor_msgs::PointCloud2 cloth_pointcloud2;
    pcl::toROSMsg(*cloth_cloud_cluster, cloth_pointcloud2);
    cloth_pointcloud2.header = pc.header;
    cloth_pointcloud2.is_dense = false;
    pub_.publish(cloth_pointcloud2);

    sensor_msgs::PointCloud2 noncloth_pointcloud2;
    pcl::toROSMsg(*noncloth_cloud_cluster, noncloth_pointcloud2);
    noncloth_pointcloud2.header = pc.header;
    noncloth_pointcloud2.is_dense = false;
    pub2_.publish(noncloth_pointcloud2);
}