Пример #1
0
void pcl::face_detection::FaceDetectorDataProvider<FeatureType, DataSet, LabelType, ExampleIndex, NodeType>::getDatasetAndLabels(DataSet & data_set,
    std::vector<LabelType> & label_data, std::vector<ExampleIndex> & examples)
{
  srand (static_cast<unsigned int>(time (NULL)));
  std::random_shuffle (image_files_.begin (), image_files_.end ());
  std::vector < std::string > files;
  files = image_files_;
  files.resize (std::min (num_images_, static_cast<int> (files.size ())));

  std::vector < TrainingExample > training_examples;
  std::vector<float> labels;

  int total_neg, total_pos;
  total_neg = total_pos = 0;

#if PCL_FACE_DETECTION_VIS_TRAINING_FDDP == 1
  pcl::visualization::PCLVisualizer vis("training");
#endif

  for (size_t j = 0; j < files.size (); j++)
  {

#if PCL_FACE_DETECTION_VIS_TRAINING_FDDP == 1
    vis.removeAllPointClouds();
    vis.removeAllShapes();
#endif

    if ((j % 50) == 0)
    {
      std::cout << "Loading image..." << j << std::endl;
    }
    //1. Load clouds with and without labels
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);
    pcl::PointCloud<pcl::PointXYZ>::Ptr loaded_cloud (new pcl::PointCloud<pcl::PointXYZ>);

    pcl::io::loadPCDFile (files[j], *loaded_cloud);

    pcl::PointCloud<pcl::PointXYZL>::Ptr cloud_labels (new pcl::PointCloud<pcl::PointXYZL>);
    pcl::PointCloud<pcl::PointXYZL>::Ptr loaded_cloud_labels (new pcl::PointCloud<pcl::PointXYZL>);
    pcl::io::loadPCDFile (files[j], *loaded_cloud_labels);

    //crop images to remove as many NaNs as possible and reduce the memory footprint
    {
      size_t min_col, min_row;
      size_t max_col, max_row;
      min_col = min_row = std::numeric_limits<size_t>::max ();
      max_col = max_row = 0;

      for (size_t col = 0; col < loaded_cloud->width; col++)
      {
        for (size_t row = 0; row < loaded_cloud->height; row++)
        {
          if (pcl::isFinite (loaded_cloud->at (col, row)))
          {
            if (row < min_row)
              min_row = row;

            if (row > max_row)
              max_row = row;

            if (col < min_col)
              min_col = col;

            if (col > max_col)
              max_col = col;
          }
        }
      }

      //std::cout << min_col << " - " << max_col << std::endl;
      //std::cout << min_row << " - " << max_row << std::endl;

      cropCloud<pcl::PointXYZ> (min_col, max_col, min_row, max_row, *loaded_cloud, *cloud);
      cropCloud<pcl::PointXYZL> (min_col, max_col, min_row, max_row, *loaded_cloud_labels, *cloud_labels);

      /*pcl::visualization::PCLVisualizer vis ("training");
       vis.addPointCloud(loaded_cloud);
       vis.spin();*/
    }

    //Compute integral image over depth
    boost::shared_ptr < pcl::IntegralImage2D<float, 1> > integral_image_depth;
    integral_image_depth.reset (new pcl::IntegralImage2D<float, 1> (false));

    int element_stride = sizeof(pcl::PointXYZ) / sizeof(float);
    int row_stride = element_stride * cloud->width;
    const float *data = reinterpret_cast<const float*> (&cloud->points[0]);
    integral_image_depth->setInput (data + 2, cloud->width, cloud->height, element_stride, row_stride);

    //Compute normals and normal integral images
    pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>);

    if (USE_NORMALS_)
    {
      typedef typename pcl::IntegralImageNormalEstimation<pcl::PointXYZ, pcl::Normal> NormalEstimator_;
      NormalEstimator_ n3d;
      n3d.setNormalEstimationMethod (n3d.COVARIANCE_MATRIX);
      n3d.setInputCloud (cloud);
      n3d.setRadiusSearch (0.02);
      n3d.setKSearch (0);
      {
        pcl::ScopeTime t ("compute normals...");
        n3d.compute (*normals);
      }
    }

    int element_stride_normal = sizeof(pcl::Normal) / sizeof(float);
    int row_stride_normal = element_stride_normal * normals->width;
    boost::shared_ptr < pcl::IntegralImage2D<float, 1> > integral_image_normal_x;
    boost::shared_ptr < pcl::IntegralImage2D<float, 1> > integral_image_normal_y;
    boost::shared_ptr < pcl::IntegralImage2D<float, 1> > integral_image_normal_z;

    if (USE_NORMALS_)
    {
      integral_image_normal_x.reset (new pcl::IntegralImage2D<float, 1> (false));
      const float *data_nx = reinterpret_cast<const float*> (&normals->points[0]);
      integral_image_normal_x->setInput (data_nx, normals->width, normals->height, element_stride_normal, row_stride_normal);

      integral_image_normal_y.reset (new pcl::IntegralImage2D<float, 1> (false));
      const float *data_ny = reinterpret_cast<const float*> (&normals->points[0]);
      integral_image_normal_y->setInput (data_ny + 1, normals->width, normals->height, element_stride_normal, row_stride_normal);

      integral_image_normal_z.reset (new pcl::IntegralImage2D<float, 1> (false));
      const float *data_nz = reinterpret_cast<const float*> (&normals->points[0]);
      integral_image_normal_z->setInput (data_nz + 2, normals->width, normals->height, element_stride_normal, row_stride_normal);
    }

    //Using cloud labels estimate a 2D window from where to extract positive samples
    //Rest can be used to extract negative samples
    size_t min_col, min_row;
    size_t max_col, max_row;
    min_col = min_row = std::numeric_limits<size_t>::max ();
    max_col = max_row = 0;

    //std::cout << cloud_labels->width << " " << cloud_labels->height << std::endl;

    for (size_t col = 0; col < cloud_labels->width; col++)
    {
      for (size_t row = 0; row < cloud_labels->height; row++)
      {
        if (cloud_labels->at (col, row).label == 1)
        {
          if (row < min_row)
            min_row = row;

          if (row > max_row)
            max_row = row;

          if (col < min_col)
            min_col = col;

          if (col > max_col)
            max_col = col;
        }
      }
    }

#if PCL_FACE_DETECTION_VIS_TRAINING_FDDP == 1
    pcl::PointCloud<pcl::PointXYZI>::Ptr cloud_intensity(new pcl::PointCloud<pcl::PointXYZI>);
    cloud_intensity->width = cloud->width;
    cloud_intensity->height = cloud->height;
    cloud_intensity->points.resize(cloud->points.size());
    cloud_intensity->is_dense = cloud->is_dense;

    for (int jjj = 0; jjj < static_cast<int>(cloud->points.size()); jjj++)
    {
      cloud_intensity->points[jjj].getVector4fMap() = cloud->points[jjj].getVector4fMap();
      cloud_intensity->points[jjj].intensity = 0.f;
      int row, col;
      col = jjj % cloud->width;
      row = jjj / cloud->width;
      //std::cout << row << " " << col << std::endl;
      if (check_inside(col, row, min_col, max_col, min_row, max_row))
      {
        cloud_intensity->points[jjj].intensity = 1.f;
      }
    }

    pcl::visualization::PointCloudColorHandlerGenericField < pcl::PointXYZI > handler(cloud_intensity, "intensity");
    vis.addPointCloud(cloud_intensity, handler, "intensity_cloud");
#endif

    std::string pose_file (files[j]);
    boost::replace_all (pose_file, ".pcd", "_pose.txt");

    Eigen::Matrix4f pose_mat;
    pose_mat.setIdentity (4, 4);
    readMatrixFromFile (pose_file, pose_mat);

    Eigen::Vector3f ea = pose_mat.block<3, 3> (0, 0).eulerAngles (0, 1, 2);
    Eigen::Vector3f trans_vector = Eigen::Vector3f (pose_mat (0, 3), pose_mat (1, 3), pose_mat (2, 3));

    pcl::PointXYZ center_point;
    center_point.x = trans_vector[0];
    center_point.y = trans_vector[1];
    center_point.z = trans_vector[2];

    int N_patches = patches_per_image_;
    int pos_extracted = 0;
    int neg_extracted = 0;
    int w_size_2 = static_cast<int> (w_size_ / 2);

    //************************************************
    //2nd training style, fanelli's journal description
    //************************************************
    {

      typedef std::pair<int, int> pixelpair;
      std::vector < pixelpair > negative_p, positive_p;
      //get negative and positive indices to sample from
      for (int col = 0; col < (static_cast<int> (cloud_labels->width) - w_size_); col++)
      {
        for (int row = 0; row < (static_cast<int> (cloud_labels->height) - w_size_); row++)
        {
          if (!pcl::isFinite (cloud->at (col + w_size_2, row + w_size_2)))
            continue;

          //reject patches with more than percent invalid values
          float percent = 0.5f;
          if (static_cast<float>(integral_image_depth->getFiniteElementsCount (col, row, w_size_, w_size_)) < (percent * static_cast<float>(w_size_ * w_size_)))
            continue;

          pixelpair pp = std::make_pair (col, row);
          if (cloud_labels->at (col + w_size_2, row + w_size_2).label == 1)
            positive_p.push_back (pp);
          else
            negative_p.push_back (pp);
        }
      }

      //shuffle and resize
      std::random_shuffle (positive_p.begin (), positive_p.end ());
      std::random_shuffle (negative_p.begin (), negative_p.end ());
      positive_p.resize (N_patches);
      negative_p.resize (N_patches);

      //extract positive patch
      for (size_t p = 0; p < positive_p.size (); p++)
      {
        int col, row;
        col = positive_p[p].first;
        row = positive_p[p].second;

        pcl::PointXYZ patch_center_point;
        patch_center_point.x = cloud->at (col + w_size_2, row + w_size_2).x;
        patch_center_point.y = cloud->at (col + w_size_2, row + w_size_2).y;
        patch_center_point.z = cloud->at (col + w_size_2, row + w_size_2).z;

        TrainingExample te;
        te.iimages_.push_back (integral_image_depth);
        if (USE_NORMALS_)
        {
          te.iimages_.push_back (integral_image_normal_x);
          te.iimages_.push_back (integral_image_normal_y);
          te.iimages_.push_back (integral_image_normal_z);
        }

        te.row_ = row;
        te.col_ = col;
        te.wsize_ = w_size_;

        te.trans_ = center_point.getVector3fMap () - patch_center_point.getVector3fMap ();
        te.trans_ *= 1000.f; //transform it to millimeters
        te.rot_ = ea;
        te.rot_ *= 57.2957795f; //transform it to degrees

        labels.push_back (1);
        pos_extracted++;
        total_pos++;

        training_examples.push_back (te);
#if PCL_FACE_DETECTION_VIS_TRAINING_FDDP == 1
        pcl::PointCloud<pcl::PointXYZI>::Ptr cloud_intensity2(new pcl::PointCloud<pcl::PointXYZI>(*cloud_intensity));
        for (int jjj = col; jjj < (col + w_size_); jjj++)
        {
          for (int iii = row; iii < (row + w_size_); iii++)
          {
            cloud_intensity2->at(jjj, iii).intensity = 2.f;
          }
        }

        vis.removeAllPointClouds();

        pcl::visualization::PointCloudColorHandlerGenericField < pcl::PointXYZI > handler(cloud_intensity2, "intensity");
        vis.addPointCloud(cloud_intensity2, handler, "cloud");
        vis.spinOnce();
        vis.spin();
        sleep(1);
#endif

      }

#if PCL_FACE_DETECTION_VIS_TRAINING_FDDP == 1
      std::cout << "Going to extract negative patches..." << std::endl;
      sleep(2);
#endif

      for (size_t p = 0; p < negative_p.size (); p++)
      {
        int col, row;
        col = negative_p[p].first;
        row = negative_p[p].second;

        TrainingExample te;
        te.iimages_.push_back (integral_image_depth);
        if (USE_NORMALS_)
        {
          te.iimages_.push_back (integral_image_normal_x);
          te.iimages_.push_back (integral_image_normal_y);
          te.iimages_.push_back (integral_image_normal_z);
        }

        te.row_ = row;
        te.col_ = col;
        te.wsize_ = w_size_;
        labels.push_back (0);
        neg_extracted++;
        total_neg++;

        training_examples.push_back (te);
#if PCL_FACE_DETECTION_VIS_TRAINING_FDDP == 1
        pcl::PointCloud<pcl::PointXYZI>::Ptr cloud_intensity2(new pcl::PointCloud<pcl::PointXYZI>(*cloud_intensity));
        for (int jjj = col; jjj < (col + w_size_); jjj++)
        {
          for (int iii = row; iii < (row + w_size_); iii++)
          {
            cloud_intensity2->at(jjj, iii).intensity = 2.f;
          }
        }

        vis.removeAllPointClouds();

        pcl::visualization::PointCloudColorHandlerGenericField < pcl::PointXYZI > handler(cloud_intensity2, "intensity");
        vis.addPointCloud(cloud_intensity2, handler, "cloud");
        vis.spinOnce();
        vis.spin();
        sleep(1);
#endif
      }

      if (neg_extracted != N_patches)
      {
        std::cout << "Extracted " << neg_extracted << " negative patches" << std::endl;
        std::cout << files[j] << std::endl;
      }

      if (pos_extracted != N_patches)
      {
        std::cout << "Extracted " << pos_extracted << " positive patches" << std::endl;
        std::cout << files[j] << std::endl;
      }
    }
  }

  std::cout << training_examples.size () << " " << labels.size () << " " << total_neg << " " << total_pos << std::endl;

  //train random forest and make persistent
  std::vector<int> example_indices;
  for (size_t i = 0; i < labels.size (); i++)
    example_indices.push_back (static_cast<int> (i));

  label_data = labels;
  data_set = training_examples;
  examples = example_indices;
}
Пример #2
0
void run(pcl::RFFaceDetectorTrainer & fdrf, typename pcl::PointCloud<PointInT>::Ptr & scene_vis, pcl::visualization::PCLVisualizer & vis, bool heat_map,
    bool show_votes, const std::string & filename)
{
  pcl::PointCloud<pcl::PointXYZ>::Ptr scene (new pcl::PointCloud<pcl::PointXYZ>);
  pcl::copyPointCloud (*scene_vis, *scene);

  fdrf.setInputCloud (scene);

  if (heat_map)
  {
    pcl::PointCloud<pcl::PointXYZI>::Ptr intensity_cloud (new pcl::PointCloud<pcl::PointXYZI>);
    fdrf.setFaceHeatMapCloud (intensity_cloud);
  }

  fdrf.detectFaces ();

  typedef typename pcl::traits::fieldList<PointInT>::type FieldListM;

  double rgb_m;
  bool exists_m;
  pcl::for_each_type < FieldListM > (pcl::CopyIfFieldExists<PointInT, double> (scene_vis->points[0], "rgb", exists_m, rgb_m));

  std::cout << "Color exists:" << static_cast<int> (exists_m) << std::endl;
  if (exists_m)
  {
    pcl::PointCloud<pcl::PointXYZRGB>::Ptr to_visualize (new pcl::PointCloud<pcl::PointXYZRGB>);
    pcl::copyPointCloud (*scene_vis, *to_visualize);

    pcl::visualization::PointCloudColorHandlerRGBField < pcl::PointXYZRGB > handler_keypoints (to_visualize);
    vis.addPointCloud < pcl::PointXYZRGB > (to_visualize, handler_keypoints, "scene_cloud");
  } else
  {
    vis.addPointCloud (scene_vis, "scene_cloud");
  }

  if (heat_map)
  {
    pcl::PointCloud<pcl::PointXYZI>::Ptr intensity_cloud (new pcl::PointCloud<pcl::PointXYZI>);
    fdrf.getFaceHeatMap (intensity_cloud);

    pcl::visualization::PointCloudColorHandlerGenericField < pcl::PointXYZI > handler_keypoints (intensity_cloud, "intensity");
    vis.addPointCloud < pcl::PointXYZI > (intensity_cloud, handler_keypoints, "heat_map");
  }

  if (show_votes)
  {
    //display votes_
    /*pcl::PointCloud<pcl::PointXYZ>::Ptr votes_cloud(new pcl::PointCloud<pcl::PointXYZ>());
     fdrf.getVotes(votes_cloud);
     pcl::visualization::PointCloudColorHandlerCustom < pcl::PointXYZ > handler_votes(votes_cloud, 255, 0, 0);
     vis.addPointCloud < pcl::PointXYZ > (votes_cloud, handler_votes, "votes_cloud");
     vis.setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 14, "votes_cloud");
     vis.setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_OPACITY, 0.5, "votes_cloud");
     vis.setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_OPACITY, 0.75, "votes_cloud");*/

    pcl::PointCloud<pcl::PointXYZI>::Ptr votes_cloud (new pcl::PointCloud<pcl::PointXYZI> ());
    fdrf.getVotes2 (votes_cloud);
    pcl::visualization::PointCloudColorHandlerGenericField < pcl::PointXYZI > handler_votes (votes_cloud, "intensity");
    vis.addPointCloud < pcl::PointXYZI > (votes_cloud, handler_votes, "votes_cloud");
    vis.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 14, "votes_cloud");
  }

  vis.addCoordinateSystem (0.1, "global");

  std::vector<Eigen::VectorXd> heads;
  fdrf.getDetectedFaces (heads);
  face_detection_apps_utils::displayHeads (heads, vis);

  if (SHOW_GT)
  {
    //check if there is ground truth data
    std::string pose_file (filename);
    boost::replace_all (pose_file, ".pcd", "_pose.txt");

    Eigen::Matrix4d pose_mat;
    pose_mat.setIdentity (4, 4);
    bool result = face_detection_apps_utils::readMatrixFromFile (pose_file, pose_mat);

    if (result)
    {
      Eigen::Vector3d ea = pose_mat.block<3, 3> (0, 0).eulerAngles (0, 1, 2);
      Eigen::Vector3d trans_vector = Eigen::Vector3d (pose_mat (0, 3), pose_mat (1, 3), pose_mat (2, 3));
      std::cout << ea << std::endl;
      std::cout << trans_vector << std::endl;

      pcl::PointXYZ center_point;
      center_point.x = trans_vector[0];
      center_point.y = trans_vector[1];
      center_point.z = trans_vector[2];
      vis.addSphere (center_point, 0.05, 255, 0, 0, "sphere");

      pcl::ModelCoefficients cylinder_coeff;
      cylinder_coeff.values.resize (7); // We need 7 values
      cylinder_coeff.values[0] = center_point.x;
      cylinder_coeff.values[1] = center_point.y;
      cylinder_coeff.values[2] = center_point.z;

      cylinder_coeff.values[3] = ea[0];
      cylinder_coeff.values[4] = ea[1];
      cylinder_coeff.values[5] = ea[2];

      Eigen::Vector3d vec = Eigen::Vector3d::UnitZ () * -1.;
      Eigen::Matrix3d matrixxx;

      matrixxx = Eigen::AngleAxisd (ea[0], Eigen::Vector3d::UnitX ()) * Eigen::AngleAxisd (ea[1], Eigen::Vector3d::UnitY ())
          * Eigen::AngleAxisd (ea[2], Eigen::Vector3d::UnitZ ());

      //matrixxx = pose_mat.block<3,3>(0,0);
      vec = matrixxx * vec;

      cylinder_coeff.values[3] = vec[0];
      cylinder_coeff.values[4] = vec[1];
      cylinder_coeff.values[5] = vec[2];

      cylinder_coeff.values[6] = 0.01;
      vis.addCylinder (cylinder_coeff, "cylinder");
    }
  }

  vis.setRepresentationToSurfaceForAllActors ();

  if (VIDEO)
  {
    vis.spinOnce (50, true);
  } else
  {
    vis.spin ();
  }

  vis.removeAllPointClouds ();
  vis.removeAllShapes ();
}