Пример #1
0
bool RegionGrowing::seedRegionGrowing(
    pcl::PointCloud<PointNormalT>::Ptr src_points,
    const PointT seed_point, const PointCloud::Ptr cloud,
    PointNormal::Ptr normals) {
    if (cloud->empty() || normals->size() != cloud->size()) {
       ROS_ERROR("- Region growing failed. Incorrect inputs sizes ");
       return false;
    }
    if (isnan(seed_point.x) || isnan(seed_point.y) || isnan(seed_point.z)) {
       ROS_ERROR("- Seed Point is Nan. Skipping");
       return false;
    }

    this->kdtree_->setInputCloud(cloud);
    
    std::vector<int> neigbor_indices;
    this->getPointNeigbour<int>(neigbor_indices, seed_point, 1);
    int seed_index = neigbor_indices[0];
    
    const int in_dim = static_cast<int>(cloud->size());
    int *labels = reinterpret_cast<int*>(malloc(sizeof(int) * in_dim));
    
#ifdef _OPENMP
#pragma omp parallel for num_threads(this->num_threads_)
#endif
    for (int i = 0; i < in_dim; i++) {
       if (i == seed_index) {
          labels[i] = 1;
       }
       labels[i] = -1;
    }
    this->seedCorrespondingRegion(labels, cloud, normals,
                                  seed_index, seed_index);
    src_points->clear();
    for (int i = 0; i < in_dim; i++) {
       if (labels[i] != -1) {
          PointNormalT pt;
          pt.x = cloud->points[i].x;
          pt.y = cloud->points[i].y;
          pt.z = cloud->points[i].z;
          pt.r = cloud->points[i].r;
          pt.g = cloud->points[i].g;
          pt.b = cloud->points[i].b;
          pt.normal_x = normals->points[i].normal_x;
          pt.normal_y = normals->points[i].normal_y;
          pt.normal_z = normals->points[i].normal_z;
          src_points->push_back(pt);
       }
    }
    free(labels);
    return true;
}
void
appendToHistory(const LastData & data)
{
  if (history_cloud->size() < max_history_size)
  {
    pcl::PointXYZRGB point;
    history_cloud->push_back(point);
  }

  toPointXYZRGB(history_cloud->points[history_cloud_index], data);
  history_cloud_index = (history_cloud_index + 1) % max_history_size;

}
Пример #3
0
void complex_reproject_cloud(const cv::Mat& Q, cv::Mat& img_rgb, cv::Mat& img_disparity, pcl::PointCloud<pcl::PointXYZRGB>::Ptr& point_cloud_ptr)
{
    //Get the interesting parameters from Q
    double Q03, Q13, Q23, Q32, Q33;
    Q03 = Q.at<double>(0,3);
    Q13 = Q.at<double>(1,3);
    Q23 = Q.at<double>(2,3);
    Q32 = Q.at<double>(3,2);
    Q33 = Q.at<double>(3,3);

    std::cout << "Q(0,3) = "<< Q03 <<"; Q(1,3) = "<< Q13 <<"; Q(2,3) = "<< Q23 <<"; Q(3,2) = "<< Q32 <<"; Q(3,3) = "<< Q33 <<";" << std::endl;

    double px, py, pz;
    uchar pr, pg, pb;

    for (int i = 0; i < img_rgb.rows; i++)
    {
        uchar* rgb_ptr = img_rgb.ptr<uchar>(i);
        uchar* disp_ptr = img_disparity.ptr<uchar>(i);
        for (int j = 0; j < img_rgb.cols; j++)
        {
            //Get 3D coordinates
            uchar d = disp_ptr[j];
            if ( d == 0 ) continue; //Discard bad pixels
            double pw = -1.0 * static_cast<double>(d) * Q32 + Q33; 
            px = static_cast<double>(j) + Q03;
            py = static_cast<double>(i) + Q13;
            pz = Q23;

            px = px/pw;
            py = py/pw;
            pz = pz/pw;
            //Get RGB info
            pb = rgb_ptr[3*j];
            pg = rgb_ptr[3*j+1];
            pr = rgb_ptr[3*j+2];
            //Insert info into point cloud structure
            pcl::PointXYZRGB point;
            point.x = px;
            point.y = py;
            point.z = pz;
            uint32_t rgb = (static_cast<uint32_t>(pr) << 16 |
                  static_cast<uint32_t>(pg) << 8 | static_cast<uint32_t>(pb));
            point.rgb = *reinterpret_cast<float*>(&rgb);
            point_cloud_ptr->push_back (point);
        }
    }
    point_cloud_ptr->width = (int) point_cloud_ptr->points.size();
    point_cloud_ptr->height = 1;

}
Пример #4
0
void DownSampler::downSamplePointsDeterministic(
    const pcl::PointCloud<pcl::PointXYZRGB>::ConstPtr& points,
    const int target_num_points,
    pcl::PointCloud<pcl::PointXYZRGB>::Ptr& down_sampled_points,
    const bool use_ceil)
{
  const size_t num_points = points->size();

  // Check if the points are already sufficiently down-sampled.
  if (target_num_points >= num_points * 0.8){
    *down_sampled_points = *points;
    return;
  }

  // Select every N points to reach the target number of points.
  int everyN = 0;
  if (use_ceil) {
    everyN = ceil(static_cast<double>(num_points) /
                  static_cast<double>(target_num_points));
  } else {
    everyN = static_cast<double>(num_points) /
        static_cast<double>(target_num_points);
  }

  // Allocate space for the new points.
  down_sampled_points->reserve(target_num_points);

  //Just to ensure that we don't end up with 0 points, add 1 point to this
  down_sampled_points->push_back((*points)[0]);

  // Select every N points to reach the target number of points.
  for (size_t i = 1; i < num_points; ++i) {
    if (i % everyN == 0){
      const pcl::PointXYZRGB& pt = (*points)[i];
      down_sampled_points->push_back(pt);
    }
  }
}
Пример #5
0
void add_velodyne_point_to_pointcloud(
		pcl::PointCloud<pcl::PointXYZRGB>::Ptr pointcloud,
		unsigned char intensity, carmen_vector_3D_t point) {
	pcl::PointXYZRGB p3D;

	p3D.x = point.x;
	p3D.y = point.y;
	p3D.z = point.z;
	p3D.r = intensity;
	p3D.g = intensity;
	p3D.b = intensity;

	pointcloud->push_back(p3D);
}
void GeneralTransform::Mat2PCD_Trans(cv::Mat& cloud_mat, pcl::PointCloud<pcl::PointXYZ>::Ptr& cloud)
{
    int size = cloud_mat.rows;
    std::vector<pcl::PointXYZ> points_vec(size);
    cloud.reset(new pcl::PointCloud<pcl::PointXYZ>());
    for(int i = 0; i < size; i++)
    {
        pcl::PointXYZ point;
        point.x = cloud_mat.at<double>(i, 0);
        point.y = cloud_mat.at<double>(i, 1);
        point.z = cloud_mat.at<double>(i, 2);
        cloud->push_back(point);
    }   
}
Пример #7
0
void filterCloudByHeight(
  const pcl::PointCloud<pcl::PointXYZRGBNormal>& cloud_in,
  pcl::PointCloud<pcl::PointXYZRGBNormal>& cloud_out,
  double min_z,
  double max_z)
{
  for (unsigned int i = 0; i < cloud_in.points.size(); ++i)
  {
    const pcl::PointXYZRGBNormal& p = cloud_in.points[i]; 
    
    if (p.z >= min_z && p.z < max_z)
      cloud_out.push_back(p); 
  }
}
Пример #8
0
bool getFramePointCloud(int frameID, pcl::PointCloud<pcl::PointXYZRGB> &pointCloud)
{
	FrameData frameData;
	if (!frameData.loadImageRGBD(frameID)) {
		pointCloud.points.clear();
		return false;
	}

	// allocate the point cloud buffer
	pointCloud.width = NBPIXELS_WIDTH;
	pointCloud.height = NBPIXELS_HEIGHT;
	pointCloud.points.clear();
	pointCloud.points.reserve(NBPIXELS_WIDTH*NBPIXELS_HEIGHT);	// memory preallocation
	//pointCloud.points.resize(NBPIXELS_WIDTH*NBPIXELS_HEIGHT);

	//printf("Generating point cloud frame %d\n", frameID);

	float focalInv = 0.001 / Config::_FocalLength;
	unsigned int rgb;
	int depth_index = 0;
	IplImage *img = frameData.getImage();
	for (int ind_y =0; ind_y < NBPIXELS_HEIGHT; ind_y++)
	{
		for (int ind_x=0; ind_x < NBPIXELS_WIDTH; ind_x++, depth_index++)
		{
			//pcl::PointXYZRGB& pt = pointCloud(ind_x,ind_y);
			TDepthPixel depth = frameData.getDepthData()[depth_index];
			if (depth != 0)
			{
				pcl::PointXYZRGB pt;

				// locate point in meters
				pt.z = (ind_x - NBPIXELS_X_HALF) * depth * focalInv;
				pt.y = (NBPIXELS_Y_HALF - ind_y) * depth * focalInv;
				pt.x = depth * 0.001 ; // depth values are given in mm

				// reinterpret color bytes
				unsigned char b = ((uchar *)(img->imageData + ind_y*img->widthStep))[ind_x*img->nChannels + 0];
				unsigned char g = ((uchar *)(img->imageData + ind_y*img->widthStep))[ind_x*img->nChannels + 1];
				unsigned char r = ((uchar *)(img->imageData + ind_y*img->widthStep))[ind_x*img->nChannels + 2];
				rgb = (((unsigned int)r)<<16) | (((unsigned int)g)<<8) | ((unsigned int)b);
				pt.rgb = *reinterpret_cast<float*>(&rgb);
				pointCloud.push_back(pt);
			}
		}
	}

	return true;
}
void PopulatePCLPointCloud(const vector<Point3d>& pointcloud, 
						   const std::vector<cv::Vec3b>& pointcloud_RGB,
						   const Mat& img_1_orig, 
						   const Mat& img_2_orig,
						   const vector<KeyPoint>& correspImg1Pt)
	//Populate point cloud
{
	cout << "Creating point cloud...";
	double t = getTickCount();
	Mat_<Vec3b> img1_v3b,img2_v3b;
	if (!img_1_orig.empty() && !img_2_orig.empty()) {
		img1_v3b = Mat_<Vec3b>(img_1_orig);
		img2_v3b = Mat_<Vec3b>(img_2_orig);
	}
	for (unsigned int i=0; i<pointcloud.size(); i++) {
		Vec3b rgbv(255,255,255);
		if(!img_1_orig.empty()) {
			Point p = correspImg1Pt[i].pt;
			//		Point p1 = pt_set2[i];
			rgbv = img1_v3b(p.y,p.x); //(img1_v3b(p.y,p.x) + img2_v3b(p1.y,p1.x)) * 0.5;
		} else if (pointcloud_RGB.size()>0) {
			rgbv = pointcloud_RGB[i];
		}

		
		if (pointcloud[i].x != pointcloud[i].x || isnan(pointcloud[i].x) ||
			pointcloud[i].y != pointcloud[i].y || isnan(pointcloud[i].y) || 
			pointcloud[i].z != pointcloud[i].z || isnan(pointcloud[i].z) ||
			fabsf(pointcloud[i].x) > 10.0 || 
			fabsf(pointcloud[i].y) > 10.0 || 
			fabsf(pointcloud[i].z) > 10.0) {
			continue;
		}
		
		pcl::PointXYZRGB pclp;
		pclp.x = pointcloud[i].x;
		pclp.y = pointcloud[i].y;
		pclp.z = pointcloud[i].z;
		uint32_t rgb = ((uint32_t)rgbv[2] << 16 | (uint32_t)rgbv[1] << 8 | (uint32_t)rgbv[0]);
		pclp.rgb = *reinterpret_cast<float*>(&rgb);
		cloud->push_back(pclp);
	}
	cloud->width = (uint32_t) cloud->points.size();
	cloud->height = 1;
	t = ((double)getTickCount() - t)/getTickFrequency();
	cout << "Done. (" << t <<"s)"<< endl;
	pcl::PLYWriter pw;
	pw.write("pointcloud.ply",*cloud);
}
Пример #10
0
void
Triangulation::convertCurve2PointCloud (const ON_NurbsCurve &curve, const ON_NurbsSurface &surf,
                                        pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud, unsigned resolution)
{
  // copy knots
  if (curve.m_knot_capacity <= 1)
  {
    printf ("[Triangulation::Convert] Warning: ON knot vector empty.\n");
    return;
  }

  cloud->clear ();

  if (resolution < 2)
    resolution = 2;

  int cp_red = curve.Order () - 2;

  // for each element of the nurbs curve
  for (int i = 1; i < curve.KnotCount () - 1 - cp_red; i++)
  {
    double dr = 1.0 / (resolution - 1);

    double xi0 = curve.m_knot[i];
    double xid = (curve.m_knot[i + 1] - xi0);

    for (unsigned j = 0; j < resolution; j++)
    {
      pcl::PointXYZRGB pt;

      double xi = (xi0 + j * dr * xid);

      double p[3];
      double pp[3];
      curve.Evaluate (xi, 0, 2, pp);
      surf.Evaluate (pp[0], pp[1], 0, 3, p);
      pt.x = p[0];
      pt.y = p[1];
      pt.z = p[2];
      pt.r = 255;
      pt.g = 0;
      pt.b = 0;

      cloud->push_back (pt);
    }

  }
}
	// Methods
	// Return a first list of points of objects which has a proper size
	void findCandidates(const kut_ugv_sensor_fusion::lidar_object_listConstPtr& object_list, pcl::PointCloud<pcl::PointXY> &current_2d_scan)
	{
		current_2d_scan.clear();
		for(unsigned int i = 0; i < object_list->object.size(); i++)
		{
			// Check the size of the object
			if( object_list->object[i].width > OBJECT_MIN_WIDTH && object_list->object[i].height > OBJECT_MIN_HEIGHT
					&& object_list->object[i].width < OBJECT_MAX_WIDTH && object_list->object[i].height < OBJECT_MAX_HEIGHT)
			{
				pcl::PointXY p;
				p.x = object_list->object[i].centroid.x;
				p.y = object_list->object[i].centroid.y;
				current_2d_scan.push_back(p);
			}
		}
	}
bool insertPoints(const osg::Geometry* geometry, pcl::PointCloud<PointT>& pointcloud) {
	const osg::Vec3Array* vertex_points = (osg::Vec3Array*)geometry->getVertexArray();
	for (osg::Vec3Array::size_type i = 0; i < vertex_points->size(); ++i) {
		PointT point;
		point.x = (*vertex_points)[i][0];
		point.y = (*vertex_points)[i][1];
		point.z = (*vertex_points)[i][2];
		pointcloud.push_back(point);
	}

	pointcloud.is_dense = false;
	pointcloud.width = pointcloud.size();
	pointcloud.height = 1;

	return !pointcloud.empty();
}
 void addPhysicalPoint()
 {
   geometry_msgs::PointStamped pt_out;
   
   try
   {
     tf_listener_.transformPoint(fixed_frame, gripper_tip, pt_out);
   }
   catch (tf::TransformException& ex)
   {
     ROS_WARN("[calibrate] TF exception:\n%s", ex.what());
     return;
   }
   
   physical_points_.push_back(pcl::PointXYZ(pt_out.point.x, pt_out.point.y, pt_out.point.z));
 }
Пример #14
0
void addToCloud(int lin_idx, pcl::PointCloud<PointXYZGD>::Ptr outcloud, int groundAdj = 0) 
{
    int num_bin_pts = ptBins[lin_idx].size();
    if(num_bin_pts < NONDRIVE_POINTS_THRESH) {
        return;
    }
    for(int k=0;k<num_bin_pts;k++) {
         PointXYZGD pt;
       	 pt.x = ptBins[lin_idx][k].x;
         pt.y = ptBins[lin_idx][k].y;             
       	 pt.z = ptBins[lin_idx][k].z;
       	 pt.ground_adj = groundAdj;
         pt.drivable = ptBins[lin_idx][k].drivable;
         outcloud->push_back(pt); //add the point to outcloud
    }
}
Пример #15
0
void TestClass::makePointsForTest(pcl::PointCloud<pcl::PointXYZI>::Ptr in_pcl_pc_ptr)
{
  pcl::PointXYZI point;
  point.x = 12.9892;
  point.y = -9.98058;
  point.z = 0;
  point.intensity = 4;
  in_pcl_pc_ptr->push_back(point);
  point.x = 11.8697;
  point.y = -11.123;
  point.z = -0.189377;
  point.intensity = 35;
  in_pcl_pc_ptr->push_back(point);
  point.x = 12.489;
  point.y = -9.59703;
  point.z = -2.15565;
  point.intensity = 11;
  in_pcl_pc_ptr->push_back(point);
  point.x = 12.9084;
  point.y = -10.9626;
  point.z = -2.15565;
  point.intensity = 11;
  in_pcl_pc_ptr->push_back(point);
  point.x = 13.8676;
  point.y = -9.61668;
  point.z = 0.0980819;
  point.intensity = 14;
  in_pcl_pc_ptr->push_back(point);
  point.x = 13.5673;
  point.y = -12.9834;
  point.z = 0.21862;
  point.intensity = 1;
  in_pcl_pc_ptr->push_back(point);
  point.x = 13.8213;
  point.y = -10.8529;
  point.z = -1.22883;
  point.intensity = 19;
  in_pcl_pc_ptr->push_back(point);
  point.x = 11.8957;
  point.y = -10.3189;
  point.z = -1.28556;
  point.intensity = 13;
  in_pcl_pc_ptr->push_back(point);
}
Пример #16
0
// remove table
void remove_table(pcl::PointCloud<pcl::PointXYZRGB>& cloud, pcl::PointCloud<pcl::PointXYZRGB>& rmc)
{
    for(int i=0;i<cloud.points.size();i++){
        if(cloud.points[i].z <= (m_size.max_z + 0.01)) continue;
        int X = (100/VOXEL_SIZE)*(cloud.points[i].x-m_size.min_x);
        int Y = (100/VOXEL_SIZE)*(cloud.points[i].y-m_size.min_y);
        int Z = (100/VOXEL_SIZE)*(cloud.points[i].z-m_size.min_z);

        if(((X < 0) || (SIZE_X <= X)) || ((Y < 0) || (SIZE_Y <= Y)) || ((Z < 2.0) || (SIZE_Z <= Z)))
            continue;

        if(!(T_voxel[X][Y][Z]))
            rmc.push_back(cloud.points[i]);
    }

    return;
}
Пример #17
0
int Conversion::convert(const pcl::PointCloud<pcl::PointXYZ>::Ptr & cloud, pcl::PointCloud<pcl::PointXYZRGB>::Ptr & colorCloud, 
                       const int & r, const int & g, const int & b){
       
       colorCloud->empty();
       for (int i=0; i<cloud->points.size ();i++){
            pcl::PointXYZRGB point;
            point.x = cloud->points[i].x;
            point.y = cloud->points[i].y;
            point.z = cloud->points[i].z;
            point.r = r;
            point.g = g;
            point.b = b;
            colorCloud->push_back(point);
      }
      return 1;
  
  }
Пример #18
0
int utils::parseUrgBenriXY(std::string filename, pcl::PointCloud<pcl::PointXYZ>::Ptr cloud) {
    std::ifstream ifs(filename.c_str());
    std::string elem;
    int retval = 0;
    while( std::getline(ifs, elem, ',') ) {
        unsigned long timestamp;
        float x, y, z;
        if ( sscanf(elem.c_str(), "%lu:(%f;%f;%f)", &timestamp, &x, &y, &z) != 4 ) {
            if ( sscanf(elem.c_str(), "(%f;%f;%f)", &x, &y, &z) != 3 ) {
                return -1;
            }
        }
        ++retval;
        cloud->push_back( pcl::PointXYZ(x, y, z) );
    }
    return retval;
}
Пример #19
0
int Conversion::convert(const Eigen::MatrixXf & matrix, 
                        pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud, 
                        const int & r, const int & g, const int & b)
{
  
   //        cloud->empty();
 for (int i=0; i<matrix.rows();i++){
            pcl::PointXYZRGB basic_point;
            basic_point.x = matrix(i,0);
            basic_point.y = matrix(i,1);
            basic_point.z = matrix(i,2);
            basic_point.r = r;
            basic_point.g = g;
            basic_point.b = b;
            cloud->push_back(basic_point);
      }
      return 1;
}
Пример #20
0
    size_t
    Tracker::appendToPointCloud(pcl::PointCloud<pcl::PointXYZRGB>::Ptr& pointcloud, size_t starting_index, size_t max_size)
    {
      for(size_t i = 0; i < tracks_.size() && pointcloud->size() < max_size; i++)
      {
        pcl::PointXYZRGB point;
        pointcloud->push_back(point);
      }

      for(std::list<open_ptrack::tracking::Track*>::iterator it = tracks_.begin(); it != tracks_.end(); it++)
      {
        open_ptrack::tracking::Track* t = *it;
        if(t->getPointXYZRGB(pointcloud->points[starting_index]))
          starting_index = (starting_index + 1) % max_size;

      }
      return starting_index;
    }
Пример #21
0
void setTrackerTarget(){
    initTracking();
    Eigen::Vector4f c;
    Eigen::Affine3f trans = Eigen::Affine3f::Identity ();
    pcl::compute3DCentroid<PointT>(*object_to_track,c);
    trans.translation().matrix() = Eigen::Vector3f(c[0],c[1],c[2]);
    tracker_->setTrans (trans);

    pcl::PointCloud<PointT>::Ptr tmp_pc(new pcl::PointCloud<PointT>);
    pcl::transformPointCloud<PointT> (*object_to_track, *tmp_pc, trans.inverse());

    tracker_->setReferenceCloud(tmp_pc);
    tracker_->setInputCloud(cloud);

    tracked_object_centroid->clear();
    tracked_object_centroid->push_back(PointT(c[0],c[1],c[2]));

}
void
CreateCylinderPoints (pcl::PointCloud<Point>::Ptr cloud, pcl::on_nurbs::vector_vec3d &data, unsigned npoints,
                      double alpha, double h, double r)
{
  for (unsigned i = 0; i < npoints; i++)
  {
    double da = alpha * double (rand ()) / RAND_MAX;
    double dh = h * (double (rand ()) / RAND_MAX - 0.5);

    Point p;
    p.x = r * cos (da);
    p.y = r * sin (da);
    p.z = dh;

    data.push_back (Eigen::Vector3d (p.x, p.y, p.z));
    cloud->push_back (p);
  }
}
pcl::PointCloud<pcl::PointXYZRGB>::Ptr extract_clusters (pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud, pcl::PointXYZRGB point)
{
  cloud->push_back(point);  // append clicked_point to cloud
  int point_index = cloud->points.size() - 1;  // get index of clicked_point

  // Creating the KdTree object for the search method of the extraction
  pcl::search::KdTree<pcl::PointXYZRGB>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZRGB>);
  tree->setInputCloud (cloud);

  std::vector<pcl::PointIndices> cluster_indices;
  pcl::EuclideanClusterExtraction<pcl::PointXYZRGB> ec;
  ec.setClusterTolerance (0.02); // 2cm
  ec.setMinClusterSize (100);
  ec.setMaxClusterSize (640*480);
  ec.setSearchMethod (tree);
  ec.setInputCloud (cloud);
  ec.extract (cluster_indices);

  pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_cluster (new pcl::PointCloud<pcl::PointXYZRGB>);
	  
  int j = 0;
  for (std::vector<pcl::PointIndices>::const_iterator it = cluster_indices.begin (); it != cluster_indices.end (); ++it)
    {

      if(std::find(it->indices.begin(), it->indices.end(), point_index) != it->indices.end())
	{
	  // Pointer-ify the indices for the current cluster
	  pcl::PointIndicesPtr indices_ptr (new pcl::PointIndices);
	  indices_ptr->indices = it->indices; 
	  
	  // Extract the cluster points
	  pcl::ExtractIndices<pcl::PointXYZRGB> extract;
	  extract.setInputCloud (cloud);
	  extract.setIndices (indices_ptr);
	  extract.setNegative (false);
	  extract.filter (*cloud_cluster);
	  
	  std::cout << "PointCloud representing cluster #" << j << ": " << cloud_cluster->points.size () << " data points." << std::endl;
	  j++;
	}
    }

  return cloud_cluster;
}
Пример #24
0
void
Triangulation::convertCurve2PointCloud (const ON_NurbsCurve &nurbs, pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud,
                                        unsigned resolution)
{
  // copy knots
  if (nurbs.m_knot_capacity <= 1)
  {
    printf ("[Triangulation::convert] Warning: ON knot vector empty.\n");
    return;
  }

  cloud->clear ();

  if (resolution < 2)
    resolution = 2;

  int cp_red = nurbs.Order () - 2;

  // for each element in the nurbs curve
  for (int i = 1; i < nurbs.KnotCount () - 1 - cp_red; i++)
  {
    double dr = 1.0 / (resolution - 1);
    double xi0 = nurbs.m_knot[i];
    double xid = (nurbs.m_knot[i + 1] - xi0);

    for (unsigned j = 0; j < resolution; j++)
    {
      double xi = (xi0 + j * dr * xid);
      pcl::PointXYZRGB p;

      double points[3];
      nurbs.Evaluate (xi, 0, 3, points);
      p.x = static_cast<float> (points[0]);
      p.y = static_cast<float> (points[1]);
      p.z = static_cast<float> (points[2]);
      p.r = 255;
      p.g = 0;
      p.b = 0;

      cloud->push_back (p);
    }

  }
}
Пример #25
0
void
convert_to_point_cloud (
                            matrix <Point_XYZI_Color> & Point_Cloud,
                            pcl::PointCloud < pcl::PointXYZ> & pcl_cloud
                        )
{
    for (unsigned int z = 0; z < Point_Cloud.Get_Num_Of_Elem(); z++)
    {
        pcl::PointXYZ one_more;

        XYZI_Color cur_point = Point_Cloud.data[z];

       one_more.x = cur_point.vertex.x;
       one_more.y = cur_point.vertex.y;
       one_more.y = cur_point.vertex.z;

        pcl_cloud.push_back (one_more);
    }
}
Пример #26
0
/**
 * @brief Fills a point cloud with all the pixels in a collection of keyframes.
 */
void utils::generateCloud(const std::vector<KeyframeContainer>& frames,pcl::PointCloud<pcl::PointXYZRGB>& newCloud)
{
	newCloud.clear();
	for(uint f=0;f<frames.size();++f)
	{
		if(frames[f].projectionMatrix.rows>0 && frames[f].projectionMatrix.cols>0)
		{
			//calculate the extended projection matrix
			cv::Mat_<double> projective(4,4,0.0);
			for(int x=0;x<3;x++) for(int y=0;y<4;y++)
				projective(x,y)=frames[f].projectionMatrix(x,y);
			projective(3,3)=1;

			// all pixels in the current frame
			for(int x=0;x<frames[f].colorImg.image.cols;x++)
				for(int y=0;y<frames[f].colorImg.image.rows;y++)
				{
					float depth(frames[f].depthImg.image.at<float>(y,x));
					//if the depth data is available and the pixel is inside the mask
					if (isnan(depth)==0 && frames[f].mask(y,x)>0)
					{
						//calculate 3D point from the image data
						cv::Vec<uint8_t,3> pixel(frames[f].colorImg.image.at<cv::Vec<uint8_t,3> >(y,x));
						cv::Mat_<double> tmpPoint2D(2,1,0.0);
						tmpPoint2D(0,0)=(double)x;
						tmpPoint2D(1,0)=(double)y;
						cv::Mat_<double> tmpPointHomo3D=utils::reprojectImagePointTo3D(tmpPoint2D,frames[f].cameraCalibration,projective,depth);

						//convert to pcl format
						pcl::PointXYZRGB tmpPoint;
						tmpPoint.x=tmpPointHomo3D(0,0);
						tmpPoint.y=tmpPointHomo3D(1,0);
						tmpPoint.z=tmpPointHomo3D(2,0);
						tmpPoint.b=pixel.val[0];
						tmpPoint.g=pixel.val[1];
						tmpPoint.r=pixel.val[2];
						//add to the cloud
						newCloud.push_back(tmpPoint);
					}
				}
		}
	}
}
Пример #27
0
void UpdateCloud(const vector<Point3d>& point_cloud, const int r, const int g, const int b, bool clear)
{
    boost::mutex::scoped_lock updateLock(update_pc_model_mutex);
    update_pc = true;
    
    if (clear)
        cloud_ptr->clear();
    
    for (int i=0; i<point_cloud.size(); i++)
    {
        Vec3b rgbv(r, g, b);
        
        // Check for invalid points:
        if (point_cloud[i].x != point_cloud[i].x ||
            point_cloud[i].y != point_cloud[i].y || 
            point_cloud[i].z != point_cloud[i].z ||
            isnan(point_cloud[i].x) ||
            isnan(point_cloud[i].y) ||
            isnan(point_cloud[i].z) ||
            point_cloud[i].x > MAX_PC_VAL ||
            point_cloud[i].y > MAX_PC_VAL ||
            point_cloud[i].z > MAX_PC_VAL)
        {
            continue;
        }
        
        pcl::PointXYZRGB point;
        
        point.x = point_cloud[i].x * X_SCALE;
        point.y = point_cloud[i].y * Y_SCALE;
        point.z = point_cloud[i].z * Z_SCALE;
        
        uint32_t rgb = ((uint32_t)rgbv[2] << 16 | (uint32_t)rgbv[1] << 8 | (uint32_t)rgbv[0]);
        point.rgb = *reinterpret_cast<float*>(&rgb);
        
        cloud_ptr->push_back(point);
    }
    
    cloud_ptr->width = (uint32_t) cloud_ptr->points.size();
    cloud_ptr->height = 1;
    
    updateLock.unlock();
}
Пример #28
0
void
Triangulation::createVertices (pcl::PointCloud<pcl::PointXYZ>::Ptr &cloud, float x0, float y0, float z0, float width,
                               float height, unsigned segX, unsigned segY)
{
  pcl::PointXYZ v;
  float dx = width / segX;
  float dy = height / segY;

  for (unsigned j = 0; j <= segY; j++)
  {
    for (unsigned i = 0; i <= segX; i++)
    {
      v.x = x0 + i * dx;
      v.y = y0 + j * dy;
      v.z = z0;
      cloud->push_back (v);
    }
  }
}
Пример #29
0
static void VscanCallback(const sensor_msgs::PointCloud2ConstPtr &msg)
{
    pcl::PointCloud<pcl::PointXYZ> vscan_raw;
    pcl::fromROSMsg(*msg, vscan_raw);

    _vscan.clear();
    for (const auto &v : vscan_raw) {
      if (v.x == 0 && v.y == 0)
	continue;
      if (v.z > _detection_height_top || v.z < _detection_height_bottom)
	continue;
      _vscan.push_back(v);
    }

    if (_vscan_flag == false) {
        std::cout << "vscan subscribed" << std::endl;
        _vscan_flag = true;
    }

}
Пример #30
0
/**
 * Convert a segment point into a set of points corresponding to measurements of
 * walls in the maze. No need to take into account any rotation at this point -
 * assume that the robot is moving in a perfectly straight line. Assume (0,0) is
 * the robot location at the start of the segment, and then compute the x,y
 * coordinates of where the IR reading is measured according to the sensor
 * positions and distances received. Only consider the side facing sensors.
 * The pointcloud passed to this function will be populated with the points.
 *
 *
 * Object detections will be added to the objects vector given.
 */
void SegmentStitching::segmentPointToMeasurements(const mapping_msgs::SegmentPoint& pt,
                                                  pcl::PointCloud<pcl::PointXYZ>::Ptr measurements,
                                                  mapping_msgs::ObjectVector& objects){
    hardware_msgs::IRDists dists = pt.distances;
    hardware_msgs::Odometry odom = pt.odometry;
    
    float distances[6] = {dists.s0, dists.s1, dists.s2, dists.s3, dists.s4, dists.s5};
    // The robot base has moved relative to the start of the segment
    pcl::PointXYZ odompt(0, odom.distanceTotal, 0);
    // only look at first 4 sensors, last 2 are front facing, assume either -90 or 90 rotation
    for (size_t i = 0; i < sensors.size() - 2; i++){
        // Ignore any measurements which are too far or too close. 
        // TODO - the distance measurements above the upper limit actually give
        // information, but the points generated from this need to be handled
        // differently.
        if (distances[i] > sensorUpperLimit || distances[i] < 0){
            
        }
        
        // naive way of getting point measurement - if sensor rotated -90,
        // subtract from x, otherwise add. The generated points will be rotated
        // later to match segment rotation if necessary
        if (sensors[i].rotation == -90) { // might be dangerous, rotation is a float
            distances[i] = -distances[i];
        }
        pcl::PointXYZ p = sensors[i].asPCLPoint() + odompt + pcl::PointXYZ(distances[i], 0, 0);
        // std::cout << "adding " << s.asPCLPoint() << ", " << pcl::PointXYZ(distances[i], 0, 0) << std::endl;
        // std::cout << "result: " << p << std::endl;
        measurements->push_back(p);
    }
    
    if (pt.gotObject){
        mapping_msgs::Object p;
        // the position of the object relative to the robot at the current point
        // in the segment is the current odometry augmented by the object offset.
        // assumes that camera is at (0,0,0) offset from the robot.
        p.location = PCLUtil::pclToGeomPoint(odompt + pcl::PointXYZ(pt.object.offset_x, pt.object.offset_y, 0));
        p.id = pt.object.id;
        objects.objects.push_back(p);
    }
}