Пример #1
0
void
GrabCutHelper::setInputCloud (const pcl::PointCloud<pcl::PointXYZRGB>::ConstPtr& cloud)
{
  pcl::GrabCut<pcl::PointXYZRGB>::setInputCloud (cloud);
  // Reset clouds
  n_links_image_.reset (new pcl::PointCloud<float> (cloud->width, cloud->height, 0));
  t_links_image_.reset (new pcl::segmentation::grabcut::Image (cloud->width, cloud->height));
  gmm_image_.reset (new pcl::segmentation::grabcut::Image (cloud->width, cloud->height));
  alpha_image_.reset (new pcl::PointCloud<float> (cloud->width, cloud->height, 0));
  image_height_1_ = cloud->height-1;
  image_width_1_ = cloud->width-1;
}
Пример #2
0
    Extractor() {
        tree.reset(new pcl::KdTreeFLANN<Point > ());
        cloud.reset(new pcl::PointCloud<Point>);
        cloud_filtered.reset(new pcl::PointCloud<Point>);
        cloud_normals.reset(new pcl::PointCloud<pcl::Normal>);
        coefficients_plane_.reset(new pcl::ModelCoefficients);
        coefficients_cylinder_.reset(new pcl::ModelCoefficients);
        inliers_plane_.reset(new pcl::PointIndices);
        inliers_cylinder_.reset(new pcl::PointIndices);

        // Filter Pass
        pass.setFilterFieldName("z");
        pass.setFilterLimits(-100, 100);

        // VoxelGrid for Downsampling
        LEAFSIZE = 0.01f;
        vg.setLeafSize(LEAFSIZE, LEAFSIZE, LEAFSIZE);

        // Any object < CUT_THRESHOLD will be abandoned.
        //CUT_THRESHOLD = (int) (LEAFSIZE * LEAFSIZE * 7000000); // 700
        CUT_THRESHOLD =  700; //for nonfiltering

        // Clustering
        cluster_.setClusterTolerance(0.06 * UNIT);
        cluster_.setMinClusterSize(50.0);
        cluster_.setSearchMethod(clusters_tree_);

        // Normals
        ne.setSearchMethod(tree);
        ne.setKSearch(50); // 50 by default

        // plane SAC
        seg_plane.setOptimizeCoefficients(true);
        seg_plane.setModelType(pcl::SACMODEL_NORMAL_PLANE);
        seg_plane.setNormalDistanceWeight(0.1); // 0.1
        seg_plane.setMethodType(pcl::SAC_RANSAC);
        seg_plane.setMaxIterations(1000); // 10000
        seg_plane.setDistanceThreshold(0.05); // 0.03

        // cylinder SAC
        seg_cylinder.setOptimizeCoefficients(true);
        seg_cylinder.setModelType(pcl::SACMODEL_CYLINDER);
        seg_cylinder.setMethodType(pcl::SAC_RANSAC);
        seg_cylinder.setNormalDistanceWeight(0.1);
        seg_cylinder.setMaxIterations(10000);
        seg_cylinder.setDistanceThreshold(0.02); // 0.05
        seg_cylinder.setRadiusLimits(0.02, 0.07); // [0, 0.1]
    }
Пример #3
0
template<typename PointT, typename LeafContainerT, typename BranchContainerT> void
pcl::octree::OctreePointCloudSuperVoxel<PointT, LeafContainerT, BranchContainerT>::getCentroidCloud (pcl::PointCloud<PointSuperVoxel>::Ptr &cloud )
{
  bool use_existing_points = true;
  if ((cloud == 0) || (cloud->size () != this->OctreeBaseT::getLeafCount ()))
  {
    cloud.reset ();
    cloud = boost::make_shared<pcl::PointCloud<PointSuperVoxel> > ();
    cloud->reserve (this->OctreeBaseT::getLeafCount ());
    use_existing_points = false;
  }
  typename OctreeSuperVoxelT::LeafNodeIterator leaf_itr;
  leaf_itr = this->leaf_begin ();
  
  LeafContainerT* leafContainer;
  PointSuperVoxel point;
  for ( int index = 0; leaf_itr != this->leaf_end (); ++leaf_itr,++index)
  {
    leafContainer = dynamic_cast<LeafContainerT*> (*leaf_itr);
    if (!use_existing_points)
    {
      leafContainer->getCentroid (point);
      cloud->push_back (point);
    }
    else
    {
      leafContainer->getCentroid (cloud->points[index]);
    }
    
  }  
}
void ShowCloud(pcl::PointCloud<pcl::PointXYZRGB>::Ptr _cloud, const std::string& name) { 
	cloud.reset(new pcl::PointCloud<pcl::PointXYZRGB>);
	pcl::copyPointCloud(*_cloud,*cloud);
	cloud_to_show_name = name;
	show_cloud = true;
	show_cloud_A = true;
}
void ShowClouds(const vector<cv::Point3d>& pointcloud,
				const vector<cv::Vec3b>& pointcloud_RGB,
				const vector<cv::Point3d>& pointcloud1,
				const vector<cv::Vec3b>& pointcloud1_RGB) 
{
	cloud.reset(new pcl::PointCloud<pcl::PointXYZRGB>);
	cloud1.reset(new pcl::PointCloud<pcl::PointXYZRGB>);
	orig_cloud.reset(new pcl::PointCloud<pcl::PointXYZRGB>);
    
	PopulatePCLPointCloud(cloud,pointcloud,pointcloud_RGB);
	PopulatePCLPointCloud(cloud1,pointcloud1,pointcloud1_RGB);
	copyPointCloud(*cloud,*orig_cloud);
	cloud_to_show_name = "";
	show_cloud = true;
	show_cloud_A = true;
}
void trk3dFeatures::trkSeq(const PointCloudT::Ptr &cloud2,
                           const PointCloudT::Ptr &keyPts2,
                           const float params[],
                           PointCloudT::Ptr &keyPts,
                           pcl::PointCloud<SHOT1344>::Ptr &Shot1344Cloud_1,
                           std::vector<u16> &matchIdx1, std::vector<u16> &matchIdx2,
                           std::vector<f32> &matchDist)
{
    // construct descriptors
    pcl::PointCloud<SHOT1344>::Ptr Shot1344Cloud_2(new pcl::PointCloud<SHOT1344>);

    extractFeatures featureDetector;
    Shot1344Cloud_2 = featureDetector.Shot1344Descriptor(cloud2, keyPts2, params);

//    // match keypoints
//    featureDetector.crossMatching(Shot1344Cloud_1, Shot1344Cloud_2,
//                                  &matchIdx1, &matchIdx2, &matchDist);


    // find the matching from desc_1 -> desc_2
    std::cout<<"size of Shot1344Cloud_1 in trkSeq: "<<Shot1344Cloud_1->points.size()<<std::endl;
    std::cout<<"size of Shot1344Cloud_2 in trkSeq: "<<Shot1344Cloud_2->points.size()<<std::endl;
    featureDetector.matchKeyPts(Shot1344Cloud_1, Shot1344Cloud_2, &matchIdx2, &matchDist);
    std::cout<<"size of matches in trkSeq: "<<matchIdx2.size()<<std::endl;
    std::cout<<"size of matchDist in trkSeq: "<<matchDist.size()<<std::endl;
    Shot1344Cloud_1.reset(new pcl::PointCloud<SHOT1344>);
    keyPts->points.clear();
    for(size_t i=0; i<matchIdx2.size();i++)
    {
        keyPts->push_back(keyPts2->points.at(matchIdx2[i]));
        Shot1344Cloud_1->push_back(Shot1344Cloud_2->points.at(matchIdx2[i]));
    }
}
void FindFloorPlaneRANSAC()
{	
	double t = getTickCount();
	cout << "RANSAC...";
	/*
	 pcl::SampleConsensusModelPlane<pcl::PointXYZRGB>::Ptr model_p (new pcl::SampleConsensusModelPlane<pcl::PointXYZRGB> (cloud));
	 */
	pcl::SampleConsensusModelNormalPlane<pcl::PointXYZRGB,pcl::Normal>::Ptr model_p(
					new pcl::SampleConsensusModelNormalPlane<pcl::PointXYZRGB,pcl::Normal>(cloud));
	//	model_p->setInputCloud(cloud);
	model_p->setInputNormals(mls_normals);
	model_p->setNormalDistanceWeight(0.75);
	
	inliers.reset(new vector<int>);
	
	ransac.reset(new pcl::RandomSampleConsensus<pcl::PointXYZRGB>(model_p));
	ransac->setDistanceThreshold (.1);
	ransac->computeModel();	
	ransac->getInliers(*inliers);
	
	t = ((double)getTickCount() - t)/getTickFrequency();
	cout << "Done. (" << t <<"s)"<< endl;
	
	ransac->getModelCoefficients(coeffs[0]);
	model_p->optimizeModelCoefficients(*inliers,coeffs[0],coeffs[1]);
	
	floorcloud.reset(new pcl::PointCloud<pcl::PointXYZ>);
	pcl::copyPointCloud(*cloud,*inliers,*floorcloud);

}
Пример #8
0
inline bool calcPointsPCL(Mat &depth_img, pcl::PointCloud<pcl::PointXYZ>::Ptr &cloud, float scale) {

	// TODO: dont handle only scale, but also the offset (c_x, c_y) of the given images center to the original image center (for training and roi images!)
	
	cloud.reset(new pcl::PointCloud<pcl::PointXYZ>(depth_img.cols, depth_img.rows));
	const float bad_point = 0;//std::numeric_limits<float>::quiet_NaN ();
	const float constant_x = M_PER_MM / F_X;
	const float constant_y = M_PER_MM / F_Y;
	bool is_valid = false;
	int centerX = depth_img.cols/2.0;
	int centerY = depth_img.rows/2.0;
	float x, y, z;
	int row, col = 0;

	for (row = 0, y = -centerY; row < depth_img.rows; ++row, ++y) {
		float* r_ptr = depth_img.ptr<float>(row);

		for (col = 0, x = -centerX; col < depth_img.cols; ++col, ++x) {
			pcl::PointXYZ newPoint;
			z = r_ptr[col];

			if(z) {
				newPoint.x = (x/scale)*z*constant_x;
				newPoint.y = (y/scale)*z*constant_y;
				newPoint.z = z*M_PER_MM;
				is_valid = true;
			} else {
				newPoint.x = newPoint.y = newPoint.z = bad_point;
			}
			cloud->at(col,row) = newPoint;
		}
	}

	return is_valid;
}
void Camera::getPointCloud(pcl::PointCloud<pcl::PointXYZRGBA>::Ptr &cloud)
{
    cloud.reset(new  pcl::PointCloud<pcl::PointXYZRGBA>);
    for (unsigned int h = 0; h < _depth.rows; h++)
    {
        for (unsigned int w = 0; w < _depth.cols; w++)
        {
            //            point.z = 1024 * rand () / (RAND_MAX + 1.0f); // depthValue
            //            //point.z /= 1000;
            //            point.x = 1024 * rand () / (RAND_MAX + 1.0f) ;//(w - 319.5); //321.075 1024 * rand () / (RAND_MAX + 1.0f) //(w - 319.5) * point.z * rgbFocalInvertedX;
            //            point.y = 1024
            unsigned short depthValue;
            depthValue = _depth.at<unsigned short>(h,w);
            //cv::Vec3f pt3D = _bgr.at<Vec3f>(h, w);
            pcl::PointXYZRGBA point;
            point.z = depthValue; // depthValue
            point.z /= 1000;
            point.x = (w - C_X) * point.z * rgbFocalInvertedX ;//(w - 319.5); //321.075 1024 * rand () / (RAND_MAX + 1.0f) //(w - 319.5) * point.z * rgbFocalInvertedX;
            point.y = (h - C_Y) * point.z * rgbFocalInvertedY; //248.919//(h - 239.5) * point.z * rgbFocalInvertedY
            point.r = 255;
            point.g = 255;
            point.b = 255;
            cloud->push_back(point);
            //std::cout << " point x " << point.x << "  " << point.y << " " << point.z << std::endl;
        }

    }

}
Пример #10
0
void KinectGrabber_Rawlog2::getCurrentColouredPointCloudPtr(pcl::PointCloud<pcl::PointXYZRGBA>::Ptr& colouredPointCloudPtr)
{
    colouredPointCloudPtr=currentColouredPointCloudPtr;

    //Change MRPT coordinate system to the PCL system
    Eigen::Matrix4f rotationMatrix;
    rotationMatrix(0,0)=0;
    rotationMatrix(0,1)=-1;
    rotationMatrix(0,2)=0;
    rotationMatrix(0,3)=0;
    rotationMatrix(1,0)=0;
    rotationMatrix(1,1)=0;
    rotationMatrix(1,2)=-1;
    rotationMatrix(1,3)=0;
    rotationMatrix(2,0)=1;
    rotationMatrix(2,1)=0;
    rotationMatrix(2,2)=0;
    rotationMatrix(2,3)=0;
    rotationMatrix(3,0)=0;
    rotationMatrix(3,1)=0;
    rotationMatrix(3,2)=0;
    rotationMatrix(3,3)=1;

    colouredPointCloudPtr.reset(new pcl::PointCloud<pcl::PointXYZRGBA>());
    pcl::transformPointCloud(*currentColouredPointCloudPtr,*colouredPointCloudPtr,rotationMatrix);
}
Пример #11
0
void FindNormalsMLS()
//find normals using MLS
{
	double t = getTickCount();
	cout << "MLS...";
	
	mls_normals.reset(new pcl::PointCloud<pcl::Normal> ());
	
	// Create a KD-Tree
	pcl::KdTree<pcl::PointXYZRGB>::Ptr tree (new pcl::KdTreeFLANN<pcl::PointXYZRGB>);
	
	// Output has the same type as the input one, it will be only smoothed
	pcl::PointCloud<pcl::PointXYZRGB> mls_points;
	
	// Init object (second point type is for the normals, even if unused)
	pcl::MovingLeastSquares<pcl::PointXYZRGB, pcl::Normal> mls;
	
	// Optionally, a pointer to a cloud can be provided, to be set by MLS
	mls.setOutputNormals (mls_normals);
	
	// Set parameters
	mls.setInputCloud (cloud);
	mls.setPolynomialFit (true);
	mls.setSearchMethod (tree);
	mls.setSearchRadius (0.16);
	
	// Reconstruct
	mls.reconstruct (mls_points);
	pcl::copyPointCloud<pcl::PointXYZRGB,pcl::PointXYZRGB>(mls_points,*cloud);
	
	t = ((double)getTickCount() - t)/getTickFrequency();
	cout << "Done. (" << t <<"s)"<< endl;
}
void
pp_callback (const pcl::visualization::PointPickingEvent& event, void* cookie)
{
  int idx = event.getPointIndex ();
  if (idx == -1)
    return;

  if (!cloud)
  {
    cloud = *reinterpret_cast<pcl::PCLPointCloud2::Ptr*> (cookie);
    xyzcloud.reset (new pcl::PointCloud<pcl::PointXYZ>);
    pcl::fromPCLPointCloud2 (*cloud, *xyzcloud);
    search.setInputCloud (xyzcloud);
  }
  // Return the correct index in the cloud instead of the index on the screen
  std::vector<int> indices (1);
  std::vector<float> distances (1);

  // Because VTK/OpenGL stores data without NaN, we lose the 1-1 correspondence, so we must search for the real point
  pcl::PointXYZ picked_pt;
  event.getPoint (picked_pt.x, picked_pt.y, picked_pt.z);
  //TODO: Look into this.
  search.nearestKSearch (picked_pt, 1, indices, distances);

  PCL_INFO ("Point index picked: %d (real: %d) - [%f, %f, %f]\n", idx, indices[0], picked_pt.x, picked_pt.y, picked_pt.z);

  idx = indices[0];
  // If two points were selected, draw an arrow between them
  pcl::PointXYZ p1, p2;
  if (event.getPoints (p1.x, p1.y, p1.z, p2.x, p2.y, p2.z) && p)
  {
    std::stringstream ss;
    ss << p1 << p2;
    p->addArrow<pcl::PointXYZ, pcl::PointXYZ> (p1, p2, 1.0, 1.0, 1.0, ss.str ());
    return;
  }

  // Else, if a single point has been selected
  std::stringstream ss;
  ss << idx;
  // Get the cloud's fields
  for (size_t i = 0; i < cloud->fields.size (); ++i)
  {
    if (!isMultiDimensionalFeatureField (cloud->fields[i]))
      continue;
    PCL_INFO ("Multidimensional field found: %s\n", cloud->fields[i].name.c_str ());
#if VTK_MAJOR_VERSION==6 || (VTK_MAJOR_VERSION==5 && VTK_MINOR_VERSION>6)
    ph_global.addFeatureHistogram (*cloud, cloud->fields[i].name, idx, ss.str ());
    ph_global.renderOnce ();
#endif
  }
  if (p)
  {
    pcl::PointXYZ pos;
    event.getPoint (pos.x, pos.y, pos.z);
    p->addText3D<pcl::PointXYZ> (ss.str (), pos, 0.0005, 1.0, 1.0, 1.0, ss.str ());
  }
  
}
Пример #13
0
 void
 SegmenterLight::computeNormals (pcl::PointCloud<pcl::PointXYZRGB>::Ptr &cloud_in,
                            pcl::PointCloud<pcl::Normal>::Ptr &normals_out)
 {
   normals_out.reset (new pcl::PointCloud<pcl::Normal>);
   surface::ZAdaptiveNormals::Parameter za_param;
   za_param.adaptive = true;
   surface::ZAdaptiveNormals nor (za_param);
   nor.setInputCloud (cloud_in);
   nor.compute ();
   nor.getNormals (normals_out);
 }
Пример #14
0
void RunVisualization(const vector<cv::Point3d>& pointcloud,
					  const std::vector<cv::Vec3b>& pointcloud_RGB,
					  const Mat& img_1_orig, 
					  const Mat& img_2_orig,
					  const vector<KeyPoint>& correspImg1Pt) {
	cloud.reset(new pcl::PointCloud<pcl::PointXYZRGB>);
	orig_cloud.reset(new pcl::PointCloud<pcl::PointXYZRGB>);
	//  pcl::io::loadPCDFile ("output.pcd", *cloud);
    
	PopulatePCLPointCloud(pointcloud,pointcloud_RGB,img_1_orig,img_2_orig,correspImg1Pt);
//	SORFilter();
	copyPointCloud(*cloud,*orig_cloud);
//	FindNormalsMLS();
//	FindFloorPlaneRANSAC();
	
	//	pcl::PointCloud<pcl::PointXYZRGB>::Ptr final (new pcl::PointCloud<pcl::PointXYZRGB>);
	//	pcl::copyPointCloud<pcl::PointXYZRGB>(*cloud, inliers, *final);
	
    pcl::visualization::CloudViewer viewer("Cloud Viewer");
    
    //blocks until the cloud is actually rendered
    viewer.showCloud(orig_cloud,"orig");
	//	viewer.showCloud(final);
    
    //use the following functions to get access to the underlying more advanced/powerful
    //PCLVisualizer
    
    //This will only get called once
    viewer.runOnVisualizationThreadOnce (viewerOneOff);
    
    //This will get called once per visualization iteration
	viewer.runOnVisualizationThread (viewerThread);
    while (!viewer.wasStopped ())
    {
		//you can also do cool processing here
		//FIXME: Note that this is running in a separate thread from viewerPsycho
		//and you should guard against race conditions yourself...
		//		user_data++;
    }
}	
void GeneralTransform::Mat2PCD_Trans(cv::Mat& cloud_mat, pcl::PointCloud<pcl::PointXYZ>::Ptr& cloud)
{
    int size = cloud_mat.rows;
    std::vector<pcl::PointXYZ> points_vec(size);
    cloud.reset(new pcl::PointCloud<pcl::PointXYZ>());
    for(int i = 0; i < size; i++)
    {
        pcl::PointXYZ point;
        point.x = cloud_mat.at<double>(i, 0);
        point.y = cloud_mat.at<double>(i, 1);
        point.z = cloud_mat.at<double>(i, 2);
        cloud->push_back(point);
    }   
}
    void createObjectCloudFiltered(pcl::PointCloud<pcl::PointXYZRGB>::Ptr & octree_cloud)
    {
        double max_angle = 70.f;
        double lateral_sigma = 0.0015f;
        bool depth_edges = true;
        float nm_integration_min_weight_ = 0.75f;

        v4r::noise_models::NguyenNoiseModel<pcl::PointXYZRGB> nm;
        std::vector< std::vector<float> > weights(keyframes_.size());
        std::vector<pcl::PointCloud<pcl::PointXYZRGB>::Ptr > ptr_clouds(keyframes_.size());
        std::vector< pcl::PointCloud<pcl::Normal>::Ptr > normals(keyframes_.size());

        nm.setLateralSigma(lateral_sigma);
        nm.setMaxAngle(max_angle);
        nm.setUseDepthEdges(depth_edges);

        if (keyframes_.size()>0)
        {

            for (unsigned i=0; i<keyframes_.size(); i++)
            {

                ptr_clouds[i].reset(new pcl::PointCloud<PointT>(keyframes_[i]));
                pcl::NormalEstimationOMP<pcl::PointXYZRGB, pcl::Normal> ne;
                ne.setRadiusSearch(0.01f);
                ne.setInputCloud (ptr_clouds[i]);
                normals[i].reset(new pcl::PointCloud<pcl::Normal>());
                ne.compute (*normals[i]);

                nm.setInputCloud(ptr_clouds[i]);
                nm.setInputNormals(normals[i]);
                nm.compute();
                nm.getWeights(weights[i]);
            }

            v4r::NMBasedCloudIntegration<pcl::PointXYZRGB> nmIntegration;
            octree_cloud.reset(new pcl::PointCloud<pcl::PointXYZRGB>);

            nmIntegration.setInputClouds(ptr_clouds);
            nmIntegration.setWeights(weights);
            nmIntegration.setTransformations(cameras_);
            nmIntegration.setMinWeight(nm_integration_min_weight_);
            nmIntegration.setInputNormals(normals);
            nmIntegration.setMinPointsPerVoxel(1);
            nmIntegration.setResolution(0.005f);
            nmIntegration.setFinalResolution(0.005f);
            nmIntegration.compute(octree_cloud);
        }
    }
Пример #17
0
  TyErrorId processWithLock(CAS &tcas, ResultSpecification const &res_spec)
  {
    MEASURE_TIME;
    outInfo("Process begins");
    rs::SceneCas cas(tcas);
    cas.get(VIEW_THERMAL_COLOR_IMAGE, out);

    supervoxelcloud.reset(new pcl::PointCloud<pcl::PointXYZRGBA>);

    pcl::PointCloud< pcl::PointXYZRGBA>::Ptr cloudPtr(new pcl::PointCloud<pcl::PointXYZRGBA>);
    cas.get(VIEW_THERMAL_CLOUD, *cloudPtr);
    supervoxels = new pcl::SupervoxelClustering<pcl::PointXYZRGBA>(voxel_resolution, seed_resolution, use_transform);
    supervoxels->setColorImportance(color_importance);
    supervoxels->setSpatialImportance(spatial_importance);
    supervoxels->setNormalImportance(normal_importance);
    supervoxels->setInputCloud(cloudPtr);

    if(!supervoxel_clusters.empty())
    {
      supervoxel_clusters.clear();
    }
    supervoxels->extract(supervoxel_clusters);

    std::map <uint32_t, pcl::Supervoxel<pcl::PointXYZRGBA>::Ptr >::iterator it = supervoxel_clusters.begin();
    std::map <uint32_t, pcl::Supervoxel<pcl::PointXYZRGBA>::Ptr >::iterator itEnd = supervoxel_clusters.end();

    supervoxelcloud->height = 1;
    supervoxelcloud->width = supervoxel_clusters.size();
    supervoxelcloud->points.resize(supervoxelcloud->width);
    pcl::PointXYZRGBA *pPt = &supervoxelcloud->points[0];
    for(; it != itEnd; ++it, ++pPt)
    {
      pcl::PointXYZRGBA centroid;
      it->second->getCentroidPoint(centroid);
      *pPt = centroid;
    }

    //    pcl::PointCloud< pcl::PointXYZRGBA>::Ptr voxelCentroidCloud = supervoxels->getVoxelCentroidCloud();
    cas.set(VIEW_CLOUD_SUPERVOXEL, *supervoxelcloud);

    std::multimap<uint32_t, uint32_t> supervoxel_adjacency;
    supervoxels->getSupervoxelAdjacency(supervoxel_adjacency);

    return UIMA_ERR_NONE;
  }
    void trackNewCloud(const sensor_msgs::PointCloud2Ptr& msg)
    {
        ros::Time start_time_stamp = msg->header.stamp;

        boost::posix_time::ptime start_time = boost::posix_time::microsec_clock::local_time ();
        //std::cout << (start_time - last_cloud_).total_nanoseconds () * 1.0e-9 << std::endl;

        float time_ms = (start_time_stamp - last_cloud_ros_time_).toSec() * 1e3;

        last_cloud_ = start_time;
        last_cloud_ros_time_ = start_time_stamp;

        pcl::ScopeTime t("trackNewCloud");
        scene_.reset(new pcl::PointCloud<PointT>);
        pcl::moveFromROSMsg (*msg, *scene_);

        v4r::DataMatrix2D<Eigen::Vector3f> kp_cloud;
        cv::Mat_<cv::Vec3b> image;

        v4r::convertCloud(*scene_, kp_cloud, image);

        int cam_idx=-1;

        bool is_ok = camtracker->track(image, kp_cloud, pose_, conf_, cam_idx);

        if(debug_image_publisher_.getNumSubscribers())
        {
            drawConfidenceBar(image, conf_);
            sensor_msgs::ImagePtr image_msg = cv_bridge::CvImage(msg->header, "bgr8", image).toImageMsg();
            debug_image_publisher_.publish(image_msg);
        }

        std::cout << time_ms << " conf:" << conf_ << std::endl;

        if(is_ok)
        {
            selectFrames(*scene_, cam_idx, pose_);
            tf::Transform transform;

            //v4r::invPose(pose, inv_pose);
            transform.setOrigin(tf::Vector3(pose_(0,3), pose_(1,3), pose_(2,3)));
            tf::Matrix3x3 R(pose_(0,0), pose_(0,1), pose_(0,2),
                            pose_(1,0), pose_(1,1), pose_(1,2),
                            pose_(2,0), pose_(2,1), pose_(2,2));
            tf::Quaternion q;
            R.getRotation(q);
            transform.setRotation(q);
            ros::Time now_sync = ros::Time::now();
            cameraTransformBroadcaster.sendTransform(tf::StampedTransform(transform, now_sync, "camera_rgb_optical_frame", "world"));

            bool publish_trajectory = true;
            if (!trajectory_marker_.points.empty())
            {
                const geometry_msgs::Point& last = trajectory_marker_.points.back();
                Eigen::Vector3f v_last(last.x, last.y, last.z);
                Eigen::Vector3f v_curr(-pose_.col(3).head<3>());
                if ((v_last - v_curr).norm() < trajectory_threshold_)
                    publish_trajectory = false;
            }

            if (publish_trajectory)
            {
                geometry_msgs::Point p;
                p.x = -pose_(0,3);
                p.y = -pose_(1,3);
                p.z = -pose_(2,3);
                std_msgs::ColorRGBA c;
                c.a = 1.0;
                c.g = conf_;
                trajectory_marker_.points.push_back(p);
                trajectory_marker_.colors.push_back(c);
                trajectory_marker_.header.stamp = msg->header.stamp;
                trajectory_publisher_.publish(trajectory_marker_);
            }
        }
        else
            std::cout << "cam tracker not ready!" << std::endl;

        /*std_msgs::Float32 conf_mesage;
      conf_mesage.data = conf;
      confidence_publisher_.publish(conf_mesage);*/
    }
    TyErrorId processWithLock(CAS &tcas, ResultSpecification const &res_spec)
    {
        MEASURE_TIME;
        // declare variables for kinect data
        outInfo("process start");
        pcl::PointCloud<PointT>::Ptr cloud_ptr(new pcl::PointCloud<PointT>);
        pcl::PointCloud<pcl::Normal>::Ptr normal_ptr(new pcl::PointCloud<pcl::Normal>);

        dispCloudPtr_.reset(new pcl::PointCloud<PointT>);

        rs::SceneCas cas(tcas);
        rs::Scene scene = cas.getScene();
        std::vector<rs::Cluster> clusters;
        std::vector<rs::Plane> planes;
        std::vector<float> plane_model;

        cas.get(VIEW_CLOUD, *cloud_ptr);
        cas.get(VIEW_NORMALS, *normal_ptr);

        scene.identifiables.filter(clusters);
        scene.annotations.filter(planes);
        if(planes.empty())
        {
            return UIMA_ERR_ANNOTATOR_MISSING_INFO;
        }

        plane_model = planes[0].model();

        if(plane_model.size() == 0)
        {
            return UIMA_ERR_ANNOTATOR_MISSING_INFO;
        }
        outInfo("Processing " << clusters.size() << " point clusters");

        pcl::ModelCoefficients::Ptr plane_coefficients(new pcl::ModelCoefficients());
        plane_coefficients->values.push_back(plane_model[0]);
        plane_coefficients->values.push_back(plane_model[1]);
        plane_coefficients->values.push_back(plane_model[2]);
        plane_coefficients->values.push_back(plane_model[3]);

        pcl::ProjectInliers<PointT> proj;
        int circles_found = 0;

        int idx = 0;
        for(auto cluster : clusters)
        {
            std::vector<rs::Geometry> geom;
            cluster.annotations.filter(geom);
            if(!geom.empty())
            {
                rs::BoundingBox3D box = geom[0].boundingBox();

                float max_edge = std::max(box.width(), std::max(box.depth(), box.height()));
                float min_edge = std::min(box.width(), std::min(box.depth(), box.height()));
                if(min_edge / max_edge <= 0.25)
                {
                    rs::Shape shape = rs::create<rs::Shape>(tcas);
                    shape.shape.set("flat");
                    shape.confidence.set(std::abs(0.25 / 2 - min_edge / max_edge) / 0.125);
                    cluster.annotations.append(shape);
                }
            }

            pcl::PointIndices::Ptr cluster_indices(new pcl::PointIndices);
            rs::ReferenceClusterPoints clusterpoints(cluster.points());
            rs::conversion::from(clusterpoints.indices(), *cluster_indices);

            pcl::PointCloud<PointT>::Ptr cluster_cloud(new pcl::PointCloud<PointT>());
            pcl::PointCloud<PointT>::Ptr cluster_projected(new pcl::PointCloud<PointT>());
            pcl::PointCloud<pcl::Normal>::Ptr cluster_normal(new pcl::PointCloud<pcl::Normal>());

            for(std::vector<int>::const_iterator pit = cluster_indices->indices.begin();
                    pit != cluster_indices->indices.end(); pit++)
            {
                cluster_cloud->points.push_back(cloud_ptr->points[*pit]);
                cluster_normal->points.push_back(normal_ptr->points[*pit]);
            }
            cluster_cloud->width = cluster_cloud->points.size();
            cluster_cloud->height = 1;
            cluster_cloud->is_dense = true;
            cluster_normal->width = cluster_normal->points.size();
            cluster_normal->height = 1;
            cluster_normal->is_dense = true;

            std::stringstream ss;

            pcl::console::TicToc tt;
            tt.tic();
            pcl::BoundaryEstimation<PointT, pcl::Normal, pcl::Boundary> be;
            be.setInputCloud(cluster_cloud);
            be.setInputNormals(cluster_normal);
            be.setSearchMethod(typename pcl::search::KdTree<PointT>::Ptr(new pcl::search::KdTree<PointT>));
            be.setAngleThreshold(DEG2RAD(70));
            be.setRadiusSearch(0.03);

            pcl::PointCloud<pcl::Boundary>::Ptr boundaries(new pcl::PointCloud<pcl::Boundary>);
            pcl::PointCloud<PointT>::Ptr boundaryCloud(new pcl::PointCloud<PointT>);
            be.compute(*boundaries);
            assert(boundaries->points.size() == cluster_cloud->points.size());
            for(int k = 0; k < boundaries->points.size(); ++k)
            {
                if((int)boundaries->points[k].boundary_point)
                {
                    boundaryCloud->points.push_back(cluster_cloud->points[k]);
                }
            }

            boundaryCloud->width = boundaryCloud->points.size();
            boundaryCloud->height = 1;

            //projecting clusters to the plane
            proj.setModelType(pcl::SACMODEL_PLANE);
            proj.setInputCloud(boundaryCloud); //cluster_cloud
            proj.setModelCoefficients(plane_coefficients);
            proj.filter(*cluster_projected);

            pcl::PointIndices::Ptr circle_inliers(new pcl::PointIndices());
            pcl::ModelCoefficients::Ptr circle_coefficients(new pcl::ModelCoefficients());
            pcl::PointCloud<PointT>::Ptr circle(new pcl::PointCloud<PointT>());

            pcl::PointIndices::Ptr line_inliers(new pcl::PointIndices());
            pcl::ModelCoefficients::Ptr line_coefficients(new pcl::ModelCoefficients());
            pcl::PointCloud<PointT>::Ptr plane(new pcl::PointCloud<PointT>());

            pcl::SACSegmentation<PointT> seg;
            //finding circles in the projected cluster
            seg.setOptimizeCoefficients(true);
            seg.setMethodType(pcl::SAC_RANSAC);
            seg.setMaxIterations(500);
            seg.setModelType(pcl::SACMODEL_CIRCLE3D);
            seg.setDistanceThreshold(0.005);
            seg.setRadiusLimits(0.025, 0.13);
            // Give as input the filtered point cloud
            seg.setInputCloud(cluster_projected);
            // Call the segmenting method
            seg.segment(*circle_inliers, *circle_coefficients);

            seg.setOptimizeCoefficients(true);
            seg.setMethodType(pcl::SAC_RANSAC);
            seg.setMaxIterations(500);
            seg.setModelType(pcl::SACMODEL_LINE);
            seg.setDistanceThreshold(0.005);
            // Give as input the filtered point cloud
            //      seg.setInputCloud(cluster_cloud);
            // Call the segmenting method
            seg.segment(*line_inliers, *line_coefficients);

            outDebug("Projected Cloud has " << cluster_projected->points.size() << " data point");
            outDebug("Number of CIRCLE inliers found " << circle_inliers->indices.size());
            outDebug("Number of line inliers found " << line_inliers->indices.size());
            pcl::ExtractIndices<PointT> extract;

            rs::Shape shapeAnnot = rs::create<rs::Shape>(tcas);

            if((float)line_inliers->indices.size() / cluster_projected->points.size() > 0.60)
            {
                for(unsigned int k = 0; k < line_inliers->indices.size(); ++k)
                {
                    cluster_projected->points[line_inliers->indices[k]].rgba = rs::common::colors[idx % rs::common::numberOfColors];
                }
                *dispCloudPtr_ += *cluster_projected;
                shapeAnnot.shape.set("box");
                shapeAnnot.confidence.set((float)line_inliers->indices.size() / cluster_projected->points.size());
                cluster.annotations.append(shapeAnnot);
            }

            else if((float)circle_inliers->indices.size() / cluster_projected->points.size() > 0.3)
            {

                circles_found++;
                outInfo("Circle Model Coefficients:");
                outInfo("x= " << circle_coefficients->values[0] << " y= " << circle_coefficients->values[1] << " z= " << circle_coefficients->values[1] << " R= "
                        << circle_coefficients->values[3]);
                float cx = circle_coefficients->values[0];
                float cy = circle_coefficients->values[1];
                float cz = circle_coefficients->values[2];

                float r = circle_coefficients->values[3];

                float nx = circle_coefficients->values[4];
                float ny = circle_coefficients->values[5];
                float nz = circle_coefficients->values[6];

                float s = 1.0f / (nx * nx + ny * ny + nz * nz);
                float v3x = s * nx;
                float v3y = s * ny;
                float v3z = s * nz;

                s = 1.0f / (v3x * v3x + v3z * v3z);
                float v1x = s * v3z;
                float v1y = 0.0f;
                float v1z = s * -v3x;

                float v2x = v3y * v1z - v3z * v1y;
                float v2y = v3z * v1x - v3x * v1z;
                float v2z = v3x * v1y - v3y * v1x;

                for(int theta = 0; theta < 360; theta += 5)
                {
                    pcl::PointXYZRGBA point;
                    point.rgba = rs::common::colors[idx % rs::common::numberOfColors];

                    point.x = cx + r * (v1x * cos(theta) + v2x * sin(theta));
                    point.y = cy + r * (v1y * cos(theta) + v2y * sin(theta));
                    point.z = cz + r * (v1z * cos(theta) + v2z * sin(theta));
                    dispCloudPtr_->points.push_back(point);
                }

                for(unsigned int k = 0; k < circle_inliers->indices.size(); ++k)
                {
                    cluster_projected->points[line_inliers->indices[k]].rgba = rs::common::colors[idx % rs::common::numberOfColors];
                }
                *dispCloudPtr_ += *cluster_projected;

                extract.setInputCloud(cluster_projected);
                extract.setIndices(circle_inliers);
                extract.setNegative(false);
                extract.filter(*circle);
                shapeAnnot.shape.set("round");
                shapeAnnot.confidence.set((float)circle_inliers->indices.size() / cluster_projected->points.size());
                cluster.annotations.append(shapeAnnot);
            }


            idx++;
        }
        return UIMA_ERR_NONE;
    }
Пример #20
0
/* ---[ */
int
main (int argc, char** argv)
{
  srand (static_cast<unsigned int> (time (0)));

  print_info ("The viewer window provides interactive commands; for help, press 'h' or 'H' from within the window.\n");

  if (argc < 2)
  {
    printHelp (argc, argv);
    return (-1);
  }

  bool debug = false;
  pcl::console::parse_argument (argc, argv, "-debug", debug);
  if (debug)
    pcl::console::setVerbosityLevel (pcl::console::L_DEBUG);

  bool cam = pcl::console::find_switch (argc, argv, "-cam");

  // Parse the command line arguments for .pcd files
  std::vector<int> p_file_indices   = pcl::console::parse_file_extension_argument (argc, argv, ".pcd");
  std::vector<int> vtk_file_indices = pcl::console::parse_file_extension_argument (argc, argv, ".vtk");

  if (p_file_indices.size () == 0 && vtk_file_indices.size () == 0)
  {
    print_error ("No .PCD or .VTK file given. Nothing to visualize.\n");
    return (-1);
  }

  // Command line parsing
  double bcolor[3] = {0, 0, 0};
  pcl::console::parse_3x_arguments (argc, argv, "-bc", bcolor[0], bcolor[1], bcolor[2]);

  std::vector<double> fcolor_r, fcolor_b, fcolor_g;
  bool fcolorparam = pcl::console::parse_multiple_3x_arguments (argc, argv, "-fc", fcolor_r, fcolor_g, fcolor_b);

  std::vector<int> psize;
  pcl::console::parse_multiple_arguments (argc, argv, "-ps", psize);

  std::vector<double> opaque;
  pcl::console::parse_multiple_arguments (argc, argv, "-opaque", opaque);

  std::vector<std::string> shadings;
  pcl::console::parse_multiple_arguments (argc, argv, "-shading", shadings);

  int mview = 0;
  pcl::console::parse_argument (argc, argv, "-multiview", mview);

  int normals = 0;
  pcl::console::parse_argument (argc, argv, "-normals", normals);
  float normals_scale = NORMALS_SCALE;
  pcl::console::parse_argument (argc, argv, "-normals_scale", normals_scale);

  int pc = 0;
  pcl::console::parse_argument (argc, argv, "-pc", pc);
  float pc_scale = PC_SCALE;
  pcl::console::parse_argument (argc, argv, "-pc_scale", pc_scale);

  bool use_vbos = false;
  pcl::console::parse_argument (argc, argv, "-vbo_rendering", use_vbos);
  if (use_vbos) 
    print_highlight ("Vertex Buffer Object (VBO) visualization enabled.\n");

  bool use_pp   = pcl::console::find_switch (argc, argv, "-use_point_picking");
  if (use_pp) 
    print_highlight ("Point picking enabled.\n");

  // If VBOs are not enabled, then try to use immediate rendering
  bool use_immediate_rendering = false;
  if (!use_vbos)
  {
    pcl::console::parse_argument (argc, argv, "-immediate_rendering", use_immediate_rendering);
    if (use_immediate_rendering) 
      print_highlight ("Using immediate mode rendering.\n");
  }

  // Multiview enabled?
  int y_s = 0, x_s = 0;
  double x_step = 0, y_step = 0;
  if (mview)
  {
    print_highlight ("Multi-viewport rendering enabled.\n");

    y_s = static_cast<int>(floor (sqrt (static_cast<float>(p_file_indices.size () + vtk_file_indices.size ()))));
    x_s = y_s + static_cast<int>(ceil (double (p_file_indices.size () + vtk_file_indices.size ()) / double (y_s) - y_s));

    if (p_file_indices.size () != 0)
    {
      print_highlight ("Preparing to load "); print_value ("%d", p_file_indices.size ()); print_info (" pcd files.\n");
    }

    if (vtk_file_indices.size () != 0)
    {
      print_highlight ("Preparing to load "); print_value ("%d", vtk_file_indices.size ()); print_info (" vtk files.\n");
    }

    x_step = static_cast<double>(1.0 / static_cast<double>(x_s));
    y_step = static_cast<double>(1.0 / static_cast<double>(y_s));
    print_value ("%d", x_s);    print_info ("x"); print_value ("%d", y_s);
    print_info (" / ");      print_value ("%f", x_step); print_info ("x"); print_value ("%f", y_step);
    print_info (")\n");
  }

  // Fix invalid multiple arguments
  if (psize.size () != p_file_indices.size () && psize.size () > 0)
    for (size_t i = psize.size (); i < p_file_indices.size (); ++i)
      psize.push_back (1);
  if (opaque.size () != p_file_indices.size () && opaque.size () > 0)
    for (size_t i = opaque.size (); i < p_file_indices.size (); ++i)
      opaque.push_back (1.0);

  if (shadings.size () != p_file_indices.size () && shadings.size () > 0)
    for (size_t i = shadings.size (); i < p_file_indices.size (); ++i)
      shadings.push_back ("flat");

  // Create the PCLVisualizer object
#if VTK_MAJOR_VERSION==6 || (VTK_MAJOR_VERSION==5 && VTK_MINOR_VERSION>6)
  boost::shared_ptr<pcl::visualization::PCLPlotter> ph;
#endif  
  // Using min_p, max_p to set the global Y min/max range for the histogram
  float min_p = FLT_MAX; float max_p = -FLT_MAX;

  int k = 0, l = 0, viewport = 0;
  // Load the data files
  pcl::PCDReader pcd;
  pcl::console::TicToc tt;
  ColorHandlerPtr color_handler;
  GeometryHandlerPtr geometry_handler;

  // Go through VTK files
  for (size_t i = 0; i < vtk_file_indices.size (); ++i)
  {
    // Load file
    tt.tic ();
    print_highlight (stderr, "Loading "); print_value (stderr, "%s ", argv[vtk_file_indices.at (i)]);
    vtkPolyDataReader* reader = vtkPolyDataReader::New ();
    reader->SetFileName (argv[vtk_file_indices.at (i)]);
    reader->Update ();
    vtkSmartPointer<vtkPolyData> polydata = reader->GetOutput ();
    if (!polydata)
      return (-1);
    print_info ("[done, "); print_value ("%g", tt.toc ()); print_info (" ms : "); print_value ("%d", polydata->GetNumberOfPoints ()); print_info (" points]\n");

    // Create the PCLVisualizer object here on the first encountered XYZ file
    if (!p)
      p.reset (new pcl::visualization::PCLVisualizer (argc, argv, "PCD viewer"));

    // Multiview enabled?
    if (mview)
    {
      p->createViewPort (k * x_step, l * y_step, (k + 1) * x_step, (l + 1) * y_step, viewport);
      k++;
      if (k >= x_s)
      {
        k = 0;
        l++;
      }
    }

    // Add as actor
    std::stringstream cloud_name ("vtk-");
    cloud_name << argv[vtk_file_indices.at (i)] << "-" << i;
    p->addModelFromPolyData (polydata, cloud_name.str (), viewport);

    // Change the shape rendered color
    if (fcolorparam && fcolor_r.size () > i && fcolor_g.size () > i && fcolor_b.size () > i)
      p->setShapeRenderingProperties (pcl::visualization::PCL_VISUALIZER_COLOR, fcolor_r[i], fcolor_g[i], fcolor_b[i], cloud_name.str ());

    // Change the shape rendered point size
    if (psize.size () > 0)
      p->setShapeRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, psize.at (i), cloud_name.str ());

    // Change the shape rendered opacity
    if (opaque.size () > 0)
      p->setShapeRenderingProperties (pcl::visualization::PCL_VISUALIZER_OPACITY, opaque.at (i), cloud_name.str ());

    // Change the shape rendered shading
    if (shadings.size () > 0)
    {
      if (shadings[i] == "flat")
      {
        print_highlight (stderr, "Setting shading property for %s to FLAT.\n", argv[vtk_file_indices.at (i)]);
        p->setShapeRenderingProperties (pcl::visualization::PCL_VISUALIZER_SHADING, pcl::visualization::PCL_VISUALIZER_SHADING_FLAT, cloud_name.str ());
      }
      else if (shadings[i] == "gouraud")
      {
        print_highlight (stderr, "Setting shading property for %s to GOURAUD.\n", argv[vtk_file_indices.at (i)]);
        p->setShapeRenderingProperties (pcl::visualization::PCL_VISUALIZER_SHADING, pcl::visualization::PCL_VISUALIZER_SHADING_GOURAUD, cloud_name.str ());
      }
      else if (shadings[i] == "phong")
      {
        print_highlight (stderr, "Setting shading property for %s to PHONG.\n", argv[vtk_file_indices.at (i)]);
        p->setShapeRenderingProperties (pcl::visualization::PCL_VISUALIZER_SHADING, pcl::visualization::PCL_VISUALIZER_SHADING_PHONG, cloud_name.str ());
      }
    }
  }

  pcl::PCLPointCloud2::Ptr cloud;
  // Go through PCD files
  for (size_t i = 0; i < p_file_indices.size (); ++i)
  {
    tt.tic ();
    cloud.reset (new pcl::PCLPointCloud2);
    Eigen::Vector4f origin;
    Eigen::Quaternionf orientation;
    int version;

    print_highlight (stderr, "Loading "); print_value (stderr, "%s ", argv[p_file_indices.at (i)]);

    if (pcd.read (argv[p_file_indices.at (i)], *cloud, origin, orientation, version) < 0)
      return (-1);

    std::stringstream cloud_name;

    // ---[ Special check for 1-point multi-dimension histograms
    if (cloud->fields.size () == 1 && isMultiDimensionalFeatureField (cloud->fields[0]))
    {
      cloud_name << argv[p_file_indices.at (i)];

#if VTK_MAJOR_VERSION==6 || (VTK_MAJOR_VERSION==5 && VTK_MINOR_VERSION>6)
      if (!ph)
        ph.reset (new pcl::visualization::PCLPlotter);
#endif

      pcl::getMinMax (*cloud, 0, cloud->fields[0].name, min_p, max_p);
#if VTK_MAJOR_VERSION==6 || (VTK_MAJOR_VERSION==5 && VTK_MINOR_VERSION>6)
      ph->addFeatureHistogram (*cloud, cloud->fields[0].name, cloud_name.str ());
#endif
      print_info ("[done, "); print_value ("%g", tt.toc ()); print_info (" ms : "); print_value ("%d", cloud->fields[0].count); print_info (" points]\n");
      continue;
    }

    // ---[ Special check for 2D images
    if (cloud->fields.size () == 1 && isOnly2DImage (cloud->fields[0]))
    {
      print_info ("[done, "); print_value ("%g", tt.toc ()); print_info (" ms : "); print_value ("%u", cloud->width * cloud->height); print_info (" points]\n");
      print_info ("Available dimensions: "); print_value ("%s\n", pcl::getFieldsList (*cloud).c_str ());
      
      std::stringstream name;
      name << "PCD Viewer :: " << argv[p_file_indices.at (i)];
      pcl::visualization::ImageViewer::Ptr img (new pcl::visualization::ImageViewer (name.str ()));
      pcl::PointCloud<pcl::RGB> rgb_cloud;
      pcl::fromPCLPointCloud2 (*cloud, rgb_cloud);

      img->addRGBImage (rgb_cloud);
      imgs.push_back (img);

      continue;
    }

    cloud_name << argv[p_file_indices.at (i)] << "-" << i;

    // Create the PCLVisualizer object here on the first encountered XYZ file
    if (!p)
    {
      p.reset (new pcl::visualization::PCLVisualizer (argc, argv, "PCD viewer"));
      if (use_pp)   // Only enable the point picking callback if the command line parameter is enabled
        p->registerPointPickingCallback (&pp_callback, static_cast<void*> (&cloud));

      // Set whether or not we should be using the vtkVertexBufferObjectMapper
      p->setUseVbos (use_vbos);

      if (!cam)
      {
        Eigen::Matrix3f rotation;
        rotation = orientation;
        p->setCameraPosition (origin [0]                  , origin [1]                  , origin [2],
                              origin [0] + rotation (0, 2), origin [1] + rotation (1, 2), origin [2] + rotation (2, 2),
                                           rotation (0, 1),              rotation (1, 1),              rotation (2, 1));
      }
    }

    // Multiview enabled?
    if (mview)
    {
      p->createViewPort (k * x_step, l * y_step, (k + 1) * x_step, (l + 1) * y_step, viewport);
      k++;
      if (k >= x_s)
      {
        k = 0;
        l++;
      }
    }

    if (cloud->width * cloud->height == 0)
    {
      print_error ("[error: no points found!]\n");
      return (-1);
    }

    // If no color was given, get random colors
    if (fcolorparam)
    {
      if (fcolor_r.size () > i && fcolor_g.size () > i && fcolor_b.size () > i)
        color_handler.reset (new pcl::visualization::PointCloudColorHandlerCustom<pcl::PCLPointCloud2> (cloud, fcolor_r[i], fcolor_g[i], fcolor_b[i]));
      else
        color_handler.reset (new pcl::visualization::PointCloudColorHandlerRandom<pcl::PCLPointCloud2> (cloud));
    }
    else
      color_handler.reset (new pcl::visualization::PointCloudColorHandlerRandom<pcl::PCLPointCloud2> (cloud));

    // Add the dataset with a XYZ and a random handler
    geometry_handler.reset (new pcl::visualization::PointCloudGeometryHandlerXYZ<pcl::PCLPointCloud2> (cloud));
    // Add the cloud to the renderer
    //p->addPointCloud<pcl::PointXYZ> (cloud_xyz, geometry_handler, color_handler, cloud_name.str (), viewport);
    p->addPointCloud (cloud, geometry_handler, color_handler, origin, orientation, cloud_name.str (), viewport);


    if (mview)
      // Add text with file name
      p->addText (argv[p_file_indices.at (i)], 5, 5, 10, 1.0, 1.0, 1.0, "text_" + std::string (argv[p_file_indices.at (i)]), viewport);

    // If normal lines are enabled
    if (normals != 0)
    {
      int normal_idx = pcl::getFieldIndex (*cloud, "normal_x");
      if (normal_idx == -1)
      {
        print_error ("Normal information requested but not available.\n");
        continue;
        //return (-1);
      }
      //
      // Convert from blob to pcl::PointCloud
      pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_xyz (new pcl::PointCloud<pcl::PointXYZ>);
      pcl::fromPCLPointCloud2 (*cloud, *cloud_xyz);
      cloud_xyz->sensor_origin_ = origin;
      cloud_xyz->sensor_orientation_ = orientation;

      pcl::PointCloud<pcl::Normal>::Ptr cloud_normals (new pcl::PointCloud<pcl::Normal>);
      pcl::fromPCLPointCloud2 (*cloud, *cloud_normals);
      std::stringstream cloud_name_normals;
      cloud_name_normals << argv[p_file_indices.at (i)] << "-" << i << "-normals";
      p->addPointCloudNormals<pcl::PointXYZ, pcl::Normal> (cloud_xyz, cloud_normals, normals, normals_scale, cloud_name_normals.str (), viewport);
    }

    // If principal curvature lines are enabled
    if (pc != 0)
    {
      if (normals == 0)
        normals = pc;

      int normal_idx = pcl::getFieldIndex (*cloud, "normal_x");
      if (normal_idx == -1)
      {
        print_error ("Normal information requested but not available.\n");
        continue;
        //return (-1);
      }
      int pc_idx = pcl::getFieldIndex (*cloud, "principal_curvature_x");
      if (pc_idx == -1)
      {
        print_error ("Principal Curvature information requested but not available.\n");
        continue;
        //return (-1);
      }
      //
      // Convert from blob to pcl::PointCloud
      pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_xyz (new pcl::PointCloud<pcl::PointXYZ>);
      pcl::fromPCLPointCloud2 (*cloud, *cloud_xyz);
      cloud_xyz->sensor_origin_ = origin;
      cloud_xyz->sensor_orientation_ = orientation;
      pcl::PointCloud<pcl::Normal>::Ptr cloud_normals (new pcl::PointCloud<pcl::Normal>);
      pcl::fromPCLPointCloud2 (*cloud, *cloud_normals);
      pcl::PointCloud<pcl::PrincipalCurvatures>::Ptr cloud_pc (new pcl::PointCloud<pcl::PrincipalCurvatures>);
      pcl::fromPCLPointCloud2 (*cloud, *cloud_pc);
      std::stringstream cloud_name_normals_pc;
      cloud_name_normals_pc << argv[p_file_indices.at (i)] << "-" << i << "-normals";
      int factor = (std::min)(normals, pc);
      p->addPointCloudNormals<pcl::PointXYZ, pcl::Normal> (cloud_xyz, cloud_normals, factor, normals_scale, cloud_name_normals_pc.str (), viewport);
      p->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_COLOR, 1.0, 0.0, 0.0, cloud_name_normals_pc.str ());
      p->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_LINE_WIDTH, 3, cloud_name_normals_pc.str ());
      cloud_name_normals_pc << "-pc";
      p->addPointCloudPrincipalCurvatures (cloud_xyz, cloud_normals, cloud_pc, factor, pc_scale, cloud_name_normals_pc.str (), viewport);
      p->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_LINE_WIDTH, 3, cloud_name_normals_pc.str ());
    }

    // Add every dimension as a possible color
    if (!fcolorparam)
    {
      for (size_t f = 0; f < cloud->fields.size (); ++f)
      {
        if (cloud->fields[f].name == "rgb" || cloud->fields[f].name == "rgba")
          color_handler.reset (new pcl::visualization::PointCloudColorHandlerRGBField<pcl::PCLPointCloud2> (cloud));
        else
        {
          if (!isValidFieldName (cloud->fields[f].name))
            continue;
          color_handler.reset (new pcl::visualization::PointCloudColorHandlerGenericField<pcl::PCLPointCloud2> (cloud, cloud->fields[f].name));
        }
        // Add the cloud to the renderer
        //p->addPointCloud<pcl::PointXYZ> (cloud_xyz, color_handler, cloud_name.str (), viewport);
        p->addPointCloud (cloud, color_handler, origin, orientation, cloud_name.str (), viewport);
      }
    }

    // Additionally, add normals as a handler
    geometry_handler.reset (new pcl::visualization::PointCloudGeometryHandlerSurfaceNormal<pcl::PCLPointCloud2> (cloud));
    if (geometry_handler->isCapable ())
      //p->addPointCloud<pcl::PointXYZ> (cloud_xyz, geometry_handler, cloud_name.str (), viewport);
      p->addPointCloud (cloud, geometry_handler, origin, orientation, cloud_name.str (), viewport);

    if (use_immediate_rendering)
      // Set immediate mode rendering ON
      p->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_IMMEDIATE_RENDERING, 1.0, cloud_name.str ());

    // Change the cloud rendered point size
    if (psize.size () > 0)
      p->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, psize.at (i), cloud_name.str ());

    // Change the cloud rendered opacity
    if (opaque.size () > 0)
      p->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_OPACITY, opaque.at (i), cloud_name.str ());

    // Reset camera viewpoint to center of cloud if camera parameters were not passed manually and this is the first loaded cloud
    if (i == 0 && !p->cameraParamsSet ())
      p->resetCameraViewpoint (cloud_name.str ());

    print_info ("[done, "); print_value ("%g", tt.toc ()); print_info (" ms : "); print_value ("%u", cloud->width * cloud->height); print_info (" points]\n");
    print_info ("Available dimensions: "); print_value ("%s\n", pcl::getFieldsList (*cloud).c_str ());
  }

  if (!mview && p)
  {
    std::string str;
    if (!p_file_indices.empty ())
      str = std::string (argv[p_file_indices.at (0)]);
    else if (!vtk_file_indices.empty ())
      str = std::string (argv[vtk_file_indices.at (0)]);

    for (size_t i = 1; i < p_file_indices.size (); ++i)
      str += ", " + std::string (argv[p_file_indices.at (i)]);

    for (size_t i = 1; i < vtk_file_indices.size (); ++i)
      str += ", " + std::string (argv[vtk_file_indices.at (i)]);

    p->addText (str, 5, 5, 10, 1.0, 1.0, 1.0, "text_allnames");
  }

  if (p)
    p->setBackgroundColor (bcolor[0], bcolor[1], bcolor[2]);
  // Read axes settings
  double axes  = 0.0;
  pcl::console::parse_argument (argc, argv, "-ax", axes);
  if (axes != 0.0 && p)
  {
    float ax_x = 0.0, ax_y = 0.0, ax_z = 0.0;
    pcl::console::parse_3x_arguments (argc, argv, "-ax_pos", ax_x, ax_y, ax_z, false);
    // Draw XYZ axes if command-line enabled
    p->addCoordinateSystem (axes, ax_x, ax_y, ax_z);
  }

  // Clean up the memory used by the binary blob
  // Note: avoid resetting the cloud, otherwise the PointPicking callback will fail
  if (!use_pp)   // Only enable the point picking callback if the command line parameter is enabled
  {
    cloud.reset ();
    xyzcloud.reset ();
  }

  // If we have been given images, create our own loop so that we can spin each individually
  if (!imgs.empty ())
  {
    bool stopped = false;
    do
    {
#if VTK_MAJOR_VERSION==6 || (VTK_MAJOR_VERSION==5 && VTK_MINOR_VERSION>6)
      if (ph) ph->spinOnce ();
#endif

      for (int i = 0; i < int (imgs.size ()); ++i)
      {
        if (imgs[i]->wasStopped ())
        {
          stopped = true;
          break;
        }
        imgs[i]->spinOnce ();
      }
        
      if (p)
      {
        if (p->wasStopped ())
        {
          stopped = true;
          break;
        }
        p->spinOnce ();
      }
      boost::this_thread::sleep (boost::posix_time::microseconds (100));
    }
    while (!stopped);
  }
  else
  {
    // If no images, continue
#if VTK_MAJOR_VERSION==6 || (VTK_MAJOR_VERSION==5 && VTK_MINOR_VERSION>6)
    if (ph)
    {
      //print_highlight ("Setting the global Y range for all histograms to: "); print_value ("%f -> %f\n", min_p, max_p);
      //ph->setGlobalYRange (min_p, max_p);
      //ph->updateWindowPositions ();
      if (p)
        p->spin ();
      else
        ph->spin ();
    }
    else
#endif
      if (p)
        p->spin ();
  }
}
Пример #21
0
  TyErrorId processWithLock(CAS &tcas, ResultSpecification const &res_spec)
  {
    MEASURE_TIME;
    outInfo("process begins");
    rs::SceneCas cas(tcas);
    rs::Scene scene = cas.getScene();
    dispCloud.reset(new pcl::PointCloud<pcl::PointXYZRGBA>);
    cas.get(VIEW_CLOUD,*dispCloud);
    cas.get(VIEW_COLOR_IMAGE,dispRGB);
    std::vector<rs::Cluster> clusters;
    std::vector<rs::Identifiable> mergedClusters;
    scene.identifiables.filter(clusters);
    outInfo("Scene has " << clusters.size() << " clusters");
    std::vector<bool> duplicates(clusters.size(), false);
    for(uint8_t i = 0; i < clusters.size(); ++i)
    {
      rs::Cluster &cluster1 = clusters[i];
      if(cluster1.rois.has())
      {
        rs::ImageROI image_roisc1 = cluster1.rois.get();
        cv::Rect cluster1Roi;
        rs::conversion::from(image_roisc1.roi_hires(), cluster1Roi);
        for(uint8_t j = i + 1; j < clusters.size(); ++j)
        {
          rs::Cluster &cluster2 = clusters[j];
          if(cluster2.rois.has())
          {
            rs::ImageROI image_roisc2 = cluster2.rois.get();
            cv::Rect cluster2Roi;
            rs::conversion::from(image_roisc2.roi_hires(), cluster2Roi);
            cv::Rect intersection = cluster1Roi & cluster2Roi;
            if(intersection.area() > cluster1Roi.area() / 10)
            {
              if(cluster1Roi.area() > cluster2Roi.area())
              {
                duplicates[j] = true;
              }
              else
              {
                duplicates[i] = true;
              }
            }
          }
        }
      }
    }

    for(uint8_t i = 0; i < duplicates.size(); ++i)
    {
      if(!duplicates[i])
      {
        mergedClusters.push_back(clusters[i]);
      }
      else
      {
        outInfo("Cluster " << i << "exists twice");
      }
    }

    scene.identifiables.set(mergedClusters);
    dispClusters.clear();
    scene.identifiables.filter(dispClusters);

    for(unsigned int i = 0; i < dispClusters.size(); ++i)
    {
      rs::Cluster &cluster = dispClusters[i];
      if(!cluster.points.has())
      {
        continue;
      }
      pcl::PointIndicesPtr clusterIndices(new pcl::PointIndices());
      rs::conversion::from(((rs::ReferenceClusterPoints)cluster.points.get()).indices.get(), *clusterIndices);
      for(unsigned int k = 0; k < clusterIndices->indices.size(); ++k)
      {
        int index = clusterIndices->indices[k];
        dispCloud->points[index].rgba = rs::common::colors[i % COLOR_SIZE];
      }
    }

    outDebug("BEGIN: After adding new clusters scene has " << scene.identifiables.size() << " identifiables");
    return UIMA_ERR_NONE;
  }
Пример #22
0
void
processRGBD()
{
	USE_TIMES_START( start );

	struct timeval process_start, process_end;

	USE_TIMES_START( process_start );

    int stream_depth_state = 0;
    int stream_rgb_state = 0;

    int count = 0;
    stream_depth_state = true;
    while ((stream_depth_state) && (count < 1000))
    {
      stream_depth_state = processDepth();
      ++count;
    }
    if(stream_depth_state)
    {
        printf("\nstream state error  depth = %d, rgb = %d\n", stream_depth_state, stream_rgb_state);
        return;
    }
    count = 0;
    stream_rgb_state = true;
    while((stream_rgb_state) && (count < 100))
    {
      stream_rgb_state = processRGB();
      ++count;
    }

    USE_TIMES_END_SHOW ( process_start, process_end, "get RGBD time" );

    if(stream_rgb_state)
    {
        printf("\nstream state error  depth = %d, rgb = %d\n", stream_depth_state, stream_rgb_state);
        return;
    }

    USE_TIMES_START( process_start );

  cv::Mat depth_frame(depth_stream.height, depth_stream.width, CV_8UC1, depth_frame_buffer);

//added
  cv::Mat cv_img_depth(depth_stream.height, depth_stream.width, CV_32FC1);

  /*img_depth.encoding = sensor_msgs::image_encodings::TYPE_32FC1;
  img_depth.is_bigendian = 0;
  img_depth.step = sizeof(float) * depth_stream.width;
  img_depth.data.resize(depth_stream.width * depth_stream.height * sizeof(float));*/
//

#ifdef V4L2_PIX_FMT_INZI
	cv::Mat ir_frame(depth_stream.height, depth_stream.width, CV_8UC1, ir_frame_buffer);
#endif

	cv::Mat rgb_frame;
#if USE_BGR24
	rgb_frame = cv::Mat(rgb_stream.height, rgb_stream.width, CV_8UC3, rgb_frame_buffer);
	memcpy(rgb_frame_buffer, rgb_stream.fillbuf, rgb_stream.buflen);
#else
	cv::Mat rgb_frame_yuv(rgb_stream.height, rgb_stream.width, CV_8UC2, rgb_frame_buffer);
	memcpy(rgb_frame_buffer, rgb_stream.fillbuf, rgb_stream.buflen);

	struct timeval cvt_start, cvt_end;
	USE_TIMES_START( cvt_start );
	cv::cvtColor(rgb_frame_yuv,rgb_frame,CV_YUV2BGR_YUYV);
	USE_TIMES_END_SHOW ( cvt_start, cvt_end, "CV_YUV2BGR_YUYV time" );

#endif

	USE_TIMES_END_SHOW ( process_start, process_end, "new cv::Mat object RGBD time" );


	USE_TIMES_START( process_start );

	if(!resize_point_cloud)
	{
		realsense_xyz_cloud.reset(new pcl::PointCloud<pcl::PointXYZ>());
		realsense_xyz_cloud->width = depth_stream.width;
		realsense_xyz_cloud->height = depth_stream.height;
		realsense_xyz_cloud->is_dense = false;
		realsense_xyz_cloud->points.resize(depth_stream.width * depth_stream.height);

		if(isHaveD2RGBUVMap)
		{
			realsense_xyzrgb_cloud.reset(new pcl::PointCloud<PointType>());
			realsense_xyzrgb_cloud->width = depth_stream.width;
			realsense_xyzrgb_cloud->height = depth_stream.height;
			realsense_xyzrgb_cloud->is_dense = false;
			realsense_xyzrgb_cloud->points.resize(depth_stream.width * depth_stream.height);
		}

		resize_point_cloud = true;
	}

    USE_TIMES_END_SHOW ( process_start, process_end, "new point cloud object time" );


    USE_TIMES_START( process_start );

cv::MatIterator_<float> it;
it = cv_img_depth.begin<float>();

    //depth value
	//#pragma omp parallel for
    for(int i=0; i<depth_stream.width * depth_stream.height; ++i)
    {
      float depth = 0.0f;
#ifdef V4L2_PIX_FMT_INZI
			unsigned short* depth_ptr = (unsigned short*)((unsigned char*)(depth_stream.fillbuf) + i*3);
			unsigned char* ir_ptr = (unsigned char*)(depth_stream.fillbuf) + i*3+2;

			unsigned char ir_raw = *ir_ptr;
			ir_frame_buffer[i] = ir_raw;

			unsigned short depth_raw = *depth_ptr;
			depth = (float)depth_raw / depth_unit;
#else
    		unsigned short depth_raw = *((unsigned short*)(depth_stream.fillbuf) + i);
			depth = (float)depth_raw / depth_unit;
#endif

        depth_frame_buffer[i] = depth ? 255 * (sensor_depth_max - depth) / sensor_depth_max : 0;
if (it != cv_img_depth.end<float>())
{
  if(depth>0.000001f)
    *it = depth * depth_scale;
  else
    *it = std::numeric_limits<float>::quiet_NaN();

  ++it;
}
else
  std::cout << " ********** out of data matrix" << std::endl;

        float uvx = -1.0f;
        float uvy = -1.0f;

        if(depth>0.000001f)
        {
        	float pixel_x = (i % depth_stream.width) - depth_cx;
        	float pixel_y = (i / depth_stream.width) - depth_cy;
            float zz = depth * depth_scale;
            realsense_xyz_cloud->points[i].x = pixel_x * zz * depth_fxinv;
            realsense_xyz_cloud->points[i].y = pixel_y * zz * depth_fyinv;
            realsense_xyz_cloud->points[i].z = zz;

            if(isHaveD2RGBUVMap)
            {
				realsense_xyzrgb_cloud->points[i].x = realsense_xyz_cloud->points[i].x;
				realsense_xyzrgb_cloud->points[i].y = realsense_xyz_cloud->points[i].y;
				realsense_xyzrgb_cloud->points[i].z = realsense_xyz_cloud->points[i].z;

				if(!getUVWithDXY(depth, i, uvx, uvy))
				{
					int cx = (int)(uvx * rgb_stream.width + 0.5f);
					int cy = (int)(uvy * rgb_stream.height + 0.5f);
					if (cx >= 0 && cx < rgb_stream.width && cy >= 0 && cy < rgb_stream.height)
					{
						unsigned char *rgb = rgb_frame.data + (cx+cy*rgb_stream.width)*3;
						unsigned char r = rgb[2];
						unsigned char g = rgb[1];
						unsigned char b = rgb[0];


						if(debug_depth_unit &&
							pixel_x > -center_offset_pixel &&
							pixel_x < center_offset_pixel &&
							pixel_y > -center_offset_pixel &&
							pixel_y < center_offset_pixel )
						{
							center_z += zz;
							center_z_count++;
							realsense_xyzrgb_cloud->points[i].rgba = (0 << 24) | (0 << 16) | (0 << 8) | 255;
						}
						else
						{
							realsense_xyzrgb_cloud->points[i].rgba = (0 << 24) | (r << 16) | (g << 8) | b;
						}

					}
					else
					{
						realsense_xyzrgb_cloud->points[i].x = std::numeric_limits<float>::quiet_NaN();
						realsense_xyzrgb_cloud->points[i].y = std::numeric_limits<float>::quiet_NaN();
						realsense_xyzrgb_cloud->points[i].z = std::numeric_limits<float>::quiet_NaN();
						realsense_xyzrgb_cloud->points[i].rgba = 0;
					}
				}
				else
				{
					realsense_xyzrgb_cloud->points[i].x = std::numeric_limits<float>::quiet_NaN();
					realsense_xyzrgb_cloud->points[i].y = std::numeric_limits<float>::quiet_NaN();
					realsense_xyzrgb_cloud->points[i].z = std::numeric_limits<float>::quiet_NaN();
					realsense_xyzrgb_cloud->points[i].rgba = 0;
				}
            }

        }
        else
        {
        	realsense_xyz_cloud->points[i].x = std::numeric_limits<float>::quiet_NaN();
			realsense_xyz_cloud->points[i].y = std::numeric_limits<float>::quiet_NaN();
			realsense_xyz_cloud->points[i].z = std::numeric_limits<float>::quiet_NaN();

        	if(isHaveD2RGBUVMap)
        	{
				realsense_xyzrgb_cloud->points[i].x = std::numeric_limits<float>::quiet_NaN();
				realsense_xyzrgb_cloud->points[i].y = std::numeric_limits<float>::quiet_NaN();
				realsense_xyzrgb_cloud->points[i].z = std::numeric_limits<float>::quiet_NaN();
				realsense_xyzrgb_cloud->points[i].rgba = 0;
        	}
        }

    }

    USE_TIMES_END_SHOW ( process_start, process_end, "fill point cloud data time" );

    if(debug_depth_unit && center_z_count)
    {
    	if(usbContext && usbHandle)
    	{
    		realsenseTemperature = getRealsenseTemperature(usbHandle);
    	}
    	center_z /= center_z_count;
    	printf("average center z value = %f    temp = %d    depth_unit = %f\n", center_z, realsenseTemperature, depth_unit);
    	center_z_count = 0;
    }


    USE_TIMES_END_SHOW ( start, end, "process result time" );

#if SHOW_RGBD_FRAME
    cv::imshow("depth frame view", depth_frame);

#ifdef V4L2_PIX_FMT_INZI
    cv::imshow("ir frame view", ir_frame);
#endif
    cv::imshow("RGB frame view", rgb_frame);

#endif


    //pub msgs
    head_sequence_id++;
    head_time_stamp = ros::Time::now();

    pubRealSenseRGBImageMsg(rgb_frame);
#ifdef V4L2_PIX_FMT_INZI
    pubRealSenseInfraredImageMsg(ir_frame);
#endif
    //pubRealSenseDepthImageMsg(depth_frame);
    pubRealSenseDepthImageMsg32F(cv_img_depth);

    pubRealSensePointsXYZCloudMsg(realsense_xyz_cloud);
    if(isHaveD2RGBUVMap)
    {
    	pubRealSensePointsXYZRGBCloudMsg(realsense_xyzrgb_cloud);
    }

}
void clearCb()
{
	final_giant.reset(new pcl::PointCloud<pcl::PointXYZRGB>);
}
void Camera::undistort(pcl::PointCloud<pcl::PointXYZRGBA>::Ptr &cloud, pcl::PointCloud<pcl::PointXYZRGBA>::Ptr &cloud_undistorted)
{
    cloud_undistorted.reset(new pcl::PointCloud<pcl::PointXYZRGBA>);
    clams::FrameProjector proj;
    //depth.z = ;
    //std::cout << "cloud " << cloud->size() << std::endl;
    clams::Frame *frame = new clams::Frame ;
    proj.width_ = WIDTH;
    proj.height_ = HEIGHT;
    proj.cx_ = C_X;
    proj.cy_ = C_Y;
    proj.fx_ = F_X ;
    proj.fy_ = F_Y;
    clams::Cloud cloudToCalibrate;
    clams::Cloud *cloud_dist = new clams::Cloud;
    cloudToCalibrate.width = WIDTH;
    cloudToCalibrate.height = HEIGHT;
    //std::cout << "hi" << std:: endl;
    for(size_t a = 0; a < HEIGHT ; a++)
    {
        for(size_t b = 0; b < WIDTH ; b++)
        {
            clams::Point point;
            //std::cout << "a " << (int) a << " b " << (int) b << std::endl;
            point.x = cloud->points[a * WIDTH + b].x;
            point.y = cloud->points[a * WIDTH + b].y;
            point.z = cloud->points[a * WIDTH + b].z;
            point.r = cloud->points[a * WIDTH + b].r;
            point.g = cloud->points[a * WIDTH + b].g;
            point.b = cloud->points[a * WIDTH + b].b;
            cloudToCalibrate.push_back(point);

        }
    }
    //std::cout << "hi2" << std:: endl;
    //lams::IndexMap* indexmap;
    clams::FrameProjector::IndexMap *indexMap = new clams::FrameProjector::IndexMap;
    cloudToCalibrate.header.stamp = cloud->header.stamp;
    //std::cout << "hi3" << std:: endl;
    proj.cloudToFrame(cloudToCalibrate, frame, indexMap);

    _distortionModel.undistort(frame->depth_.get());


    proj.frameToCloud(*frame, cloud_dist);

    //pcl::PointCloud<pcl::PointXYZRGBA>::Ptr cloud_undistorted (new pcl::PointCloud<pcl::PointXYZRGBA>);
    for(size_t c = 0; c < HEIGHT ; c++)
    {
        for(size_t d = 0; d < WIDTH ; d++)
        {
            pcl::PointXYZRGBA point;

            point.x = cloud_dist->points[c * WIDTH + d].x;
            point.y = cloud_dist->points[c * WIDTH + d].y;
            point.z = cloud_dist->points[c * WIDTH + d].z;
            point.r = cloud_dist->points[c * WIDTH + d].r;
            point.g = cloud_dist->points[c * WIDTH + d].g;
            point.b = cloud_dist->points[c * WIDTH + d].b;
            cloud_undistorted->push_back(point);
            // cloud_dist = *cloud[a];
            //_distortionModel.undistort(frame);
        }
    }
    delete indexMap;
    delete cloud_dist;
}