void fi::VPDetectionWrapper::leastQuaresVPFitting(const std::vector<Eigen::Vector3f> &vp_inliers, Eigen::VectorXf &optimized_vp)
{
	unsigned int num_in_liers = vp_inliers.size();


	//change to double
	//std::vector<Eigen::Vector3d> vp_double(num_in_liers);
	//for ( unsigned int i = 0; i < num_in_liers; i++)
	//{
	//	Eigen::Vector3f a_vp = vp_inliers[i];
	//	vp_double[i](0) = a_vp(0);
	//	vp_double[i](1) = a_vp(1);
	//	vp_double[i](2) = a_vp(2);
	//}

	Eigen::VectorXd opt_double_val;
	opt_double_val.resize(6);

	//porpulate the vps in a cloud just to make things a bit h\E4ndy
	pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud(new pcl::PointCloud<pcl::PointXYZ>());
	point_cloud->height = 1;
	point_cloud->width = num_in_liers;
	point_cloud->resize(point_cloud->height * point_cloud->width);

	for (unsigned int i = 0; i < num_in_liers; i++)
	{
		Eigen::Vector3f n_tmp = vp_inliers.at(i);
		point_cloud->points[i].x = n_tmp(0);
		point_cloud->points[i].y = n_tmp(1);
		point_cloud->points[i].z = n_tmp(2);
	}

	optimized_vp.resize (6);

	// Compute the 3x3 covariance matrix
	Eigen::Vector4d centroid;
	//pcl::compute3DCentroid (*point_cloud, centroid);

	Eigen::Matrix3d covariance_matrix;
	//pcl::computeCovarianceMatrix(*input_, inliers, centroid, covariance_matrix);
	//pcl::computeCovarianceMatrix(*point_cloud, centroid, covariance_matrix);
	pcl::computeMeanAndCovarianceMatrix(*point_cloud, covariance_matrix, centroid);
	optimized_vp[0] = centroid[0];
	optimized_vp[1] = centroid[1];
	optimized_vp[2] = centroid[2];
	opt_double_val(0) = centroid[0];
	opt_double_val(1) = centroid[1];
	opt_double_val(2) = centroid[2];

	// Extract the eigenvalues and eigenvectors
	EIGEN_ALIGN16 Eigen::Vector3d eigen_values;
	EIGEN_ALIGN16 Eigen::Matrix3d eigen_vectors;
	pcl::eigen33(covariance_matrix, eigen_vectors, eigen_values);

	//optimized_vp.tail<3> () = eigen_vectors.col (0).normalized ();
	opt_double_val.tail<3> () = eigen_vectors.col (0).normalized ();
	optimized_vp[3] = static_cast<float> (opt_double_val[3]);
	optimized_vp[4] = static_cast<float> (opt_double_val[4]);
	optimized_vp[5] = static_cast<float> (opt_double_val[5]);
}
void TerrainClassifierNode::setPointCloud(const sensor_msgs::PointCloud2& point_cloud_msg)
{
  std::string world_frame_id = vigir_footstep_planning::strip_const(terrain_classifier.getFrameId(), '/');
  std::string cloud_frame_id = vigir_footstep_planning::strip_const(point_cloud_msg.header.frame_id, '/');
  if (world_frame_id != cloud_frame_id)
  {
    ROS_ERROR_THROTTLE(5.0, "[TerrainClassifierNode] setPointCloud: Frame of input point ('%s') cloud mismatch! Should be '%s'.", cloud_frame_id.c_str(), world_frame_id.c_str());
    return;
  }

  pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud(new pcl::PointCloud<pcl::PointXYZ>());
  pcl::fromROSMsg(point_cloud_msg, *point_cloud);

  // filter input
  filterVoxelGrid<pcl::PointXYZ>(point_cloud, 0.02, 0.02, 2.0);

  // filtering of input point cloud
  terrain_classifier.filterPointCloudData(point_cloud);

  terrain_classifier.setPointCloud(point_cloud);
  generateTerrainModel();

  publishDebugData();
  publishTerrainModel();

  //ROS_INFO("Saved");
  //pcl::io::savePCDFile("/home/alex/vigir/catkin_ws/vigir_footstep_planning/vigir_terrain_classifier/pointclouds/new.pcd", cloud_input);
}
 int
 process(const tendrils& inputs, const tendrils& outputs)
 {
   CloudType::Ptr point_cloud(new CloudType);
   cvToCloud(*points3d, *point_cloud);
   *cloud_out = point_cloud;
   return ecto::OK;
 }
void SuperFrameParser::fillPointCloudMsg ()
{
    pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud (new pcl::PointCloud<pcl::PointXYZ> ());
    convertImageToPointCloud (depth_msgs_, point_cloud);

    pcl::toROSMsg (*point_cloud, *point_cloud_msgs_);

    point_cloud_msgs_->header.frame_id = depth_msgs_->header.frame_id;
    point_cloud_msgs_->header.stamp = depth_msgs_->header.stamp;
    point_cloud_msgs_->row_step = point_cloud_msgs_->width * 2;
}
Example #5
0
void callback(PointCloud points) {
	cv::Mat point_cloud(points.height, points.width, CV_8U, points.data);
	cv_bridge::CvImage msg;
	msg.header.seq = seq_id;
	msg.header.frame_id = seq_id;
	msg.header.stamp = ros::Time::now();
	msg.encoding = sensor_msgs::image_encodings::MONO8;
	msg.image = point_cloud;
	
	points_pub.publish(msg.toImageMsg());
	ROS_INFO("[stereo_publisher]: Lag - %u", time(0) - points.stamp);
	seq_id++;
}
void TerrainClassifierNode::insertPointCloud(const sensor_msgs::PointCloud2& point_cloud_msg)
{
  std::string world_frame_id = vigir_footstep_planning::strip_const(terrain_classifier.getFrameId(), '/');
  std::string cloud_frame_id = vigir_footstep_planning::strip_const(point_cloud_msg.header.frame_id, '/');
  if (world_frame_id != cloud_frame_id)
  {
    ROS_ERROR_THROTTLE(5.0, "[TerrainClassifierNode] insertPointCloud: Frame of input point ('%s') cloud mismatch! Should be '%s'.", cloud_frame_id.c_str(), world_frame_id.c_str());
    return;
  }

  if (point_cloud_msg.data.empty())
    return;

  pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud(new pcl::PointCloud<pcl::PointXYZ>());
  pcl::fromROSMsg(point_cloud_msg, *point_cloud);

  // filtering of input point cloud
  terrain_classifier.filterPointCloudData(point_cloud);

  terrain_classifier.insertPointCloud(point_cloud);

  if (terrain_classifier.getInputCloud()->size() <= min_aggregation_size)
  {
    if (cloud_input_pub.getNumSubscribers() > 0)
    {
      sensor_msgs::PointCloud2 cloud_point_msg;
      pcl::toROSMsg(*(terrain_classifier.getInputCloud()), cloud_point_msg);
      cloud_point_msg.header.stamp = ros::Time::now();
      cloud_point_msg.header.frame_id = terrain_classifier.getFrameId();
      cloud_input_pub.publish(cloud_point_msg);
    }

    return;
  }

  if (compute_update_skips_counter++ >= compute_update_skips)
  {
    terrain_classifier.computeNormals(point_cloud);
    compute_update_skips_counter = 0;
  }

  terrain_classifier.generateHeightGridmap(point_cloud);

  publishDebugData();

  //ROS_INFO("Saved");
  //pcl::io::savePCDFile("/home/alex/vigir/catkin_ws/vigir_footstep_planning/vigir_terrain_classifier/pointclouds/new.pcd", cloud_input);
}
void keypoint_map::publish_pointclouds(RmOctomapServer::Ptr & server, const std::string & frame_prefix) {

	for (size_t i = 0; i < depth_imgs.size(); i++) {

		pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud(
				new pcl::PointCloud<pcl::PointXYZ>);

		for (int v = 0; v < depth_imgs[i].rows; v++) {
			for (int u = 0; u < depth_imgs[i].cols; u++) {
				if (depth_imgs[i].at<unsigned short>(v, u) != 0) {
					pcl::PointXYZ p;
					p.z = depth_imgs[i].at<unsigned short>(v, u) / 1000.0f;
					p.x = (u - intrinsics[2]) * p.z / intrinsics[0];
					p.y = (v - intrinsics[3]) * p.z / intrinsics[1];
					//cv::Vec3b brg = rgb_imgs[i].at<cv::Vec3b>(v, u);
					//p.r = brg[2];
					//p.g = brg[1];
					//p.b = brg[0];
					//p.a = 255;

					Eigen::Vector4f tmp = camera_positions[i]
							* p.getVector4fMap();

					if (tmp[2] < 0.6 && tmp[2] > 0.2) {
						p.getVector4fMap() = tmp;
						point_cloud->push_back(p);

					}

				}
			}
		}

		point_cloud->sensor_orientation_ = camera_positions[i].rotation();
		point_cloud->sensor_origin_ = camera_positions[i].translation().homogeneous();
		point_cloud->header.frame_id = frame_prefix + "/map";
		point_cloud->header.seq = i;
		point_cloud->header.stamp = ros::Time::now();

		Eigen::Vector3f pos = camera_positions[i].translation();

		server->insertScan(tf::Point(pos[0], pos[1], pos[2]), *point_cloud, pcl::PointCloud<pcl::PointXYZ>());

	}

	server->publishAll(ros::Time::now());
}
Example #8
0
  void computeAndPublishPointCloud(
      const sensor_msgs::CameraInfoConstPtr& l_info_msg,
      const sensor_msgs::ImageConstPtr& l_image_msg,
      const sensor_msgs::CameraInfoConstPtr& r_info_msg, 
      const std::vector<Matcher::p_match>& matches,
      const std::vector<int32_t>& inlier_indices)
  {
    try
    {
      cv_bridge::CvImageConstPtr cv_ptr;
      cv_ptr = cv_bridge::toCvShare(l_image_msg, sensor_msgs::image_encodings::RGB8);
      // read calibration info from camera info message
      image_geometry::StereoCameraModel model;
      model.fromCameraInfo(*l_info_msg, *r_info_msg);
      PointCloud::Ptr point_cloud(new PointCloud());
      point_cloud->header.frame_id = getSensorFrameId();
      point_cloud->header.stamp = l_info_msg->header.stamp;
      point_cloud->width = 1;
      point_cloud->height = inlier_indices.size();
      point_cloud->points.resize(inlier_indices.size());

      for (size_t i = 0; i < inlier_indices.size(); ++i)
      {
        const Matcher::p_match& match = matches[inlier_indices[i]];
        cv::Point2d left_uv;
        left_uv.x = match.u1c;
        left_uv.y = match.v1c;
        cv::Point3d point;
        double disparity = match.u1c - match.u2c;
        model.projectDisparityTo3d(left_uv, disparity, point);
        point_cloud->points[i].x = point.x;
        point_cloud->points[i].y = point.y;
        point_cloud->points[i].z = point.z;
        cv::Vec3b color = cv_ptr->image.at<cv::Vec3b>(left_uv.y,left_uv.x);
        point_cloud->points[i].r = color[0];
        point_cloud->points[i].g = color[1];
        point_cloud->points[i].b = color[2];
      }
      ROS_DEBUG("Publishing point cloud with %zu points.", point_cloud->size());
      point_cloud_pub_.publish(point_cloud);
    }
    catch (cv_bridge::Exception& e)
    {
      ROS_ERROR("cv_bridge exception: %s", e.what());
    }
  }
void PointsFileSystem::showPointCloud(const QPersistentModelIndex& index)
{
    checked_indexes_.insert(index);

    osg::ref_ptr<PointCloud> point_cloud(getPointCloud(filePath(index).toStdString()));
    if (!point_cloud.valid())
        return;

    PointCloudMap::iterator point_cloud_map_it = point_cloud_map_.find(index);
    if (point_cloud_map_it != point_cloud_map_.end())
        return;

    MainWindow::getInstance()->getSceneWidget()->addSceneChild(point_cloud);
    point_cloud_map_[index] = point_cloud;

    return;
}
void TerrainClassifierNode::loadTestPointCloud(const std::string& path)
{
  pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud(new pcl::PointCloud<pcl::PointXYZ>());

  ROS_INFO("Loading point cloud from %s...", path.c_str());
  if (pcl::io::loadPCDFile(path, *point_cloud))
  {
    ROS_ERROR("FAILED!");
    return;
  }
  else
    ROS_INFO("Done!");;

  sensor_msgs::PointCloud2 point_cloud_msg;
  pcl::toROSMsg(*point_cloud, point_cloud_msg);
  point_cloud_msg.header.frame_id = terrain_classifier.getFrameId();
  point_cloud_msg.header.stamp = ros::Time::now();

  ROS_INFO("Generating terrain model...");
  setPointCloud(point_cloud_msg);
  ROS_INFO("Done!");
}
Example #11
0
pcl::PointCloud<pcl::PointXYZ>::Ptr mat2IntegralPointCloud(const cv::Mat& depth_mat, const float focal_length, const float max_depth) {
    assert(depth_mat.type() == CV_16U);
    pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud(new pcl::PointCloud<pcl::PointXYZ>());
    const int half_width = depth_mat.cols / 2;
    const int half_height = depth_mat.rows / 2;
    const float inv_focal_length = 1.0 / focal_length;
    point_cloud->points.reserve(depth_mat.rows * depth_mat.cols);
    for (int y = 0; y < depth_mat.rows; y++) {
        for (int x = 0; x < depth_mat.cols; x++) {
            float z = depth_mat.at<ushort>(cv:: Point(x, y)) * 0.001;
            if (z < max_depth && z > 0) {
                point_cloud->points.emplace_back(static_cast<float>(x - half_width)  * z * inv_focal_length,
                                                 static_cast<float>(y - half_height) * z * inv_focal_length,
                                                 z);
            } else {
                point_cloud->points.emplace_back(x, y, NAN);
            }
        }
    }
    point_cloud->width = depth_mat.cols;
    point_cloud->height = depth_mat.rows;
    return point_cloud;
}
void LslidarN301Decoder::publishPointCloud() {
  pcl::PointCloud<pcl::PointXYZI>::Ptr point_cloud(
      new pcl::PointCloud<pcl::PointXYZI>());
  point_cloud->header.stamp =
    pcl_conversions::toPCL(sweep_data->header).stamp;
  point_cloud->header.frame_id = child_frame_id;
  point_cloud->height = 1;

  for (size_t i = 0; i < 16; ++i) {
    const lslidar_n301_msgs::LslidarN301Scan& scan = sweep_data->scans[i];
    // The first and last point in each scan is ignored, which
    // seems to be corrupted based on the received data.
    // TODO: The two end points should be removed directly
    //    in the scans.
    if (scan.points.size() == 0) continue;
	size_t j;
    for (j = 1; j < scan.points.size()-1; ++j) {
	
      pcl::PointXYZI point;
      point.x = scan.points[j].x;
      point.y = scan.points[j].y;
      point.z = scan.points[j].z;
      point.intensity = scan.points[j].intensity;
      point_cloud->points.push_back(point);
      ++point_cloud->width;
    }

  }

//  	if(point_cloud->width > 2000)
  	{
  		point_cloud_pub.publish(point_cloud);
  	}


  return;
}
Example #13
0
void SceneWidget::slotSampleScan(void) {
  MainWindow* main_window = MainWindow::getInstance();

  int resolution;
  if (!ParameterManager::getInstance().getSampleScanParameters(resolution))
    return;

  osg::ref_ptr < PointCloud > point_cloud(new PointCloud);
  for (size_t i = 0, i_end = mesh_models_.size(); i < i_end; ++i) {
    if (i == 0) {
      mesh_models_[i]->sampleScan(point_cloud->data(), resolution, 0.0);
    } else {
      PclPointCloud::Ptr samples(new PclPointCloud);
      mesh_models_[i]->sampleScan(samples, resolution, 0.0);
      point_cloud->data()->insert(point_cloud->data()->end(), samples->begin(), samples->end());
    }
  }

  removeSceneChild(point_cloud_);
  point_cloud_ = point_cloud;
  addSceneChild(point_cloud_);

  return;
}
Example #14
0
// ----------------------------------------------------------------------------
void loadFeatures3d(BoWFeatures &features)
{
    typedef pcl::PointXYZ PointType;
    float angular_resolution = pcl::deg2rad (0.15f);
    float support_size = 0.1f;

    features.clear();
    features.reserve(files_list_3d.size());
    
    float noise_level = 0.0f;
    float min_range = 0.0f;
    int border_size = 1;
    
    double acc_media = 0,media=0,scarti=0,varianza=0;
    //pcl::visualization::RangeImageVisualizer range_image_widget ("Range image");
    pcl::RangeImage::CoordinateFrame coordinate_frame = pcl::RangeImage::CAMERA_FRAME;
    pcl::RangeImage::Ptr range_image_ptr (new pcl::RangeImage);
    pcl::RangeImage& range_image = *range_image_ptr;
    
    for (int i = 0; i < files_list_3d.size(); ++i) {
        clock_t begin = clock();
        pcl::PointCloud<PointType>::Ptr point_cloud_wf (new pcl::PointCloud<PointType>);
        pcl::PointCloud<PointType>::Ptr point_cloud (new pcl::PointCloud<PointType>);
        
        pcl::io::loadPCDFile (files_list_3d[i], *point_cloud_wf);

        //filtraggio valori NaN
        std::vector<int> indices;
        pcl::removeNaNFromPointCloud (*point_cloud_wf,*point_cloud_wf,indices);
        pcl::VoxelGrid<PointType> sor;
        sor.setInputCloud (point_cloud_wf);
        sor.setLeafSize (0.01f, 0.01f, 0.01f);
        sor.filter (*point_cloud);
        cout << "Estrazione NARF: " << files_list_3d[i] ;
        
        Eigen::Affine3f scene_sensor_pose (Eigen::Affine3f::Identity());
        scene_sensor_pose = Eigen::Affine3f (Eigen::Translation3f ((*point_cloud).sensor_origin_[0],
                                             (*point_cloud).sensor_origin_[1],
                (*point_cloud).sensor_origin_[2])) *
                Eigen::Affine3f ((*point_cloud).sensor_orientation_);
       // pcl::visualization::RangeImageVisualizer range_image_widget ("Range image");
        range_image.max_no_of_threads = 2;
        range_image.createFromPointCloud ((*point_cloud),angular_resolution,pcl::deg2rad(360.0f),pcl::deg2rad(180.0f),scene_sensor_pose,coordinate_frame,noise_level,min_range,border_size);
        range_image.setUnseenToMaxRange();
        //saveRangeImagePlanarFilePNG(boost::lexical_cast<string>(i),range_image);
        
        //range_image_widget.showRangeImage (range_image);
        //range_image_widget.spin();
        pcl::RangeImageBorderExtractor range_image_border_extractor;
        pcl::NarfKeypoint narf_keypoint_detector;
        narf_keypoint_detector.setRangeImageBorderExtractor (&range_image_border_extractor);
        narf_keypoint_detector.setRangeImage (&range_image);
        narf_keypoint_detector.getParameters().support_size = support_size;
        //euristiche, per avvicinarsi al real time
        narf_keypoint_detector.getParameters().max_no_of_threads = 2;
        narf_keypoint_detector.getParameters().calculate_sparse_interest_image=false; //false
        narf_keypoint_detector.getParameters().use_recursive_scale_reduction=true; //true
        //narf_keypoint_detector.getParameters().add_points_on_straight_edges=true;
        
        pcl::PointCloud<int> keypoint_indices;
        narf_keypoint_detector.compute (keypoint_indices);
        
        vector<int> keypoint_indices2;
        keypoint_indices2.resize (keypoint_indices.points.size ());
        for (unsigned int i=0; i<keypoint_indices.size (); ++i) // This step is necessary to get the right vector type
            keypoint_indices2[i]=keypoint_indices.points[i];
        pcl::NarfDescriptor narf_descriptor (&range_image, &keypoint_indices2);
        narf_descriptor.getParameters().support_size = support_size;
        narf_descriptor.getParameters().rotation_invariant = true;
        pcl::PointCloud<pcl::Narf36> narf_descriptors;
        
        narf_descriptor.compute (narf_descriptors);
        
        clock_t end = clock();
        double elapsed_secs = double(end - begin) / CLOCKS_PER_SEC;
        media = media + elapsed_secs;
        acc_media = media / (i+1);
        cout << "media: " << acc_media<<endl;
        scarti += pow(elapsed_secs-acc_media,2);
        varianza = sqrt(scarti/(i+1));
        cout << "varianza: " << varianza<<endl; 
        cout << ". Estratti "<<narf_descriptors.size ()<<" descrittori. Punti: " <<keypoint_indices.points.size ()<< "."<<endl;
        
        features.push_back(vector<vector<float> >());
        for (int p = 0; p < narf_descriptors.size(); p++) {
            vector<float> flot;
            copy(narf_descriptors[p].descriptor, narf_descriptors[p].descriptor+FNarf::L, back_inserter(flot));
            features.back().push_back(flot);
            flot.clear();
        } 
        
        indices.clear();
        range_image_border_extractor.clearData();
        narf_keypoint_detector.clearData();
        (*range_image_ptr).clear();
        keypoint_indices.clear();
        keypoint_indices2.clear();
        (*point_cloud).clear();
        (*point_cloud_wf).clear();
        range_image.clear();
        narf_descriptors.clear();
        narf_descriptor = NULL;
    }
    cout << "Estrazione terminata." << endl;
}
Example #15
0
template <typename PointXYZT, typename PointRGBT> void 
pcl::LineRGBD<PointXYZT, PointRGBT>::applyProjectiveDepthICPOnDetections ()
{
  const size_t nr_detections = detections_.size ();
  for (size_t detection_index = 0; detection_index < nr_detections; ++detection_index)
  {
    typename pcl::LineRGBD<PointXYZT, PointRGBT>::Detection & detection = detections_[detection_index];

    const size_t template_id = detection.template_id;
    pcl::PointCloud<pcl::PointXYZRGBA> & point_cloud = template_point_clouds_[template_id];

    const size_t start_x = detection.region.x;
    const size_t start_y = detection.region.y;
    const size_t pc_width = point_cloud.width;
    const size_t pc_height = point_cloud.height;
    
    std::vector<std::pair<float, float> > depth_matches;
    for (size_t row_index = 0; row_index < pc_height; ++row_index)
    {
      for (size_t col_index = 0; col_index < pc_width; ++col_index)
      {
        const pcl::PointXYZRGBA & point_template = point_cloud (col_index, row_index);
        const PointXYZT & point_input = (*cloud_xyz_) (col_index + start_x, row_index + start_y);

        if (!pcl_isfinite (point_template.z) || !pcl_isfinite (point_input.z))
          continue;

        depth_matches.push_back (std::make_pair (point_template.z, point_input.z));
      }
    }

    // apply ransac on matches
    const size_t nr_matches = depth_matches.size ();
    const size_t nr_iterations = 100; // todo: should be a parameter...
    const float inlier_threshold = 0.01f; // 5cm // todo: should be a parameter...
    size_t best_nr_inliers = 0;
    float best_z_translation = 0.0f;
    for (size_t iteration_index = 0; iteration_index < nr_iterations; ++iteration_index)
    {
      const size_t rand_index = (rand () * nr_matches) / RAND_MAX;

      const float z_translation = depth_matches[rand_index].second - depth_matches[rand_index].first;

      size_t nr_inliers = 0;
      for (size_t match_index = 0; match_index < nr_matches; ++match_index)
      {
        const float error = fabsf (depth_matches[match_index].first + z_translation - depth_matches[match_index].second);

        if (error <= inlier_threshold)
        {
          ++nr_inliers;
        }
      }

      if (nr_inliers > best_nr_inliers)
      {
        best_nr_inliers = nr_inliers;
        best_z_translation = z_translation;
      }
    }

    float average_depth = 0.0f;
    size_t average_counter = 0;
    for (size_t match_index = 0; match_index < nr_matches; ++match_index)
    {
      const float error = fabsf (depth_matches[match_index].first + best_z_translation - depth_matches[match_index].second);

      if (error <= inlier_threshold)
      {
        //average_depth += depth_matches[match_index].second;
        average_depth += depth_matches[match_index].second - depth_matches[match_index].first;
        ++average_counter;
      }
    }
    average_depth /= static_cast<float> (average_counter);

    detection.bounding_box.z = bounding_boxes_[template_id].z + average_depth;// - detection.bounding_box.depth/2.0f;
  }
}
Example #16
0
  bool generateDistanceFields(void) {
    if(FLAGS_df_list.empty()) {
      return false;
    }
    if (!boost::filesystem::exists(FLAGS_df_list)) {
      return false;
    }

    std::vector<DFItem> df_list;
    std::ifstream fin(FLAGS_df_list);
    std::string source, target;
    int resolution;
    std::set<std::string> folder_set;
    while (fin >> source >> resolution >> target) {
      df_list.push_back(std::make_tuple(source, resolution, target));
      std::string folder = boost::filesystem::path(target).parent_path().string();
      if(folder_set.find(folder) == folder_set.end()) {
        if (!boost::filesystem::exists(folder)) {
          boost::filesystem::create_directories(folder);
          folder_set.insert(folder);
        }
      }
    }
    LOG(INFO) <<  df_list.size() << " items to be processed!" << std::endl;

    if(!FLAGS_skip_converting) {
      int step = 100;
      for (int i = 0, i_end = df_list.size(); i < i_end; i ++) {
        const std::string filename_df = std::get<2>(df_list[i]);
        boost::filesystem::path path(filename_df);
        std::string filename_pcd = path.parent_path().string()+"/"+path.stem().string()+".pcd";

        osg::ref_ptr <PointCloud> point_cloud(new PointCloud);
        if(point_cloud->load(filename_pcd)) {
          LOG(INFO) << "Skipping generating " << filename_pcd << " as it exists and reads well..." << std::endl;
          continue;
        }

        const std::string filename_mesh = std::get<0>(df_list[i]);
        LOG(INFO) << "Converting " << filename_mesh << " into point cloud..." << std::endl;
        osg::ref_ptr <MeshModel> mesh_model(new MeshModel);
        if(!mesh_model->load(filename_mesh)) {
          LOG(ERROR) << "Reading " << filename_mesh << " failed! Skipping it..." << std::endl;
          continue;
        }

        point_cloud->data()->clear();
        double grid_size = mesh_model->sampleScan(point_cloud->data(), 100, 0.0);
        point_cloud->buildTree();
        point_cloud->voxelGridFilter(grid_size/2, true);
        point_cloud->save(filename_pcd);

        if ((i+1)%step == 0) {
          LOG(INFO) << "Converted " << (i+1) << " items! (total item number: " << i_end << ")" << std::endl;
        }
      }
    }

    if (!FLAGS_skip_generation) {
      unsigned int n = std::thread::hardware_concurrency()-4;
      LOG(INFO) << n << " threads will be used!" << std::endl;

      std::vector<std::thread> threads;
      for (unsigned int i = 0; i < n; ++ i) {
        std::thread thread_obj(generateDistanceField, df_list, n, i);
        threads.push_back(std::move(thread_obj));
      }

      for (unsigned int i = 0; i < n; ++ i) {
        threads[i].join();
      }
      LOG(INFO) << "Distance field generation done!" << std::endl;
    }

    return true;
  }
void keypoint_map::align_z_axis() {

	pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud(
			new pcl::PointCloud<pcl::PointXYZ>);

	for (int v = 0; v < depth_imgs[0].rows; v++) {
		for (int u = 0; u < depth_imgs[0].cols; u++) {
			if (depth_imgs[0].at<unsigned short>(v, u) != 0) {
				pcl::PointXYZ p;

				p.z = depth_imgs[0].at<unsigned short>(v, u) / 1000.0f;
				p.x = (u - intrinsics[2]) * p.z / intrinsics[0];
				p.y = (v - intrinsics[3]) * p.z / intrinsics[1];

				p.getVector4fMap() = camera_positions[0] * p.getVector4fMap();

				point_cloud->push_back(p);

			}
		}
	}

	pcl::ModelCoefficients::Ptr coefficients(new pcl::ModelCoefficients);
	pcl::PointIndices::Ptr inliers(new pcl::PointIndices);
	// Create the segmentation object
	pcl::SACSegmentation<pcl::PointXYZ> seg;
	// Optional
	seg.setOptimizeCoefficients(true);
	// Mandatory
	seg.setModelType(pcl::SACMODEL_PLANE);
	seg.setMethodType(pcl::SAC_RANSAC);
	seg.setDistanceThreshold(0.005);
	seg.setProbability(0.99);

	seg.setInputCloud(point_cloud);
	seg.segment(*inliers, *coefficients);

	std::cerr << "Model coefficients: " << coefficients->values[0] << " "
			<< coefficients->values[1] << " " << coefficients->values[2] << " "
			<< coefficients->values[3] << " Num inliers "
			<< inliers->indices.size() << std::endl;

	Eigen::Affine3f transform = Eigen::Affine3f::Identity();
	if (coefficients->values[2] > 0) {
		transform.matrix().coeffRef(0, 2) = coefficients->values[0];
		transform.matrix().coeffRef(1, 2) = coefficients->values[1];
		transform.matrix().coeffRef(2, 2) = coefficients->values[2];
	} else {
		transform.matrix().coeffRef(0, 2) = -coefficients->values[0];
		transform.matrix().coeffRef(1, 2) = -coefficients->values[1];
		transform.matrix().coeffRef(2, 2) = -coefficients->values[2];
	}

	transform.matrix().col(0).head<3>() =
			transform.matrix().col(1).head<3>().cross(
					transform.matrix().col(2).head<3>());
	transform.matrix().col(1).head<3>() =
			transform.matrix().col(2).head<3>().cross(
					transform.matrix().col(0).head<3>());

	transform = transform.inverse();

	transform.matrix().coeffRef(2, 3) = coefficients->values[3];

	pcl::transformPointCloud(keypoints3d, keypoints3d, transform);

	for (size_t i = 0; i < camera_positions.size(); i++) {
		camera_positions[i] = transform * camera_positions[i];
	}

}
void keypoint_map::extract_surface() {

	octomap::ColorOcTree tree(0.05f);

	pcl::PointCloud<pcl::PointXYZRGBA>::Ptr point_cloud(
			new pcl::PointCloud<pcl::PointXYZRGBA>);

	for (size_t i = 0; i < depth_imgs.size(); i++) {

		cv::imwrite("rgb/" + boost::lexical_cast<std::string>(i) + ".png",
				rgb_imgs[i]);
		cv::imwrite("depth/" + boost::lexical_cast<std::string>(i) + ".png",
				depth_imgs[i]);

		octomap::Pointcloud octomap_cloud;
		octomap::point3d sensor_origin(0.0, 0.0, 0.0);

		Eigen::Vector3f trans(camera_positions[i].translation());
		Eigen::Quaternionf rot(camera_positions[i].rotation());

		octomap::pose6d frame_origin(
				octomath::Vector3(trans.x(), trans.y(), trans.z()),
				octomath::Quaternion(rot.w(), rot.x(), rot.y(), rot.z()));
		//std::cerr << camera_positions[i].matrix() << std::endl;

		for (int v = 0; v < depth_imgs[i].rows; v++) {
			for (int u = 0; u < depth_imgs[i].cols; u++) {
				if (depth_imgs[i].at<unsigned short>(v, u) != 0) {
					pcl::PointXYZRGBA p;
					p.z = depth_imgs[i].at<unsigned short>(v, u) / 1000.0f;
					p.x = (u - intrinsics[2]) * p.z / intrinsics[0];
					p.y = (v - intrinsics[3]) * p.z / intrinsics[1];
					cv::Vec3b brg = rgb_imgs[i].at<cv::Vec3b>(v, u);
					p.r = brg[2];
					p.g = brg[1];
					p.b = brg[0];
					p.a = 255;

					Eigen::Vector4f tmp = camera_positions[i]
							* p.getVector4fMap();

					if (tmp[2] < 2.0) {

						octomap_cloud.push_back(p.x, p.y, p.z);
						p.getVector4fMap() = tmp;

						octomap::point3d endpoint(p.x, p.y, p.z);
						octomap::ColorOcTreeNode* n = tree.search(endpoint);
						if (n) {
							n->setColor(p.r, p.g, p.b);
						}

						//ROS_INFO("Point %f %f %f from  %f %f ", p.x, p.y, p.z, keypoints[i].pt.x, keypoints[i].pt.y);

						point_cloud->push_back(p);

					}

				}
			}
		}
		tree.insertScan(octomap_cloud, sensor_origin, frame_origin);
		tree.updateInnerOccupancy();
	}

	tree.write("room.ot");

	pcl::io::savePCDFileASCII("room.pcd", *point_cloud);

	pcl::PointCloud<pcl::PointXYZRGBA>::Ptr point_cloud_filtered(
			new pcl::PointCloud<pcl::PointXYZRGBA>);
	pcl::VoxelGrid<pcl::PointXYZRGBA> sor;
	sor.setInputCloud(point_cloud);
	sor.setLeafSize(0.05f, 0.05f, 0.05f);
	sor.filter(*point_cloud_filtered);

	pcl::io::savePCDFileASCII("room_sub.pcd", *point_cloud_filtered);

}
Example #19
0
int main(int argc, char** argv)
{
	/// load calibration data
	cv::Mat leftCameraMatrix; //3x3 floating-point left camera matrix
	cv::Mat rightCameraMatrix; //3x3 floating-point right camera matrix
	cv::Mat leftDistCoeffs; //8x1 vector of left distortion coefficients 
	cv::Mat rightDistCoeffs; //8x1 vector of right distortion coefficients 
	cv::Mat R1;			// 3x3 rectification transform (rotation matrix) for the first camera
	cv::Mat R2;			// 3x3 rectification transform (rotation matrix) for the second camera
	cv::Mat P1;			// 3x4 projection matrix in the new (rectified) coordinate systems for the first camera
	cv::Mat P2;			// 3x4 projection matrix in the new (rectified) coordinate systems for the second camera
	cv::Mat Q;			// 4x4 disparity-to-depth mapping matrix

	//load all calibration parameters from outputCalibration.xml
	cv::FileStorage fsR("config/outputCalibration.xml", cv::FileStorage::READ);
	fsR["leftCameraMatrix"] >> leftCameraMatrix;                                      
    fsR["rightCameraMatrix"] >> rightCameraMatrix;
    fsR["leftDistCoeffs"] >> leftDistCoeffs;
    fsR["rightDistCoeffs"] >> rightDistCoeffs;
    fsR["R1"] >> R1;
    fsR["R2"] >> R2;
    fsR["P1"] >> P1;
    fsR["P2"] >> P2;
    fsR["Q"] >> Q;

    //load test images
	std::string left_filename, right_filename;
	left_filename="../../dataset/test/left.jpg";
	right_filename="../../dataset/test/right.jpg";
	cv::Mat left_image = cv::imread(left_filename);
	cv::Mat right_image = cv::imread(right_filename);

	cv::Size image_size = left_image.size(); //size of test images

	//left and right undistort and rectification maps
	cv::Mat left_map1;
	cv::Mat left_map2;
	cv::Mat right_map1;
	cv::Mat right_map2;

	//timers
	clock_t init, timeComplete;
	init=clock(); //start timer

	/// create rectification maps
	cv::initUndistortRectifyMap(leftCameraMatrix, leftDistCoeffs, R1, P1, image_size, CV_32FC1, left_map1, left_map2);
	cv::initUndistortRectifyMap(rightCameraMatrix, rightDistCoeffs, R2, P2, image_size, CV_32FC1, right_map1, right_map2);
	
	//remap images
	cv::Mat left_image_remap;
	cv::Mat right_image_remap;
	
	/// use the maps to rectificate images
	cv::remap(left_image, left_image_remap, left_map1, left_map2, cv::INTER_LINEAR, cv::BORDER_CONSTANT, cv::Scalar());
	cv::remap(right_image, right_image_remap, right_map1, right_map2, cv::INTER_LINEAR, cv::BORDER_CONSTANT, cv::Scalar());

	//show undistorted and rectified test images;
	/*cv::namedWindow("Original left image");
	cv::imshow("Original left image",left_image);
	cv::namedWindow("Original right image");
	cv::imshow("Original right image",right_image);
	cv::namedWindow("Rectified left image");
	cv::imshow("Rectified left image",left_image_remap);
	cv::namedWindow("Rectified right image");
	cv::imshow("Rectified right image",right_image_remap);
	cv::waitKey(0);
	return 0;*/
		
	/// compute the disparity
	cv::StereoSGBM sgbm;
	sgbm.preFilterCap = 100;
	sgbm.SADWindowSize = 5;
	sgbm.P1 = 8 * left_image_remap.channels() * sgbm.SADWindowSize * sgbm.SADWindowSize;
	sgbm.P2 = 32 * left_image_remap.channels() * sgbm.SADWindowSize * sgbm.SADWindowSize;
	sgbm.minDisparity = 40;
	sgbm.numberOfDisparities = 256;
	sgbm.uniquenessRatio = 10;
	sgbm.speckleWindowSize = 200;
	sgbm.speckleRange = 2;
	sgbm.disp12MaxDiff = 0;
		
	cv::Mat disparity_image;
	sgbm(left_image_remap, right_image_remap, disparity_image);

	//show disparity image;
	/*
	cv::namedWindow("Disparity");
	cv::imshow("Disparity",disparity_image);
	cv::waitKey(0);
	return 0;*/

	/// convert to 3D points
	cv::Mat cloud_image; 
	cv::reprojectImageTo3D(disparity_image, cloud_image, Q);

	//convert cloud_image into point format of PCL
	pcl::PointCloud<pcl::PointXYZRGB>::Ptr point_cloud (new pcl::PointCloud<pcl::PointXYZRGB>);
	float px, py, pz;
	int pr, pg, pb;

	//scan cloud_image and convert its information in a Point Cloud
  	for (int i = 0; i < cloud_image.rows; i++)
  	{
  		for (int j = 0; j < cloud_image.cols; j++)
  		{

  			px= cloud_image.at<cv::Vec3f>(i,j)[0];
  			py= cloud_image.at<cv::Vec3f>(i,j)[1];
  			pz= cloud_image.at<cv::Vec3f>(i,j)[2];

  			pb= left_image.at<cv::Vec3b>(i,j)[0];
  			pg= left_image.at<cv::Vec3b>(i,j)[1];
  			pr= left_image.at<cv::Vec3b>(i,j)[2];

      		//Insert info into point cloud structure
      		pcl::PointXYZRGB point;
      		point.x = px;
      		point.y = py;
      		point.z = pz;

      		point.r = pr;
      		point.g = pg;
      		point.b = pb;

      		//delete unwanted infinites points and outliers
      		if(point.x>-30 && point.x<30 && point.y>-30 && point.y<30 && point.z>-3 && point.z <3){ 
      			point_cloud->points.push_back (point);
      		}
      	}
  	}

	/// visualize 3D points
	pcl::visualization::PCLVisualizer visualizer("PCL visualizer");
	pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb(point_cloud);
	visualizer.addPointCloud<pcl::PointXYZRGB> (point_cloud, rgb, "point_cloud");
	timeComplete=clock()-init; //final time
	std::cout <<"Reconstruction complete in: "<<timeComplete <<std::endl;
	visualizer.spin();
	return 0;
}
Example #20
0
void IbeoSourceWrapper::run()
{
    // Create ibeo source
    icl_sourcesink::SourceSinkManager manager;
    nibeo::IbeoSourceNoAPI::Ptr ibeo_source = manager.createSource<nibeo::IbeoMsg>(m_ibeo_uri, std::vector<std::string>(), true);
    nncom::NcomSource::Ptr ncom_source = manager.createSource<nncom::Ncom>(m_ncom_uri);

    // We create a data element ptr that we will use to work on.
    nibeo::IbeoMsgStamped::Ptr ibeo_element;
    nncom::NcomStamped::Ptr ncom_element;
    icl_gps::AbsolutePose* ref_pose = NULL;

    // for each time step
    for (; manager.good(); manager.advance())
    {
       ibeo_element = ibeo_source->get();
       ncom_element = ncom_source->get();

      // Skip messages other than IbeoScanMsg
      nibeo::IbeoScanMsg scan_msg;
      if(!scan_msg.fromIbeoMsg(*ibeo_element))
          continue;

      LOGGING_INFO(Gpu_voxels, "Number of points in ibeo msg: " << scan_msg.number_of_points << endl);

      // Copy points to std::vector
      std::vector<Vector3f> point_cloud(scan_msg.number_of_points);
      for(int i = 0; i < scan_msg.number_of_points; ++i)
      {
          point_cloud[i].x = (*scan_msg.scan_points)[i].x;
          point_cloud[i].y = (*scan_msg.scan_points)[i].y;
          point_cloud[i].z = (*scan_msg.scan_points)[i].z;
      }

      // Handle position
      nncom::Ncom ncom_msg = *ncom_element; // Unwrap message
      icl_gps::AbsolutePose abs_pose(ncom_msg->mLon, ncom_msg->mLat, ncom_msg->mAlt, ncom_msg->mTrack);
       LOGGING_INFO(Gpu_voxels, "Current pose. long: " << abs_pose.longitude() << " lat: " << abs_pose.latitude() << " alt: " << abs_pose.altitude() << " bear: " << abs_pose.bearing() << endl);

      if(!ref_pose) // Use first position as reference
      {
          ref_pose = new icl_gps::AbsolutePose(abs_pose.longitude(), abs_pose.latitude(), abs_pose.altitude(), abs_pose.bearing());
          LOGGING_INFO(Gpu_voxels, "Set reference pose. long: " << abs_pose.longitude() << " lat: " << abs_pose.latitude() << " alt: " << abs_pose.altitude() << " bear: " << abs_pose.bearing() << endl);
      }
      icl_gps::RelativePose rel_pose = abs_pose - *ref_pose;
      icl_math::Pose2d rel_pose2d = rel_pose.toPose2d();

      LOGGING_INFO(Gpu_voxels, "Relative pose. north: " << rel_pose.north() << " east: " << rel_pose.east() << " alt: " << rel_pose.altitude() << endl);

      Matrix4f matrix;

      // copy pose2d rotation matrix
      matrix.a11 = rel_pose2d(0,0);     matrix.a12 = rel_pose2d(0,1);
      matrix.a21 = rel_pose2d(1,0);     matrix.a22 = rel_pose2d(1,1);

      // copy pose2d translation vector
      matrix.a14 = rel_pose2d(0,2);
      matrix.a24 = rel_pose2d(1,2);

      // set generals structure of transformation matrix
      matrix.a33 = 1.0f;
      matrix.a44 = 1.0f;

      // set z-translation (altitude)
      matrix.a34 = static_cast<float>(rel_pose.altitude());

      // set additional translation
      matrix.a14 += m_additional_translation.x;
      matrix.a24 += m_additional_translation.y;
      matrix.a34 += m_additional_translation.z;

      LOGGING_INFO(Gpu_voxels, "Matrix4f: " << endl << matrix.a11 << " " << matrix.a12 << " " << matrix.a13 << " " << matrix.a14 << endl
                   << matrix.a21 << " " << matrix.a22 << " " << matrix.a23 << " " << matrix.a24 << endl
                   << matrix.a31 << " " << matrix.a32 << " " << matrix.a33 << " " << matrix.a34 << endl
                   << matrix.a41 << " " << matrix.a42 << " " << matrix.a43 << " " << matrix.a44 << endl);

      LOGGING_INFO(Gpu_voxels, "Transform point cloud..." << endl);

      CudaMath::transform(point_cloud, matrix); // todo: keep point cloud in gpu mem

      m_callback(point_cloud);
    }
}
void fi::MSHRFileIO::get3DTo2DProjectionsUniBW()
{
	//collect the points and images
	fi::MSHR3D2DTableParser mshr_results(m_mshr_result_dir, m_image_data_dir, m_image_extension);
	pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud(new pcl::PointCloud<pcl::PointXYZ>());
	std::vector<std::vector<unsigned int> > corresponding_images; //image names used to extract the corresponding poses
	std::vector<std::vector<std::string> > mapping_table; 
	bool parse_data = mshr_results.parseInput(point_cloud, corresponding_images, mapping_table);
	
	//get the full paths to the images
	std::vector<std::vector<std::string> > corresponding_images_filenames;
	bool image_present = mshr_results.getCorrespondingImageFiles(corresponding_images, corresponding_images_filenames);

	//get the calibration data file
	fi::CamCalibParser m_cam(m_mshr_result_dir);
	Eigen::Matrix3f cam_calib;
	m_cam.getCameraCalib(cam_calib);
	std::cout<<cam_calib<<std::endl;

	//get the pose
	std::vector<std::vector<Eigen::Matrix3f> > rotation_matrix;
	std::vector<std::vector<Eigen::Vector3f> > translation_vectors;
	std::vector<std::vector<float> > radial_distortion_params_k1;
	std::vector<std::vector<float> > radial_distortion_params_k2;
	std::vector<std::vector<unsigned int> > image_width;
	std::vector<std::vector<unsigned int> > image_height;
	std::string unibw_params_folder = m_unibw_params_dir;
	
	fi::CamPOSEParser data_pose(m_unibw_params_dir, corresponding_images);
	bool extracted = data_pose.getCamPose(rotation_matrix, translation_vectors, radial_distortion_params_k1, radial_distortion_params_k2, image_width, image_height);
   
	//project to image
	unsigned int num_pnts = translation_vectors.size();
	for ( unsigned int i = 0; i < num_pnts; i++)
	{
		//collect for the point all the values of the pose
		std::vector<Eigen::Matrix3f> pnt_rotation_matrix = rotation_matrix.at(i);
		std::vector<Eigen::Vector3f> pnt_translation_vectors = translation_vectors.at(i);
		std::vector<float> pnt_radial_distortion_params_k1 = radial_distortion_params_k1.at(i);
		std::vector<float> pnt_radial_distortion_params_k2 = radial_distortion_params_k2.at(i);
		std::vector<unsigned int> pnt_image_width = image_width.at(i);
		std::vector<unsigned int> pnt_image_height = image_height.at(i);
		
		//
		unsigned int pnt_num_images = pnt_radial_distortion_params_k1.size();
		std::vector<Eigen::Vector2f> pnt_img_points;

		for(unsigned int j = 0; j < pnt_num_images; j ++)
		{
			//for the selected image compute all the corresponding values
				const Eigen::Matrix3f &rot_mat = pnt_rotation_matrix.at(j); 
				const Eigen::Vector3f &trans_vec = pnt_translation_vectors.at(j);
				float undist1 = pnt_radial_distortion_params_k1.at(j);
				float undist2 = pnt_radial_distortion_params_k2.at(j);
				unsigned int img_w = pnt_image_width.at(j);
				unsigned int img_h = pnt_image_height.at(j);
				Eigen::Vector2f an_image_point;

				getProjectionPointOnImage(point_cloud->points[i], cam_calib, rot_mat, trans_vec, undist1, undist2, img_w, img_h, an_image_point);
				pnt_img_points.push_back(an_image_point);
		}

		//collect all corresponding points
		m_image_points2D.push_back(pnt_img_points);
	}

	m_cloud = point_cloud;
	m_corresponding_images_filenames = corresponding_images_filenames;
	m_mapping_table = mapping_table;
}