string Mapper::serializeTransParamCSV(const PM::TransformationParameters T, const bool getHeader, const string prefix) { std::stringstream stream; for(int col=0; col < T.cols(); col++) { for(int row=0; row < T.rows(); row++) { if(getHeader) { stream << prefix << row << col; } else { stream << T(row,col); } if((col != (T.cols()-1)) || (row != (T.rows()-1))) { stream << ", "; } } } return stream.str(); }
// IMPORTANT: We need to receive the point clouds in local coordinates (scanner or robot) void Mapper::processCloud(unique_ptr<DP> newPointCloud, const std::string& scannerFrame, const ros::Time& stamp, uint32_t seq) { // if the future has completed, use the new map processNewMapIfAvailable(); // This call lock the tf publication cerr << "received new map" << endl; timer t; processingNewCloud = true; BoolSetter stopProcessingSetter(processingNewCloud, false); // Convert point cloud const size_t goodCount(newPointCloud->features.cols()); if (goodCount == 0) { ROS_ERROR("I found no good points in the cloud"); return; } // Dimension of the point cloud, important since we handle 2D and 3D const int dimp1(newPointCloud->features.rows()); if(!(newPointCloud->descriptorExists("stamps_Msec") && newPointCloud->descriptorExists("stamps_sec") && newPointCloud->descriptorExists("stamps_nsec"))) { const float Msec = round(stamp.sec/1e6); const float sec = round(stamp.sec - Msec*1e6); const float nsec = round(stamp.nsec); const PM::Matrix desc_Msec = PM::Matrix::Constant(1, goodCount, Msec); const PM::Matrix desc_sec = PM::Matrix::Constant(1, goodCount, sec); const PM::Matrix desc_nsec = PM::Matrix::Constant(1, goodCount, nsec); newPointCloud->addDescriptor("stamps_Msec", desc_Msec); newPointCloud->addDescriptor("stamps_sec", desc_sec); newPointCloud->addDescriptor("stamps_nsec", desc_nsec); cout << "Adding time" << endl; } ROS_INFO("Processing new point cloud"); { timer t; // Print how long take the algo // Apply filters to incoming cloud, in scanner coordinates inputFilters.apply(*newPointCloud); ROS_INFO_STREAM("Input filters took " << t.elapsed() << " [s]"); } string reason; // Initialize the transformation to identity if empty if(!icp.hasMap()) { // we need to know the dimensionality of the point cloud to initialize properly publishLock.lock(); TOdomToMap = PM::TransformationParameters::Identity(dimp1, dimp1); TOdomToMap(2,3) = mapElevation; publishLock.unlock(); } // Fetch transformation from scanner to odom // Note: we don't need to wait for transform. It is already called in transformListenerToEigenMatrix() PM::TransformationParameters TOdomToScanner; try { TOdomToScanner = PointMatcher_ros::eigenMatrixToDim<float>( PointMatcher_ros::transformListenerToEigenMatrix<float>( tfListener, scannerFrame, odomFrame, stamp ), dimp1); } catch(tf::ExtrapolationException e) { ROS_ERROR_STREAM("Extrapolation Exception. stamp = "<< stamp << " now = " << ros::Time::now() << " delta = " << ros::Time::now() - stamp << endl << e.what() ); return; } ROS_DEBUG_STREAM("TOdomToScanner(" << odomFrame<< " to " << scannerFrame << "):\n" << TOdomToScanner); ROS_DEBUG_STREAM("TOdomToMap(" << odomFrame<< " to " << mapFrame << "):\n" << TOdomToMap); const PM::TransformationParameters TscannerToMap = TOdomToMap * TOdomToScanner.inverse(); ROS_DEBUG_STREAM("TscannerToMap (" << scannerFrame << " to " << mapFrame << "):\n" << TscannerToMap); // Ensure a minimum amount of point after filtering const int ptsCount = newPointCloud->features.cols(); if(ptsCount < minReadingPointCount) { ROS_ERROR_STREAM("Not enough points in newPointCloud: only " << ptsCount << " pts."); return; } // Initialize the map if empty if(!icp.hasMap()) { ROS_INFO_STREAM("Creating an initial map"); mapCreationTime = stamp; setMap(updateMap(newPointCloud.release(), TscannerToMap, false)); // we must not delete newPointCloud because we just stored it in the mapPointCloud return; } // Check dimension if (newPointCloud->features.rows() != icp.getInternalMap().features.rows()) { ROS_ERROR_STREAM("Dimensionality missmatch: incoming cloud is " << newPointCloud->features.rows()-1 << " while map is " << icp.getInternalMap().features.rows()-1); return; } try { // Apply ICP PM::TransformationParameters Ticp; Ticp = icp(*newPointCloud, TscannerToMap); Ticp = transformation->correctParameters(Ticp); // extract corrections PM::TransformationParameters Tdelta = Ticp * TscannerToMap.inverse(); // ISER //{ // // remove roll and pitch // Tdelta(2,0) = 0; // Tdelta(2,1) = 0; // Tdelta(2,2) = 1; // Tdelta(0,2) = 0; // Tdelta(1,2) = 0; // Tdelta(2,3) = 0; //z // Tdelta.block(0,0,3,3) = transformation->correctParameters(Tdelta.block(0,0,3,3)); // Ticp = Tdelta*TscannerToMap; // ROS_DEBUG_STREAM("Ticp:\n" << Ticp); //} // Ensure minimum overlap between scans const double estimatedOverlap = icp.errorMinimizer->getOverlap(); ROS_INFO_STREAM("Overlap: " << estimatedOverlap); if (estimatedOverlap < minOverlap) { ROS_ERROR_STREAM("Estimated overlap too small, ignoring ICP correction!"); return; } // Compute tf publishStamp = stamp; publishLock.lock(); TOdomToMap = Ticp * TOdomToScanner; PM::TransformationParameters Terror = TscannerToMap.inverse() * Ticp; cerr << "Correcting translation error of " << Terror.block(0,3, 3,1).norm() << " m" << endl; // Add transformation to path path.push_back(Ticp); errors.push_back(Terror); // Publish tf if(publishMapTf == true) { tfBroadcaster.sendTransform(PointMatcher_ros::eigenMatrixToStampedTransform<float>(TOdomToMap, mapFrame, odomFrame, stamp)); } publishLock.unlock(); processingNewCloud = false; ROS_DEBUG_STREAM("TOdomToMap:\n" << TOdomToMap); // Publish odometry if (odomPub.getNumSubscribers()) odomPub.publish(PointMatcher_ros::eigenMatrixToOdomMsg<float>(Tdelta, mapFrame, stamp)); // TODO: check that, might be wrong.... // Publish error on odometry if (odomErrorPub.getNumSubscribers()) odomErrorPub.publish(PointMatcher_ros::eigenMatrixToOdomMsg<float>(TOdomToMap, mapFrame, stamp)); // ***Debug: //debugPub.publish(PointMatcher_ros::pointMatcherCloudToRosMsg<float>(transformation->compute(*newPointCloud, Ticp), mapFrame, mapCreationTime)); // *** // check if news points should be added to the map if ( mapping && ((estimatedOverlap < maxOverlapToMerge) || (icp.getInternalMap().features.cols() < minMapPointCount)) && (!mapBuildingInProgress) ) { cout << "map Creation..." << endl; // make sure we process the last available map mapCreationTime = stamp; ROS_INFO("Adding new points to the map in background"); mapBuildingTask = MapBuildingTask(boost::bind(&Mapper::updateMap, this, newPointCloud.release(), Ticp, true)); mapBuildingFuture = mapBuildingTask.get_future(); mapBuildingThread = boost::thread(boost::move(boost::ref(mapBuildingTask))); mapBuildingInProgress = true; } } catch (PM::ConvergenceError error) { ROS_ERROR_STREAM("ICP failed to converge: " << error.what()); return; } //Statistics about time and real-time capability int realTimeRatio = 100*t.elapsed() / (stamp.toSec()-lastPoinCloudTime.toSec()); ROS_INFO_STREAM("[TIME] Total ICP took: " << t.elapsed() << " [s]"); if(realTimeRatio < 80) ROS_INFO_STREAM("[TIME] Real-time capability: " << realTimeRatio << "%"); else ROS_WARN_STREAM("[TIME] Real-time capability: " << realTimeRatio << "%"); lastPoinCloudTime = stamp; }
// Point cloud processing void MapMaintener::processCloud(DP newPointCloud, const TP TScannerToMap) { string reason; timer t; // Convert point cloud const size_t goodCount(newPointCloud.features.cols()); if (goodCount == 0) { ROS_ERROR("I found no good points in the cloud"); return; } ROS_INFO("Processing new point cloud"); { timer t; // Print how long take the algo // Apply filters to incoming cloud, in scanner coordinates inputFilters.apply(newPointCloud); ROS_INFO_STREAM("Input filters took " << t.elapsed() << " [s]"); } // Correct new points using ICP result and move them in their own frame //cout << "TObjectToMap: " << endl << TObjectToMap << endl; //cout << "TScannerToMap: " << endl << TScannerToMap << endl; //cout << "concat: " << endl << TObjectToMap.inverse() * TScannerToMap << endl; newPointCloud = transformation->compute(newPointCloud, transformation->correctParameters(TObjectToMap.inverse() * TScannerToMap)); // Preparation of cloud for inclusion in map mapPreFilters.apply(newPointCloud); // FIXME put that as parameter if(newPointCloud.features.cols() < 400) return; // Generate first map if(!mapPointCloud.features.rows()) { mapPointCloud = newPointCloud; return; } // Check dimension if (newPointCloud.features.rows() != mapPointCloud.features.rows()) { ROS_ERROR_STREAM("Dimensionality missmatch: incoming cloud is " << newPointCloud.features.rows()-1 << " while map is " << mapPointCloud.features.rows()-1); return; } PM::TransformationParameters Tcorr; try { Tcorr = icp(newPointCloud, mapPointCloud); TObjectToMap = TObjectToMap * Tcorr.inverse(); } catch (PM::ConvergenceError error) { ROS_WARN_STREAM("ICP failed to converge: " << error.what()); return; } const double estimatedOverlap = icp.errorMinimizer->getOverlap(); if(estimatedOverlap < 0.40) { ROS_WARN_STREAM("Estimated overlap too small: " << estimatedOverlap); return; } ROS_DEBUG_STREAM("Tcorr:\n" << Tcorr); cout << "Tcorr: " << endl << Tcorr << endl; newPointCloud = transformation->compute(newPointCloud, Tcorr); // Merge point clouds to map mapPointCloud.concatenate(newPointCloud); // Map maintenance mapPostFilters.apply(mapPointCloud); ROS_INFO_STREAM("New map available (" << mapPointCloud.features.cols() << " pts), update took " << t.elapsed() << " [s]"); // Publish map point cloud // FIXME this crash when used without descriptor if (mapPub.getNumSubscribers()) mapPub.publish(PointMatcher_ros::pointMatcherCloudToRosMsg<float>(mapPointCloud, objectFrame, ros::Time::now())); ROS_INFO_STREAM("Total map merging: " << t.elapsed() << " [s]"); //ros::Rate r(2); //r.sleep(); }
TEST(icpTest, icpTest) { DP ref = DP::load(dataPath + "cloud.00000.vtk"); DP data = DP::load(dataPath + "cloud.00001.vtk"); namespace fs = boost::filesystem; fs::path config_dir(dataPath + "icp_data"); EXPECT_TRUE( fs::exists(config_dir) && fs::is_directory(config_dir) ); fs::directory_iterator end_iter; for( fs::directory_iterator d(config_dir); d != end_iter; ++d) { if (!fs::is_regular_file(d->status()) ) continue; // Load config file, and form ICP object PM::ICP icp; std::string config_file = d->path().string(); if (fs::extension(config_file) != ".yaml") continue; std::ifstream ifs(config_file.c_str()); EXPECT_NO_THROW(icp.loadFromYaml(ifs)) << "This error was caused by the test file:" << endl << " " << config_file; // Compute current ICP transform PM::TransformationParameters curT = icp(data, ref); // Write current transform to disk (to easily compare it // with reference transform offline) fs::path cur_file = d->path(); cur_file.replace_extension(".cur_trans"); //std::cout << "Writing: " << cur_file << std::endl; std::ofstream otfs(cur_file.c_str()); otfs.precision(16); otfs << curT; otfs.close(); // Load reference transform fs::path ref_file = d->path(); ref_file.replace_extension(".ref_trans"); PM::TransformationParameters refT = 0*curT; //std::cout << "Reading: " << ref_file << std::endl; std::ifstream itfs(ref_file.c_str()); for (int row = 0; row < refT.cols(); row++) { for (int col = 0; col < refT.cols(); col++) { itfs >>refT(row, col); } } // Dump the reference transform and current one //std::cout.precision(17); //std::cout << "refT:\n" << refT << std::endl; //std::cout << "curT:\n" << curT << std::endl; // Tolerance for change in rotation and translation double rotTol = 0.1, transTol = 0.1; // Find how much the reference rotation and translation // differ from the current values. PM::TransformationParameters refRot = refT.block(0, 0, 3, 3); PM::TransformationParameters refTrans = refT.block(0, 3, 3, 1); PM::TransformationParameters curRot = curT.block(0, 0, 3, 3); PM::TransformationParameters curTrans = curT.block(0, 3, 3, 1); PM::TransformationParameters rotErrMat = refRot*(curRot.transpose()) - PM::TransformationParameters::Identity(3, 3); PM::TransformationParameters transErrMat = refTrans - curTrans; double rotErr = rotErrMat.array().abs().sum(); double transErr = transErrMat.array().abs().sum(); //std::cout << "Rotation error: " << rotErr << std::endl; //std::cout << "Translation error: " << transErr << std::endl; EXPECT_LT(rotErr, rotTol) << "This error was caused by the test file:" << endl << " " << config_file; EXPECT_LT(transErr, transTol) << "This error was caused by the test file:" << endl << " " << config_file; } }
void Mapper::gotCloud(const sensor_msgs::PointCloud2& cloudMsgIn) { PM::Parameters params; PM::TransformationParameters T = PM::TransformationParameters::Identity(4,4); sensor_msgs::PointCloud2 cloudMsg; pcl_ros::transformPointCloud(mapFrame, cloudMsgIn, cloudMsg, tf_listener); size_t goodCount(0); DP newPointCloud(rosMsgToPointMatcherCloud(cloudMsg, goodCount)); if (goodCount == 0) { ROS_ERROR("I found no good points in the cloud"); return; } else { ROS_INFO("Processing new point cloud"); } bool icpWasSuccess = true; newPointCloud = filterScannerPtsCloud(newPointCloud); // Ensure a minimum amount of point after filtering const int ptsCount = newPointCloud.features.cols(); if(ptsCount < minReadingPointCount) { ROS_ERROR_STREAM("Not enough points in newPointCloud: " << ptsCount << "pts."); return; } // Keep the map in memory if(mapPointCloud.features.rows() == 0) { // Initialize the map mapPointCloud = newPointCloud; return; } // Apply ICP try { T = icp(newPointCloud, mapPointCloud); mapPointCloud.features = T.inverse() * mapPointCloud.features; // Ensure minimum overlap between scans const double estimatedOverlap = icp.errorMinimizer->getWeightedPointUsedRatio(); if (estimatedOverlap < minOverlap) { ROS_ERROR("Estimated overlap too small, move back"); return; } ROS_INFO_STREAM("**** Adding new points to the map with " << estimatedOverlap << " overlap"); // Create point cloud filters PM::DataPointsFilter* uniformSubsample; params = PM::Parameters({{"aggressivity", toParam(0.5)}}); uniformSubsample = pm.DataPointsFilterRegistrar.create("UniformizeDensityDataPointsFilter", params); // Merge point clouds to map mapPointCloud.concatenate(newPointCloud); mapPointCloud = uniformSubsample->filter(mapPointCloud); // Controle the size of the point cloud const double probToKeep = maxMapPointCount/(double)mapPointCloud.features.cols(); if(probToKeep < 1.0) { PM::DataPointsFilter* randSubsample; params = PM::Parameters({{"prob", toParam(probToKeep)}}); randSubsample = pm.DataPointsFilterRegistrar.create("RandomSamplingDataPointsFilter", params); mapPointCloud = randSubsample->filter(mapPointCloud); } //Publish map point cloud stringstream nameStream; nameStream << "nifti_map_" << cloudMsg.header.seq; PM::saveVTK(mapPointCloud, nameStream.str()); mapPub.publish(pointMatcherCloudToRosMsg(mapPointCloud, mapFrame)); } catch (PM::ConvergenceError error) { icpWasSuccess = false; ROS_WARN_STREAM("ICP failed to converge: " << error.what()); } }
void Mapper::processCloud(unique_ptr<DP> newPointCloud, const std::string& scannerFrame, const ros::Time& stamp, uint32_t seq) { processingNewCloud = true; BoolSetter stopProcessingSetter(processingNewCloud, false); // if the future has completed, use the new map processNewMapIfAvailable(); // IMPORTANT: We need to receive the point clouds in local coordinates (scanner or robot) timer t; // Convert point cloud const size_t goodCount(newPointCloud->features.cols()); if (goodCount == 0) { ROS_ERROR("I found no good points in the cloud"); return; } // Dimension of the point cloud, important since we handle 2D and 3D const int dimp1(newPointCloud->features.rows()); ROS_INFO("Processing new point cloud"); { timer t; // Print how long take the algo // Apply filters to incoming cloud, in scanner coordinates inputFilters.apply(*newPointCloud); ROS_INFO_STREAM("Input filters took " << t.elapsed() << " [s]"); } string reason; // Initialize the transformation to identity if empty if(!icp.hasMap()) { // we need to know the dimensionality of the point cloud to initialize properly publishLock.lock(); TOdomToMap = PM::TransformationParameters::Identity(dimp1, dimp1); publishLock.unlock(); } // Fetch transformation from scanner to odom // Note: we don't need to wait for transform. It is already called in transformListenerToEigenMatrix() PM::TransformationParameters TOdomToScanner; try { TOdomToScanner = PointMatcher_ros::eigenMatrixToDim<float>( PointMatcher_ros::transformListenerToEigenMatrix<float>( tfListener, scannerFrame, odomFrame, stamp ), dimp1); } catch(tf::ExtrapolationException e) { ROS_ERROR_STREAM("Extrapolation Exception. stamp = "<< stamp << " now = " << ros::Time::now() << " delta = " << ros::Time::now() - stamp); return; } ROS_DEBUG_STREAM("TOdomToScanner(" << odomFrame<< " to " << scannerFrame << "):\n" << TOdomToScanner); ROS_DEBUG_STREAM("TOdomToMap(" << odomFrame<< " to " << mapFrame << "):\n" << TOdomToMap); const PM::TransformationParameters TscannerToMap = TOdomToMap * TOdomToScanner.inverse(); ROS_DEBUG_STREAM("TscannerToMap (" << scannerFrame << " to " << mapFrame << "):\n" << TscannerToMap); // Ensure a minimum amount of point after filtering const int ptsCount = newPointCloud->features.cols(); if(ptsCount < minReadingPointCount) { ROS_ERROR_STREAM("Not enough points in newPointCloud: only " << ptsCount << " pts."); return; } // Initialize the map if empty if(!icp.hasMap()) { ROS_INFO_STREAM("Creating an initial map"); mapCreationTime = stamp; setMap(updateMap(newPointCloud.release(), TscannerToMap, false)); // we must not delete newPointCloud because we just stored it in the mapPointCloud return; } // Check dimension if (newPointCloud->features.rows() != icp.getInternalMap().features.rows()) { ROS_ERROR_STREAM("Dimensionality missmatch: incoming cloud is " << newPointCloud->features.rows()-1 << " while map is " << icp.getInternalMap().features.rows()-1); return; } try { // Apply ICP PM::TransformationParameters Ticp; Ticp = icp(*newPointCloud, TscannerToMap); ROS_DEBUG_STREAM("Ticp:\n" << Ticp); // Ensure minimum overlap between scans const double estimatedOverlap = icp.errorMinimizer->getOverlap(); ROS_INFO_STREAM("Overlap: " << estimatedOverlap); if (estimatedOverlap < minOverlap) { ROS_ERROR_STREAM("Estimated overlap too small, ignoring ICP correction!"); return; } // Compute tf publishStamp = stamp; publishLock.lock(); TOdomToMap = Ticp * TOdomToScanner; // Publish tf tfBroadcaster.sendTransform(PointMatcher_ros::eigenMatrixToStampedTransform<float>(TOdomToMap, mapFrame, odomFrame, stamp)); publishLock.unlock(); processingNewCloud = false; ROS_DEBUG_STREAM("TOdomToMap:\n" << TOdomToMap); // Publish odometry if (odomPub.getNumSubscribers()) odomPub.publish(PointMatcher_ros::eigenMatrixToOdomMsg<float>(Ticp, mapFrame, stamp)); // Publish error on odometry if (odomErrorPub.getNumSubscribers()) odomErrorPub.publish(PointMatcher_ros::eigenMatrixToOdomMsg<float>(TOdomToMap, mapFrame, stamp)); // Publish outliers if (outlierPub.getNumSubscribers()) { //DP outliers = PM::extractOutliers(transformation->compute(*newPointCloud, Ticp), *mapPointCloud, 0.6); //outlierPub.publish(PointMatcher_ros::pointMatcherCloudToRosMsg<float>(outliers, mapFrame, mapCreationTime)); } // check if news points should be added to the map if ( mapping && ((estimatedOverlap < maxOverlapToMerge) || (icp.getInternalMap().features.cols() < minMapPointCount)) && #if BOOST_VERSION >= 104100 (!mapBuildingInProgress) #else // BOOST_VERSION >= 104100 true #endif // BOOST_VERSION >= 104100 ) { // make sure we process the last available map mapCreationTime = stamp; #if BOOST_VERSION >= 104100 ROS_INFO("Adding new points to the map in background"); mapBuildingTask = MapBuildingTask(boost::bind(&Mapper::updateMap, this, newPointCloud.release(), Ticp, true)); mapBuildingFuture = mapBuildingTask.get_future(); mapBuildingThread = boost::thread(boost::move(boost::ref(mapBuildingTask))); mapBuildingInProgress = true; #else // BOOST_VERSION >= 104100 ROS_INFO("Adding new points to the map"); setMap(updateMap( newPointCloud.release(), Ticp, true)); #endif // BOOST_VERSION >= 104100 } } catch (PM::ConvergenceError error) { ROS_ERROR_STREAM("ICP failed to converge: " << error.what()); return; } //Statistics about time and real-time capability int realTimeRatio = 100*t.elapsed() / (stamp.toSec()-lastPoinCloudTime.toSec()); ROS_INFO_STREAM("[TIME] Total ICP took: " << t.elapsed() << " [s]"); if(realTimeRatio < 80) ROS_INFO_STREAM("[TIME] Real-time capability: " << realTimeRatio << "%"); else ROS_WARN_STREAM("[TIME] Real-time capability: " << realTimeRatio << "%"); lastPoinCloudTime = stamp; }