bool PrimitiveShapeClassifier::checkFrameId(const sensor_msgs::PointCloud2::ConstPtr& ros_cloud, const sensor_msgs::PointCloud2::ConstPtr& ros_normal, const jsk_recognition_msgs::ClusterPointIndices::ConstPtr& ros_indices, const jsk_recognition_msgs::PolygonArray::ConstPtr& ros_polygons) { std::string cloud_topic = ros::names::resolve(sub_cloud_.getTopic()); std::string normal_topic = ros::names::resolve(sub_normal_.getTopic()); std::string indices_topic = ros::names::resolve(sub_indices_.getTopic()); std::string polygons_topic = ros::names::resolve(sub_polygons_.getTopic()); if (!jsk_recognition_utils::isSameFrameId(ros_cloud->header, ros_normal->header)) { NODELET_ERROR_STREAM(cloud_topic << " and " << normal_topic << " must have same frame_id"); return false; } if (!jsk_recognition_utils::isSameFrameId(ros_cloud->header, ros_indices->header)) { NODELET_ERROR_STREAM(cloud_topic << " and " << indices_topic << " must have same frame_id"); return false; } if (!jsk_recognition_utils::isSameFrameId(ros_cloud->header, ros_polygons->header)) { NODELET_ERROR_STREAM(cloud_topic << " and " << polygons_topic << " must have same frame_id"); return false; } NODELET_DEBUG_STREAM("Frame id is ok: " << ros_cloud->header.frame_id); return true; }
void ControlMode::controlModeCmdCallback(const control_mode_cmdConstPtr& msg) { NODELET_INFO_STREAM("Heard command: " << msg->cmd); if (msg->cmd == "mode idle") { state = ControlModeTypes::IDLE; info = ""; } else if (msg->cmd == "mode standby") { state = ControlModeTypes::STANDBY; info = ""; } else if (msg->cmd == "mode active") { if (state == ControlModeTypes::STANDBY) { state = ControlModeTypes::ACTIVE; info = ""; } else { NODELET_ERROR("Invalid transition"); } } else { NODELET_ERROR_STREAM("Command unknown: " << msg->cmd); } }
/** * @brief Start capture/publish thread. */ virtual void onInit() { driver_.reset(new Driver(getPrivateNodeHandle(), getPrivateNodeHandle())); try { driver_->setup(); is_running_ = true; thread_ = boost::shared_ptr<boost::thread> (new boost::thread(boost::bind(&CvCameraNodelet::main, this))); } catch(cv_camera::DeviceError& e) { NODELET_ERROR_STREAM("failed to open device... do nothing: " << e.what()); } }
void ServoInterface::servoMSGCallback( const autorally_msgs::servoMSGConstPtr& msg) { std::map<std::string, autorally_msgs::servoMSG>::iterator mapIt; if((mapIt = m_servoCommandMsgs.find(msg->header.frame_id)) == m_servoCommandMsgs.end()) { NODELET_ERROR_STREAM("ServoInterface: Unknown controller " << msg->header.frame_id << " attempting to control servos, please add entry " << " to servoCommandPriorities.yaml"); } else { mapIt->second = *msg; } }
void ServoInterface::onInit() { ros::NodeHandle nh = getNodeHandle(); ros::NodeHandle nhPvt = getPrivateNodeHandle(); std::string port; if(!nh.getParam(getName()+"/port", port)) { NODELET_ERROR("ServoInterface: could not get servo interface port"); } loadServoParams(); loadServoCommandPriorities(); m_maestro.init(nh, getName(), port); m_ss.init(nh); double servoCommandRate = 0.0; if(!nhPvt.getParam("servoCommandRate", servoCommandRate) || !nhPvt.getParam("throttleBrakeCoupled", m_brakeSetup.coupledWithThrottle) || !nhPvt.getParam("servoCommandMaxAge", m_servoCommandMaxAge)) { NODELET_ERROR_STREAM(getName() << " could not get all startup params"); } for (auto& mapIt : m_servoCommandMsgs) { ros::Subscriber sub = nh.subscribe(mapIt.first+"/servoCommand", 2, &ServoInterface::servoMSGCallback, this); m_servoSub[mapIt.first] = sub; NODELET_INFO_STREAM("ServoInterface: subscribed to srv cmd:" << mapIt.first+"/servoCommand"); } m_speedCommandSub = nh.subscribe("vehicleSpeedCommand", 2, &ServoInterface::speedCallback, this); //m_servoStatusTimer = nh.createTimer(ros::Rate(servoStatusPubRate), // &ServoInterface::servoStatusTimerCallback, this); m_throttleTimer = nh.createTimer(ros::Rate(servoCommandRate), &ServoInterface::setServos, this); m_servoMSGPub = nh.advertise<autorally_msgs::servoMSG> ("servoStatus", 2); //m_servoMSGStatus = autorally_msgs::servoMSGPtr(new autorally_msgs::servoMSG); //m_servoMSGStatus->header.frame_id = "servoController"; }
void CmdVelMuxNodelet::reloadConfiguration(yocs_cmd_vel_mux::reloadConfig &config, uint32_t unused_level) { std::string yaml_cfg_file; ros::NodeHandle &nh = this->getPrivateNodeHandle(); if( config.yaml_cfg_file == "" ) { // typically fired on startup, so look for a parameter to set a default nh.getParam("yaml_cfg_file", yaml_cfg_file); } else { yaml_cfg_file = config.yaml_cfg_file; } /********************* ** Yaml File Parsing **********************/ std::ifstream ifs(yaml_cfg_file.c_str(), std::ifstream::in); if (ifs.good() == false) { NODELET_ERROR_STREAM("CmdVelMux : configuration file not found [" << yaml_cfg_file << "]"); return; } // probably need to bring the try catches back here YAML::Node doc; #ifdef HAVE_NEW_YAMLCPP doc = YAML::Load(ifs); #else YAML::Parser parser(ifs); parser.GetNextDocument(doc); #endif /********************* ** Output Publisher **********************/ std::string output_name("output"); #ifdef HAVE_NEW_YAMLCPP if ( doc["publisher"] ) { doc["publisher"] >> output_name; } #else const YAML::Node *node = doc.FindValue("publisher"); if ( node != NULL ) { *node >> output_name; }
virtual void onInit() { ros::NodeHandle nh = this->getPrivateNodeHandle(); // resolve node(let) name std::string name = nh.getUnresolvedNamespace(); int pos = name.find_last_of('/'); name = name.substr(pos + 1); NODELET_INFO_STREAM("Initialising nodelet... [" << name << "]"); controller_.reset(new SafetyController(nh, name)); if (controller_->init()) { NODELET_INFO_STREAM("Kobuki initialised. Spinning up update thread ... [" << name << "]"); update_thread_.start(&SafetyControllerNodelet::update, *this); NODELET_INFO_STREAM("Nodelet initialised. [" << name << "]"); } else { NODELET_ERROR_STREAM("Couldn't initialise nodelet! Please restart. [" << name << "]"); } }
void ControlMode::requestRegistration2(const ros::TimerEvent& event, const std::string& mode_name, double wait_time) { // Service Client NODELET_INFO("Waiting %f seconds before trying to register..", wait_time); ros::Duration d(wait_time); d.sleep(); NODELET_INFO("Waiting for controller/control_modes service..."); info = "Waiting for controller/control_mode service..."; ros::service::waitForService("controller/control_modes"); // blocks until service available, no timeout info = "registering service..."; control_modes register_service; register_service.request.request = "register " + mode_name + " " + getName(); //ros::this_node::getName(); bool registered = ros::service::call("controller/control_modes", register_service); if (registered) { if (register_service.response.response == "success") { //NODELET_INFO("Successfully registered control mode"); info = "control mode registered"; startDataFlow(); } else { NODELET_ERROR_STREAM ("Control mode registration failed, response: '" << register_service.response.response << "', reason: '" << register_service.response.reason << "'"); state = ControlModeTypes::ERROR; info = "registration failed"; } } else { NODELET_ERROR("Control mode registration service call failed"); state = ControlModeTypes::ERROR; info = "registration service call failed"; } }
void Stereoproc::imageCb( const sensor_msgs::ImageConstPtr& l_raw_msg, const sensor_msgs::CameraInfoConstPtr& l_info_msg, const sensor_msgs::ImageConstPtr& r_raw_msg, const sensor_msgs::CameraInfoConstPtr& r_info_msg) { boost::lock_guard<boost::recursive_mutex> config_lock(config_mutex_); boost::lock_guard<boost::mutex> connect_lock(connect_mutex_); int level = connected_.level(); NODELET_DEBUG("got images, level %d", level); // Update the camera model model_.fromCameraInfo(l_info_msg, r_info_msg); cv::cuda::Stream l_strm, r_strm; cv::cuda::GpuMat l_raw, r_raw; std::vector<GPUSender::Ptr> senders; // Create cv::Mat views onto all buffers const cv::Mat l_cpu_raw = cv_bridge::toCvShare( l_raw_msg, l_raw_msg->encoding)->image; cv::cuda::registerPageLocked(const_cast<cv::Mat&>(l_cpu_raw)); const cv::Mat r_cpu_raw = cv_bridge::toCvShare( r_raw_msg, l_raw_msg->encoding)->image; cv::cuda::registerPageLocked(const_cast<cv::Mat&>(r_cpu_raw)); l_raw.upload(l_cpu_raw, l_strm); r_raw.upload(r_cpu_raw, r_strm); cv::cuda::GpuMat l_mono; if(connected_.DebayerMonoLeft || connected_.RectifyMonoLeft || connected_.Disparity || connected_.DisparityVis || connected_.Pointcloud) { cv::cuda::demosaicing(l_raw, l_mono, CV_BayerRG2GRAY, 1, l_strm); } if(connected_.DebayerMonoLeft) { GPUSender::Ptr t(new GPUSender(l_raw_msg, sensor_msgs::image_encodings::MONO8, &pub_mono_left_)); senders.push_back(t); t->enqueueSend(l_mono, l_strm); } cv::cuda::GpuMat r_mono; if(connected_.DebayerMonoRight || connected_.RectifyMonoRight || connected_.Disparity || connected_.DisparityVis || connected_.Pointcloud) { cv::cuda::demosaicing(r_raw, r_mono, CV_BayerRG2GRAY, 1, r_strm); } if(connected_.DebayerMonoRight) { GPUSender::Ptr t(new GPUSender(r_raw_msg, sensor_msgs::image_encodings::MONO8, &pub_mono_right_)); senders.push_back(t); t->enqueueSend(r_mono, r_strm); } cv::cuda::GpuMat l_color; if(connected_.DebayerColorLeft || connected_.RectifyColorLeft || connected_.Pointcloud) { cv::cuda::demosaicing(l_raw, l_color, CV_BayerRG2BGR, 3, l_strm); } if(connected_.DebayerColorLeft) { GPUSender::Ptr t(new GPUSender(l_raw_msg, sensor_msgs::image_encodings::BGR8, &pub_color_left_)); senders.push_back(t); t->enqueueSend(l_color, l_strm); } cv::cuda::GpuMat r_color; if(connected_.DebayerColorRight || connected_.RectifyColorRight) { cv::cuda::demosaicing(r_raw, r_color, CV_BayerRG2BGR, 3, r_strm); } if(connected_.DebayerColorRight) { GPUSender::Ptr t(new GPUSender(r_raw_msg, sensor_msgs::image_encodings::BGR8, &pub_color_right_)); senders.push_back(t); t->enqueueSend(r_color, r_strm); } cv::cuda::GpuMat l_rect_mono, r_rect_mono; if(connected_.RectifyMonoLeft || connected_.Disparity || connected_.DisparityVis || connected_.Pointcloud) { model_.left().rectifyImageGPU(l_mono, l_rect_mono, cv::INTER_LINEAR, l_strm); } if(connected_.RectifyMonoRight || connected_.Disparity || connected_.DisparityVis || connected_.Pointcloud) { model_.right().rectifyImageGPU(r_mono, r_rect_mono, cv::INTER_LINEAR, r_strm); } if(connected_.RectifyMonoLeft) { GPUSender::Ptr t(new GPUSender(l_raw_msg, sensor_msgs::image_encodings::MONO8, &pub_mono_rect_left_)); senders.push_back(t); t->enqueueSend(l_rect_mono, l_strm); } if(connected_.RectifyMonoRight) { GPUSender::Ptr t(new GPUSender(r_raw_msg, sensor_msgs::image_encodings::MONO8, &pub_mono_rect_right_)); senders.push_back(t); t->enqueueSend(r_rect_mono, r_strm); } cv::cuda::GpuMat l_rect_color, r_rect_color; if(connected_.RectifyColorLeft || connected_.Pointcloud) { model_.left().rectifyImageGPU(l_color, l_rect_color, cv::INTER_LINEAR, l_strm); } if(connected_.RectifyColorRight) { model_.right().rectifyImageGPU(r_color, r_rect_color, cv::INTER_LINEAR, r_strm); } if(connected_.RectifyColorLeft) { GPUSender::Ptr t(new GPUSender(l_raw_msg, sensor_msgs::image_encodings::BGR8, &pub_color_rect_left_)); senders.push_back(t); t->enqueueSend(l_rect_color, l_strm); } if(connected_.RectifyColorRight) { GPUSender::Ptr t(new GPUSender(r_raw_msg, sensor_msgs::image_encodings::BGR8, &pub_color_rect_right_)); senders.push_back(t); t->enqueueSend(r_rect_color, r_strm); } cv::cuda::GpuMat disparity; cv::cuda::GpuMat disparity_16s; cv::cuda::GpuMat disparity_filtered; if(connected_.Disparity || connected_.DisparityVis || connected_.Pointcloud) { r_strm.waitForCompletion(); block_matcher_->compute(l_rect_mono, r_rect_mono, disparity, l_strm); //allocate cpu-side resource filter_buf_.create(l_rect_mono.size(), CV_16SC1); //enqueueDownload disparity.convertTo(disparity_16s, CV_16SC1, 16, l_strm); disparity_16s.download(filter_buf_, l_strm); l_strm.waitForCompletion(); filterSpeckles(); //enqueueUpload disparity_16s.upload(filter_buf_, l_strm); if(bilateral_filter_enabled_) { bilateral_filter_->apply(disparity_16s, l_rect_mono, disparity_filtered); disparity_filtered.convertTo(disparity, CV_32FC1, 1/16.); } else { disparity_16s.convertTo(disparity, CV_32FC1, 1/16.); } } if(connected_.Disparity) { cv::cuda::GpuMat disparity_float; if(disparity.type() != CV_32F) disparity.convertTo(disparity_float, CV_32FC1); else disparity_float = disparity; // Adjust for any x-offset between the principal points: d' = d - (cx_l - cx_r) double cx_l = model_.left().cx(); double cx_r = model_.right().cx(); if (cx_l != cx_r) { cv::cuda::subtract(disparity_float, cv::Scalar(cx_l - cx_r), disparity_float, cv::cuda::GpuMat(), -1, l_strm); } // Allocate new disparity image message disp_msg_.reset(new stereo_msgs::DisparityImage()); disp_msg_->header = l_info_msg->header; disp_msg_->image.header = l_info_msg->header; // Compute window of (potentially) valid disparities int border = block_matcher_->getBlockSize() / 2; int left = block_matcher_->getNumDisparities() + block_matcher_min_disparity_ + border - 1; int wtf = (block_matcher_min_disparity_ >= 0) ? border + block_matcher_min_disparity_ : std::max(border, -block_matcher_min_disparity_); int right = disp_msg_->image.width - 1 - wtf; int top = border; int bottom = disp_msg_->image.height - 1 - border; disp_msg_->valid_window.x_offset = left; disp_msg_->valid_window.y_offset = top; disp_msg_->valid_window.width = right - left; disp_msg_->valid_window.height = bottom - top; disp_msg_->min_disparity = block_matcher_min_disparity_ + 1; disp_msg_->max_disparity = block_matcher_min_disparity_ + block_matcher_->getNumDisparities() - 1; disp_msg_->image.height = l_rect_mono.rows; disp_msg_->image.width = l_rect_mono.cols; disp_msg_->image.encoding = sensor_msgs::image_encodings::TYPE_32FC1; disp_msg_->image.step = l_rect_mono.cols * sizeof(float); disp_msg_->image.data.resize(disp_msg_->image.step*disp_msg_->image.height); disp_msg_data_ = cv::Mat_<float> (disp_msg_->image.height, disp_msg_->image.width, (float *)&disp_msg_->image.data[0], disp_msg_->image.step); cv::cuda::registerPageLocked(disp_msg_data_); disparity_float.download(disp_msg_data_, l_strm); l_strm.enqueueHostCallback( [](int status, void *userData) { (void)status; static_cast<Stereoproc*>(userData)->sendDisparity(); }, (void*)this); } if(connected_.DisparityVis) { GPUSender::Ptr t(new GPUSender(l_raw_msg, sensor_msgs::image_encodings::BGRA8, &pub_disparity_vis_)); senders.push_back(t); cv::cuda::GpuMat disparity_int(l_cpu_raw.size(), CV_16SC1); cv::cuda::GpuMat disparity_image(l_cpu_raw.size(), CV_8UC4); int ndisp = block_matcher_->getNumDisparities(); if(disparity.type() == CV_32F) { disparity.convertTo(disparity_int, CV_16SC1, 16, 0, l_strm); ndisp *= 16; } else disparity_int = disparity; try { cv::cuda::drawColorDisp(disparity_int, disparity_image, ndisp, l_strm); t->enqueueSend(disparity_image, l_strm); } catch(cv::Exception e) { NODELET_ERROR_STREAM("Unable to draw color disparity: " << e.err << "in " << e.file << ":" << e.func << " line " << e.line); } } cv::cuda::GpuMat xyz; if(connected_.Pointcloud) { model_.projectDisparityImageTo3dGPU(disparity, xyz, true, l_strm); cv::cuda::HostMem cuda_xyz(l_cpu_raw.size(), CV_32FC3); xyz.download(cuda_xyz, l_strm); cv::Mat l_cpu_color(l_cpu_raw.size(), CV_8UC3); l_color.download(l_cpu_color, l_strm); sensor_msgs::PointCloud2Ptr points_msg = boost::make_shared<sensor_msgs::PointCloud2>(); points_msg->header = l_raw_msg->header; points_msg->height = l_cpu_raw.rows; points_msg->width = l_cpu_raw.cols; points_msg->fields.resize (4); points_msg->fields[0].name = "x"; points_msg->fields[0].offset = 0; points_msg->fields[0].count = 1; points_msg->fields[0].datatype = sensor_msgs::PointField::FLOAT32; points_msg->fields[1].name = "y"; points_msg->fields[1].offset = 4; points_msg->fields[1].count = 1; points_msg->fields[1].datatype = sensor_msgs::PointField::FLOAT32; points_msg->fields[2].name = "z"; points_msg->fields[2].offset = 8; points_msg->fields[2].count = 1; points_msg->fields[2].datatype = sensor_msgs::PointField::FLOAT32; points_msg->fields[3].name = "rgb"; points_msg->fields[3].offset = 12; points_msg->fields[3].count = 1; points_msg->fields[3].datatype = sensor_msgs::PointField::FLOAT32; //points_msg->is_bigendian = false; ??? static const int STEP = 16; points_msg->point_step = STEP; points_msg->row_step = points_msg->point_step * points_msg->width; points_msg->data.resize (points_msg->row_step * points_msg->height); points_msg->is_dense = false; // there may be invalid points l_strm.waitForCompletion(); cv::Mat_<cv::Vec3f> cpu_xyz = cuda_xyz.createMatHeader(); float bad_point = std::numeric_limits<float>::quiet_NaN (); int offset = 0; for (int v = 0; v < cpu_xyz.rows; ++v) { for (int u = 0; u < cpu_xyz.cols; ++u, offset += STEP) { if (isValidPoint(cpu_xyz(v,u))) { // x,y,z,rgba memcpy (&points_msg->data[offset + 0], &cpu_xyz(v,u)[0], sizeof (float)); memcpy (&points_msg->data[offset + 4], &cpu_xyz(v,u)[1], sizeof (float)); memcpy (&points_msg->data[offset + 8], &cpu_xyz(v,u)[2], sizeof (float)); } else { memcpy (&points_msg->data[offset + 0], &bad_point, sizeof (float)); memcpy (&points_msg->data[offset + 4], &bad_point, sizeof (float)); memcpy (&points_msg->data[offset + 8], &bad_point, sizeof (float)); } } } // Fill in color offset = 0; const cv::Mat_<cv::Vec3b> color(l_cpu_color); for (int v = 0; v < cpu_xyz.rows; ++v) { for (int u = 0; u < cpu_xyz.cols; ++u, offset += STEP) { if (isValidPoint(cpu_xyz(v,u))) { const cv::Vec3b& bgr = color(v,u); int32_t rgb_packed = (bgr[2] << 16) | (bgr[1] << 8) | bgr[0]; memcpy (&points_msg->data[offset + 12], &rgb_packed, sizeof (int32_t)); } else { memcpy (&points_msg->data[offset + 12], &bad_point, sizeof (float)); } } } pub_pointcloud_.publish(points_msg); } l_strm.waitForCompletion(); r_strm.waitForCompletion(); if(connected_.Disparity) cv::cuda::unregisterPageLocked(disp_msg_data_); //if(connected_.Disparity || connected_.DisparityVis || connected_.Pointcloud) // cv::cuda::unregisterPageLocked(filter_buf_); cv::cuda::unregisterPageLocked( const_cast<cv::Mat&>(l_cpu_raw) ); cv::cuda::unregisterPageLocked( const_cast<cv::Mat&>(r_cpu_raw) ); }
void PrimitiveShapeClassifier::process(const sensor_msgs::PointCloud2::ConstPtr& ros_cloud, const sensor_msgs::PointCloud2::ConstPtr& ros_normal, const jsk_recognition_msgs::ClusterPointIndices::ConstPtr& ros_indices, const jsk_recognition_msgs::PolygonArray::ConstPtr& ros_polygons) { boost::mutex::scoped_lock lock(mutex_); if (!checkFrameId(ros_cloud, ros_normal, ros_indices, ros_polygons)) return; pcl::PointCloud<PointT>::Ptr input(new pcl::PointCloud<PointT>); pcl::fromROSMsg(*ros_cloud, *input); pcl::PointCloud<pcl::Normal>::Ptr normal(new pcl::PointCloud<pcl::Normal>); pcl::fromROSMsg(*ros_normal, *normal); pcl::ExtractIndices<PointT> ext_input; ext_input.setInputCloud(input); pcl::ExtractIndices<pcl::Normal> ext_normal; ext_normal.setInputCloud(normal); std::vector<jsk_recognition_utils::Polygon::Ptr> polygons = jsk_recognition_utils::Polygon::fromROSMsg(*ros_polygons); jsk_recognition_msgs::ClassificationResult result; result.header = ros_cloud->header; result.classifier = "primitive_shape_classifier"; result.target_names.push_back("box"); result.target_names.push_back("circle"); result.target_names.push_back("other"); pcl::PointCloud<PointT>::Ptr projected_cloud(new pcl::PointCloud<PointT>); std::vector<pcl::PointIndices::Ptr> boundary_indices; NODELET_DEBUG_STREAM("Cluster num: " << ros_indices->cluster_indices.size()); for (size_t i = 0; i < ros_indices->cluster_indices.size(); ++i) { pcl::PointIndices::Ptr indices(new pcl::PointIndices); indices->indices = ros_indices->cluster_indices[i].indices; NODELET_DEBUG_STREAM("Estimating cluster #" << i << " (" << indices->indices.size() << " points)"); pcl::PointCloud<PointT>::Ptr cluster_cloud(new pcl::PointCloud<PointT>); ext_input.setIndices(indices); ext_input.filter(*cluster_cloud); pcl::PointCloud<pcl::Normal>::Ptr cluster_normal(new pcl::PointCloud<pcl::Normal>); ext_normal.setIndices(indices); ext_normal.filter(*cluster_normal); pcl::ModelCoefficients::Ptr support_plane(new pcl::ModelCoefficients); if (!getSupportPlane(cluster_cloud, polygons, support_plane)) { NODELET_ERROR_STREAM("cloud " << i << " has no support plane. skipped"); continue; } pcl::PointIndices::Ptr b(new pcl::PointIndices); pcl::PointCloud<PointT>::Ptr pc(new pcl::PointCloud<PointT>); float circle_likelihood, box_likelihood; estimate(cluster_cloud, cluster_normal, support_plane, b, pc, circle_likelihood, box_likelihood); boundary_indices.push_back(std::move(b)); *projected_cloud += *pc; if (circle_likelihood > circle_threshold_) { // circle result.labels.push_back(1); result.label_names.push_back("circle"); result.label_proba.push_back(circle_likelihood); } else if (box_likelihood > box_threshold_) { // box result.labels.push_back(0); result.label_names.push_back("box"); result.label_proba.push_back(box_likelihood); } else { // other result.labels.push_back(3); result.label_names.push_back("other"); result.label_proba.push_back(0.0); } } // publish results sensor_msgs::PointCloud2 ros_projected_cloud; pcl::toROSMsg(*projected_cloud, ros_projected_cloud); ros_projected_cloud.header = ros_cloud->header; pub_projected_cloud_.publish(ros_projected_cloud); jsk_recognition_msgs::ClusterPointIndices ros_boundary_indices; ros_boundary_indices.header = ros_cloud->header; for (size_t i = 0; i < boundary_indices.size(); ++i) { pcl_msgs::PointIndices ri; pcl_conversions::moveFromPCL(*boundary_indices[i], ri); ros_boundary_indices.cluster_indices.push_back(ri); } pub_boundary_indices_.publish(ros_boundary_indices); pub_class_.publish(result); }