int main(int argc, char** argv) { ros::init(argc, argv, "base_planner_cu"); ros::NodeHandle nh; ros::Rate loop_rate(100); // load globals from parameter server double param_input; bool bool_input; if(ros::param::get("base_planner_cu/obstacle_cost", param_input)) OBSTACLE_COST = param_input; // the cost of an obstacle if(ros::param::get("base_planner_cu/robot_radius", param_input)) robot_radius = param_input; //(m) if(ros::param::get("base_planner_cu/safety_distance", param_input)) safety_distance = param_input; //(m), distance that must be maintained between robot and obstacles if(ros::param::get("base_planner_cu/better_path_ratio", param_input)) old_path_discount = param_input; // if (current path length)*old_path_discount < (old path length) < (current path length)/old_path_discount, then stick with the old path if(ros::param::get("base_planner_cu/replan_jump", param_input)) MAX_POSE_JUMP = param_input; //(map grids) after which it is easer to replan from scratch if(ros::param::get("base_planner_cu/using_extra_safety_distance", bool_input)) high_cost_safety_region = bool_input; // true: dilate obstacles by an extra extra_dilation but instad of lethal, multiply existing cost by extra_dilation_mult if(ros::param::get("base_planner_cu/extra_safety_distance", param_input)) extra_dilation = param_input; //(m) // print data about parameters printf("obstacle cost: %f, robot radius: %f, safety_distance: %f, extra_safety_distance: %f, \nbetter_path_ratio: %f, replan_jump: %f \n", OBSTACLE_COST, robot_radius, safety_distance, extra_dilation, old_path_discount, MAX_POSE_JUMP); if(high_cost_safety_region) printf("using extra safety distance \n"); else printf("not using extra safety distance \n"); // wait until the map service is provided (we need its tf /world_cu -> /map_cu to be broadcast) ros::service::waitForService("/cu/get_map_cu", -1); // set up ROS topic subscriber callbacks pose_sub = nh.subscribe("/cu/pose_cu", 1, pose_callback); goal_sub = nh.subscribe("/cu/goal_cu", 1, goal_callback); map_changes_sub = nh.subscribe("/cu/map_changes_cu", 10, map_changes_callback); // set up ROS topic publishers global_path_pub = nh.advertise<nav_msgs::Path>("/cu/global_path_cu", 1); system_update_pub = nh.advertise<std_msgs::Int32>("/cu/system_update_cu", 10); // spin ros once ros::spinOnce(); loop_rate.sleep(); // wait for a map while(!load_map() && ros::ok()) { ros::spinOnce(); loop_rate.sleep(); } // init map buildGraphAndMap(raw_map->height, raw_map->width); populate_map_from_raw_map(); // wait until we have a goal and a robot pose (these arrive via callbacks) while((goal_pose == NULL || robot_pose == NULL) && ros::ok()) { ros::spinOnce(); loop_rate.sleep(); } // initialize search and related data structures (this calculates initial cost field) if(ros::ok()) { initialize_search(true); // true := use old map, (raw_map was populated in a pre-processing step) new_goal = false; ros::spinOnce(); // check in on callbacks // added to give map changes a chance to propogate before first real search ros::Rate one_time_sleep(2); one_time_sleep.sleep(); ros::spinOnce(); // check in on callbacks } // main planning loop int lr, ud; int time_counter = 0; MapPath* old_path = NULL; while (ros::ok()) { while(change_token_used) {printf("change token used, main \n");} change_token_used = true; time_counter++; //printf(" This is the base planner %d\n", time_counter); // heartbeat for debugging load_goal(); if(new_goal || reload_map) { if(reload_map) { printf("reinitializing map and searchtree \n"); change_token_used = false; // wait for a map while(!load_map() && ros::ok()) { ros::spinOnce(); loop_rate.sleep(); } change_token_used = true; initialize_search(false); // false := reset map based in what is in raw_map (raw_map was just re-populated with info from the map server) } else // new_goal { printf("reinitializing search tree, reusing map \n"); initialize_search(true); // true := reuse the old map } new_goal = false; reload_map = false; if(old_path != NULL) { deleteMapPath(old_path); old_path = NULL; } // get best estimate of pose while(!load_pose()) { // wait for most up to date pose } printf(" recieved pose via service \n"); } else load_pose(); // if services lag then this is a bad idea, but have had problems on netbooks never getting new pose // find the new exact coordinates of the robot robot_pos_x = robot_pose->x/raw_map->resolution; // need to add global offset ability if(robot_pos_x > raw_map->width) robot_pos_x = raw_map->width; if(robot_pos_x < 0) robot_pos_x = 0; robot_pos_y = robot_pose->y/raw_map->resolution; // need to add global offset ability if(robot_pos_y > raw_map->width) robot_pos_y = raw_map->width; if(robot_pos_y < 0) robot_pos_y = 0; // do replanning, note that we need to make sure that all neighboring nodes of the cell(s) containing the robot are updated //printf("replanning %d \n", time_counter); if(robot_pos_x == (double)((int)robot_pos_x)) // then on a vertical edge, need to plan to nodes on cells to the left and right of the robot lr = -1; else //only need to plan to nodes on cells with x == int_pos_x lr = 0; while(lr < 2) { if(robot_pos_y == (double)((int)robot_pos_y)) // then on a horizontal edge, need to plan to nodes on cells to the top and bottom of the robot ud = -1; else //only need to plan to nodes on cells with y == int_pos_y ud = 0; if((int)robot_pos_x + lr < 0 || (int)robot_pos_x + lr > WIDTH) { lr++; continue; } while(ud < 2) { if((int)robot_pos_y + ud < 0 || (int)robot_pos_y + ud > HEIGHT) { ud++; continue; } s_start = &graph[(int)robot_pos_y + ud][(int)robot_pos_x + lr]; computeShortestPath(); // this updates the cost field to this node ud++; } lr++; } // extract the path, this will be used to figure out where to move the robot //printf("extract path1 %d \n", time_counter); MapPath* pathToGoalWithLookAhead = extractPathOneLookahead(); double path1_max_single_grid; double path1_cost = calculatePathCost(pathToGoalWithLookAhead, path1_max_single_grid); //printf("extract path2 %d \n", time_counter); MapPath* pathToGoalMine = extractPathMine(0); // this uses gradient descent where possible double path2_max_single_grid; double path2_cost = calculatePathCost(pathToGoalMine, path2_max_single_grid); double old_path_cost = LARGE; double old_path_max_single_grid; if(old_path != NULL) old_path_cost = calculatePathCost(old_path, old_path_max_single_grid); change_token_used = false; // use better path of the two to move the robot (or retain the old path if it is still better) if(old_path != NULL && path1_cost*old_path_discount <= old_path_cost && old_path_cost <= path1_cost/old_path_discount && path2_cost*old_path_discount <= old_path_cost && old_path_cost <= path2_cost/old_path_discount && old_path_max_single_grid < OBSTACLE_COST) { publish_global_path(old_path); //printf("old path is about the same or better %d [%f vs %f] %f \n", time_counter, old_path_cost, (path1_cost+path2_cost)/2, old_path_max_single_grid); deleteMapPath(pathToGoalMine); deleteMapPath(pathToGoalWithLookAhead); } else if(path1_cost < path2_cost && path1_max_single_grid < OBSTACLE_COST) { publish_global_path(pathToGoalWithLookAhead); //printf("path1 is better %d %f %f\n", time_counter, path1_cost, path1_max_single_grid); if(old_path != NULL) { deleteMapPath(old_path); old_path = NULL; } old_path = pathToGoalWithLookAhead; deleteMapPath(pathToGoalMine); } else if(path2_max_single_grid < OBSTACLE_COST) { publish_global_path(pathToGoalMine); //printf("path2 is better %d %f %f\n", time_counter, path2_cost, path2_max_single_grid); if(old_path != NULL) { deleteMapPath(old_path); old_path = NULL; } old_path = pathToGoalMine; deleteMapPath(pathToGoalWithLookAhead); } else // all paths go through obstacles { printf("Base Planner: No safe path exists to goal %f %f %f\n", old_path_cost, path1_cost, path2_cost); publish_system_update(1); reload_map = true; } ros::spinOnce(); loop_rate.sleep(); //printf("done %d \n", time_counter); } if(old_path != NULL) deleteMapPath(old_path); pose_sub.shutdown(); goal_sub.shutdown(); map_changes_sub.shutdown(); global_path_pub.shutdown(); system_update_pub.shutdown(); destroy_pose(robot_pose); destroy_pose(goal_pose); cpDeleteHeap(primaryCellPathHeap); cpDeleteHeap(secondaryCellPathHeap); deleteHeap(); deleteGraphAndMap(); }
~BaseMotionController() { base_odom.shutdown(); base_velocities_publisher.shutdown(); }
// Main loop. int main(int argc, char **argv) { ros::init(argc, argv, "new_tagmapper_cu"); ros::NodeHandle n; kill_pub = n.advertise<std_msgs::Int8>("/cu/killsg", 1000); pose_sub = n.subscribe("/cu/pose_cu", 1, pose_callback); marker_sub = n.subscribe("/cu/stargazer_marker_cu", 1, marker_callback); speeds_sub = n.subscribe("/speeds_bus", 1, speeds_callback); ros::Rate loop_rate(1000); isStopped = false; restart = false; poseSampleCount = 0; markerSampleCount = 0; lastTagLine = 0; memset(tagIDs, 0, PSEUDO_FILE_LINES*sizeof(int)); tagCount = 0; currentTag = 0; // Open the file, read it in, extract the tag IDs, and determine the place to insert the new tag. int ifline = 0; std::ifstream psin; psin.open(pseudo_file, std::ifstream::in); if (!psin.good()) { return 1; } while (!psin.eof()) { psin.getline(pseudo_text[ifline], PSEUDO_FILE_LINE_WIDTH); ifline++; } psin.close(); for (int i = 0; i < PSEUDO_FILE_LINES; i++) { if (strstr(pseudo_text[i], "/PseudoLiteMap")) { lastTagLine = i; break; } } for (int i = 0; i < lastTagLine; i++) { if (strstr(pseudo_text[i], "PseudoLite id")) { sscanf(pseudo_text[i], " <PseudoLite id=\"%d\"", &tagIDs[tagCount]); tagCount++; } } // Run the main loop. If the new tag samples have all been retrieved and the robot is stopped, save the tag, kill Stargazer, and then shut down. int count = 0; while (ros::ok()) { if (poseSampleCount == SAMPLES && markerSampleCount == SAMPLES && isStopped) { saveTagInXml(); publish_kill(); break; } ros::spinOnce(); loop_rate.sleep(); ++count; } kill_pub.shutdown(); pose_sub.shutdown(); marker_sub.shutdown(); speeds_sub.shutdown(); return 0; }
void jointCallback(const sensor_msgs::JointState & msg){ ROS_INFO("RECEIVED JOINT VALUES"); if(itHappened){ sub.shutdown(); cout<<"not again"<<endl; return; } if (msg.name.size()== 6 ) { itHappened=true; for (int i=0;i<6;i++){ joint_pos[i] = msg.position[i]; name[i] = msg.name[i]; // cout<<" , "<<name[i]<<":"<<joint_pos[i]; } sub.shutdown(); } }
void camerainfoCb(const sensor_msgs::CameraInfoConstPtr& info_msg) { ROS_INFO("infocallback :shutting down camerainfosub"); cam_model_.fromCameraInfo(info_msg); camera_topic = info_msg->header.frame_id; camerainfosub_.shutdown(); }
bool getNearestObject(hbrs_srvs::GetPose::Request &req, hbrs_srvs::GetPose::Response &res) { // if not subscribe to laser scan topic, do it if(!subscribed_to_topic) { sub_scan = nh_ptr->subscribe<sensor_msgs::LaserScan>("/scan", 1, laserScanCallback); subscribed_to_topic = true; } // check if new laser scan data has arrived scan_received = false; while(!scan_received) ros::spinOnce(); // calculate nearest object pose geometry_msgs::PoseStamped pose; pose = calculateNearestObject(laser_scan); res.pose = pose.pose; // unsubscribe from topic to save computational resources sub_scan.shutdown(); subscribed_to_topic = false; return true; }
void dynamicReconfigureCallback(pcl_filters::passthroughConfig &config, uint32_t level) { ros::NodeHandle nh("~"); if (axis_.compare(config.filteration_axis.c_str()) != 0) { axis_ = config.filteration_axis.c_str(); } if (min_range_ != config.min_range) { min_range_ = config.min_range; } if (max_range_ != config.max_range) { max_range_ = config.max_range; } std::string temp_str = config.passthrough_sub.c_str(); if (!config.passthrough_sub.empty() && passthrough_sub_.compare(temp_str) != 0) { sub_.shutdown(); passthrough_sub_ = config.passthrough_sub.c_str(); sub_ = nh.subscribe (passthrough_sub_, 1, cloudCallback); } ROS_INFO("Reconfigure Request: %s %f %f",axis_.c_str(), min_range_,max_range_); }
void cam_info_callback(const sensor_msgs::CameraInfo &msg) { tgCamParams = TomGine::tgCamera::Parameter(msg); ROS_INFO("Camera parameters received."); camera_info = msg; need_cam_init = false; cam_info_sub.shutdown(); }
void XYOriginCallback(const topic_tools::ShapeShifter::ConstPtr msg) { try { const gps_common::GPSFixConstPtr origin = msg->instantiate<gps_common::GPSFix>(); xy_wgs84_util_.reset( new swri_transform_util::LocalXyWgs84Util( origin->latitude, origin->longitude, origin->track, origin->altitude)); origin_sub_.shutdown(); return; } catch (...) {} try { const geometry_msgs::PoseStampedConstPtr origin = msg->instantiate<geometry_msgs::PoseStamped>(); xy_wgs84_util_.reset( new swri_transform_util::LocalXyWgs84Util( origin->pose.position.y, // Latitude origin->pose.position.x, // Longitude 0.0, // Heading origin->pose.position.z)); // Altitude origin_sub_.shutdown(); return; } catch(...) {} try { const geographic_msgs::GeoPoseConstPtr origin = msg->instantiate<geographic_msgs::GeoPose>(); xy_wgs84_util_.reset( new swri_transform_util::LocalXyWgs84Util( origin->position.latitude, origin->position.longitude, tf::getYaw(origin->orientation), origin->position.altitude)); origin_sub_.shutdown(); return; } catch(...) {} ROS_ERROR_THROTTLE(1.0, "Unsupported message type received for local_xy_origin."); }
void wallDetectCB(std_msgs::Bool) { ROS_INFO("Drag Race Controller got wall."); racecar_set_speed(0); race_end_subscriber.shutdown(); system("rosnode kill iarrc_lane_detection &\n"); system("rosnode kill drag_race_end_detector &\n"); exit(0); }
void unsubscribe() { if (use_indices_) { sub_input_.unsubscribe(); sub_indices_.unsubscribe(); } else { sub_.shutdown(); } }
void stop() { if (!running_) return; cam_->stop(); // Must stop camera before streaming_pub_. poll_srv_.shutdown(); trigger_sub_.shutdown(); streaming_pub_.shutdown(); running_ = false; }
void stoplightCB(std_msgs::Bool) { ROS_INFO("Drag Race Controller got stoplight."); racecar_set_speed(14); //racecar_set_speed(0); stoplight_subscriber.shutdown(); system("rosnode kill stoplight_watcher &\n"); system("roslaunch iarrc_launch lane_detection.launch &\n"); system("roslaunch iarrc_launch race_end_detector.launch &\n"); race_end_subscriber = nh->subscribe(race_end_topic, 1, wallDetectCB); }
//service callback: //pause the topic model void pause(bool p){ if(p){ ROS_INFO("stopped listening to words"); word_sub.shutdown(); } else{ ROS_INFO("started listening to words"); word_sub = nh->subscribe("words", 10, words_callback); } rost->pause(p); paused=p; }
bool stop(std_srvs::Empty::Request &req, std_srvs::Empty::Response &res) { if(subscribed_to_topic) { sub_scan.shutdown(); subscribed_to_topic = false; } continues_mode_enabled = false; ROS_INFO("nearest object detector DISABLED"); return true; }
bool stop (camera_srv_definitions::start_tracker::Request & req, camera_srv_definitions::start_tracker::Response & response) { #ifdef USE_PCL_GRABBER if(interface.get()) interface->stop(); #else camera_topic_subscriber_.shutdown(); #endif if (!camtracker.empty()) camtracker->stopObjectManagement(); return true; }
// gets camera intrinsic parameters void camInfoCB(const sensor_msgs::CameraInfoConstPtr& camInfoMsg) { //get camera info image_geometry::PinholeCameraModel cam_model; cam_model.fromCameraInfo(camInfoMsg); camMat = cv::Mat(cam_model.fullIntrinsicMatrix()); camMat.convertTo(camMat,CV_32FC1); cam_model.distortionCoeffs().convertTo(distCoeffs,CV_32FC1); //unregister subscriber ROS_DEBUG("Got camera intrinsic parameters!"); camInfoSub.shutdown(); gotCamParam = true; }
// wait for one camerainfo, then shut down that subscriber void cam_info_callback(const sensor_msgs::CameraInfo &msg) { camParam = aruco_ros::rosCameraInfo2ArucoCamParams(msg, useRectifiedImages); // handle cartesian offset between stereo pairs // see the sensor_msgs/CamaraInfo documentation for details rightToLeft.setIdentity(); rightToLeft.setOrigin( tf::Vector3( -msg.P[3]/msg.P[0], -msg.P[7]/msg.P[5], 0.0)); cam_info_received = true; cam_info_sub.shutdown(); }
// The real initialization is being done after receiving the camerainfo. void cam_info_callback(const sensor_msgs::CameraInfo &msg) { if(parent_->tracker == 0) { ROS_INFO("Camera parameters received, ready to run."); cam_info_sub.shutdown(); parent_->tracker = new blort_ros::GLTracker(msg, parent_->root_, true); image_sub = parent_->it_.subscribe("/blort_image", 10, &TrackerNode::imageCb, parent_); parent_->control_service = parent_->nh_.advertiseService("tracker_control", &TrackerNode::trackerControlServiceCb, parent_); parent_->server_ = std::auto_ptr<dynamic_reconfigure::Server<blort_ros::TrackerConfig> > (new dynamic_reconfigure::Server<blort_ros::TrackerConfig>()); parent_->f_ = boost::bind(&TrackerNode::TrackingMode::reconf_callback, this, _1, _2); parent_->server_->setCallback(parent_->f_); parent_->recovery_client = parent_->nh_.serviceClient<blort_ros::RecoveryCall>("/blort_detector/pose_service"); } }
bool stopServiceCallBack(qbo_video_record::StopRecord::Request &req, qbo_video_record::StopRecord::Response &res ) { ROS_INFO("Service Called"); ROS_INFO("Recived:Stop"); // cvReleaseVideoWriter(&writer); if (status==1) { status=0; kill( pID, SIGKILL ); sub.shutdown(); thread thread_1 = thread(combineAudioVideo); res.result=true; } else{ res.result=false; } return true; }
void cam_info_callback(const sensor_msgs::CameraInfo &msg) { if(detector == 0) { ROS_INFO("Camera parameters received, ready to run."); cam_info_sub.shutdown(); detector = new blort_ros::GLDetector(msg, root_); if(nn_match_threshold != 0.0) detector->setNNThreshold(nn_match_threshold); if(ransac_n_points_to_match != 0) detector->setRansacNPointsToMatch(ransac_n_points_to_match); pose_service = nh_.advertiseService("pose_service", &DetectorNode::recovery, this); // lines for dynamic_reconfigure server_ = std::auto_ptr<dynamic_reconfigure::Server<blort_ros::DetectorConfig> > (new dynamic_reconfigure::Server<blort_ros::DetectorConfig>()); f_ = boost::bind(&DetectorNode::reconf_callback, this, _1, _2); server_->setCallback(f_); } }
void pr2_marker_cb_old(ar_track_alvar_msgs::AlvarMarkersConstPtr markers) { static int state = 0; geometry_msgs::PointStamped point; point.header.frame_id = "/base_link"; point.point.x = 0.3; point.point.z = 1; if (state == 0) { point.point.y = -0.5; head_look_at_pub.publish(point); ros::Duration(2).sleep(); state = 1; } else if (state == 1 && pr2_poses.size() >= POSES_COUNT/2) { point.point.y = 0.2; head_look_at_pub.publish(point); ros::Duration(2).sleep(); state = 2; } else if (state == 2 && pr2_poses.size() >= POSES_COUNT) { tr_pr2_ = create_transform_from_poses_vector(pr2_poses, robot_frame_); pr2_marker_sub.shutdown(); pr2_calibration_done_ = true; tr_timer_ = nh_.createTimer(ros::Duration(0.01), &ArtCalibration::trCallback, this); } geometry_msgs::Pose pose; try { pose = get_main_marker_pose(*markers); pr2_poses.push_back(pose); } catch (NoMainMarker& e) { std::cout << e.what() << std::endl; return; } }
// Connection callback that unsubscribes from the tracker if no one is subscribed. void connectCallback(ros::Subscriber &sub_msg, ros::NodeHandle &n, string gp_topic, string img_topic, Subscriber<GroundPlane> &sub_gp, Subscriber<CameraInfo> &sub_cam, image_transport::SubscriberFilter &sub_col, image_transport::ImageTransport &it){ if(!pub_message.getNumSubscribers() && !pub_result_image.getNumSubscribers() && !pub_detected_persons.getNumSubscribers()) { ROS_DEBUG("HOG: No subscribers. Unsubscribing."); sub_msg.shutdown(); sub_gp.unsubscribe(); sub_cam.unsubscribe(); sub_col.unsubscribe(); } else { ROS_DEBUG("HOG: New subscribers. Subscribing."); if(strcmp(gp_topic.c_str(), "") == 0) { sub_msg = n.subscribe(img_topic.c_str(), 1, &imageCallback); } sub_cam.subscribe(); sub_gp.subscribe(); sub_col.subscribe(it,sub_col.getTopic().c_str(),1); } }
void pr2_marker_cb(ar_track_alvar_msgs::AlvarMarkersConstPtr markers) { static int state = 0; geometry_msgs::PointStamped point; point.header.frame_id = "/base_link"; point.point.x = 0.3; point.point.z = 1; if (state == 0) { point.point.y = -0.5; head_look_at_pub.publish(point); ros::Duration(5).sleep(); state = 1; } else if (state == 1 && pr2_looking_for_marker_id_ == 12) { point.point.y = 0.5; head_look_at_pub.publish(point); ros::Duration(5).sleep(); state = 2; } else if (state == 2 && pr2_looking_for_marker_id_ > 20) { tf::Vector3 pp1110 = pr2_position10_ - pr2_position11_; tf::Vector3 pp1112 = pr2_position12_ - pr2_position11_; pp1110.normalize(); pp1112.normalize(); tf::Vector3 n = pp1112.cross(pp1110); n.normalize(); ROS_INFO_STREAM(pp1110.dot(pp1112)); ROS_INFO_STREAM(pp1110.dot(n)); ROS_INFO_STREAM(pp1112.dot(pp1110)); ROS_INFO_STREAM(pp1112.dot(n)); ROS_INFO_STREAM(n.dot(pp1110)); ROS_INFO_STREAM(n.dot(pp1112)); //tf::Matrix3x3 m(pp1110.getX(), pp1110.getY(), pp1110.getZ(), pp1112.getX(), pp1112.getY(), pp1112.getZ(), n.getX(), n.getY(), n.getZ()); tf::Matrix3x3 m(pp1112.getX(), pp1110.getX(), n.getX(), pp1112.getY(), pp1110.getY(), n.getY(), pp1112.getZ(), pp1110.getZ(), n.getZ()); tf::Transform tr = tf::Transform(m, pr2_position11_); tr_pr2_ = tf::StampedTransform(tr.inverse(), ros::Time::now(), world_frame_, robot_frame_); pr2_marker_sub.shutdown(); pr2_calibration_done_ = true; tr_timer_ = nh_.createTimer(ros::Duration(0.01), &ArtCalibration::trCallback, this); } try { switch (pr2_looking_for_marker_id_) { case 10: pr2_position10_ += get_marker_position_by_id(*markers, 10); ++count; if (count == 10) { pr2_position10_ /= count; count = 0; pr2_looking_for_marker_id_ += 1; } break; case 11: pr2_position11_ += get_marker_position_by_id(*markers, 11); ++count; if (count == 10) { pr2_position11_ /= count; count = 0; pr2_looking_for_marker_id_ += 1; } break; case 12: pr2_position12_ += get_marker_position_by_id(*markers, 12); ++count; if (count == 10) { pr2_position12_ /= count; count = 0; pr2_looking_for_marker_id_ += 100; } break; case 13: pr2_position13_ = get_marker_position_by_id(*markers, 13); break; } } catch (MissingMarker& e) { ; } }
void close() { sub.shutdown(); }
void octomapCallback(const octomap_msgs::Octomap::ConstPtr& msg) { octomap_msg = msg; octomap_sub.shutdown(); }
// wait for one camerainfo, then shut down that subscriber void cam_info_callback(const sensor_msgs::CameraInfo &msg) { camParam = aruco_ros::rosCameraInfo2ArucoCamParams(msg, useRectifiedImages); cam_info_received = true; cam_info_sub.shutdown(); }
bool start (camera_srv_definitions::start_tracker::Request & req, camera_srv_definitions::start_tracker::Response & response) { cameras_.clear(); keyframes_.clear(); saved_clouds_ = 0; conf_=0; pose_ = Eigen::Matrix4f::Identity(); initializeVisualizationMarkers(); #ifdef USE_PCL_GRABBER try { interface.reset( new pcl::io::OpenNI2Grabber() ); } catch (pcl::IOException e) { std::cout << "PCL threw error " << e.what() << ". Could not start camera..." << std::endl; return false; } cv::Mat_<double> distCoeffs; cv::Mat_<double> intrinsic = cv::Mat::zeros(3, 3, CV_64F); intrinsic.at<double>(0,0) = 525.f; intrinsic.at<double>(1,1) = 525.f; intrinsic.at<double>(0,2) = 320.f; intrinsic.at<double>(1,2) = 240.f; intrinsic.at<double>(2,2) = 1.f; std::cout << intrinsic << std::endl << std::endl; camtracker.reset( new v4r::KeypointSlamRGBD2(param) ); camtracker->setCameraParameter(intrinsic,distCoeffs); boost::function<void (const pcl::PointCloud<pcl::PointXYZRGBA>::ConstPtr&)> f = boost::bind (&CamTracker::cloud_cb_, this, _1); interface->registerCallback (f); interface->start (); std::cout << "Camera started..." << std::endl; #else camera_info_subscriber_ = n_->subscribe(camera_topic_ +"/camera_info", 1, &CamTracker::camera_info_cb, this); ROS_INFO_STREAM("Wating for camera info...topic=" << camera_topic_ << "/camera_info..."); while (!got_camera_info_) { ros::Duration(0.1).sleep(); ros::spinOnce(); } ROS_INFO("got it."); camera_info_subscriber_.shutdown(); cv::Mat_<double> distCoeffs = cv::Mat(4, 1, CV_64F, camera_info_.D.data()); cv::Mat_<double> intrinsic = cv::Mat(3, 3, CV_64F, camera_info_.K.data()); camtracker.reset( new v4r::KeypointSlamRGBD2(param) ); camtracker->setCameraParameter(intrinsic,distCoeffs); confidence_publisher_ = n_->advertise<std_msgs::Float32>("cam_tracker_confidence", 1); camera_topic_subscriber_ = n_->subscribe(camera_topic_ +"/points", 1, &CamTracker::getCloud, this); #endif last_cloud_ = boost::posix_time::microsec_clock::local_time (); last_cloud_ros_time_ = ros::Time::now(); return true; }
void sendGoalFromTopic(const geometry_msgs::PoseArrayConstPtr& msg) { ROS_INFO("[pick and place] Got goal from topic! %s", goal_->topic.c_str()); pickAndPlace(msg->poses[0], msg->poses[1]); pick_and_place_sub_.shutdown(); }
void saveImagesToDisk(ros::NodeHandle& nh, ros::Subscriber& sub) { //pcl::ScopeTime t1 ("save images"); { boost::mutex::scoped_lock lock(rgb_mutex_); memcpy( cv_rgb_.data, &rgb_data_[0], 3*rows_*cols_*sizeof(unsigned char)); new_rgb_ = false; } capture_->stop(); sub.shutdown(); sub = nh.subscribe("/camera/depth/image_raw", 1, &OpenNIShoter::frameDepthCallback, this); new_depth_ = false; capture_->start(); do { if (new_depth_ == true ) { boost::mutex::scoped_lock lock(depth_mutex_); memcpy( cv_depth_.data, &depth_data_[0], rows_*cols_*sizeof(uint16_t)); new_depth_ = false; break; } boost::this_thread::sleep (boost::posix_time::millisec (10)); } while (1); capture_->stop(); sub.shutdown(); sub = nh.subscribe("/camera/ir/image", 1, &OpenNIShoter::frameIRCallback, this); new_ir_ = false; capture_->start(); do { if (new_ir_ == true ) { boost::mutex::scoped_lock lock(ir_mutex_); memcpy( cv_ir_raw_.data, &ir_data_[0], rows_*cols_*sizeof(uint16_t)); cv_ir_raw_.convertTo(cv_ir_, CV_8U); cv::equalizeHist( cv_ir_, cv_ir_ ); int max = 0; for (int i=0; i< rows_; i++) { for (int j=0; j< cols_; j++) { if (ir_data_[i+j*rows_] > max) max = ir_data_[i+j*rows_]; } } std::cout << "max IR val: " << max << std::endl; new_ir_ = false; break; } boost::this_thread::sleep (boost::posix_time::millisec (10)); } while (1); capture_->stop(); sub.shutdown(); sub = nh.subscribe("/camera/rgb/image_raw", 1, &OpenNIShoter::frameRGBCallback, this); capture_->start(); sprintf(depth_file, "/Depth/%018ld.png", timestamp_depth_ ); sprintf(rgb_file, "/RGB/%018ld.png", timestamp_rgb_); sprintf(ir_file, "/IR/%018ld.png", timestamp_ir_); cv::imwrite( write_folder_ + depth_file, cv_depth_); cv::imwrite( write_folder_ + rgb_file, cv_rgb_); cv::imwrite( write_folder_ + ir_file, cv_ir_); sprintf(depth_file_PNG, "/Depth/%018ld.png", timestamp_depth_); sprintf(rgb_file_PNG, "/RGB/%018ld.png", timestamp_rgb_); sprintf(ir_file_PNG, "/IR/%018ld.png", timestamp_ir_); off_rgb_.width(18); off_rgb_.fill('0'); off_rgb_ << timestamp_rgb_; off_rgb_ << " " << rgb_file_PNG << std::endl; off_depth_.width(18); off_depth_.fill('0'); off_depth_ << timestamp_depth_; off_depth_ << " " << depth_file_PNG << std::endl; off_ir_.width(18); off_ir_.fill('0'); off_ir_ << timestamp_ir_; off_ir_ << " " << ir_file_PNG << std::endl; }