예제 #1
17
 ~BaseMotionController()
 {
      base_odom.shutdown(); 
      base_velocities_publisher.shutdown(); 
 }
예제 #2
16
// Main loop.
int main(int argc, char **argv)
{
  ros::init(argc, argv, "new_tagmapper_cu");
  ros::NodeHandle n;
  kill_pub = n.advertise<std_msgs::Int8>("/cu/killsg", 1000);
  pose_sub = n.subscribe("/cu/pose_cu", 1, pose_callback);
  marker_sub = n.subscribe("/cu/stargazer_marker_cu", 1, marker_callback);
  speeds_sub = n.subscribe("/speeds_bus", 1, speeds_callback);
  ros::Rate loop_rate(1000);

  isStopped = false;
  restart = false;
  poseSampleCount = 0;
  markerSampleCount = 0;
  lastTagLine = 0;
  memset(tagIDs, 0, PSEUDO_FILE_LINES*sizeof(int));
  tagCount = 0;
  currentTag = 0;

  // Open the file, read it in, extract the tag IDs, and determine the place to insert the new tag.
  int ifline = 0;
  std::ifstream psin;
  psin.open(pseudo_file, std::ifstream::in);
  if (!psin.good()) {
    return 1;
  }
  while (!psin.eof()) {
    psin.getline(pseudo_text[ifline], PSEUDO_FILE_LINE_WIDTH);
    ifline++;
  }
  psin.close();
  for (int i = 0; i < PSEUDO_FILE_LINES; i++) {
    if (strstr(pseudo_text[i], "/PseudoLiteMap")) {
      lastTagLine = i;
      break;
    }
  }
  for (int i = 0; i < lastTagLine; i++) {
    if (strstr(pseudo_text[i], "PseudoLite id")) {
      sscanf(pseudo_text[i], "    <PseudoLite id=\"%d\"", &tagIDs[tagCount]);
      tagCount++;
    }
  }

  // Run the main loop.  If the new tag samples have all been retrieved and the robot is stopped, save the tag, kill Stargazer, and then shut down.
  int count = 0;
  while (ros::ok())
  {
    if (poseSampleCount == SAMPLES && markerSampleCount == SAMPLES && isStopped) {
      saveTagInXml();
      publish_kill();
      break;
    }
    ros::spinOnce();
    loop_rate.sleep();
    ++count;
  }

  kill_pub.shutdown();
  pose_sub.shutdown();
  marker_sub.shutdown();
  speeds_sub.shutdown();

  return 0;
}
예제 #3
1
파일: cob.cpp 프로젝트: krter12/COB-Servo
void jointCallback(const sensor_msgs::JointState & msg){
	ROS_INFO("RECEIVED JOINT VALUES");
	if(itHappened){
		sub.shutdown();
		cout<<"not again"<<endl;
		return;
	}
	if (msg.name.size()== 6 ) {
	itHappened=true;
	for (int i=0;i<6;i++){
	joint_pos[i] = msg.position[i];
	name[i] = msg.name[i];
//	cout<<" , "<<name[i]<<":"<<joint_pos[i];
	}
	sub.shutdown();
	}
}
void dynamicReconfigureCallback(pcl_filters::passthroughConfig &config, uint32_t level) 
{
	ros::NodeHandle nh("~");
	if (axis_.compare(config.filteration_axis.c_str()) != 0)
	{
		axis_ = config.filteration_axis.c_str();
	}
	if (min_range_ != config.min_range)
	{
		min_range_ = config.min_range;
	}
	if (max_range_ != config.max_range)
	{
		max_range_ = config.max_range;
	}
	
	std::string temp_str = config.passthrough_sub.c_str();
	if (!config.passthrough_sub.empty() && passthrough_sub_.compare(temp_str) != 0)
	{
		sub_.shutdown();
		passthrough_sub_ = config.passthrough_sub.c_str();
		sub_ = nh.subscribe (passthrough_sub_, 1, cloudCallback);
	}
	ROS_INFO("Reconfigure Request: %s %f %f",axis_.c_str(), min_range_,max_range_);
}
예제 #5
0
 void camerainfoCb(const sensor_msgs::CameraInfoConstPtr& info_msg)
 {
     ROS_INFO("infocallback :shutting down camerainfosub");
     cam_model_.fromCameraInfo(info_msg);
     camera_topic = info_msg->header.frame_id;
     camerainfosub_.shutdown();
 }
bool getNearestObject(hbrs_srvs::GetPose::Request &req, hbrs_srvs::GetPose::Response &res)
{
	// if not subscribe to laser scan topic, do it
	if(!subscribed_to_topic)
	{
		sub_scan = nh_ptr->subscribe<sensor_msgs::LaserScan>("/scan", 1, laserScanCallback);
		subscribed_to_topic = true;
	}

	// check if new laser scan data has arrived
	scan_received = false;
	while(!scan_received)
        ros::spinOnce();

	// calculate nearest object pose
	geometry_msgs::PoseStamped pose;
	pose = calculateNearestObject(laser_scan);

	res.pose = pose.pose;


	// unsubscribe from topic to save computational resources
	sub_scan.shutdown();
	subscribed_to_topic = false;

	return true;
}
예제 #7
0
void cam_info_callback(const sensor_msgs::CameraInfo &msg)
{
    tgCamParams = TomGine::tgCamera::Parameter(msg);
    ROS_INFO("Camera parameters received.");
    camera_info = msg;
    need_cam_init = false;
    cam_info_sub.shutdown();
}
예제 #8
0
    void XYOriginCallback(const topic_tools::ShapeShifter::ConstPtr msg)
    {
      try
      {
        const gps_common::GPSFixConstPtr origin = msg->instantiate<gps_common::GPSFix>();
        xy_wgs84_util_.reset(
            new swri_transform_util::LocalXyWgs84Util(
                origin->latitude,
                origin->longitude,
                origin->track,
                origin->altitude));
        origin_sub_.shutdown();
        return;
      }
      catch (...) {}

      try
      {
        const geometry_msgs::PoseStampedConstPtr origin = msg->instantiate<geometry_msgs::PoseStamped>();
        xy_wgs84_util_.reset(
            new swri_transform_util::LocalXyWgs84Util(
                origin->pose.position.y,    // Latitude
                origin->pose.position.x,    // Longitude
                0.0,                        // Heading
                origin->pose.position.z));  // Altitude
        origin_sub_.shutdown();
        return;
      }
      catch(...) {}

      try
      {
        const geographic_msgs::GeoPoseConstPtr origin = msg->instantiate<geographic_msgs::GeoPose>();
        xy_wgs84_util_.reset(
            new swri_transform_util::LocalXyWgs84Util(
                origin->position.latitude,
                origin->position.longitude,
                tf::getYaw(origin->orientation),
                origin->position.altitude));
        origin_sub_.shutdown();
        return;
      }
      catch(...) {}

      ROS_ERROR_THROTTLE(1.0, "Unsupported message type received for local_xy_origin.");
    }
void wallDetectCB(std_msgs::Bool)
{
	ROS_INFO("Drag Race Controller got wall.");
	racecar_set_speed(0);
	race_end_subscriber.shutdown();
	system("rosnode kill iarrc_lane_detection &\n");
	system("rosnode kill drag_race_end_detector &\n");
	exit(0);
}
 void unsubscribe()
 {
   if (use_indices_) {
     sub_input_.unsubscribe();
     sub_indices_.unsubscribe();
   }
   else {
     sub_.shutdown();
   }
 }
예제 #11
0
  void stop()
  {
    if (!running_) return;

    cam_->stop(); // Must stop camera before streaming_pub_.
    poll_srv_.shutdown();
    trigger_sub_.shutdown();
    streaming_pub_.shutdown();

    running_ = false;
  }
void stoplightCB(std_msgs::Bool)
{
	ROS_INFO("Drag Race Controller got stoplight.");
	racecar_set_speed(14);
	//racecar_set_speed(0);
	stoplight_subscriber.shutdown();
	system("rosnode kill stoplight_watcher &\n");
	system("roslaunch iarrc_launch lane_detection.launch &\n");
	system("roslaunch iarrc_launch race_end_detector.launch &\n");
	race_end_subscriber = nh->subscribe(race_end_topic, 1, wallDetectCB);
}
예제 #13
0
//service callback:
//pause the topic model
void pause(bool p){
  if(p){
    ROS_INFO("stopped listening to words");
    word_sub.shutdown();
  }
  else{
    ROS_INFO("started listening to words");
    word_sub = nh->subscribe("words", 10, words_callback);
  }
  rost->pause(p);
  paused=p;
}
bool stop(std_srvs::Empty::Request &req, std_srvs::Empty::Response &res)
{
	if(subscribed_to_topic)
	{
		sub_scan.shutdown();
		subscribed_to_topic = false;
	}

	continues_mode_enabled = false;

	ROS_INFO("nearest object detector DISABLED");
	return true;
}
    bool
    stop (camera_srv_definitions::start_tracker::Request & req,
          camera_srv_definitions::start_tracker::Response & response)
    {
#ifdef USE_PCL_GRABBER
        if(interface.get())
            interface->stop();
#else
        camera_topic_subscriber_.shutdown();
#endif
        if (!camtracker.empty())
          camtracker->stopObjectManagement();
        return true;
    }
예제 #16
0
 // gets camera intrinsic parameters
 void camInfoCB(const sensor_msgs::CameraInfoConstPtr& camInfoMsg)
 {
     //get camera info
     image_geometry::PinholeCameraModel cam_model;
     cam_model.fromCameraInfo(camInfoMsg);
     camMat = cv::Mat(cam_model.fullIntrinsicMatrix());
     camMat.convertTo(camMat,CV_32FC1);
     cam_model.distortionCoeffs().convertTo(distCoeffs,CV_32FC1);
     
     //unregister subscriber
     ROS_DEBUG("Got camera intrinsic parameters!");
     camInfoSub.shutdown();
     gotCamParam = true;
 }
예제 #17
0
    // wait for one camerainfo, then shut down that subscriber
    void cam_info_callback(const sensor_msgs::CameraInfo &msg)
    {
        camParam = aruco_ros::rosCameraInfo2ArucoCamParams(msg, useRectifiedImages);

        // handle cartesian offset between stereo pairs
        // see the sensor_msgs/CamaraInfo documentation for details
        rightToLeft.setIdentity();
        rightToLeft.setOrigin(
            tf::Vector3(
                -msg.P[3]/msg.P[0],
                -msg.P[7]/msg.P[5],
                0.0));

        cam_info_received = true;
        cam_info_sub.shutdown();
    }
예제 #18
0
//MAIN
int main(int argc, char** argv)
{

	ros::init(argc, argv, "wheel_motor_node"); //Initialize node

	ros::NodeHandle n; //Create nodehandle object

	sub = n.subscribe("wheel_motor_rc", 1000, callBack); //Create object to subscribe to topic "wheel_motor_rc"
	
	pub = n.advertise<motor_controller::AdaCmd>("adaFruit",1000); //Create object to publish to topic "I2C"
	while(pub.getNumSubscribers()==0);//Prevents message from sending when publisher is not completely connected to subscriber.
	
	//ros::Rate loop_rate(10); //Set frequency of looping. 10 Hz
	
	setGPIOWrite(33,1); //Motor enable

	while(ros::ok())
	{
		//Update time
		current_time = ros::Time::now();
		//Check if interval has passed
		if(current_time - last_time > update_rate)
		{
			//Reset time
			last_time = current_time;
			if(sub.getNumPublishers() == 0) //In case of loss of connection to publisher, set controller outputs to 0
			{
				ROS_WARN("Loss of wheel motor controller input!");
				leftFrontWheel.setU(0); //Set controller inputs
				leftRearWheel.setU(0);
				rightRearWheel.setU(0);
				rightFrontWheel.setU(0);
			}

			controlFunction(); //Motor controller function
			pub.publish(generateMessage());
		}
		
			ros::spinOnce();
	}


	stopAllMotors();

	return 0;
}
예제 #19
0
        // The real initialization is being done after receiving the camerainfo.
        void cam_info_callback(const sensor_msgs::CameraInfo &msg)
        {
            if(parent_->tracker == 0)
            {
                ROS_INFO("Camera parameters received, ready to run.");
                cam_info_sub.shutdown();
                parent_->tracker = new blort_ros::GLTracker(msg, parent_->root_, true);

                image_sub = parent_->it_.subscribe("/blort_image", 10, &TrackerNode::imageCb, parent_);
                parent_->control_service = parent_->nh_.advertiseService("tracker_control", &TrackerNode::trackerControlServiceCb, parent_);
                parent_->server_ = std::auto_ptr<dynamic_reconfigure::Server<blort_ros::TrackerConfig> >
                                   (new dynamic_reconfigure::Server<blort_ros::TrackerConfig>());
                parent_->f_ = boost::bind(&TrackerNode::TrackingMode::reconf_callback, this, _1, _2);
                parent_->server_->setCallback(parent_->f_);
                parent_->recovery_client = parent_->nh_.serviceClient<blort_ros::RecoveryCall>("/blort_detector/pose_service");
            }
        }
예제 #20
0
bool stopServiceCallBack(qbo_video_record::StopRecord::Request  &req,
         qbo_video_record::StopRecord::Response &res )
{
  ROS_INFO("Service Called");
  ROS_INFO("Recived:Stop");
//  cvReleaseVideoWriter(&writer);
  if (status==1)
  {
    status=0;
    kill( pID, SIGKILL );
    sub.shutdown();
    thread thread_1 = thread(combineAudioVideo);
    res.result=true;
  }
  else{
    res.result=false;
  }
  return true;
}
예제 #21
0
 void cam_info_callback(const sensor_msgs::CameraInfo &msg)
 {
     if(detector == 0)
     {
         ROS_INFO("Camera parameters received, ready to run.");
         cam_info_sub.shutdown();
         detector = new blort_ros::GLDetector(msg, root_);
         if(nn_match_threshold != 0.0)
             detector->setNNThreshold(nn_match_threshold);
         if(ransac_n_points_to_match != 0)
             detector->setRansacNPointsToMatch(ransac_n_points_to_match);
         pose_service = nh_.advertiseService("pose_service", &DetectorNode::recovery, this);
         // lines for dynamic_reconfigure
         server_ = std::auto_ptr<dynamic_reconfigure::Server<blort_ros::DetectorConfig> >
                   (new dynamic_reconfigure::Server<blort_ros::DetectorConfig>());
         f_ = boost::bind(&DetectorNode::reconf_callback, this, _1, _2);
         server_->setCallback(f_);
     }
 }
예제 #22
0
		MyLittleTurtle(ros::NodeHandle& nodeHandle): m_nodeHandle(nodeHandle)
		{
			ROS_INFO("Publishing topic...");
			m_turtlesimPublisher = m_nodeHandle.advertise<geometry_msgs::Twist>("/turtle1/cmd_vel", 10);
			
			ROS_INFO("Subscribing to turtlesim topic...");
			m_turtlesimSubscriber = m_nodeHandle.subscribe("/turtle1/pose", 10, &MyLittleTurtle::updateTurtleStatus, this);

			ROS_INFO("Waiting for turtlesim...");
			ros::Rate loopRate(10);
			while (ros::ok() && (m_turtlesimPublisher.getNumSubscribers() <= 0 || m_turtlesimSubscriber.getNumPublishers() <= 0))
				loopRate.sleep();
			checkRosOk_v();
			
			ROS_INFO("Retrieving initial status...");
			ros::spinOnce();
			ROS_INFO("Initial status: x=%.3f, y=%.3f, z=%.3f", m_status.x, m_status.y, m_status.z);
			
			ROS_INFO("Everything's ready.");
		}
    // move platform server receives a new goal
    void moveGoalCB()
    {
        set_terminal_state_ = true;

        move_goal_.nav_goal = as_.acceptNewGoal()->nav_goal;
        ROS_INFO_STREAM_NAMED(logger_name_, "Received Goal #" <<move_goal_.nav_goal.header.seq);

        if (as_.isPreemptRequested() ||!ros::ok())
        {
            ROS_WARN_STREAM_NAMED(logger_name_, "Preempt Requested on goal #" << move_goal_.nav_goal.header.seq);
            if (planning_)
                ac_planner_.cancelGoalsAtAndBeforeTime(ros::Time::now());
            if (controlling_)
                ac_control_.cancelGoalsAtAndBeforeTime(ros::Time::now());
            move_result_.result_state = 0;
            move_result_.error_string = "Preempt Requested!!!";
            as_.setPreempted(move_result_);
            return;
        }

        // Convert move goal to plan goal
        pose_goal_.pose_goal = move_goal_.nav_goal;

        if (planner_state_sub.getNumPublishers()==0)
        {
            ROS_WARN_STREAM_NAMED(logger_name_, "Goal #" << move_goal_.nav_goal.header.seq << " not sent - planner is down");
            planning_ = false;
            move_result_.result_state = 0;
            move_result_.error_string = "Planner is down";
            as_.setAborted(move_result_);
        }
        else
        {
            ac_planner_.sendGoal(pose_goal_, boost::bind(&MovePlatformAction::planningDoneCB, this, _1, _2),
                                 actionlib::SimpleActionClient<oea_planner::planAction>::SimpleActiveCallback(),
                                 actionlib::SimpleActionClient<oea_planner::planAction>::SimpleFeedbackCallback());
            planning_ = true;
            ROS_DEBUG_STREAM_NAMED(logger_name_, "Goal #" << move_goal_.nav_goal.header.seq << " sent to planner");
        }
        return;
    }
    void planningDoneCB(const actionlib::SimpleClientGoalState& state, const oea_planner::planResultConstPtr &result)
    {
        planning_ = false;
        ROS_DEBUG_STREAM_NAMED(logger_name_, "Plan Action finished: " << state.toString());

        move_result_.result_state = result->result_state;

        if (move_result_.result_state) //if plan OK
        {
            planned_path_goal_.plan_goal = result->planned_path; // goal for the controller is result of the planner

            if (ctrl_state_sub.getNumPublishers()==0)
            {
                ROS_WARN_STREAM_NAMED(logger_name_, "Goal #" << move_goal_.nav_goal.header.seq << " not sent - Controller is down");
                controlling_ = false;
                move_result_.result_state = 0;
                move_result_.error_string = "Controller is down!!!";
                as_.setAborted(move_result_);
            }
            else
            {
                ac_control_.sendGoal(planned_path_goal_, boost::bind(&MovePlatformAction::ControlDoneCB, this, _1, _2),
                                     actionlib::SimpleActionClient<oea_controller::controlPlatformAction>::SimpleActiveCallback(),
                                     actionlib::SimpleActionClient<oea_controller::controlPlatformAction>::SimpleFeedbackCallback()); //boost::bind(&MovePlatformAction::ControlFeedbackCB, this,_1));
                controlling_ = true;
                ROS_DEBUG_STREAM_NAMED(logger_name_,"Goal #" << move_goal_.nav_goal.header.seq << " sent to Controller");
            }
        }
        else //if plan NOT OK
        {
            ac_control_.cancelGoalsAtAndBeforeTime(ros::Time::now());

            move_result_.error_string = "Planning Failed: " + result->error_string;
            ROS_WARN_STREAM_NAMED(logger_name_, "Aborting because " << move_result_.error_string);

            as_.setAborted(move_result_);
        }
        return;

    }
예제 #25
0
    void pr2_marker_cb_old(ar_track_alvar_msgs::AlvarMarkersConstPtr markers) {
        static int state = 0;
        geometry_msgs::PointStamped point;
        point.header.frame_id = "/base_link";
        point.point.x = 0.3;

        point.point.z = 1;
        if (state == 0) {
            point.point.y = -0.5;
            head_look_at_pub.publish(point);
            ros::Duration(2).sleep();

            state = 1;
        } else if (state == 1 && pr2_poses.size() >= POSES_COUNT/2) {
            point.point.y = 0.2;
            head_look_at_pub.publish(point);
            ros::Duration(2).sleep();
            state = 2;
        } else if (state == 2 && pr2_poses.size() >= POSES_COUNT) {
            tr_pr2_ = create_transform_from_poses_vector(pr2_poses, robot_frame_);
            pr2_marker_sub.shutdown();
            pr2_calibration_done_ = true;
            tr_timer_ = nh_.createTimer(ros::Duration(0.01), &ArtCalibration::trCallback, this);

        }

        geometry_msgs::Pose pose;
        try {
            pose = get_main_marker_pose(*markers);
            pr2_poses.push_back(pose);
        }
        catch (NoMainMarker& e) {
            std::cout << e.what() << std::endl;
            return;
        }

    }
예제 #26
0
// Connection callback that unsubscribes from the tracker if no one is subscribed.
void connectCallback(ros::Subscriber &sub_msg,
                     ros::NodeHandle &n,
                     string gp_topic,
                     string img_topic,
                     Subscriber<GroundPlane> &sub_gp,
                     Subscriber<CameraInfo> &sub_cam,
                     image_transport::SubscriberFilter &sub_col,
                     image_transport::ImageTransport &it){
    if(!pub_message.getNumSubscribers() && !pub_result_image.getNumSubscribers() && !pub_detected_persons.getNumSubscribers()) {
        ROS_DEBUG("HOG: No subscribers. Unsubscribing.");
        sub_msg.shutdown();
        sub_gp.unsubscribe();
        sub_cam.unsubscribe();
        sub_col.unsubscribe();
    } else {
        ROS_DEBUG("HOG: New subscribers. Subscribing.");
        if(strcmp(gp_topic.c_str(), "") == 0) {
            sub_msg = n.subscribe(img_topic.c_str(), 1, &imageCallback);
        }
        sub_cam.subscribe();
        sub_gp.subscribe();
        sub_col.subscribe(it,sub_col.getTopic().c_str(),1);
    }
}
예제 #27
0
    void saveImagesToDisk(ros::NodeHandle& nh, ros::Subscriber& sub)
    {	 
    	//pcl::ScopeTime t1 ("save images");
    	  {  
          boost::mutex::scoped_lock lock(rgb_mutex_);   
          memcpy( cv_rgb_.data, &rgb_data_[0], 3*rows_*cols_*sizeof(unsigned char));
          
          new_rgb_ = false;
        }
        
        capture_->stop();        
        sub.shutdown();        
        sub = nh.subscribe("/camera/depth/image_raw", 1, &OpenNIShoter::frameDepthCallback, this);        
        new_depth_ = false;
        capture_->start();
        
        do
        {         
          if (new_depth_ == true )
          {
            boost::mutex::scoped_lock lock(depth_mutex_);   
            memcpy( cv_depth_.data, &depth_data_[0], rows_*cols_*sizeof(uint16_t));
          
            new_depth_ = false;
            break;
          }
          
          boost::this_thread::sleep (boost::posix_time::millisec (10)); 
             
        } while (1);
        
        
        capture_->stop();        
        sub.shutdown();        
        sub = nh.subscribe("/camera/ir/image", 1, &OpenNIShoter::frameIRCallback, this);   
        new_ir_ = false;         
        capture_->start();
        
        do
        {         
          if (new_ir_ == true )
          {            
            boost::mutex::scoped_lock lock(ir_mutex_);   
            memcpy( cv_ir_raw_.data, &ir_data_[0], rows_*cols_*sizeof(uint16_t));            
            
            cv_ir_raw_.convertTo(cv_ir_, CV_8U); 
            cv::equalizeHist( cv_ir_, cv_ir_ );
            
            int max = 0;
            
            for (int i=0; i< rows_; i++)
            {
              for (int j=0; j< cols_; j++)
              {
               if (ir_data_[i+j*rows_] > max)
                max = ir_data_[i+j*rows_];
              }
            }
            
            std::cout << "max IR val: " << max << std::endl;
            
          
            new_ir_ = false;
            break;
          }
          
          boost::this_thread::sleep (boost::posix_time::millisec (10)); 
             
        } while (1);
        
        capture_->stop();        
        sub.shutdown();        
        sub = nh.subscribe("/camera/rgb/image_raw", 1, &OpenNIShoter::frameRGBCallback, this);         
        capture_->start();

	      sprintf(depth_file, "/Depth/%018ld.png", timestamp_depth_ );
	      sprintf(rgb_file, "/RGB/%018ld.png", timestamp_rgb_);  
        sprintf(ir_file, "/IR/%018ld.png", timestamp_ir_);  
        
	      cv::imwrite( write_folder_ + depth_file, cv_depth_);
	      cv::imwrite( write_folder_ + rgb_file, cv_rgb_);
        cv::imwrite( write_folder_ + ir_file, cv_ir_);
	      
	      sprintf(depth_file_PNG, "/Depth/%018ld.png", timestamp_depth_);
	      sprintf(rgb_file_PNG, "/RGB/%018ld.png", timestamp_rgb_);
        sprintf(ir_file_PNG, "/IR/%018ld.png", timestamp_ir_);

	      off_rgb_.width(18);
	      off_rgb_.fill('0');
	      off_rgb_ << timestamp_rgb_;
	      off_rgb_ <<  " " << rgb_file_PNG << std::endl;
        
        off_depth_.width(18);
	      off_depth_.fill('0');
	      off_depth_ << timestamp_depth_;
	      off_depth_ <<  " " << depth_file_PNG << std::endl;
        
        off_ir_.width(18);
	      off_ir_.fill('0');
	      off_ir_ << timestamp_ir_;
	      off_ir_ <<  " " << ir_file_PNG << std::endl;
	  }
    bool
    start (camera_srv_definitions::start_tracker::Request & req,
           camera_srv_definitions::start_tracker::Response & response)
    {
        cameras_.clear();
        keyframes_.clear();
        saved_clouds_ = 0;
        conf_=0;
        pose_ = Eigen::Matrix4f::Identity();

        initializeVisualizationMarkers();

#ifdef USE_PCL_GRABBER
        try
        {
            interface.reset( new pcl::io::OpenNI2Grabber() );
        }
        catch (pcl::IOException e)
        {
            std::cout << "PCL threw error " << e.what()
                      << ". Could not start camera..." << std::endl;
            return false;
        }

        cv::Mat_<double> distCoeffs;
        cv::Mat_<double> intrinsic = cv::Mat::zeros(3, 3, CV_64F);
        intrinsic.at<double>(0,0) = 525.f;
        intrinsic.at<double>(1,1) = 525.f;
        intrinsic.at<double>(0,2) = 320.f;
        intrinsic.at<double>(1,2) = 240.f;
        intrinsic.at<double>(2,2) = 1.f;
        std::cout << intrinsic << std::endl << std::endl;

        camtracker.reset( new v4r::KeypointSlamRGBD2(param) );
        camtracker->setCameraParameter(intrinsic,distCoeffs);

        boost::function<void (const pcl::PointCloud<pcl::PointXYZRGBA>::ConstPtr&)> f =
          boost::bind (&CamTracker::cloud_cb_, this, _1);
        interface->registerCallback (f);
        interface->start ();

        std::cout << "Camera started..." << std::endl;
#else
        camera_info_subscriber_ = n_->subscribe(camera_topic_ +"/camera_info", 1, &CamTracker::camera_info_cb, this);

        ROS_INFO_STREAM("Wating for camera info...topic=" << camera_topic_ << "/camera_info...");
        while (!got_camera_info_) {
            ros::Duration(0.1).sleep();
            ros::spinOnce();
        }
        ROS_INFO("got it.");
        camera_info_subscriber_.shutdown();

        cv::Mat_<double> distCoeffs = cv::Mat(4, 1, CV_64F, camera_info_.D.data());
        cv::Mat_<double> intrinsic = cv::Mat(3, 3, CV_64F, camera_info_.K.data());

        camtracker.reset( new v4r::KeypointSlamRGBD2(param) );
        camtracker->setCameraParameter(intrinsic,distCoeffs);

        confidence_publisher_ = n_->advertise<std_msgs::Float32>("cam_tracker_confidence", 1);
        camera_topic_subscriber_ = n_->subscribe(camera_topic_ +"/points", 1, &CamTracker::getCloud, this);
#endif
        last_cloud_ = boost::posix_time::microsec_clock::local_time ();
        last_cloud_ros_time_ = ros::Time::now();
        return true;
    }
 void sendGoalFromTopic(const geometry_msgs::PoseArrayConstPtr& msg)
 {
   ROS_INFO("[pick and place] Got goal from topic! %s", goal_->topic.c_str());
   pickAndPlace(msg->poses[0], msg->poses[1]);
   pick_and_place_sub_.shutdown();
 }
예제 #30
0
 // wait for one camerainfo, then shut down that subscriber
 void cam_info_callback(const sensor_msgs::CameraInfo &msg)
 {
   camParam = aruco_ros::rosCameraInfo2ArucoCamParams(msg, useRectifiedImages);
   cam_info_received = true;
   cam_info_sub.shutdown();
 }