void writeCSVCamera(shared_ptr<ofstream> file, ros::Time stamp)
{
  std::stringstream ss;
  ss << stamp.toNSec() << "," << stamp.toNSec() << ".png";

  *file << ss.str() << endl;
}
Transform OdometryROS::getTransform(const std::string & fromFrameId, const std::string & toFrameId, const ros::Time & stamp) const
{
	// TF ready?
	Transform transform;
	try
	{
		if(waitForTransform_ && !stamp.isZero() && waitForTransformDuration_ > 0.0)
		{
			//if(!tfBuffer_.canTransform(fromFrameId, toFrameId, stamp, ros::Duration(1)))
			if(!tfListener_.waitForTransform(fromFrameId, toFrameId, stamp, ros::Duration(waitForTransformDuration_)))
			{
				ROS_WARN("odometry: Could not get transform from %s to %s (stamp=%f) after %f seconds (\"wait_for_transform_duration\"=%f)!",
						fromFrameId.c_str(), toFrameId.c_str(), stamp.toSec(), waitForTransformDuration_, waitForTransformDuration_);
				return transform;
			}
		}

		tf::StampedTransform tmp;
		tfListener_.lookupTransform(fromFrameId, toFrameId, stamp, tmp);
		transform = rtabmap_ros::transformFromTF(tmp);
	}
	catch(tf::TransformException & ex)
	{
		ROS_WARN("%s",ex.what());
	}
	return transform;
}
/**
 * @brief TimeManipulator::toString
 * @param time
 * @return
 */
std::string TimeManipulator::str(const ros::Time& time)
{
  double tenths = time.toSec() - floor(time.toSec());
  long minutes_unix = MathManipulator::getUnsignedDivision((long) floor(time.toSec()), 60);
  int seconds = MathManipulator::getUnsignedRest((long) floor(time.toSec()), 60);
  long hours_unix = MathManipulator::getUnsignedDivision(minutes_unix, 60);
  int minutes = MathManipulator::getUnsignedRest(minutes_unix, 60);
  long days_unix = MathManipulator::getUnsignedDivision(hours_unix, 24);
  int hours = MathManipulator::getUnsignedRest(hours_unix, 24);
  long periods_of_400_years = MathManipulator::getUnsignedDivision(days_unix, 146097);
  int days_in_400_years_period = MathManipulator::getUnsignedRest(days_unix, 146097);
  if (days_in_400_years_period >= 32 * 1461 + 789)
  {
    days_in_400_years_period++;
  }
  if (days_in_400_years_period >= 57 * 1461 + 789)
  {
    days_in_400_years_period++;
  }
  if (days_in_400_years_period >= 82 * 1461 + 789)
  {
    days_in_400_years_period++;
  }
  int periods_of_4_years = days_in_400_years_period / 1461;
  int days_in_4_years_period = days_in_400_years_period % 1461;
  if (days_in_4_years_period >= 59)
  {
    days_in_4_years_period++;
  }
  if (days_in_4_years_period >= 425)
  {
    days_in_4_years_period++;
  }
  if (days_in_4_years_period >= 1157)
  {
    days_in_4_years_period++;
  }
  int year_in_4_years_period = days_in_4_years_period / 366;
  int year_days = days_in_4_years_period % 366;
  int year = year_in_4_years_period + periods_of_4_years * 4 + periods_of_400_years * 400 + 1970;
  int months_table[] = {31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31};
  int month_counter = 0;
  while(year_days > months_table[month_counter])
  {
    year_days -= months_table[month_counter++];
  }
  int month = month_counter + 1;
  int day = year_days + 1;
  std::stringstream ss;
  ss << (month < 10 ? "0" : "") << month << (day < 10 ? "/0" : "/") << day << (year < 10 ? "/0" : "/") << year;
  ss << (hours < 10 ? " 0" : " ") << hours << (minutes < 10 ? ":0" : ":") << minutes;
  if (seconds + tenths > 0)
  {
    ss << (seconds < 10 ? ":0" : ":") << seconds + tenths;
  }
  return ss.str();
}
Exemple #4
0
void VoNode::imgCb(const sensor_msgs::ImageConstPtr& msg)
{
  cv::Mat img;
  try {
    img = cv_bridge::toCvShare(msg, "mono8")->image;
  } catch (cv_bridge::Exception& e) {
    ROS_ERROR("cv_bridge exception: %s", e.what());
  }
  processUserActions();

  double diff1 = msg->header.stamp.toSec() - time_start.toSec();
//  std::cout<<"diff1: "<< diff1 <<std::endl;

  if((diff1 > time_window.toSec()) && state == SSTART){
      vo_->reset();
      vo_->start();
      state = SGOT_KEY_1;
  }
  vo_->addImage(img, msg->header.stamp.toSec());
  svo_scale_ = vo_->svo_scale_;

  if((vo_->stage() == FrameHandlerMono::STAGE_SECOND_FRAME) && state == SGOT_KEY_1)
  {    std::cout<<"the first frame at time: "<<msg->header.stamp.toSec()<<std::endl;
       time_first = msg->header.stamp;
       state = SGOT_KEY_2;
  }

  if((vo_->stage() == FrameHandlerMono::STAGE_DEFAULT_FRAME) && state == SGOT_KEY_2)
  {    //std::cout<<"the second frame at time: "<<msg->header.stamp.toSec()<<std::endl;
       time_second = msg->header.stamp;
       state = SGETTING_SCALE;
  }

  double diff2 = msg->header.stamp.toSec() - time_second.toSec();
//  std::cout<<"diff2: "<< diff2 <<std::endl;
//  std::cout<<"state: "<< state <<std::endl;

  if((diff2 > time_window.toSec()) && state == SGETTING_SCALE){
      std::cout<<"time_first: "<< time_first <<std::endl;
      std::cout<<"time_second: "<< time_second <<std::endl;
      if(VoNode::initCb()){
          state = SDEFAULT;
      }
  }

  visualizer_.publishMinimal(img, vo_->lastFrame(), *vo_, msg->header.stamp.toSec());

  if(publish_markers_){
      visualizer_.visualizeMarkers(vo_->lastFrame(), vo_->coreKeyframes(), vo_->map(), state == SDEFAULT, svo_scale_, our_scale_);
  }

  if(publish_dense_input_)
    visualizer_.exportToDense(vo_->lastFrame());

  if(vo_->stage() == FrameHandlerMono::STAGE_PAUSED)
    usleep(100000);
}
void printTime()
{
  ros::Duration d = current_time - start_time;

  if (paused)
    printf("\r [PAUSED]   Log Time: %13.6f   Duration: %.6f            \r", current_time.toSec(), d.toSec());
  else
    printf("\r [RUNNING]  Log Time: %13.6f   Duration: %.6f            \r", current_time.toSec(), d.toSec());
  fflush(stdout);
}
void encoderTickCallback(const sek_drive::encoders::ConstPtr& msg)
{
    renc = - msg->right_wheel;
    lenc = msg->left_wheel;
    current_time = ros::Time::now();
    if ((current_time.toSec() - enc_loop_time.toSec())>=0.05)
    {
        calcOdom();
        enc_loop_time_2 = current_time;
    }
}
void currentCallback(const std_msgs::Float32::ConstPtr& current){
	last_time = curr_time;
	curr_time = ros::Time::now(); 
	if(last_time.toSec() != 0.0){
		inst_power = current->data*22.0;	
		float inst_coulombs = current->data*(curr_time.toSec() - last_time.toSec());
		//ROS_INFO("inst_coulombs = %f", inst_coulombs);
		//ROS_INFO("curr coulombs, before addition: %f", curr_coulombs);
		curr_coulombs = curr_coulombs + inst_coulombs;
		//ROS_INFO("curr_coulombs, after addition:  %f", curr_coulombs);
	} //else, ignore the first value
}
  void callback(const tPointCloud::ConstPtr& rgb_cloud, 
                const tImage::ConstPtr& rgb_image, 
                const tCameraInfo::ConstPtr& rgb_caminfo,
                const tImage::ConstPtr& depth_image, 
                const tPointCloud::ConstPtr& cloud
                )
  {
    if (max_update_rate_ > 0.0)
    {
      NODELET_DEBUG("update set to %f", max_update_rate_);
      if ( last_update_ + ros::Duration(1.0/max_update_rate_) > ros::Time::now())
      {
        NODELET_DEBUG("throttle last update at %f skipping", last_update_.toSec());
        return;
      }
    }
    else
      NODELET_DEBUG("update_rate unset continuing");

    last_update_ = ros::Time::now();

    rgb_cloud_pub_.publish(rgb_cloud);
    rgb_image_pub_.publish(rgb_image);
    rgb_caminfo_pub_.publish(rgb_caminfo);
    depth_image_pub_.publish(depth_image);
    cloud_pub_.publish(cloud);
  }
Exemple #9
0
void revert_applied_readings_since(const ros::Time& time)
{
    int msec = (int)(time.toNSec()/1000l);

    int k = 0;

    for(std::vector<ras_arduino_msgs::Encoders>::reverse_iterator it = _ringbuffer.rbegin(); it != _ringbuffer.rend(); ++it)
    {
        ras_arduino_msgs::Encoders& enc = *it;
        if (enc.timestamp >= msec)
        {
            ++k;

            double dist_l = (2.0*M_PI*robot::dim::wheel_radius) * (enc.delta_encoder1 / robot::prop::ticks_per_rev);
            double dist_r = (2.0*M_PI*robot::dim::wheel_radius) * (enc.delta_encoder2 / robot::prop::ticks_per_rev);

            _theta += (dist_r - dist_l) / robot::dim::wheel_distance;

            double dist = (dist_r + dist_l) / 2.0;

            _x += dist * cos(_theta);
            _y += dist * sin(_theta);

            //remove reading
            _ringbuffer.erase(--it.base());
        }
    }

    ROS_ERROR("[PoseGenerator::revertReadingsSince] Reverted %d readings.",k);
}
Exemple #10
0
	/**
	 * Send transform to FCU position controller
	 *
	 * Note: send only XYZ, Yaw
	 */
	void send_setpoint_transform(const tf::Transform &transform, const ros::Time &stamp) {
		// ENU frame
		tf::Vector3 origin = transform.getOrigin();
		tf::Quaternion q = transform.getRotation();

		/* Documentation start from bit 1 instead 0,
		 * Ignore velocity and accel vectors, yaw rate
		 */
		uint16_t ignore_all_except_xyz_y = (1 << 11) | (7 << 6) | (7 << 3);

		if (uas->is_px4()) {
			/**
			 * Current PX4 has bug: it cuts throttle if there no velocity sp
			 * Issue #273.
			 *
			 * @todo Revesit this quirk later. Should be fixed in firmware.
			 */
			ignore_all_except_xyz_y = (1 << 11) | (7 << 6);
		}

		// ENU->NED. Issue #49.
		set_position_target_local_ned(stamp.toNSec() / 1000000,
				MAV_FRAME_LOCAL_NED,
				ignore_all_except_xyz_y,
				origin.y(), origin.x(), -origin.z(),
				0.0, 0.0, 0.0,
				0.0, 0.0, 0.0,
				tf::getYaw(q), 0.0);
	}
Exemple #11
0
int wait_for_iob_signal()
{
  if (iob_synchronized) {
    //std::cerr << "wait>" << std::endl;
    if (start_robothw) {
      // no wait
    }
    //std::cerr << "wait<" << std::endl;
    return 0;
  } else {
    //
    ros::Time rnow;
    ros::Duration tm = ros::Duration(0, g_period_ns);
    ros::WallDuration wtm = ros::WallDuration(0, 100000); // 0.1 ms
    while ((rnow = ros::Time::now()) < rg_ts) {
      wtm.sleep();
    }

    rg_ts += tm;
    if ((rg_ts - rnow).toSec() <= 0) {
      fprintf(stderr, "iob::overrun (%f[ms]), w:%f -> %f\n",
              (rnow - rg_ts).toSec()*1000, rnow.toSec(), rg_ts.toSec());
      do {
        rg_ts += tm;
      } while ((rg_ts - rnow).toSec() <= 0);
    }

    return 0;
  }

  return 0;
}
bool ElevationMap::update(const grid_map::Matrix& varianceUpdate, const grid_map::Matrix& horizontalVarianceUpdateX,
                          const grid_map::Matrix& horizontalVarianceUpdateY,
                          const grid_map::Matrix& horizontalVarianceUpdateXY, const ros::Time& time)
{
  boost::recursive_mutex::scoped_lock scopedLock(rawMapMutex_);

  const auto& size = rawMap_.getSize();

  if (!((Index(varianceUpdate.rows(), varianceUpdate.cols()) == size).all()
      && (Index(horizontalVarianceUpdateX.rows(), horizontalVarianceUpdateX.cols()) == size).all()
      && (Index(horizontalVarianceUpdateY.rows(), horizontalVarianceUpdateY.cols()) == size).all()
      && (Index(horizontalVarianceUpdateXY.rows(), horizontalVarianceUpdateXY.cols()) == size).all())) {
    ROS_ERROR("The size of the update matrices does not match.");
    return false;
  }

  rawMap_.get("variance") += varianceUpdate;
  rawMap_.get("horizontal_variance_x") += horizontalVarianceUpdateX;
  rawMap_.get("horizontal_variance_y") += horizontalVarianceUpdateY;
  rawMap_.get("horizontal_variance_xy") += horizontalVarianceUpdateXY;
  clean();
  rawMap_.setTimestamp(time.toNSec());

  return true;
}
int NodeClass::requestBoardStatus() {
	int ret;	
	
	// Request Status of RelayBoard 
	ret = m_SerRelayBoard->sendRequest();
	if(ret != SerRelayBoard::NO_ERROR) {
		ROS_ERROR("Error in sending message to Relayboard over SerialIO, lost bytes during writing");
	}

	ret = m_SerRelayBoard->evalRxBuffer();
	if(ret==SerRelayBoard::NOT_INITIALIZED) {
		ROS_ERROR("Failed to read relayboard data over Serial, the device is not initialized");
		relayboard_online = false;
	} else if(ret==SerRelayBoard::NO_MESSAGES) {
		ROS_ERROR("For a long time, no messages from RelayBoard have been received, check com port!");
		if(time_last_message_received_.toSec() - ros::Time::now().toSec() > relayboard_timeout_) {relayboard_online = false;}
	} else if(ret==SerRelayBoard::TOO_LESS_BYTES_IN_QUEUE) {
		//ROS_ERROR("Relayboard: Too less bytes in queue");
	} else if(ret==SerRelayBoard::CHECKSUM_ERROR) {
		ROS_ERROR("A checksum error occurred while reading from relayboard data");
	} else if(ret==SerRelayBoard::NO_ERROR) {
		relayboard_online = true;
		relayboard_available = true;
		time_last_message_received_ = ros::Time::now();
	}

	return 0;
}
Exemple #14
0
	/**
	 * @brief Send velocity to FCU velocity controller
	 *
	 * @warning Send only VX VY VZ. ENU frame.
	 */
	void send_setpoint_velocity(const ros::Time &stamp, Eigen::Vector3d &vel_enu, double yaw_rate)
	{
		/**
		 * Documentation start from bit 1 instead 0;
		 * Ignore position and accel vectors, yaw.
		 */
		uint16_t ignore_all_except_v_xyz_yr = (1 << 10) | (7 << 6) | (7 << 0);
		auto vel = [&] () {
			if (mav_frame == MAV_FRAME::BODY_NED || mav_frame == MAV_FRAME::BODY_OFFSET_NED) {
				return ftf::transform_frame_baselink_aircraft(vel_enu);
			} else {
				return ftf::transform_frame_enu_ned(vel_enu);
			}
		} ();

		auto yr = ftf::transform_frame_baselink_aircraft(Eigen::Vector3d(0.0, 0.0, yaw_rate));

		set_position_target_local_ned(stamp.toNSec() / 1000000,
					utils::enum_value(mav_frame),
					ignore_all_except_v_xyz_yr,
					Eigen::Vector3d::Zero(),
					vel,
					Eigen::Vector3d::Zero(),
					0.0, yr.z());
	}
Exemple #15
0
bool disable(Disable::Request& request, Disable::Response& response) {
  controller.enabled = false;
  controller.reset();
  lastTime.fromSec(0.0);

  return fadeToDefaultColor();
}
void BagFormat::writeData(const ros::Time& time, const QVector< Plot::LinkedBufferIterator* >& data)
{
	plot_msgs::Plot msg;
	msg.header.stamp = time;

	Q_FOREACH(Plot::LinkedBufferIterator* it, data)
	{
		if(!it)
			continue;

		const Plot::DataPoint& p = **it;
		const Plot* plot = it->plot();

		plot_msgs::PlotPoint point;
		point.name = plot->path().toStdString();
		point.value = p.value;
		msg.points.push_back(point);
	}

	try
	{
		m_bag.write("plot", time, msg);
	}
	catch(rosbag::BagException& e)
	{
		fprintf(stderr, "Bag exception: %s\n", e.what());
		fprintf(stderr, "Time was %lf\n", time.toSec());
		throw;
	}
}
double PositionCommand::chirp_command ()
{
    double max_freq = 5.0;
    double min_freq = 0.0;
    double max_amp  = 0.3;
    double min_amp  = 0.05;
    double duration = 15.0;
    double Freq, Amp, time, velocity ,f_slope , a_slope;
    ros ::Time curr_time = ros::Time::now();

    f_slope = (max_freq - min_freq  ) / duration;
    a_slope = (max_amp  - min_amp   ) / duration;


    time = curr_time.toSec() - time0_chirp_.toSec();

    Amp  = time * a_slope + min_amp;
    Freq = time * f_slope + min_freq;

    velocity = -Amp * sin ( 2 * PI * Freq * time);

    std::cout   << "f = "           << Freq
                << "\ttime = "       << time
                << "\tvelocity = "   << velocity
                << "\tAmp = "        << Amp
                << std::endl;

    if (Freq >= max_freq)
    {
        chirp_enable_ = false;
        velocity = 0;
    }
    return velocity;
}
int main(int argc, char **argv){

    ros::init(argc, argv, "remaining_memory");
	 ros::NodeHandle n;

    //Publisher part

    ros::Publisher remaining_memory_pub = n.advertise<mavros_msgs::Mavlink>("mavlink/to", 2000);
    ros::Rate loop_rate(10);
    
    FILE *in;
	 char buff[512];
    float remaining_memory = 0.0;

    int count = 0;
    while (ros::ok()) {

       // get the remaining memory thanks to a shell script

	    if(!(in = popen("df -h /home/ | xargs | cut -d ' ' -f 14 | sed 's/%//g'", "r"))){
		     exit(1);
	    }

	    while(fgets(buff, sizeof(buff), in)!=NULL){
		     remaining_memory = (float) atoll(buff);
	    }
	    pclose(in);


        //message generation
        //1. make named value float with type mavlink_message_t
        mavlink::mavlink_message_t msg;
        mavlink::MsgMap map(msg);

	mavlink::common::msg::NAMED_VALUE_FLOAT mem_msg;

	std::array<char, 10> name = {"rem_mem"};

	mem_msg.time_boot_ms = stamp.toNSec()/1000;
	mem_msg.name = name;
	mem_msg.value = remaining_memory;


	mem_msg.serialize(map);

        auto rmsg = boost::make_shared<mavros_msgs::Mavlink>();

        rmsg->header.stamp = ros::Time::now();
        mavros_msgs::mavlink::convert(msg, *rmsg);

		  remaining_memory_pub.publish(rmsg);
		  ros::spinOnce();

		  loop_rate.sleep();
	 	  ++count;
	 }

    return 0;

}
Exemple #19
0
 void profilingLoggerCurrentTimeDuration(ros::Time currentTime,
                                         ros::Time startTime,
                                         LogThis &logger,
                                         std::string durationName)
 {
   logger.log(currentTime.toNSec(), "currentTime");
   logger.log((currentTime-startTime).toSec(), durationName);
 }
	bool getPoseAt(const ros::Time& time, StateType& pose, ros::Time& stamp)
	{
		std::deque<TimeState> past_poses_copy;

		{
			boost::mutex::scoped_lock past_poses_lock(past_poses_mutex_);
			past_poses_copy = past_poses_;
		}

		if (past_poses_copy.empty())
			return false;

		TimeState result = *(past_poses_copy.rbegin());
		if (result.first <= time)
		{
			typename std::deque<TimeState>::const_reverse_iterator iter =
					past_poses_copy.rbegin()++;
			while(iter != past_poses_copy.rend() && iter->first <= time)
			{
				iter++;
			}
			if (iter != past_poses_copy.rend())
			{
				// check which time is the nearest
				double delta_after = fabs(iter->first.toSec() - time.toSec());
				double delta_before = fabs(
						(iter - 1)->first.toSec() - time.toSec());
				if (delta_before < delta_after)
				{
					result = *(--iter);
				}
				else
				{
					result = *iter;
				}
			}
			else
			{
				result = *(--iter);
			}
		}

		pose = result.second;
		stamp = result.first;
		return true;
	}
	/**
	 * @brief Send vision speed estimate to FCU velocity controller
	 */
	void send_vision_speed(const geometry_msgs::Vector3 &vel_enu, const ros::Time &stamp) {
		Eigen::Vector3d vel_;
		tf::vectorMsgToEigen(vel_enu, vel_);
		auto vel = UAS::transform_frame_enu_ned(vel_);

		vision_speed_estimate(stamp.toNSec() / 1000,
				vel.x(), vel.y(), vel.z());
	}
	/**
	 * @brief Send vision speed estimate to FCU velocity controller
	 */
	void send_vision_speed(const geometry_msgs::Vector3 &vel_enu, const ros::Time &stamp)
	{
		Eigen::Vector3d vel_;
		tf::vectorMsgToEigen(vel_enu, vel_);
		//Transform from ENU to NED frame
		auto vel = ftf::transform_frame_enu_ned(vel_);

		vision_speed_estimate(stamp.toNSec() / 1000, vel);
	}
void TaskScheduler::enqueueAction(const ros::Time & when,  ActionType type,boost::shared_ptr<ThreadParameters> tp)
{
	ThreadAction ta;
	PRINTF(3,"ea:Locking\n");
	pthread_mutex_lock(&aqMutex);
	PRINTF(3,"ea:Locked\n");
	// if (!runScheduler) return;
	PRINTF(2,"Enqueing action %.3f %s -- %s\n",when.toSec(),actionString(type),
			tp?(tp->task->getName().c_str()):"none");

	ta.type = type;
	ta.tp = tp;
	actionQueue[when.toSec()] = ta;
	PRINTF(3,"ea:Signalling\n");
	pthread_cond_signal(&aqCond);
	PRINTF(3,"ea:Unlocking\n");
	pthread_mutex_unlock(&aqMutex);
}
Exemple #24
0
  void imageCb_rgb(const sensor_msgs::ImageConstPtr& msg)
  {

     sensor_msgs::CvBridge bridge;//we need this object bridge for facilitating conversion from ros-img to opencv
     IplImage* img = bridge.imgMsgToCv(msg,"bgr8");  //image being converted from ros to opencv using cvbridge

     if (REC==1)
     {
         rgb_cur_time = ros::Time::now();
         cvShowImage( "RGB8 IMG",img);
         if(((rgb_cur_time.toSec())-(rgb_last_time.toSec()))>0.1)
         {
             time(&t);
             timer = (long)t;
             sstream << timer;
             path_ts = sstream.str();
             path_ts=path+path_ts+png;
             ros::Duration(0.01).sleep();
              printf("path %s\n",path_ts.c_str());
              const char* cstr = path_ts.c_str();
             // printf("lolol %s\n",cstr);
             //if(!(cvSaveImage(path_ts.c_str(),img)))
             if(!(cvSaveImage(cstr,img)))
             {
              printf("Can't Capture RGB Image\n");
              ros::Duration(0.01).sleep();
             }
             else
             {
                printf("RGB Image Captured\n");
                ros::Duration(0.01).sleep();
             }
             rgb_last_time=rgb_cur_time;
             rgb_cur_time=ros::Time::now();
             path_ts="";
             sstream.str(std::string());
             sstream.clear();
             //printf("path %s\n",path_ts.c_str());
          
         }

     }
      cvWaitKey(2);
    }
  void scannerSignalCallback(const sensor_msgs::JointStateConstPtr& js)
  {

    double ang = fmod(js->position[0], 2 * M_PI);
    // ROS_DEBUG("ang = %lf, prev_angle = %lf, %lf", ang, prev_angle_, prev_signal_.toSec());

    if ( prev_angle_ < 0 ) {
      prev_angle_ = ang;
      return;
    }
    if ((ang - prev_angle_) >= - M_PI) {
      prev_angle_ = ang;
      return;
    }

    if (prev_signal_.toSec() == 0.0) {
      first_time_ = true;
    }

    ros::Time stmp = js->header.stamp;
    if (first_time_)
    {
      prev_signal_ = stmp;
      first_time_ = false;
    }
    else
    {
      if (num_skips_ > 0)
      {
        if (num_skips_left_ > 0)
        {
          num_skips_left_ -= 1;
          return;
        }
        else
        {
          num_skips_left_ = num_skips_;
        }
      }

      laser_assembler::AssembleScans2::Request req;
      laser_assembler::AssembleScans2::Response res;

      req.begin = prev_signal_;
      req.end   = stmp;

      if (!ros::service::call("assemble_scans2", req, res))
        ROS_ERROR("Failed to call service on point cloud assembler or laser scan assembler.");

      pub_.publish(res.cloud);
      ROS_INFO("Snapshotter::Published Cloud size=%u", res.cloud.width * res.cloud.height);

      prev_signal_ = stmp;
      prev_angle_ = -1;
    }
  }
void SensorData::imgCallback(const sensor_msgs::ImageConstPtr& img_in)
{
    static ros::Time baseTime=ros::Time::now();
    cv_bridge::toCvShare(img_in,"bgr8")->image.copyTo(img);
 //   std::cout<<curTime.toSec()<<endl;
    cv::imshow("img",img);
    ros::Time curTime=ros::Time::now();

//save image and make the timeStamp as the name of images
    double timeStamp=curTime.toSec()-baseTime.toSec();
    std::string timeStampStr = boost::lexical_cast<std::string>(timeStamp);
    timeStampStr.replace(timeStampStr.find("."),1,"_");
   // std::cout<<timeStampStr<<endl;
    //使用测向GPS产生的偏航角,不再使用IMU产生的地磁偏航
    file<<timeStampStr<<"  "<<"gps:"<<setprecision(13)<<Current_x<<"  "<<Current_y<<"; yaw:"<<direction<<"\n";

    cv::imwrite(timeStampStr+".jpg",img);
    cv:waitKey(5);
}
Exemple #27
0
void imu_callback(std::string name, std_msgs::PoseWithRatesStamped* imu, ros::Time t, void* f)
{
  FILE* file = (FILE*)f;

  fprintf(file, "%.5f %.5f %.5f %.5f %.5f %.5f %.5f %.5f\n",
          t.to_double(),
          imu->header.stamp.to_double(),
          imu->acc.acc.ax, imu->acc.acc.ay, imu->acc.acc.az,
          imu->vel.ang_vel.vx, imu->vel.ang_vel.vy, imu->vel.ang_vel.vz);
}
Exemple #28
0
void imu_callback(std::string name, imu_node::ImuData* imu, ros::Time t, void* f)
{
  FILE* file = (FILE*)f;

  fprintf(file, "%.5f %.5f %.5f %.5f %.5f %.5f %.5f %.5f\n",
          t.to_double(),
          imu->header.stamp.to_double(),
          imu->accel.ax, imu->accel.ay, imu->accel.az,
          imu->angrate.vx, imu->angrate.vy, imu->angrate.vz);
}
void OdometryFromPose::poseCallback(const geometry_msgs::PoseWithCovarianceStamped::ConstPtr& msg)
{
  nav_msgs::Odometry odom;
  tf::Quaternion quat;
  geometry_msgs::Vector3 rpy;
  ros::Time current_time;

  double roll, pitch, yaw, dt;


  // transform quaternion to fixed axis angles

  tf::quaternionMsgToTF(msg->pose.pose.orientation, quat);

  // First we have to turn this quaternion to a rotation matrix and then use the accessor getRPY on this matrix.
  tf::Matrix3x3 m(quat);
  m.getRPY(roll, pitch, yaw);

  // Store and publih the angles
  rpy.x = roll;
  rpy.y = pitch;
  rpy.z = yaw;
  rpy_pub_.publish(rpy);

  //set the position to odometry msg
  odom.header.stamp = msg->header.stamp;
  odom.header.frame_id = "map";
  odom.pose.pose = msg->pose.pose;


  // calculate veocity by calcualting derivatives
  current_time = ros::Time::now();
  dt = current_time.toSec() - last_time_.toSec();

  //odom.child_frame_id = "base_link";
  odom.twist.twist.linear.x = (msg->pose.pose.position.x - previous_x_) / dt;
  odom.twist.twist.linear.y = (msg->pose.pose.position.y - previous_y_) / dt;
  odom.twist.twist.linear.z = (msg->pose.pose.position.z - previous_z_) / dt;

  //curr_yaw = tf::getYaw(msg->pose.pose.orientation);
  odom.twist.twist.angular.z = (yaw - previous_yaw_)/ dt;



  odom_pub_.publish(odom); // publish the complete odometry msg


  // Store the current values as previous for the next loop
  last_time_ = current_time;
  previous_x_ = msg->pose.pose.position.x;
  previous_y_ = msg->pose.pose.position.y;
  previous_z_ = msg->pose.pose.position.z;
  previous_yaw_ = yaw;

}
Exemple #30
-1
void HeaderManipulation::publishMsg(const topic_tools::ShapeShifter &msg, const ros::Time &msg_in_time)
{
    ROS_DEBUG("HeaderManipulation publishMsg Thread started with timestamp %f", msg_in_time.toSec());
    ros::Time end_time(ros::Time::now());
    ros::Time last_time;
    config_mutex_.lock();
    ros::Time time_to_pub(msg_in_time + msg_delay_);
    config_mutex_.unlock();
    do
    {
        last_time = end_time;
        ROS_DEBUG("waiting to publish msg from %f at %f", msg_in_time.toSec(), time_to_pub.toSec());
        if (end_time > time_to_pub)
        {
            boost::mutex::scoped_lock pub_lock(pub_mutex_);
            ROS_DEBUG("publishing msg which should be held back till: %f", time_to_pub.toSec());
            generic_pub_.publish(msg);
            return;
        }
        boost::mutex::scoped_lock config_lock(config_mutex_);
        publish_retry_rate_.sleep();
        time_to_pub = msg_in_time + msg_delay_;
        end_time = ros::Time::now();
    } while (last_time <= end_time);
    ROS_WARN("Detected jump back in time. Dropping msg. last: %f, end %f",
             last_time.toSec(), end_time.toSec());
    return;
}