void writeCSVCamera(shared_ptr<ofstream> file, ros::Time stamp)
{
  std::stringstream ss;
  ss << stamp.toNSec() << "," << stamp.toNSec() << ".png";

  *file << ss.str() << endl;
}
示例#2
0
	/**
	 * Send transform to FCU position controller
	 *
	 * Note: send only XYZ, Yaw
	 */
	void send_setpoint_transform(const tf::Transform &transform, const ros::Time &stamp) {
		// ENU frame
		tf::Vector3 origin = transform.getOrigin();
		tf::Quaternion q = transform.getRotation();

		/* Documentation start from bit 1 instead 0,
		 * Ignore velocity and accel vectors, yaw rate
		 */
		uint16_t ignore_all_except_xyz_y = (1 << 11) | (7 << 6) | (7 << 3);

		if (uas->is_px4()) {
			/**
			 * Current PX4 has bug: it cuts throttle if there no velocity sp
			 * Issue #273.
			 *
			 * @todo Revesit this quirk later. Should be fixed in firmware.
			 */
			ignore_all_except_xyz_y = (1 << 11) | (7 << 6);
		}

		// ENU->NED. Issue #49.
		set_position_target_local_ned(stamp.toNSec() / 1000000,
				MAV_FRAME_LOCAL_NED,
				ignore_all_except_xyz_y,
				origin.y(), origin.x(), -origin.z(),
				0.0, 0.0, 0.0,
				0.0, 0.0, 0.0,
				tf::getYaw(q), 0.0);
	}
int main(int argc, char **argv){

    ros::init(argc, argv, "remaining_memory");
	 ros::NodeHandle n;

    //Publisher part

    ros::Publisher remaining_memory_pub = n.advertise<mavros_msgs::Mavlink>("mavlink/to", 2000);
    ros::Rate loop_rate(10);
    
    FILE *in;
	 char buff[512];
    float remaining_memory = 0.0;

    int count = 0;
    while (ros::ok()) {

       // get the remaining memory thanks to a shell script

	    if(!(in = popen("df -h /home/ | xargs | cut -d ' ' -f 14 | sed 's/%//g'", "r"))){
		     exit(1);
	    }

	    while(fgets(buff, sizeof(buff), in)!=NULL){
		     remaining_memory = (float) atoll(buff);
	    }
	    pclose(in);


        //message generation
        //1. make named value float with type mavlink_message_t
        mavlink::mavlink_message_t msg;
        mavlink::MsgMap map(msg);

	mavlink::common::msg::NAMED_VALUE_FLOAT mem_msg;

	std::array<char, 10> name = {"rem_mem"};

	mem_msg.time_boot_ms = stamp.toNSec()/1000;
	mem_msg.name = name;
	mem_msg.value = remaining_memory;


	mem_msg.serialize(map);

        auto rmsg = boost::make_shared<mavros_msgs::Mavlink>();

        rmsg->header.stamp = ros::Time::now();
        mavros_msgs::mavlink::convert(msg, *rmsg);

		  remaining_memory_pub.publish(rmsg);
		  ros::spinOnce();

		  loop_rate.sleep();
	 	  ++count;
	 }

    return 0;

}
bool ElevationMap::update(const grid_map::Matrix& varianceUpdate, const grid_map::Matrix& horizontalVarianceUpdateX,
                          const grid_map::Matrix& horizontalVarianceUpdateY,
                          const grid_map::Matrix& horizontalVarianceUpdateXY, const ros::Time& time)
{
  boost::recursive_mutex::scoped_lock scopedLock(rawMapMutex_);

  const auto& size = rawMap_.getSize();

  if (!((Index(varianceUpdate.rows(), varianceUpdate.cols()) == size).all()
      && (Index(horizontalVarianceUpdateX.rows(), horizontalVarianceUpdateX.cols()) == size).all()
      && (Index(horizontalVarianceUpdateY.rows(), horizontalVarianceUpdateY.cols()) == size).all()
      && (Index(horizontalVarianceUpdateXY.rows(), horizontalVarianceUpdateXY.cols()) == size).all())) {
    ROS_ERROR("The size of the update matrices does not match.");
    return false;
  }

  rawMap_.get("variance") += varianceUpdate;
  rawMap_.get("horizontal_variance_x") += horizontalVarianceUpdateX;
  rawMap_.get("horizontal_variance_y") += horizontalVarianceUpdateY;
  rawMap_.get("horizontal_variance_xy") += horizontalVarianceUpdateXY;
  clean();
  rawMap_.setTimestamp(time.toNSec());

  return true;
}
示例#5
0
void revert_applied_readings_since(const ros::Time& time)
{
    int msec = (int)(time.toNSec()/1000l);

    int k = 0;

    for(std::vector<ras_arduino_msgs::Encoders>::reverse_iterator it = _ringbuffer.rbegin(); it != _ringbuffer.rend(); ++it)
    {
        ras_arduino_msgs::Encoders& enc = *it;
        if (enc.timestamp >= msec)
        {
            ++k;

            double dist_l = (2.0*M_PI*robot::dim::wheel_radius) * (enc.delta_encoder1 / robot::prop::ticks_per_rev);
            double dist_r = (2.0*M_PI*robot::dim::wheel_radius) * (enc.delta_encoder2 / robot::prop::ticks_per_rev);

            _theta += (dist_r - dist_l) / robot::dim::wheel_distance;

            double dist = (dist_r + dist_l) / 2.0;

            _x += dist * cos(_theta);
            _y += dist * sin(_theta);

            //remove reading
            _ringbuffer.erase(--it.base());
        }
    }

    ROS_ERROR("[PoseGenerator::revertReadingsSince] Reverted %d readings.",k);
}
示例#6
0
	/**
	 * @brief Send velocity to FCU velocity controller
	 *
	 * @warning Send only VX VY VZ. ENU frame.
	 */
	void send_setpoint_velocity(const ros::Time &stamp, Eigen::Vector3d &vel_enu, double yaw_rate)
	{
		/**
		 * Documentation start from bit 1 instead 0;
		 * Ignore position and accel vectors, yaw.
		 */
		uint16_t ignore_all_except_v_xyz_yr = (1 << 10) | (7 << 6) | (7 << 0);
		auto vel = [&] () {
			if (mav_frame == MAV_FRAME::BODY_NED || mav_frame == MAV_FRAME::BODY_OFFSET_NED) {
				return ftf::transform_frame_baselink_aircraft(vel_enu);
			} else {
				return ftf::transform_frame_enu_ned(vel_enu);
			}
		} ();

		auto yr = ftf::transform_frame_baselink_aircraft(Eigen::Vector3d(0.0, 0.0, yaw_rate));

		set_position_target_local_ned(stamp.toNSec() / 1000000,
					utils::enum_value(mav_frame),
					ignore_all_except_v_xyz_yr,
					Eigen::Vector3d::Zero(),
					vel,
					Eigen::Vector3d::Zero(),
					0.0, yr.z());
	}
	/**
	 * @brief Send vision speed estimate to FCU velocity controller
	 */
	void send_vision_speed(const geometry_msgs::Vector3 &vel_enu, const ros::Time &stamp) {
		Eigen::Vector3d vel_;
		tf::vectorMsgToEigen(vel_enu, vel_);
		auto vel = UAS::transform_frame_enu_ned(vel_);

		vision_speed_estimate(stamp.toNSec() / 1000,
				vel.x(), vel.y(), vel.z());
	}
示例#8
0
 void profilingLoggerCurrentTimeDuration(ros::Time currentTime,
                                         ros::Time startTime,
                                         LogThis &logger,
                                         std::string durationName)
 {
   logger.log(currentTime.toNSec(), "currentTime");
   logger.log((currentTime-startTime).toSec(), durationName);
 }
示例#9
0
	/**
	 * @brief Send vision speed estimate to FCU velocity controller
	 */
	void send_vision_speed(const geometry_msgs::Vector3 &vel_enu, const ros::Time &stamp)
	{
		Eigen::Vector3d vel_;
		tf::vectorMsgToEigen(vel_enu, vel_);
		//Transform from ENU to NED frame
		auto vel = ftf::transform_frame_enu_ned(vel_);

		vision_speed_estimate(stamp.toNSec() / 1000, vel);
	}
	/**
	 * Send angular velocity setpoint to FCU attitude controller
	 *
	 * ENU frame.
	 */
	void send_attitude_ang_velocity(const ros::Time &stamp, const float vx, const float vy, const float vz) {
		// Q + Thrust, also bits noumbering started from 1 in docs
		const uint8_t ignore_all_except_rpy = (1<<7)|(1<<6);
		float q[4] = { 1.0, 0.0, 0.0, 0.0 };

		set_attitude_target(stamp.toNSec() / 1000000,
				ignore_all_except_rpy,
				q,
				vy, vx, -vz,
				0.0);
	}
示例#11
0
	/**
	 * @brief Send angular velocity setpoint to FCU attitude controller
	 *
	 * @note ENU frame.
	 */
	void send_attitude_ang_velocity(const ros::Time &stamp, const Eigen::Vector3d &ang_vel) {
		/* Q + Thrust, also bits noumbering started from 1 in docs
		 */
		const uint8_t ignore_all_except_rpy = (1 << 7) | (1 << 6);
		float q[4] = { 1.0, 0.0, 0.0, 0.0 };

		auto av = UAS::transform_frame_enu_ned(ang_vel);

		set_attitude_target(stamp.toNSec() / 1000000,
				ignore_all_except_rpy,
				q,
				av.x(), av.y(), av.z(),
				0.0);
	}
示例#12
0
void ValterJoyTeleop::joyCallback(const sensor_msgs::Joy::ConstPtr& joy)
{
  float linVel = joy->axes[1];
  float angVel = joy->axes[2];

  linVel = linVel * 0.12;
  angVel = angVel * 0.587;

  if (fabs(prevLin - linVel) > 0.005 || fabs(prevAng - angVel) > 0.005 || (joy->axes[0] == 0 && joy->axes[1] == 0 && joy->axes[2] == 0 && joy->axes[3] == 0 && joy->axes[4] == 0 && joy->axes[5] == 0))
  {

    //ROS_INFO("TIME: %f", (ros::Time::now().toNSec() * 1e-6) - (prevSentTime.toNSec() * 1e-6));
    //ROS_INFO("linVel = %f, angVel = %f", linVel, angVel);

    if ((ros::Time::now().toNSec() * 1e-6 - prevSentTime.toNSec() * 1e-6) > 50 || (joy->axes[0] == 0 && joy->axes[1] == 0 && joy->axes[2] == 0 && joy->axes[3] == 0 && joy->axes[4] == 0 && joy->axes[5] == 0))
    {

      std::string cmdVelTaskScriptLine = format("T_PCP1_CmdVelTask_%.2f_%.2f", linVel, angVel);

      ROS_INFO("%s", cmdVelTaskScriptLine.c_str());

      int sock = socket(AF_INET , SOCK_STREAM , 0);
      struct sockaddr_in server;
      server.sin_addr.s_addr = inet_addr("192.168.0.248");
      server.sin_family = AF_INET;
      server.sin_port = htons(55555);

      //Connect to remote server
      if (connect(sock , (struct sockaddr *)&server , sizeof(server)) < 0)
      {
        perror("connect failed. Error");
      }
      else
      {
        //Send some data
        if( send(sock , cmdVelTaskScriptLine.c_str() , strlen( cmdVelTaskScriptLine.c_str() ) , 0) < 0)
        {
            perror("Send failed : ");
        }
        close(sock);
      }


      prevSentTime = ros::Time::now();
      prevLin = linVel;
      prevAng = angVel;
    }
  }
}
示例#13
0
	/**
	 * Send velocity to FCU velocity controller
	 *
	 * Note: send only VX VY VZ. ENU frame.
	 */
	void send_setpoint_velocity(const ros::Time &stamp, float vx, float vy, float vz, float yaw_rate) {
		/* Documentation start from bit 1 instead 0,
		 * Ignore position and accel vectors, yaw
		 */
		uint16_t ignore_all_except_v_xyz_yr = (1 << 10) | (7 << 6) | (7 << 0);

		// ENU->NED. Issue #49.
		set_position_target_local_ned(stamp.toNSec() / 1000000,
				MAV_FRAME_LOCAL_NED,
				ignore_all_except_v_xyz_yr,
				0.0, 0.0, 0.0,
				vy, vx, -vz,
				0.0, 0.0, 0.0,
				0.0, yaw_rate);
	}
示例#14
0
	/**
	 * @brief Send angular velocity setpoint and thrust to FCU attitude controller
	 */
	void send_attitude_ang_velocity(const ros::Time &stamp, const Eigen::Vector3d &ang_vel, const float thrust)
	{
		/**
		 * @note Q, also bits noumbering started from 1 in docs
		 */
		const uint8_t ignore_all_except_rpy = (1 << 7);

		auto av = ftf::transform_frame_baselink_aircraft(ang_vel);

		set_attitude_target(stamp.toNSec() / 1000000,
					ignore_all_except_rpy,
					Eigen::Quaterniond::Identity(),
					av,
					thrust);
	}
	/**
	 * @brief Send attitude setpoint to FCU attitude controller
	 *
	 * @note ENU frame.
	 */
	void send_attitude_target(const ros::Time &stamp, const Eigen::Affine3d &tr) {
		/* Thrust + RPY, also bits numbering started from 1 in docs
		 */
		const uint8_t ignore_all_except_q = (1 << 6) | (7 << 0);
		float q[4];

		UAS::quaternion_to_mavlink(
				UAS::transform_orientation_enu_ned(
					UAS::transform_orientation_baselink_aircraft(Eigen::Quaterniond(tr.rotation()))),q);

		set_attitude_target(stamp.toNSec() / 1000000,
				ignore_all_except_q,
				q,
				0.0, 0.0, 0.0,
				0.0);
	}
示例#16
0
	/**
	 * @brief Send attitude setpoint to FCU attitude controller
	 *
	 * @note ENU frame.
	 */
	void send_attitude_target(const ros::Time &stamp, const Eigen::Affine3d &tr) {
		/* Thrust + RPY, also bits numbering started from 1 in docs
		 */
		const uint8_t ignore_all_except_q = (1 << 6) | (7 << 0);
		float q[4];

		// Eigen use same convention as mavlink: w x y z
		Eigen::Map<Eigen::Quaternionf> q_out(q);
		q_out = UAS::transform_frame_enu_ned(Eigen::Quaterniond(tr.rotation())).cast<float>();

		set_attitude_target(stamp.toNSec() / 1000000,
				ignore_all_except_q,
				q,
				0.0, 0.0, 0.0,
				0.0);
	}
示例#17
0
	/**
	 * @brief Send attitude setpoint and thrust to FCU attitude controller
	 */
	void send_attitude_quaternion(const ros::Time &stamp, const Eigen::Affine3d &tr, const float thrust)
	{
		/**
		 * @note RPY, also bits numbering started from 1 in docs
		 */
		const uint8_t ignore_all_except_q_and_thrust = (7 << 0);

		auto q = ftf::transform_orientation_enu_ned(
					ftf::transform_orientation_baselink_aircraft(Eigen::Quaterniond(tr.rotation()))
					);

		set_attitude_target(stamp.toNSec() / 1000000,
					ignore_all_except_q_and_thrust,
					q,
					Eigen::Vector3d::Zero(),
					thrust);
	}
示例#18
0
	/**
	 * @brief Send vision estimate transform to FCU position controller
	 */
	void send_vision_estimate(const ros::Time &stamp, const Eigen::Affine3d &tr)
	{
		/**
		 * @warning Issue #60.
		 * This now affects pose callbacks too.
		 */
		if (last_transform_stamp == stamp) {
			ROS_DEBUG_THROTTLE_NAMED(10, "vision_pose", "Vision: Same transform as last one, dropped.");
			return;
		}
		last_transform_stamp = stamp;

		auto position = ftf::transform_frame_enu_ned(Eigen::Vector3d(tr.translation()));
		auto rpy = ftf::quaternion_to_rpy(
				ftf::transform_orientation_enu_ned(Eigen::Quaterniond(tr.rotation())));

		vision_position_estimate(stamp.toNSec() / 1000, position, rpy);
	}
	/**
	 * Send attitude setpoint to FCU attitude controller
	 *
	 * ENU frame.
	 */
	void send_attitude_transform(const tf::Transform &transform, const ros::Time &stamp) {
		// Thrust + RPY, also bits noumbering started from 1 in docs
		const uint8_t ignore_all_except_q = (1<<6)|(7<<0);
		float q[4];

		// ENU->NED, description in #49.
		tf::Quaternion tf_q = transform.getRotation();
		q[0] = tf_q.w();
		q[1] = tf_q.y();
		q[2] = tf_q.x();
		q[3] = -tf_q.z();

		set_attitude_target(stamp.toNSec() / 1000000,
				ignore_all_except_q,
				q,
				0.0, 0.0, 0.0,
				0.0);
	}
示例#20
0
	/**
	 * Send transform to FCU position controller
	 *
	 * Note: send only XYZ, Yaw
	 */
	void send_setpoint_transform(const tf::Transform &transform, const ros::Time &stamp) {
		// ENU frame
		tf::Vector3 origin = transform.getOrigin();
		tf::Quaternion q = transform.getRotation();

		/* Documentation start from bit 1 instead 0,
		 * Ignore velocity and accel vectors, yaw rate
		 */
		uint16_t ignore_all_except_xyzy = (1<<11)|(7<<6)|(7<<3);

		// ENU->NED. Issue #49.
		set_position_target_local_ned(stamp.toNSec() / 1000000,
				MAV_FRAME_LOCAL_NED,
				ignore_all_except_xyzy,
				origin.y(), origin.x(), -origin.z(),
				0.0, 0.0, 0.0,
				0.0, 0.0, 0.0,
				tf::getYaw(q), 0.0);
	}
	/**
	 * @brief Send setpoint to FCU position controller.
	 *
	 * @warning Send only XYZ, Yaw. ENU frame.
	 */
	void send_position_target(const ros::Time &stamp, const Eigen::Affine3d &tr) {
		/* Documentation start from bit 1 instead 0;
		 * Ignore velocity and accel vectors, yaw rate.
		 *
		 * In past versions on PX4 there been bug described in #273.
		 * If you got similar issue please try update firmware first.
		 */
		const uint16_t ignore_all_except_xyz_y = (1 << 11) | (7 << 6) | (7 << 3);

		auto p = UAS::transform_frame_enu_ned(Eigen::Vector3d(tr.translation()));
		auto q = UAS::transform_frame_enu_ned(Eigen::Quaterniond(tr.rotation()));

		set_position_target_local_ned(stamp.toNSec() / 1000000,
				MAV_FRAME_LOCAL_NED,
				ignore_all_except_xyz_y,
				p.x(), p.y(), p.z(),
				0.0, 0.0, 0.0,
				0.0, 0.0, 0.0,
				UAS::quaternion_get_yaw(q), 0.0);
	}
示例#22
0
	/**
	 * Send vision estimate transform to FCU position controller
	 */
	void send_vision_transform(const tf::Transform &transform, const ros::Time &stamp) {
		// origin and RPY in ENU frame
		tf::Vector3 position = transform.getOrigin();
		double roll, pitch, yaw;
		tf::Matrix3x3 orientation(transform.getBasis());
		orientation.getRPY(roll, pitch, yaw);

		/* Issue #60.
		 * Note: this now affects pose callbacks too, but i think its not big deal.
		 */
		if (last_transform_stamp == stamp) {
			ROS_DEBUG_THROTTLE_NAMED(10, "position", "Vision: Same transform as last one, dropped.");
			return;
		}
		last_transform_stamp = stamp;

		// TODO: check conversion. Issue #49.
		vision_position_estimate(stamp.toNSec() / 1000,
				position.y(), position.x(), -position.z(),
				roll, -pitch, -yaw); // ??? please check!
	}
示例#23
0
	/**
	 * @brief Send fake GPS coordinates through HIL_GPS Mavlink msg
	 */
	void send_fake_gps(const ros::Time &stamp, const Eigen::Vector3d &ecef_offset) {
		// Throttle incoming messages to 5hz
		if ((ros::Time::now() - last_pos_time) < ros::Duration(gps_rate)) {
			return;
		}
		last_pos_time = ros::Time::now();

		/**
		 * @note: HIL_GPS messages are accepted on PX4 Firmware out of HIL mode,
		 * if use_hil_gps flag is set (param MAV_USEHILGPS = 1).
		 * @todo: add GPS_INPUT msg as an alternative, as Ardupilot already supports it
		 */
		mavlink::common::msg::HIL_GPS fix {};

		Eigen::Vector3d geodetic;
		Eigen::Vector3d current_ecef(ecef_origin.x() + ecef_offset.x(),
					ecef_origin.y() + ecef_offset.y(),
					ecef_origin.z() + ecef_offset.z());

		try {
			earth.Reverse(current_ecef.x(), current_ecef.y(), current_ecef.z(),
						geodetic.x(), geodetic.y(), geodetic.z());
		}
		catch (const std::exception& e) {
			ROS_INFO_STREAM("FGPS: Caught exception: " << e.what() << std::endl);
		}

		Eigen::Vector3d vel = ((old_ecef - current_ecef) / (stamp.toSec() - old_stamp)) * 1e2;// [cm/s]

		// compute course over ground
		double cog;
		if (vel.x() == 0 && vel.y() == 0) {
			cog = 0;
		}
		else if (vel.x() >= 0 && vel.y() < 0) {
			cog = M_PI * 5 / 2 - atan2(vel.x(), vel.y());
		}
		else {
			cog = M_PI / 2 - atan2(vel.x(), vel.y());
		}

		// Fill in and send message
		fix.time_usec = stamp.toNSec() / 1000;	// [useconds]
		fix.lat = geodetic.x() * 1e7;		// [degrees * 1e7]
		fix.lon = geodetic.y() * 1e7;		// [degrees * 1e7]
		fix.alt = (geodetic.z() + GeographicLib::Geoid::ELLIPSOIDTOGEOID *
			  (*m_uas->egm96_5)(geodetic.x(), geodetic.y())) * 1e3;	// [meters * 1e3]
		fix.vel = vel.block<2, 1>(0, 0).norm();	// [cm/s]
		fix.vn = vel.x();			// [cm/s]
		fix.ve = vel.y();			// [cm/s]
		fix.vd = vel.z();			// [cm/s]
		fix.cog = cog * 1e2;			// [degrees * 1e2]
		fix.eph = eph * 1e2;			// [cm]
		fix.epv = epv * 1e2;			// [cm]
		fix.fix_type = utils::enum_value(fix_type);;
		fix.satellites_visible = satellites_visible;

		// store old values
		old_stamp = stamp.toSec();
		old_ecef = current_ecef;

		UAS_FCU(m_uas)->send_message_ignore_drop(fix);
	}
示例#24
0
	/**
	 * @brief Send vision speed estimate to FCU velocity controller
	 */
	void send_vision_speed(float vx, float vy, float vz, const ros::Time &stamp) {
		auto vel = UAS::transform_frame_enu_ned_xyz(vx, vy, vz);

		vision_speed_estimate(stamp.toNSec() / 1000,
				vel.x(), vel.y(), vel.z());
	}
示例#25
0
void ros_convertions::fromROS( ::ros_test::Time& value, ros::Time const& ros )
{
    value.milliseconds = ros.toNSec() / 1000000;
}
int64_t PlaceDatabase::add(ros::Time stamp, const tf::Pose& pose_in_map, const tf::Transform& optical_transform,
                           const frame_common::Frame& frame, int64_t id)
{
  assert((int)frame.kpts.size() == frame.dtors.rows);

  // Add quantized image descriptor to document database
  vt::Document words(frame.dtors.rows);
  for (int i = 0; i < frame.dtors.rows; ++i)
    words[i] = voctree_.quantize(frame.dtors.row(i));
  vt::DocId doc_id = document_db_.insert(words);

  // Record doc id -> place id mapping
  if (id == AUTO_ID)
    id = doc_id;
  doc_to_place_id_[doc_id] = id;
  
  /// @todo Move persistent db stuff to separate function
  checkErr(sqlite3_bind_int64(insert_stmt_, 1, id), "Couldn't bind row id");
  
  // Bind time stamp encoded as 64-bit integer
  sqlite3_int64 time = stamp.toNSec();
  checkErr(sqlite3_bind_int64(insert_stmt_, 2, time), "Couldn't bind stamp");

  // Bind current map pose and camera transform
  bindTransform(pose_in_map, map_pose_param_index_);
  bindTransform(optical_transform, optical_transform_param_index_);

  // Bind camera parameters
  bindCameraParams(frame.cam);

  // Bind disparities and "good points" list
  int disp_index = disparities_param_index_;
  bindVector(insert_stmt_, disp_index, frame.disps);
  bindVector(insert_stmt_, disp_index + 1, frame.goodPts);
  
  // Bind keypoint data
  int keypt_index = keypoints_param_index_;
  checkErr(sqlite3_bind_int(insert_stmt_, keypt_index, frame.kpts.size()), "Couldn't bind num_keypoints");
  bindVector(insert_stmt_, keypt_index + 1, frame.kpts);

  // Bind descriptor data. step of cv::Mat is implicit, blob length / num keypoints.
  checkErr(sqlite3_bind_int(insert_stmt_, keypt_index + 2, frame.dtors.cols), "Couldn't bind descriptor_length");
  checkErr(sqlite3_bind_blob(insert_stmt_, keypt_index + 3, frame.dtors.data,
                             frame.dtors.rows * frame.dtors.step, SQLITE_TRANSIENT),
           "Couldn't bind descriptor data");

  // Bind document vector data
  bindVector(insert_stmt_, keypt_index + 4, words);

  // Save images if present
  bindImage(keypt_index + 5, frame.img);
  bindImage(keypt_index + 6, frame.imgRight);

  // Execute INSERT statement
  checkErr(sqlite3_step(insert_stmt_), "INSERT statement not done", SQLITE_DONE);
  
  // Reset prepared statement to reuse on next call
  checkErr(sqlite3_reset(insert_stmt_), "Couldn't reset INSERT statement");
  checkErr(sqlite3_clear_bindings(insert_stmt_), "Couldn't clear bindings on INSERT statement");

  // Return row id of newly-added place
  return id;
}
bool ElevationMap::add(const pcl::PointCloud<pcl::PointXYZRGB>::Ptr pointCloud, Eigen::VectorXf& pointCloudVariances, const ros::Time& timestamp, const Eigen::Affine3d& transformationSensorToMap)
{
  if (pointCloud->size() != pointCloudVariances.size()) {
    ROS_ERROR("ElevationMap::add: Size of point cloud (%i) and variances (%i) do not agree.",
              (int) pointCloud->size(), (int) pointCloudVariances.size());
    return false;
  }

  // Initialization for time calculation.
  const ros::WallTime methodStartTime(ros::WallTime::now());
  boost::recursive_mutex::scoped_lock scopedLockForRawData(rawMapMutex_);

  // Update initial time if it is not initialized.
  if (initialTime_.toSec() == 0) {
    initialTime_ = timestamp;
  }
  const double scanTimeSinceInitialization = (timestamp - initialTime_).toSec();

  for (unsigned int i = 0; i < pointCloud->size(); ++i) {
    auto& point = pointCloud->points[i];
    Index index;
    Position position(point.x, point.y);
    if (!rawMap_.getIndex(position, index)) continue; // Skip this point if it does not lie within the elevation map.

    auto& elevation = rawMap_.at("elevation", index);
    auto& variance = rawMap_.at("variance", index);
    auto& horizontalVarianceX = rawMap_.at("horizontal_variance_x", index);
    auto& horizontalVarianceY = rawMap_.at("horizontal_variance_y", index);
    auto& horizontalVarianceXY = rawMap_.at("horizontal_variance_xy", index);
    auto& color = rawMap_.at("color", index);
    auto& time = rawMap_.at("time", index);
    auto& lowestScanPoint = rawMap_.at("lowest_scan_point", index);
    auto& sensorXatLowestScan = rawMap_.at("sensor_x_at_lowest_scan", index);
    auto& sensorYatLowestScan = rawMap_.at("sensor_y_at_lowest_scan", index);
    auto& sensorZatLowestScan = rawMap_.at("sensor_z_at_lowest_scan", index);

    const float& pointVariance = pointCloudVariances(i);
    const float scanTimeSinceInitialization = (timestamp - initialTime_).toSec();

    if (!rawMap_.isValid(index)) {
      // No prior information in elevation map, use measurement.
      elevation = point.z;
      variance = pointVariance;
      horizontalVarianceX = minHorizontalVariance_;
      horizontalVarianceY = minHorizontalVariance_;
      horizontalVarianceXY = 0.0;
      colorVectorToValue(point.getRGBVector3i(), color);
      continue;
    }

    // Deal with multiple heights in one cell.
    const double mahalanobisDistance = fabs(point.z - elevation) / sqrt(variance);
    if (mahalanobisDistance > mahalanobisDistanceThreshold_) {
      if (scanTimeSinceInitialization - time <= scanningDuration_ && elevation > point.z) {
        // Ignore point if measurement is from the same point cloud (time comparison) and
        // if measurement is lower then the elevation in the map.
      } else if (scanTimeSinceInitialization - time <= scanningDuration_) {
        // If point is higher.
        elevation = point.z;
        variance = pointVariance;
      } else {
        variance += multiHeightNoise_;
      }
      continue;
    }

    // Store lowest points from scan for visibility checking.
    const float pointHeightPlusUncertainty = point.z + 3.0 * sqrt(pointVariance); // 3 sigma.
    if (std::isnan(lowestScanPoint) || pointHeightPlusUncertainty < lowestScanPoint){
      lowestScanPoint = pointHeightPlusUncertainty;
      const Position3 sensorTranslation(transformationSensorToMap.translation());
      sensorXatLowestScan = sensorTranslation.x();
      sensorYatLowestScan = sensorTranslation.y();
      sensorZatLowestScan = sensorTranslation.z();
    }

    // Fuse measurement with elevation map data.
    elevation = (variance * point.z + pointVariance * elevation) / (variance + pointVariance);
    variance = (pointVariance * variance) / (pointVariance + variance);
    // TODO Add color fusion.
    colorVectorToValue(point.getRGBVector3i(), color);
    time = scanTimeSinceInitialization;

    // Horizontal variances are reset.
    horizontalVarianceX = minHorizontalVariance_;
    horizontalVarianceY = minHorizontalVariance_;
    horizontalVarianceXY = 0.0;
  }

  clean();
  rawMap_.setTimestamp(timestamp.toNSec()); // Point cloud stores time in microseconds.

  const ros::WallDuration duration = ros::WallTime::now() - methodStartTime;
  ROS_INFO("Raw map has been updated with a new point cloud in %f s.", duration.toSec());
  return true;
}
void ros_convertions::fromROS( ::base::Time& value, ros::Time const& ros )
{
    value = base::Time::fromMicroseconds(ros.toNSec() / 1000);
}
示例#29
0
	/**
	 * Send vision speed estimate to FCU velocity controller
	 */
	void send_vision_speed(float vx, float vy, float vz, const ros::Time &stamp) {
		// TODO: check conversion. Issue #49.
		vision_speed_estimate(stamp.toNSec() / 1000,
				vy, vx, -vz);
	}