Пример #1
0
tf::Matrix3x3 GetTfRotationMatrix(tf::Vector3 xaxis, tf::Vector3 zaxis) {
    tf::Matrix3x3 m;
    tf::Vector3 yaxis = zaxis.cross(xaxis);
    m.setValue(xaxis.getX(), yaxis.getX(), zaxis.getX(),
            xaxis.getY(), yaxis.getY(), zaxis.getY(),
            xaxis.getZ(), yaxis.getZ(), zaxis.getZ());
    return m;
}
Пример #2
0
tf::Vector3 JPCT_Math::rotate(tf::Vector3 vec, tf::Matrix3x3& mat) {
    float xr = vec.getX() * mat[0][0] + vec.getY() * mat[1][0] + vec.getZ() * mat[2][0];
    float yr = vec.getX() * mat[0][1] + vec.getY() * mat[1][1] + vec.getZ() * mat[2][1];
    float zr = vec.getX() * mat[0][2] + vec.getY() * mat[1][2] + vec.getZ() * mat[2][2];
    vec.setX(xr);
    vec.setY(yr);
    vec.setZ(zr);

    return vec;
}              
Пример #3
0
    float getVel(tf::Vector3 & pose1, tf::Vector3 & pose2, const ros::Duration & dur)
    {
      float vel;
      float delta_x = fabs(pose1.getX() - pose2.getX());
      float delta_y = fabs(pose1.getY() - pose2.getY());
      float dist = sqrt(delta_x*delta_x + delta_y*delta_y);

      vel = dist / dur.toSec();

      return vel;
    }
Пример #4
0
const float Utility::findAngleFromAToB(const tf::Vector3 a, const tf::Vector3 b) const {
  std::vector<float> a_vec;
  a_vec.push_back(a.getX());
  a_vec.push_back(a.getY());

  std::vector<float> b_vec;
  b_vec.push_back(b.getX());
  b_vec.push_back(b.getY());

  return findAngleFromAToB(a_vec, b_vec);
}
Пример #5
0
static tf::Transform twoPointTransform(const tf::Vector3 &a1, const tf::Vector3 &a2, const tf::Vector3 &b1, const tf::Vector3 &b2)
{
  double tha, thb, theta;

  // tha is the angle of the line segment from a1 to a2, in frame {A}
  tha = atan2(a2.getY() - a1.getY(), a2.getX() - a1.getX());
  // thb is the angle of the line segment from b1 to b2, in frame {B}
  thb = atan2(b2.getY() - b1.getY(), b2.getX() - b1.getX());
  // their difference is the rotation from {A} to {B}
  theta = thb - tha;
  // normalize it, [-pi .. pi]
  while (theta > M_PI) theta -= (M_PI + M_PI);
  while (theta < -M_PI) theta += (M_PI + M_PI);
  // make it a rotation matrix, where theta is rotation about Z
  tf::Matrix3x3 B_R_A;
  B_R_A.setRPY(0, 0, theta);

  // ac is the vector to the centroid of a1 and a2, in {A}
  tf::Vector3 ac((a1.getX() + a2.getX()) * 0.5, (a1.getY() + a2.getY()) * 0.5, 0);
  // bc is the vector to the centroid of b1 and b2, in {B}
  tf::Vector3 bc((b1.getX() + b2.getX()) * 0.5, (b1.getY() + b2.getY()) * 0.5, 0);
  // get the vector from {A} to {B} in frame {B} by rotating ac into {B}
  // and subtracting from bc
  tf::Vector3 B_P_A(bc - B_R_A * ac);

  // convert it to a complete transform
  return tf::Transform(B_R_A, B_P_A);
}
geometry_msgs::Point tfVector3ToPoint(tf::Vector3 vector) {
	geometry_msgs::Point position;
	position.x = vector.getX();
	position.y = vector.getY();
	position.z = vector.getZ();
	return position;
}
math::Vector3 RayTracePluginUtils::vectorTFToGazebo(const tf::Vector3 t)
{
  math::Vector3 v;
  v.x = t.getX();
  v.y = t.getY();
  v.z = t.getZ();
  return v;
}
Пример #8
0
void HapticsPSM::compute_force_in_tip_frame(geometry_msgs::Wrench &wrench){
    rot_mat6wrt0.setRPY(group->getCurrentRPY().at(0),
                        group->getCurrentRPY().at(1),
                        group->getCurrentRPY().at(2));
    tf_vec3.setValue(wrench.force.x,wrench.force.y,wrench.force.z);
    tf_vec3 = rot_mat6wrt0.transpose() * tf_vec3;
    wrench.force.x = tf_vec3.getX();
    wrench.force.y = tf_vec3.getY();
    wrench.force.z = tf_vec3.getZ();
}
Пример #9
0
		/********** callback for the cmd velocity from the autonomy **********/
		void cmd_vel_callback(const geometry_msgs::Twist& msg)
		{
			watchdogTimer.stop();
			
			error.setValue(msg.linear.x - body_vel.linear.x, msg.linear.y - body_vel.linear.y, msg.linear.z - body_vel.linear.z);
			//std::cout << "error x: " << error.getX() << " y: " << error.getY() << " z: " << error.getZ() << std::endl;
			//std::cout << std::abs(curr_body_vel_time.toSec() - last_body_vel_time.toSec()) << std::endl;
			error_yaw = msg.angular.z - body_vel.angular.z;
			//std::cout << "error yaw: " << error_yaw << std::endl;
			
			// if some time has passed between the last body velocity time and the current body velocity time then will calculate the (feed forward PD)
			if (std::abs(curr_body_vel_time.toSec() - last_body_vel_time.toSec()) > 0.00001)
			{	
				errorDot = (1/(curr_body_vel_time - last_body_vel_time).toSec()) * (error - last_error);
				//std::cout << "errordot x: " << errorDot.getX() << " y: " << errorDot.getY() << " z: " << errorDot.getZ() << std::endl;
				
				errorDot_yaw = (1/(curr_body_vel_time - last_body_vel_time).toSec()) * (error_yaw - last_error_yaw);
				//std::cout << "error dot yaw " << errorDot_yaw << std::endl;
				velocity_command.linear.x = cap_vel_auton(kx*msg.linear.x + (kp*error).getX() + (kd*errorDot).getX());
				velocity_command.linear.y = cap_vel_auton(ky*msg.linear.y + (kp*error).getY() + (kd*errorDot).getY());
				velocity_command.linear.z = cap_vel_auton(kz*msg.linear.z + (kp*error).getZ() + (kd*errorDot).getZ());
				velocity_command.angular.z = -1*cap_vel_auton(kyaw*msg.angular.z + kp_yaw*error_yaw + kd_yaw*errorDot_yaw); // z must be switched because bebop driver http://bebop-autonomy.readthedocs.org/en/latest/piloting.html
			}
			
			last_body_vel_time = curr_body_vel_time;// update last time body velocity was recieved
			last_error = error;
			last_error_yaw = error_yaw;
			
			error_gm.linear.x = error.getX(); error_gm.linear.y = error.getY(); error_gm.linear.z = error.getZ(); error_gm.angular.z = error_yaw;
			errorDot_gm.linear.x = errorDot.getX(); errorDot_gm.linear.y = errorDot.getY(); errorDot_gm.linear.z = errorDot.getZ(); errorDot_gm.angular.z = kyaw*msg.angular.z + kp_yaw*error_yaw + kd_yaw*errorDot_yaw;
			error_pub.publish(error_gm);
			errorDot_pub.publish(errorDot_gm);
			
			if (start_autonomous)
			{
				recieved_command_from_tracking = true;
			}
			
			watchdogTimer.start();
		}
Пример #10
0
GridRayTrace::GridRayTrace(tf::Vector3 start, tf::Vector3 end, const OccupancyGrid& grid_ref)
	: _x_store(), _y_store()
{
	//Transform (x,y) into map frame
	start = grid_ref.toGridFrame(start);
	end = grid_ref.toGridFrame(end);
	
	double x0 = start.getX(), y0 = start.getY();
	double x1 = end.getX(), y1 = end.getY();
	
	//ROS_WARN("start: (%f, %f) -> end: (%f, %f)", x0, y0, x1, y1);
	
	bresenham
		(
			floor(x0), floor(y0), 
			floor(x1), floor(y1), 
			_x_store, _y_store
		);
	
	_cur_idx = 0;
	_max_idx = std::min(_x_store.size(), _y_store.size());
}
Пример #11
0
double getMinimumDistance(tf::Vector3 position, nav_msgs::GridCells grid) {

  double distance_sq=-1;
  int size = grid.cells.size();
  double my_x=position.getX(), my_y=position.getY();
  double stop_radius_sq = stop_radius*stop_radius;
  for (int i=0; i<size; i++) {
    double d_sq = pow(grid.cells[i].x-my_x,2)+pow(grid.cells[i].y-my_y,2);
    if (distance_sq==-1 || d_sq<distance_sq) distance_sq=d_sq;
    if (d_sq<=stop_radius_sq) return 0;
  }

  return sqrt(distance_sq);
}
bool
ArucoMapping::processImage(cv::Mat input_image,cv::Mat output_image)
{
  aruco::MarkerDetector Detector;
  std::vector<aruco::Marker> temp_markers;

  //Set visibility flag to false for all markers
  for(size_t i = 0; i < num_of_markers_; i++)
      markers_[i].visible = false;

  // Save previous marker count
  marker_counter_previous_ = marker_counter_;

  // Detect markers
  Detector.detect(input_image,temp_markers,aruco_calib_params_,marker_size_);
    
  // If no marker found, print statement
  if(temp_markers.size() == 0)
    ROS_DEBUG("No marker found!");

  //------------------------------------------------------
  // FIRST MARKER DETECTED
  //------------------------------------------------------
  if((temp_markers.size() > 0) && (first_marker_detected_ == false))
  {
    //Set flag
    first_marker_detected_=true;

    // Detect lowest marker ID
    lowest_marker_id_ = temp_markers[0].id;
    for(size_t i = 0; i < temp_markers.size();i++)
    {
      if(temp_markers[i].id < lowest_marker_id_)
        lowest_marker_id_ = temp_markers[i].id;
    }


    ROS_DEBUG_STREAM("The lowest Id marker " << lowest_marker_id_ );

    // Identify lowest marker ID with world's origin
    markers_[0].marker_id = lowest_marker_id_;

    markers_[0].geometry_msg_to_world.position.x = 0;
    markers_[0].geometry_msg_to_world.position.y = 0;
    markers_[0].geometry_msg_to_world.position.z = 0;

    markers_[0].geometry_msg_to_world.orientation.x = 0;
    markers_[0].geometry_msg_to_world.orientation.y = 0;
    markers_[0].geometry_msg_to_world.orientation.z = 0;
    markers_[0].geometry_msg_to_world.orientation.w = 1;

    // Relative position and Global position
    markers_[0].geometry_msg_to_previous.position.x = 0;
    markers_[0].geometry_msg_to_previous.position.y = 0;
    markers_[0].geometry_msg_to_previous.position.z = 0;

    markers_[0].geometry_msg_to_previous.orientation.x = 0;
    markers_[0].geometry_msg_to_previous.orientation.y = 0;
    markers_[0].geometry_msg_to_previous.orientation.z = 0;
    markers_[0].geometry_msg_to_previous.orientation.w = 1;

    // Transformation Pose to TF
    tf::Vector3 position;
    position.setX(0);
    position.setY(0);
    position.setZ(0);

    tf::Quaternion rotation;
    rotation.setX(0);
    rotation.setY(0);
    rotation.setZ(0);
    rotation.setW(1);

    markers_[0].tf_to_previous.setOrigin(position);
    markers_[0].tf_to_previous.setRotation(rotation);

    // Relative position of first marker equals Global position
    markers_[0].tf_to_world=markers_[0].tf_to_previous;

    // Increase count
    marker_counter_++;

    // Set sign of visibility of first marker
    markers_[0].visible=true;

    ROS_INFO_STREAM("First marker with ID: " << markers_[0].marker_id << " detected");

    //First marker does not have any previous marker
    markers_[0].previous_marker_id = THIS_IS_FIRST_MARKER;
  }

  //------------------------------------------------------
  // FOR EVERY MARKER DO
  //------------------------------------------------------
  for(size_t i = 0; i < temp_markers.size();i++)
  {
    int index;
    int current_marker_id = temp_markers[i].id;

    //Draw marker convex, ID, cube and axis
    temp_markers[i].draw(output_image, cv::Scalar(0,0,255),2);
    aruco::CvDrawingUtils::draw3dCube(output_image,temp_markers[i], aruco_calib_params_);
    aruco::CvDrawingUtils::draw3dAxis(output_image,temp_markers[i], aruco_calib_params_);

    // Existing marker ?
    bool existing = false;
    int temp_counter = 0;

    while((existing == false) && (temp_counter < marker_counter_))
    {
      if(markers_[temp_counter].marker_id == current_marker_id)
      {
        index = temp_counter;
        existing = true;
        ROS_DEBUG_STREAM("Existing marker with ID: " << current_marker_id << "found");
      }
        temp_counter++;
    }

    //New marker ?
    if(existing == false)
    {
      index = marker_counter_;
      markers_[index].marker_id = current_marker_id;
      existing = true;
      ROS_DEBUG_STREAM("New marker with ID: " << current_marker_id << " found");
    }

    // Change visibility flag of new marker
    for(size_t j = 0;j < marker_counter_; j++)
    {
      for(size_t k = 0;k < temp_markers.size(); k++)
      {
        if(markers_[j].marker_id == temp_markers[k].id)
          markers_[j].visible = true;
      }
    }

    //------------------------------------------------------
    // For existing marker do
    //------------------------------------------------------
    if((index < marker_counter_) && (first_marker_detected_ == true))
    {
      markers_[index].current_camera_tf = arucoMarker2Tf(temp_markers[i]);
      markers_[index].current_camera_tf = markers_[index].current_camera_tf.inverse();

      const tf::Vector3 marker_origin = markers_[index].current_camera_tf.getOrigin();
      markers_[index].current_camera_pose.position.x = marker_origin.getX();
      markers_[index].current_camera_pose.position.y = marker_origin.getY();
      markers_[index].current_camera_pose.position.z = marker_origin.getZ();

      const tf::Quaternion marker_quaternion = markers_[index].current_camera_tf.getRotation();
      markers_[index].current_camera_pose.orientation.x = marker_quaternion.getX();
      markers_[index].current_camera_pose.orientation.y = marker_quaternion.getY();
      markers_[index].current_camera_pose.orientation.z = marker_quaternion.getZ();
      markers_[index].current_camera_pose.orientation.w = marker_quaternion.getW();
    }

    //------------------------------------------------------
    // For new marker do
    //------------------------------------------------------
    if((index == marker_counter_) && (first_marker_detected_ == true))
    {
      markers_[index].current_camera_tf=arucoMarker2Tf(temp_markers[i]);

      tf::Vector3 marker_origin = markers_[index].current_camera_tf.getOrigin();
      markers_[index].current_camera_pose.position.x = marker_origin.getX();
      markers_[index].current_camera_pose.position.y = marker_origin.getY();
      markers_[index].current_camera_pose.position.z = marker_origin.getZ();

      tf::Quaternion marker_quaternion = markers_[index].current_camera_tf.getRotation();
      markers_[index].current_camera_pose.orientation.x = marker_quaternion.getX();
      markers_[index].current_camera_pose.orientation.y = marker_quaternion.getY();
      markers_[index].current_camera_pose.orientation.z = marker_quaternion.getZ();
      markers_[index].current_camera_pose.orientation.w = marker_quaternion.getW();

      // Naming - TFs
      std::stringstream camera_tf_id;
      std::stringstream camera_tf_id_old;
      std::stringstream marker_tf_id_old;

      camera_tf_id << "camera_" << index;

      // Flag to keep info if any_known marker_visible in actual image
      bool any_known_marker_visible = false;

      // Array ID of markers, which position of new marker is calculated
      int last_marker_id;

      // Testing, if is possible calculate position of a new marker to old known marker
      for(int k = 0; k < index; k++)
      {
        if((markers_[k].visible == true) && (any_known_marker_visible == false))
        {
          if(markers_[k].previous_marker_id != -1)
          {
            any_known_marker_visible = true;
            camera_tf_id_old << "camera_" << k;
            marker_tf_id_old << "marker_" << k;
            markers_[index].previous_marker_id = k;
            last_marker_id = k;
           }
         }
       }

     // New position can be calculated
     if(any_known_marker_visible == true)
     {
       // Generating TFs for listener
       for(char k = 0; k < 10; k++)
       {
         // TF from old marker and its camera
         broadcaster_.sendTransform(tf::StampedTransform(markers_[last_marker_id].current_camera_tf,ros::Time::now(),
                                                         marker_tf_id_old.str(),camera_tf_id_old.str()));

         // TF from old camera to new camera
         broadcaster_.sendTransform(tf::StampedTransform(markers_[index].current_camera_tf,ros::Time::now(),
                                                         camera_tf_id_old.str(),camera_tf_id.str()));

         ros::Duration(BROADCAST_WAIT_INTERVAL).sleep();
       }

        // Calculate TF between two markers
        listener_->waitForTransform(marker_tf_id_old.str(),camera_tf_id.str(),ros::Time(0),
                                    ros::Duration(WAIT_FOR_TRANSFORM_INTERVAL));
        try
        {
          broadcaster_.sendTransform(tf::StampedTransform(markers_[last_marker_id].current_camera_tf,ros::Time::now(),
                                                          marker_tf_id_old.str(),camera_tf_id_old.str()));

          broadcaster_.sendTransform(tf::StampedTransform(markers_[index].current_camera_tf,ros::Time::now(),
                                                          camera_tf_id_old.str(),camera_tf_id.str()));

          listener_->lookupTransform(marker_tf_id_old.str(),camera_tf_id.str(),ros::Time(0),
                                     markers_[index].tf_to_previous);
        }
        catch(tf::TransformException &e)
        {
          ROS_ERROR("Not able to lookup transform");
        }

        // Save origin and quaternion of calculated TF
        marker_origin = markers_[index].tf_to_previous.getOrigin();
        marker_quaternion = markers_[index].tf_to_previous.getRotation();

        // If plane type selected roll, pitch and Z axis are zero
        if(space_type_ == "plane")
        {
          double roll, pitch, yaw;
          tf::Matrix3x3(marker_quaternion).getRPY(roll,pitch,yaw);
          roll = 0;
          pitch = 0;
          marker_origin.setZ(0);
          marker_quaternion.setRPY(pitch,roll,yaw);
        }

        markers_[index].tf_to_previous.setRotation(marker_quaternion);
        markers_[index].tf_to_previous.setOrigin(marker_origin);

        marker_origin = markers_[index].tf_to_previous.getOrigin();
        markers_[index].geometry_msg_to_previous.position.x = marker_origin.getX();
        markers_[index].geometry_msg_to_previous.position.y = marker_origin.getY();
        markers_[index].geometry_msg_to_previous.position.z = marker_origin.getZ();

        marker_quaternion = markers_[index].tf_to_previous.getRotation();
        markers_[index].geometry_msg_to_previous.orientation.x = marker_quaternion.getX();
        markers_[index].geometry_msg_to_previous.orientation.y = marker_quaternion.getY();
        markers_[index].geometry_msg_to_previous.orientation.z = marker_quaternion.getZ();
        markers_[index].geometry_msg_to_previous.orientation.w = marker_quaternion.getW();

        // increase marker count
        marker_counter_++;

        // Invert and position of new marker to compute camera pose above it
        markers_[index].current_camera_tf = markers_[index].current_camera_tf.inverse();

        marker_origin = markers_[index].current_camera_tf.getOrigin();
        markers_[index].current_camera_pose.position.x = marker_origin.getX();
        markers_[index].current_camera_pose.position.y = marker_origin.getY();
        markers_[index].current_camera_pose.position.z = marker_origin.getZ();

        marker_quaternion = markers_[index].current_camera_tf.getRotation();
        markers_[index].current_camera_pose.orientation.x = marker_quaternion.getX();
        markers_[index].current_camera_pose.orientation.y = marker_quaternion.getY();
        markers_[index].current_camera_pose.orientation.z = marker_quaternion.getZ();
        markers_[index].current_camera_pose.orientation.w = marker_quaternion.getW();

        // Publish all TFs and markers
        publishTfs(false);
      }
    }

    //------------------------------------------------------
    // Compute global position of new marker
    //------------------------------------------------------
    if((marker_counter_previous_ < marker_counter_) && (first_marker_detected_ == true))
    {
      // Publish all TF five times for listener
      for(char k = 0; k < 5; k++)
        publishTfs(false);

      std::stringstream marker_tf_name;
      marker_tf_name << "marker_" << index;

      listener_->waitForTransform("world",marker_tf_name.str(),ros::Time(0),
                                  ros::Duration(WAIT_FOR_TRANSFORM_INTERVAL));
      try
      {
        listener_->lookupTransform("world",marker_tf_name.str(),ros::Time(0),
                                   markers_[index].tf_to_world);
      }
      catch(tf::TransformException &e)
      {
        ROS_ERROR("Not able to lookup transform");
      }

      // Saving TF to Pose
      const tf::Vector3 marker_origin = markers_[index].tf_to_world.getOrigin();
      markers_[index].geometry_msg_to_world.position.x = marker_origin.getX();
      markers_[index].geometry_msg_to_world.position.y = marker_origin.getY();
      markers_[index].geometry_msg_to_world.position.z = marker_origin.getZ();

      tf::Quaternion marker_quaternion=markers_[index].tf_to_world.getRotation();
      markers_[index].geometry_msg_to_world.orientation.x = marker_quaternion.getX();
      markers_[index].geometry_msg_to_world.orientation.y = marker_quaternion.getY();
      markers_[index].geometry_msg_to_world.orientation.z = marker_quaternion.getZ();
      markers_[index].geometry_msg_to_world.orientation.w = marker_quaternion.getW();
    }
  }

  //------------------------------------------------------
  // Compute which of visible markers is the closest to the camera
  //------------------------------------------------------
  bool any_markers_visible=false;
  int num_of_visible_markers=0;

  if(first_marker_detected_ == true)
  {
    double minimal_distance = INIT_MIN_SIZE_VALUE;
    for(int k = 0; k < num_of_markers_; k++)
    {
      double a,b,c,size;

      // If marker is visible, distance is calculated
      if(markers_[k].visible==true)
      {
        a = markers_[k].current_camera_pose.position.x;
        b = markers_[k].current_camera_pose.position.y;
        c = markers_[k].current_camera_pose.position.z;
        size = std::sqrt((a * a) + (b * b) + (c * c));
        if(size < minimal_distance)
        {
          minimal_distance = size;
          closest_camera_index_ = k;
        }

        any_markers_visible = true;
        num_of_visible_markers++;
      }
    }
  }

  //------------------------------------------------------
  // Publish all known markers
  //------------------------------------------------------
  if(first_marker_detected_ == true)
    publishTfs(true);

  //------------------------------------------------------
  // Compute global camera pose
  //------------------------------------------------------
  if((first_marker_detected_ == true) && (any_markers_visible == true))
  {
    std::stringstream closest_camera_tf_name;
    closest_camera_tf_name << "camera_" << closest_camera_index_;

    listener_->waitForTransform("world",closest_camera_tf_name.str(),ros::Time(0),
                                ros::Duration(WAIT_FOR_TRANSFORM_INTERVAL));
    try
    {
      listener_->lookupTransform("world",closest_camera_tf_name.str(),ros::Time(0),
                                 world_position_transform_);
    }
    catch(tf::TransformException &ex)
    {
      ROS_ERROR("Not able to lookup transform");
    }

    // Saving TF to Pose
    const tf::Vector3 marker_origin = world_position_transform_.getOrigin();
    world_position_geometry_msg_.position.x = marker_origin.getX();
    world_position_geometry_msg_.position.y = marker_origin.getY();
    world_position_geometry_msg_.position.z = marker_origin.getZ();

    tf::Quaternion marker_quaternion = world_position_transform_.getRotation();
    world_position_geometry_msg_.orientation.x = marker_quaternion.getX();
    world_position_geometry_msg_.orientation.y = marker_quaternion.getY();
    world_position_geometry_msg_.orientation.z = marker_quaternion.getZ();
    world_position_geometry_msg_.orientation.w = marker_quaternion.getW();
  }

  //------------------------------------------------------
  // Publish all known markers
  //------------------------------------------------------
  if(first_marker_detected_ == true)
    publishTfs(true);

  //------------------------------------------------------
  // Publish custom marker message
  //------------------------------------------------------
  aruco_mapping::ArucoMarker marker_msg;

  if((any_markers_visible == true))
  {
    marker_msg.header.stamp = ros::Time::now();
    marker_msg.header.frame_id = "world";
    marker_msg.marker_visibile = true;
    marker_msg.num_of_visible_markers = num_of_visible_markers;
    marker_msg.global_camera_pose = world_position_geometry_msg_;
    marker_msg.marker_ids.clear();
    marker_msg.global_marker_poses.clear();
    for(size_t j = 0; j < marker_counter_; j++)
    {
      if(markers_[j].visible == true)
      {
        marker_msg.marker_ids.push_back(markers_[j].marker_id);
        marker_msg.global_marker_poses.push_back(markers_[j].geometry_msg_to_world);       
      }
    }
  }
  else
  {
    marker_msg.header.stamp = ros::Time::now();
    marker_msg.header.frame_id = "world";
    marker_msg.num_of_visible_markers = num_of_visible_markers;
    marker_msg.marker_visibile = false;
    marker_msg.marker_ids.clear();
    marker_msg.global_marker_poses.clear();
  }

  // Publish custom marker msg
  marker_msg_pub_.publish(marker_msg);

  return true;
}
void get_veclocity(arma::colvec3& u,const tf::Vector3& origin, const tf::Vector3& origin_tmp)
{
    u(0) = origin.getX() - origin_tmp.getX();
    u(1) = origin.getY() - origin_tmp.getY();
    u(2) = origin.getZ() - origin_tmp.getZ();
}
Пример #14
0
void HapticsPSM::compute_deflection_force(geometry_msgs::Wrench &wrench, tf::Vector3 &d_along_n){

    wrench.force.x = d_along_n.getX();
    wrench.force.y = d_along_n.getY();
    wrench.force.z = d_along_n.getZ();
}
 double calcEuklideanDistance(tf::Vector3 origin, tf::Vector3 oldOrigin) {
 	double distance = 0.0;
 	distance = sqrt(((origin.getX()-oldOrigin.getX())*(origin.getX()-oldOrigin.getX())) + ((origin.getY()-oldOrigin.getY())*(origin.getY()-oldOrigin.getY())) + ((origin.getZ()-oldOrigin.getZ())*(origin.getZ()-oldOrigin.getZ())));
 	return distance;
 }
Пример #16
0
		// callback for the complete message
		void complete_message_callback(const homog_track::HomogComplete& msg)
		{
			/********** Begin splitting up the incoming message *********/
			// getting boolean indicating the reference has been set
			reference_set = msg.reference_set;

			// if the reference is set then will break out the points
			if (reference_set)
			{
				// initializer temp scalar to zero
				temp_scalar = cv::Mat::zeros(1,1,CV_64F);
			
				// getting the current marker points
				circles_curr = msg.current_points;
				
				// getting the refernce marker points
				circles_ref = msg.reference_points;
				
				// setting the current points to the point vector
				curr_red_p.x = circles_curr.red_circle.x;
				curr_green_p.x = circles_curr.green_circle.x;
				curr_cyan_p.x = circles_curr.cyan_circle.x;
				curr_purple_p.x = circles_curr.purple_circle.x;
				curr_red_p.y = circles_curr.red_circle.y;
				curr_green_p.y = circles_curr.green_circle.y;
				curr_cyan_p.y = circles_curr.cyan_circle.y;
				curr_purple_p.y = circles_curr.purple_circle.y;
				curr_points_p.push_back(curr_red_p);
				curr_points_p.push_back(curr_green_p);
				curr_points_p.push_back(curr_cyan_p);
				curr_points_p.push_back(curr_purple_p);
				
				
				
				// converting the points to be the projective coordinates
				for (int ii = 0; ii < curr_points_m.size(); ii++)
				{
					curr_points_m[ii] = K.inv(cv::DECOMP_LU)*curr_points_m[ii];
					std::cout << "currpoints at " << ii << " is: " << curr_points_m[ii] << std::endl;
					
				}
				
				// setting the reference points to the point vector
				ref_red_p.x = circles_ref.red_circle.x;
				ref_green_p.x = circles_ref.green_circle.x;
				ref_cyan_p.x = circles_ref.cyan_circle.x;
				ref_purple_p.x = circles_ref.purple_circle.x;
				ref_red_p.y = circles_ref.red_circle.y;
				ref_green_p.y = circles_ref.green_circle.y;
				ref_cyan_p.y = circles_ref.cyan_circle.y;
				ref_purple_p.y = circles_ref.purple_circle.y;
				ref_points_p.push_back(ref_red_p);
				ref_points_p.push_back(ref_green_p);
				ref_points_p.push_back(ref_cyan_p);
				ref_points_p.push_back(ref_purple_p);
				
				
				// setting the reference points to the matrix vector, dont need to do the last one because its already 1
				ref_red_m.at<double>(0,0) = ref_red_p.x;
				ref_red_m.at<double>(1,0) = ref_red_p.y;
				ref_green_m.at<double>(0,0) = ref_green_p.x;
				ref_green_m.at<double>(1,0) = ref_green_p.y;
				ref_cyan_m.at<double>(0,0) = ref_cyan_p.x;
				ref_cyan_m.at<double>(1,0) = ref_cyan_p.y;
				ref_purple_m.at<double>(0,0) = ref_purple_p.x;
				ref_purple_m.at<double>(1,0) = ref_purple_p.y;
				ref_points_m.push_back(ref_red_m);
				ref_points_m.push_back(ref_green_m);
				ref_points_m.push_back(ref_cyan_m);
				ref_points_m.push_back(ref_purple_m);
				
				
				// converting the points to be the projective coordinates
				for (int ii = 0; ii < ref_points_m.size(); ii++)
				{
					ref_points_m[ii] = K.inv(cv::DECOMP_LU)*ref_points_m[ii];
					//std::cout << "refpoints at " << ii << " is: " << ref_points_m[ii] << std::endl;
				}
				
				// if any of the points have a -1 will skip over the homography
				if (curr_red_p.x != -1 && curr_green_p.x != -1 && curr_cyan_p.x != -1 && curr_purple_p.x != -1)
				{
					//std::cout << "hi" << std::endl;
					
					// finding the perspective homography
					G = cv::findHomography(curr_points_p,ref_points_p,0);
					//G = cv::findHomography(ref_points_p,ref_points_p,0);
					
					std::cout << "G: " << G << std::endl;
					
					// decomposing the homography into the four solutions
					// G and K are 3x3
					// R is 3x3
					// 3x1
					// 3x1
					// successful_decomp is the number of solutions found
					successful_decomp = cv::decomposeHomographyMat(G,K,R,T,n);
					
					std::cout << "successful_decomp: " << successful_decomp << std::endl;
					
					
					// if the decomp is successful will find the best matching
					if (successful_decomp > 0)
					{
						
						std::cout << std::endl << std::endl << " begin check for visibility" << std::endl;
						
						// finding the alphas
						alpha_red.data = 1/(G.at<double>(2,0)*ref_red_p.x + G.at<double>(2,1)*ref_red_p.y + 1);
						alpha_green.data = 1/(G.at<double>(2,0)*ref_green_p.x + G.at<double>(2,1)*ref_green_p.y + 1);
						alpha_cyan.data = 1/(G.at<double>(2,0)*ref_cyan_p.x + G.at<double>(2,1)*ref_cyan_p.y + 1);
						alpha_purple.data = 1/(G.at<double>(2,0)*ref_purple_p.x + G.at<double>(2,1)*ref_purple_p.y + 1);
						
						// finding the solutions that give the positive results
						for (int ii = 0; ii < successful_decomp; ii++)
						{
							
							std::cout << "solution set number " << ii << std::endl;
							
							// performing the operation transpose(m)*R*n to check if greater than 0 later
							// order operating on is red green cyan purple
							for (int jj = 0; jj < 4; jj++)
							{
								
								//std::cout << " T size: " << T[ii].size() << std::endl;
								//std::cout << " T type: " << T[ii].type() << std::endl;
								std::cout << " T value: " << T[ii] << std::endl;
								
								//std::cout << " temp scalar 1 " << std::endl;
								//std::cout << " temp scalar size: " << temp_scalar.size() << std::endl;
								//std::cout << " temp scalar type: " << temp_scalar.type() << std::endl;
								//std::cout << " temp scalar value " << temp_scalar <<std::endl;
								temp_scalar = curr_points_m[jj].t();
								
								//std::cout << " temp scalar 2 " << std::endl;
								//std::cout << " temp scalar size: " << temp_scalar.size() << std::endl;
								//std::cout << " temp scalar type: " << temp_scalar.type() << std::endl;
								//std::cout << " temp scalar value " << temp_scalar <<std::endl;
								
								//std::cout << " R size: " << R[ii].size() << std::endl;
								//std::cout << " R type: " << R[ii].type() << std::endl;
								//std::cout << " R value: " << R[ii] << std::endl;
								temp_scalar = temp_scalar*R[ii];
								
								//std::cout << " temp scalar 3 " << std::endl;
								//std::cout << " temp scalar size: " << temp_scalar.size() << std::endl;
								//std::cout << " temp scalar type: " << temp_scalar.type() << std::endl;
								//std::cout << " temp scalar value " << temp_scalar <<std::endl;
								
								//std::cout << " n size: " << n[ii].size() << std::endl;
								//std::cout << " n type: " << n[ii].type() << std::endl;
								std::cout << " n value: " << n[ii] << std::endl;
								temp_scalar = temp_scalar*n[ii];
								
								//std::cout << " temp scalar size: " << temp_scalar.size() << std::endl;
								//std::cout << " temp scalar type: " << temp_scalar.type() << std::endl;
								//std::cout << " temp scalar value " << temp_scalar <<std::endl;
								//std::cout << " temp scalar value at 0,0" << temp_scalar.at<double>(0,0) << std::endl;
								
								scalar_value_check.push_back(temp_scalar.at<double>(0,0));
								
								////std::cout << " scalar value check size: " << scalar_value_check.size() << std::endl;
								//std::cout << " \tthe value for the " << jj << " visibility check is: " << scalar_value_check[4*ii+jj] << std::endl;
								
							}
						}
						
						std::cout << " end check for visibility" << std::endl << std::endl;
						
						// restting first solution found and second solution found
						first_solution_found = false;
						second_solution_found = false;
						fc_found = false;
						
						// getting the two solutions or only one if there are not two
						for (int ii = 0; ii < successful_decomp; ii++)
						{
							// getting the values onto the temporary vector
							// getting the start and end of the next solution
							temp_solution_start = scalar_value_check.begin() + 4*ii;
							temp_solution_end = scalar_value_check.begin() + 4*ii+4;
							temp_solution.assign(temp_solution_start,temp_solution_end);
							
							// checking if all the values are positive
							all_positive = true;
							current_temp_index = 0;
							while (all_positive && current_temp_index < 4)
							{
								if (temp_solution[current_temp_index] >= 0)
								{
									current_temp_index++;
								}
								else
								{
									all_positive = false;
								}
							}
							
							// if all the values were positive and a first solution has not been found will assign 
							// to first solution. if all positive and first solution has been found will assign
							// to second solution. if all positive is false then will not do anything
							if (all_positive && first_solution_found && !second_solution_found)
							{
								// setting it to indicate a solution has been found
								second_solution_found = true;
								
								// setting the rotation, translation, and normal to be the second set
								second_R = R[ii];
								second_T = T[ii];
								second_n = n[ii];
								
								// setting the projected values
								second_solution = temp_solution;
							}
							else if (all_positive && !first_solution_found)
							{
								// setting it to indicate a solution has been found
								first_solution_found = true;
								
								// setting the rotation, translation, and normal to be the first set
								first_R = R[ii];
								first_T = T[ii];
								first_n = n[ii];
								
								// setting the projected values
								first_solution = temp_solution;
							}
							
							// erasing all the values from the temp solution
							temp_solution.erase(temp_solution.begin(),temp_solution.end());
						}
						
						// erasing all the scalar values from the check
						scalar_value_check.erase(scalar_value_check.begin(),scalar_value_check.end());
					
					
						// displaying the first solution if it was found
						if (first_solution_found)
						{
							std::cout << std::endl << "first R: " << first_R << std::endl;
							std::cout << "first T: " << first_T << std::endl;
							std::cout << "first n: " << first_n << std::endl;
							for (double ii : first_solution)
							{
								std::cout << ii << " ";
							}
							std::cout << std::endl;
							
						}
						
						// displaying the second solution if it was found
						if (second_solution_found)
						{
							std::cout << std::endl << "second R: " << second_R << std::endl;
							std::cout << "second T: " << second_T << std::endl;
							std::cout << "second n: " << second_n << std::endl;
							for (double ii : second_solution)
							{
								std::cout << ii << " ";
							}
							std::cout << std::endl;
						}
						
						// because the reference is set to the exact value when when n should have only a z componenet, the correct
						// choice should be the one closest to n_ref = [0,0,1]^T which will be the one with the greatest dot product with n_ref
						if (first_solution_found && second_solution_found)
						{
							if (first_n.dot(n_ref) >= second_n.dot(n_ref))
							{
								R_fc = first_R;
								T_fc = first_T;
							}
							else
							{
								R_fc = second_R;
								T_fc = second_T;
							}
							fc_found = true;
						}
						else if(first_solution_found)
						{
							R_fc = first_R;
							T_fc = first_T;
							fc_found = true;
						}
						
						//if a solution was found will publish
						// need to convert to pose message so use
						if (fc_found)
						{
							// converting the rotation from a cv matrix to quaternion, first need it as a matrix3x3
							R_fc_tf[0][0] = R_fc.at<double>(0,0);
							R_fc_tf[0][1] = R_fc.at<double>(0,1);
							R_fc_tf[0][2] = R_fc.at<double>(0,2);
							R_fc_tf[1][0] = R_fc.at<double>(1,0);
							R_fc_tf[1][1] = R_fc.at<double>(1,1);
							R_fc_tf[1][2] = R_fc.at<double>(1,2);
							R_fc_tf[2][0] = R_fc.at<double>(2,0);
							R_fc_tf[2][1] = R_fc.at<double>(2,1);
							R_fc_tf[2][2] = R_fc.at<double>(2,2);
							std::cout << "Final R:\n" << R_fc << std::endl;
							
							// converting the translation to a vector 3
							T_fc_tf.setX(T_fc.at<double>(0,0));
							T_fc_tf.setY(T_fc.at<double>(0,1));
							T_fc_tf.setZ(T_fc.at<double>(0,2));
							std::cout << "Final T :\n" << T_fc << std::endl;
							
							// getting the rotation as a quaternion
							R_fc_tf.getRotation(Q_fc_tf);
							
							std::cout << "current orientation:" << "\n\tx:\t" << Q_fc_tf.getX() 
																<< "\n\ty:\t" << Q_fc_tf.getY() 
																<< "\n\tz:\t" << Q_fc_tf.getZ() 
																<< "\n\tw:\t" << Q_fc_tf.getW() 
																<< std::endl;
				
							std::cout << "norm of quaternion:\t" << Q_fc_tf.length() << std::endl;
							
							// getting the negated version of the quaternion for the check
							Q_fc_tf_negated = tf::Quaternion(-Q_fc_tf.getX(),-Q_fc_tf.getY(),-Q_fc_tf.getZ(),-Q_fc_tf.getW());
							
							std::cout << "negated orientation:" << "\n\tx:\t" << Q_fc_tf_negated.getX() 
																<< "\n\ty:\t" << Q_fc_tf_negated.getY() 
																<< "\n\tz:\t" << Q_fc_tf_negated.getZ() 
																<< "\n\tw:\t" << Q_fc_tf_negated.getW() 
																<< std::endl;
																
							std::cout << "norm of negated quaternion:\t" << Q_fc_tf_negated.length() << std::endl;
							
							// showing the last orientation
							std::cout << "last orientation:" << "\n\tx:\t" << Q_fc_tf_last.getX() 
															 << "\n\ty:\t" << Q_fc_tf_last.getY() 
															 << "\n\tz:\t" << Q_fc_tf_last.getZ() 
															 << "\n\tw:\t" << Q_fc_tf_last.getW() 
															 << std::endl;
																
							std::cout << "norm of last quaternion:\t" << Q_fc_tf_last.length() << std::endl;
							
							// checking if the quaternion has flipped
							Q_norm_current_diff = std::sqrt(std::pow(Q_fc_tf.getX() - Q_fc_tf_last.getX(),2.0)
														  + std::pow(Q_fc_tf.getY() - Q_fc_tf_last.getY(),2.0) 
														  + std::pow(Q_fc_tf.getZ() - Q_fc_tf_last.getZ(),2.0) 
														  + std::pow(Q_fc_tf.getW() - Q_fc_tf_last.getW(),2.0));
							
							std::cout << "current difference:\t" << Q_norm_current_diff << std::endl;
							
							Q_norm_negated_diff = std::sqrt(std::pow(Q_fc_tf_negated.getX() - Q_fc_tf_last.getX(),2.0)
														  + std::pow(Q_fc_tf_negated.getY() - Q_fc_tf_last.getY(),2.0) 
														  + std::pow(Q_fc_tf_negated.getZ() - Q_fc_tf_last.getZ(),2.0) 
														  + std::pow(Q_fc_tf_negated.getW() - Q_fc_tf_last.getW(),2.0));
							
							std::cout << "negated difference:\t" << Q_norm_negated_diff << std::endl;
							
							if (Q_norm_current_diff > Q_norm_negated_diff)
							{
								Q_fc_tf = Q_fc_tf_negated;
							}
							
							// updating the last
							Q_fc_tf_last = Q_fc_tf;
							
							// converting the tf quaternion to a geometry message quaternion
							Q_fc_gm.x = Q_fc_tf.getX();
							Q_fc_gm.y = Q_fc_tf.getY();
							Q_fc_gm.z = Q_fc_tf.getZ();
							Q_fc_gm.w = Q_fc_tf.getW();
							
							// converting the tf vector3 to a point
							P_fc_gm.x = T_fc_tf.getX();
							P_fc_gm.y = T_fc_tf.getY();
							P_fc_gm.z = T_fc_tf.getZ();
							
							// setting the transform with the values
							fc_tf.setOrigin(T_fc_tf);
							fc_tf.setRotation(Q_fc_tf);
							tf_broad.sendTransform(tf::StampedTransform(fc_tf, msg.header.stamp,"f_star","f_current"));
							
							// setting the decomposed message
							pose_fc_gm.position = P_fc_gm;
							pose_fc_gm.orientation = Q_fc_gm;
							decomposed_msg.pose = pose_fc_gm;
							decomposed_msg.header.stamp = msg.header.stamp;
							decomposed_msg.header.frame_id = "current_frame_normalized";
							decomposed_msg.alpha_red = alpha_red;
							decomposed_msg.alpha_green = alpha_green;
							decomposed_msg.alpha_cyan = alpha_cyan;
							decomposed_msg.alpha_purple = alpha_purple;
							homog_decomp_pub.publish(decomposed_msg);
							
							std::cout << "complete message\n" << decomposed_msg << std::endl << std::endl;
							
							// publish the marker
							marker.pose = pose_fc_gm;
							marker_pub.publish(marker);
							
						}
					}
				}

				// erasing all the temporary points
				if (first_solution_found || second_solution_found)
				{
					// erasing all the point vectors and matrix vectors
					curr_points_p.erase(curr_points_p.begin(),curr_points_p.end());
					ref_points_p.erase(ref_points_p.begin(),ref_points_p.end());
					curr_points_m.erase(curr_points_m.begin(),curr_points_m.end());
					ref_points_m.erase(ref_points_m.begin(),ref_points_m.end());
				}
			}
			/********** End splitting up the incoming message *********/
			
		}