//caculate  the  centroid of multi small tags 
Point2f  tag_centroid(AprilTags::TagDetection &detec,Point2f &centr)
{
  for(int i =0;i<detec.size();i++)
  {
      centr.x+= detec[i].cxy.first;
      centr.y+= detec[i].cxy.second;
  }
      centr.x/= detec.size();
      centr.y/= detec.size();
      return centr;
  }
예제 #2
0
Apriltag DetectorNode::DetectionToApriltagMsg(
    const AprilTags::TagDetection &detection) {
  Apriltag tag;
  // Gather basic information
  tag.id = detection.id;
  tag.family = tag_family_;
  tag.hamming_distance = detection.hammingDistance;
  tag.center.x = detection.cxy.first;
  tag.center.y = detection.cxy.second;
  tag.size = tag_size_;
  std::for_each(begin(detection.p), end(detection.p),
                [&](const AprilTags::Pointf &corner) {
    geometry_msgs::Point point;
    point.x = corner.first;
    point.y = corner.second;
    tag.corners.push_back(point);
  });
  if (!cam_calibrated_) return tag;
  // Get rotation and translation of tag in camera frame, only if we have cinfo!
  Eigen::Quaterniond c_Q_b;
  Eigen::Vector3d c_T_b;
  /// @todo: Need to decide whether to undistort points here!
  detection.getRelativeQT(tag_size_, model_.fullIntrinsicMatrix(),
                          model_.distortionCoeffs(), c_Q_b, c_T_b);
  SetPose(&tag.pose, c_Q_b, c_T_b);
  return tag;
}
예제 #3
0
  void print_detection(AprilTags::TagDetection& detection) const {
    cout << "  Id: " << detection.id
         << " (Hamming: " << detection.hammingDistance << ")";

    // recovering the relative pose of a tag:

    // NOTE: for this to be accurate, it is necessary to use the
    // actual camera parameters here as well as the actual tag size
    // (m_fx, m_fy, m_px, m_py, m_tagSize)

    Eigen::Vector3d translation;
    Eigen::Matrix3d rotation;
    detection.getRelativeTranslationRotation(m_tagSize, m_fx, m_fy, m_px, m_py,
                                             translation, rotation);

    Eigen::Matrix3d F;
    F <<
      1, 0,  0,
      0,  -1,  0,
      0,  0,  1;
    Eigen::Matrix3d fixed_rot = F*rotation;
    double yaw, pitch, roll;
    wRo_to_euler(fixed_rot, yaw, pitch, roll);

    cout << "  distance=" << translation.norm()
         << "m, x=" << translation(0)
         << ", y=" << translation(1)
         << ", z=" << translation(2)
         << ", yaw=" << yaw
         << ", pitch=" << pitch
         << ", roll=" << roll
         << endl;


    size_t buffsize = 60;
    char str2 [buffsize]; 


    if (stream) {
        sprintf(str2, "ID %d x %f y %f ptch %f ", detection.id, translation(0), translation(1), pitch);
        stream->send(str2, buffsize);
        printf("sent - %s\n", str2);
        //len = stream->receive(line, sizeof(line));
        //line[len] = 0;
        //printf("received - %s\n", line);
        
    }
    // Also note that for SLAM/multi-view application it is better to
    // use reprojection error of corner points, because the noise in
    // this relative pose is very non-Gaussian; see iSAM source code
    // for suitable factors.
  }
예제 #4
0
  /* Convert AprilTags::TagDetection to duckietown_msgs::TagDetection */
  duckietown_msgs::TagDetection toMsg (const AprilTags::TagDetection& detection)
  {
    duckietown_msgs::TagDetection msg;
    msg.good = detection.good;
    msg.id = detection.id;
    for (int i = 0; i < msg.p.size(); ++i){
      msg.p.push_back(detection.p[i].first);
      msg.p.push_back(detection.p[i].second);
    }
   
    msg.cxy.push_back(detection.cxy.first);
    msg.cxy.push_back(detection.cxy.second);
   
    msg.observedPerimeter = detection.observedPerimeter;

    msg.homography.push_back(detection.homography(0,0));
    msg.homography.push_back(detection.homography(0,1));
    msg.homography.push_back(detection.homography(0,2));
    msg.homography.push_back(detection.homography(1,0));
    msg.homography.push_back(detection.homography(1,1));
    msg.homography.push_back(detection.homography(1,2));
    msg.homography.push_back(detection.homography(2,0));
    msg.homography.push_back(detection.homography(2,1));
    msg.homography.push_back(detection.homography(2,2));

    msg.orientation = detection.getXYOrientation();

    msg.hxy.push_back(detection.hxy.first);
    msg.hxy.push_back(detection.hxy.second);

    /* Extract focal length and principal point */
    double fx = camera_info_.K[0];
    double fy = camera_info_.K[4];
    double px = camera_info_.K[2];
    double py = camera_info_.K[5];

    /* Extract relative translation rotation */
    Eigen::Vector3d trans;
    Eigen::Matrix3d rot;
    detection.getRelativeTranslationRotation(tag_size_,fx,fy,px,py,trans,rot);
    msg.transform.translation.x = trans(0);
    msg.transform.translation.y = trans(1);
    msg.transform.translation.z = trans(2);
    tf::Matrix3x3 tf_matrix3x3;
    tf::matrixEigenToTF(rot,tf_matrix3x3);
    tf::Quaternion quat;
    tf_matrix3x3.getRotation(quat);
    msg.transform.rotation.x = quat.x();
    msg.transform.rotation.y = quat.y();
    msg.transform.rotation.z = quat.z();
    msg.transform.rotation.w = quat.w();

    return msg;
  }
예제 #5
0
  void print_detection(AprilTags::TagDetection& detection) /*const*/ {
    cout << "  Id: " << detection.id
         << " (Hamming: " << detection.hammingDistance << ")";

    // recovering the relative pose of a tag:

    // NOTE: for this to be accurate, it is necessary to use the
    // actual camera parameters here as well as the actual tag size
    // (m_fx, m_fy, m_px, m_py, m_tagSize)

    Eigen::Vector3d translation;
    Eigen::Matrix3d rotation;
    detection.getRelativeTranslationRotation(m_tagSize, m_fx, m_fy, m_px, m_py,
                                             translation, rotation);

    Eigen::Matrix3d F;
    F <<
      1, 0,  0,
      0,  -1,  0,
      0,  0,  1;
    Eigen::Matrix3d fixed_rot = F*rotation;
    double yaw, pitch, roll;
    wRo_to_euler(fixed_rot, yaw, pitch, roll);

    cout << "  distance=" << translation.norm()
         << "m, x=" << translation(0)
         << ", y=" << translation(1)
         << ", z=" << translation(2)
         << ", yaw=" << yaw
         << ", pitch=" << pitch
         << ", roll=" << roll
         << endl;
    /******** log detection data to ic.msg ********/
    ic.msg.pose.pose.position.x = translation(0);
    ic.msg.pose.pose.position.y = translation(1);
    ic.msg.pose.pose.position.z = translation(2);
    ic.msg.pose.pose.orientation.x = yaw;
    ic.msg.pose.pose.orientation.y;
    ic.msg.pose.pose.orientation.z;
    ic.msg.pose.pose.orientation.w;
    ic.msg.header.seq ++;
    ros::Time temp_t = ros::Time::now();
    ic.msg.header.stamp.sec = temp_t.sec;
    ic.msg.header.stamp.nsec = temp_t.nsec;
    ic.publish(ic.msg);
    // Also note that for SLAM/multi-view application it is better to
    // use reprojection error of corner points, because the noise in
    // this relative pose is very non-Gaussian; see iSAM source code
    // for suitable factors.
  }
void MultiAprilTagsTracker::imageDrawnCallback( const sensor_msgs::ImageConstPtr& msg) {
  cv_bridge::CvImagePtr cv_ptr;
  try {
    cv_ptr = cv_bridge::toCvCopy( msg, "bgr8" );
  }
  catch( cv_bridge::Exception& e ) {
    ROS_ERROR( "cv_bridge exce[topm: %s", e.what() );
    return;
  }
  if( tags.size() > 0) {
    multi_apriltags_tracker::april_tag_pos msg;
    for( unsigned int i=0; i<tags.size(); i++ ){
      AprilTags::TagDetection tag = tags[i];
      circle( cv_ptr->image, Point2f( tag.cxy.first, tag.cxy.second ), 2, Scalar(0,255,0), 4 );
      line( cv_ptr->image, Point( tag.p[0].first, tag.p[0].second ), Point( tag.p[1].first, tag.p[1].second ), Scalar(0,255,0), 2 );
      line( cv_ptr->image, Point( tag.p[1].first, tag.p[1].second ), Point( tag.p[2].first, tag.p[2].second ), Scalar(0,255,0), 2 );
      line( cv_ptr->image, Point( tag.p[2].first, tag.p[2].second ), Point( tag.p[3].first, tag.p[3].second ), Scalar(0,255,0), 2 );
      line( cv_ptr->image, Point( tag.p[3].first, tag.p[3].second ), Point( tag.p[0].first, tag.p[0].second ), Scalar(0,255,0), 2 );
      double orientation_length = sqrt( pow(tag.p[0].first-tag.p[1].first,2) + pow(tag.p[0].second-tag.p[1].second,2) );
      line( cv_ptr->image, Point( tag.cxy.first, tag.cxy.second ), Point( tag.cxy.first+orientation_length*cos(tag.getXYOrientation()), tag.cxy.second+orientation_length*sin(tag.getXYOrientation()) ), Scalar(0,255,0), 2 );


      msg.id.push_back( tag.id );
      geometry_msgs::Pose2D pose;
      pose.x = tag.cxy.first;
      pose.y = tag.cxy.second;
      pose.theta = convRadius( tag.getXYOrientation() );
      msg.pose.push_back( pose );

    }
    m_pos_pub.publish(msg);
  }
  cv::imshow(MULTI_APRIL_TAGS_TRACKER_VIEW, cv_ptr->image );
  if( false == ros::ok() ) {
    ros::shutdown();
  }
}
예제 #7
0
void Demo::print_detection(AprilTags::TagDetection& detection) {
    cout << "  Id: " << detection.id
         << " (Hamming: " << detection.hammingDistance << ")";

    // recovering the relative pose of a tag:

    // NOTE: for this to be accurate, it is necessary to use the
    // actual camera parameters here as well as the actual tag size
    // (m_fx, m_fy, m_px, m_py, m_tagSize)

    Eigen::Vector3d translation;
    Eigen::Matrix3d rotation;
    detection.getRelativeTranslationRotation(m_tagSize, m_fx, m_fy, m_px, m_py,
                                             translation, rotation);

    Eigen::Matrix3d F;
    F <<
        1, 0,  0,
        0, -1, 0,
        0, 0,  1;

    Eigen::Matrix3d fixed_rot = F * rotation;


    double yaw, pitch, roll;
    wRo_to_euler(fixed_rot, yaw, pitch, roll);

    cout << "  distance=" << translation.norm()
         << "m, x=" << translation(0)
         << ", y=" << translation(1)
         << ", z=" << translation(2)
         << ", yaw=" << yaw
         << ", pitch=" << pitch
         << ", roll=" << roll
         << endl;

    // Also note that for SLAM/multi-view application it is better to
    // use reprojection error of corner points, because the noise in
    // this relative pose is very non-Gaussian; see iSAM source code
    // for suitable factors.
}
void AprilAnalysis::printDetection(AprilTags::TagDetection& detection) const {
    std::cout << "  Id: " << detection.id
              << " (Hamming: " << detection.hammingDistance << ")";
    Eigen::Vector3d translation;
    Eigen::Matrix3d rotation;
    detection.getRelativeTranslationRotation(m_tagSize, m_fx, m_fy, m_px, m_py,
                                            translation, rotation);
    Eigen::Matrix3d F;
    F << 1, 0,  0,
         0,  -1,  0,
         0,  0,  1;
    Eigen::Matrix3d fixed_rot = F*rotation;
    double yaw, pitch, roll;
    wRo_to_euler(fixed_rot, yaw, pitch, roll);

    std::cout << "  distance = " << translation.norm()
        << "m,\nx = " << translation(0)
        << "\ny = " << translation(1)
        << "\nz = " << translation(2)
        << "\nyaw = " << yaw
        << ", pitch = " << pitch
        << ", roll = " << roll
        << std::endl;
}
void print_detection(AprilTags::TagDetection& detection) {
  
  if(detection.id != TAG_ID)
    return;

  // recovering the relative pose of a tag:
  // NOTE: for this to be accurate, it is necessary to use the
  // actual camera parameters here as well as the actual tag size
  // (m_fx, m_fy, m_px, m_py, m_tagSize)

  Eigen::Vector3d tag_translation; //in camera frame
  Eigen::Matrix3d tag_rotation;    //in camera frame
  detection.getRelativeTranslationRotation(m_tagSize, m_fx, m_fy, m_px, m_py,
                                           tag_translation, tag_rotation);

  double tag_roll, tag_pitch, tag_yaw;
  wRo_to_euler(tag_rotation, tag_yaw, tag_pitch, tag_roll);

  // Note: Roll measures 180 degrees at a neutral attitude for some reason.
  // I know this isn't yaw, but the yawWrap function does the bounding we want.
  tag_roll = wave::yawWrap(tag_roll + M_PI);

  Eigen::Vector3d qr_frame_tag_angles(tag_roll, tag_pitch, tag_yaw);

  Eigen::Matrix3d rotation_roll_pi = wave::createEulerRot(M_PI, 0, 0);
  qr_frame_tag_angles = rotation_roll_pi * qr_frame_tag_angles;

  Eigen::Matrix3d rotation_yaw_half_pi = wave::createEulerRot(0, 0, M_PI/2);
  qr_frame_tag_angles = rotation_yaw_half_pi * qr_frame_tag_angles;

  tf::Quaternion tag_quaternion;
  tag_quaternion.setEulerZYX(qr_frame_tag_angles(2), qr_frame_tag_angles(1),
                        qr_frame_tag_angles(0));

  tf::Matrix3x3 tag_dcm(tag_quaternion);
  Eigen::Vector3d copterTranslation = cToM*tag_translation;
  double measured_x_position = -copterTranslation(2);
  double measured_y_position = -copterTranslation(1);
  double measured_z_position = -copterTranslation(0);


  Eigen::Matrix4d measured_pose;
  measured_pose <<
    tag_dcm[0][0], tag_dcm[0][1], tag_dcm[0][2], measured_x_position,
    tag_dcm[1][0], tag_dcm[1][1], tag_dcm[1][2], measured_y_position,
    tag_dcm[2][0], tag_dcm[2][1], tag_dcm[2][2], measured_z_position,
    0, 0, 0, 1;
    

  // Correct for origin offset.
  Eigen::Matrix4d origin_pose;
  origin_pose <<
    __origin_rotation_matrix[0][0], __origin_rotation_matrix[0][1], __origin_rotation_matrix[0][2], __origin_position(0),
    __origin_rotation_matrix[1][0], __origin_rotation_matrix[1][1], __origin_rotation_matrix[1][2], __origin_position(1),
    __origin_rotation_matrix[2][0], __origin_rotation_matrix[2][1], __origin_rotation_matrix[2][2], __origin_position(2),
    0,                              0,                              0,                              1;

  Eigen::Matrix4d measured_pose_with_origin_correction;
  measured_pose_with_origin_correction = origin_pose.inverse() * measured_pose;

  tf::Matrix3x3 rotation_with_origin_correction(measured_pose_with_origin_correction(0,0),
    measured_pose_with_origin_correction(0,1), measured_pose_with_origin_correction(0,2),
    measured_pose_with_origin_correction(1,0), measured_pose_with_origin_correction(1,1),
    measured_pose_with_origin_correction(1,2), measured_pose_with_origin_correction(2,0),
    measured_pose_with_origin_correction(2,1), measured_pose_with_origin_correction(2,2));

  tf::Quaternion final_quaternion;

  rotation_with_origin_correction.getRotation(final_quaternion);

  geometry_msgs::Pose to_publish;
  to_publish.position.x = measured_pose_with_origin_correction(0, 3);
  to_publish.position.y = measured_pose_with_origin_correction(1, 3);
  to_publish.position.z = measured_pose_with_origin_correction(2, 3);
  tf::quaternionTFToMsg(final_quaternion, to_publish.orientation);

  cout<<"x: "<<to_publish.orientation.x<<", y: "<<to_publish.orientation.y<<", z:" << to_publish.orientation.z<<", w: "<<to_publish.orientation.w<<endl;

  prev_t = ros::Time::now();
  crop_image = true;

  april_tag_one_target_ips::ips_msg curpose;
  curpose.header.stamp = ros::Time::now();
  curpose.tag_id = 0;
  curpose.hamming_distance = 0;
  curpose.header.frame_id="/map";
  curpose.X = to_publish.position.x;
  curpose.Y = to_publish.position.y;
  curpose.Z = to_publish.position.z;
  curpose.Roll = 0; //tf::getRoll(to_publish.orientation);;
  curpose.Pitch = 0;//tf::getPitch(to_publish.orientation);;
  curpose.Yaw = tf::getYaw(to_publish.orientation); // Robot Yaw
  //republish pose for rviz

  // send transform
  br = new tf::TransformBroadcaster;
  tform = new tf::Transform;
  tform->setOrigin( tf::Vector3(to_publish.position.x, to_publish.position.y, 0) );
  tf::Quaternion q;
  q.setEulerZYX(tf::getYaw(to_publish.orientation), 0,0);
  tform->setRotation( q );
  *tform = tform->inverse();
  br->sendTransform(tf::StampedTransform(*tform, ros::Time::now(), "base_footprint", "map"));

  pose_pub.publish(curpose);

/*
#ifdef DEBUG_ROS_APRIL
  double DEG2RAD = 180.0 / M_PI;
  tf::Matrix3x3 m(q);
  double roll, pitch, yaw;
  m.getRPY(roll, pitch, yaw);
  ROS_INFO("Euler Angles: %f, %f, %f", roll*DEG2RAD, pitch*DEG2RAD, yaw*DEG2RAD);
#endif
*/
}