Ejemplo n.º 1
0
  /* Convert AprilTags::TagDetection to duckietown_msgs::TagDetection */
  duckietown_msgs::TagDetection toMsg (const AprilTags::TagDetection& detection)
  {
    duckietown_msgs::TagDetection msg;
    msg.good = detection.good;
    msg.id = detection.id;
    for (int i = 0; i < msg.p.size(); ++i){
      msg.p.push_back(detection.p[i].first);
      msg.p.push_back(detection.p[i].second);
    }
   
    msg.cxy.push_back(detection.cxy.first);
    msg.cxy.push_back(detection.cxy.second);
   
    msg.observedPerimeter = detection.observedPerimeter;

    msg.homography.push_back(detection.homography(0,0));
    msg.homography.push_back(detection.homography(0,1));
    msg.homography.push_back(detection.homography(0,2));
    msg.homography.push_back(detection.homography(1,0));
    msg.homography.push_back(detection.homography(1,1));
    msg.homography.push_back(detection.homography(1,2));
    msg.homography.push_back(detection.homography(2,0));
    msg.homography.push_back(detection.homography(2,1));
    msg.homography.push_back(detection.homography(2,2));

    msg.orientation = detection.getXYOrientation();

    msg.hxy.push_back(detection.hxy.first);
    msg.hxy.push_back(detection.hxy.second);

    /* Extract focal length and principal point */
    double fx = camera_info_.K[0];
    double fy = camera_info_.K[4];
    double px = camera_info_.K[2];
    double py = camera_info_.K[5];

    /* Extract relative translation rotation */
    Eigen::Vector3d trans;
    Eigen::Matrix3d rot;
    detection.getRelativeTranslationRotation(tag_size_,fx,fy,px,py,trans,rot);
    msg.transform.translation.x = trans(0);
    msg.transform.translation.y = trans(1);
    msg.transform.translation.z = trans(2);
    tf::Matrix3x3 tf_matrix3x3;
    tf::matrixEigenToTF(rot,tf_matrix3x3);
    tf::Quaternion quat;
    tf_matrix3x3.getRotation(quat);
    msg.transform.rotation.x = quat.x();
    msg.transform.rotation.y = quat.y();
    msg.transform.rotation.z = quat.z();
    msg.transform.rotation.w = quat.w();

    return msg;
  }
Ejemplo n.º 2
0
  void print_detection(AprilTags::TagDetection& detection) const {
    cout << "  Id: " << detection.id
         << " (Hamming: " << detection.hammingDistance << ")";

    // recovering the relative pose of a tag:

    // NOTE: for this to be accurate, it is necessary to use the
    // actual camera parameters here as well as the actual tag size
    // (m_fx, m_fy, m_px, m_py, m_tagSize)

    Eigen::Vector3d translation;
    Eigen::Matrix3d rotation;
    detection.getRelativeTranslationRotation(m_tagSize, m_fx, m_fy, m_px, m_py,
                                             translation, rotation);

    Eigen::Matrix3d F;
    F <<
      1, 0,  0,
      0,  -1,  0,
      0,  0,  1;
    Eigen::Matrix3d fixed_rot = F*rotation;
    double yaw, pitch, roll;
    wRo_to_euler(fixed_rot, yaw, pitch, roll);

    cout << "  distance=" << translation.norm()
         << "m, x=" << translation(0)
         << ", y=" << translation(1)
         << ", z=" << translation(2)
         << ", yaw=" << yaw
         << ", pitch=" << pitch
         << ", roll=" << roll
         << endl;


    size_t buffsize = 60;
    char str2 [buffsize]; 


    if (stream) {
        sprintf(str2, "ID %d x %f y %f ptch %f ", detection.id, translation(0), translation(1), pitch);
        stream->send(str2, buffsize);
        printf("sent - %s\n", str2);
        //len = stream->receive(line, sizeof(line));
        //line[len] = 0;
        //printf("received - %s\n", line);
        
    }
    // Also note that for SLAM/multi-view application it is better to
    // use reprojection error of corner points, because the noise in
    // this relative pose is very non-Gaussian; see iSAM source code
    // for suitable factors.
  }
Ejemplo n.º 3
0
  void print_detection(AprilTags::TagDetection& detection) /*const*/ {
    cout << "  Id: " << detection.id
         << " (Hamming: " << detection.hammingDistance << ")";

    // recovering the relative pose of a tag:

    // NOTE: for this to be accurate, it is necessary to use the
    // actual camera parameters here as well as the actual tag size
    // (m_fx, m_fy, m_px, m_py, m_tagSize)

    Eigen::Vector3d translation;
    Eigen::Matrix3d rotation;
    detection.getRelativeTranslationRotation(m_tagSize, m_fx, m_fy, m_px, m_py,
                                             translation, rotation);

    Eigen::Matrix3d F;
    F <<
      1, 0,  0,
      0,  -1,  0,
      0,  0,  1;
    Eigen::Matrix3d fixed_rot = F*rotation;
    double yaw, pitch, roll;
    wRo_to_euler(fixed_rot, yaw, pitch, roll);

    cout << "  distance=" << translation.norm()
         << "m, x=" << translation(0)
         << ", y=" << translation(1)
         << ", z=" << translation(2)
         << ", yaw=" << yaw
         << ", pitch=" << pitch
         << ", roll=" << roll
         << endl;
    /******** log detection data to ic.msg ********/
    ic.msg.pose.pose.position.x = translation(0);
    ic.msg.pose.pose.position.y = translation(1);
    ic.msg.pose.pose.position.z = translation(2);
    ic.msg.pose.pose.orientation.x = yaw;
    ic.msg.pose.pose.orientation.y;
    ic.msg.pose.pose.orientation.z;
    ic.msg.pose.pose.orientation.w;
    ic.msg.header.seq ++;
    ros::Time temp_t = ros::Time::now();
    ic.msg.header.stamp.sec = temp_t.sec;
    ic.msg.header.stamp.nsec = temp_t.nsec;
    ic.publish(ic.msg);
    // Also note that for SLAM/multi-view application it is better to
    // use reprojection error of corner points, because the noise in
    // this relative pose is very non-Gaussian; see iSAM source code
    // for suitable factors.
  }
Ejemplo n.º 4
0
void Demo::print_detection(AprilTags::TagDetection& detection) {
    cout << "  Id: " << detection.id
         << " (Hamming: " << detection.hammingDistance << ")";

    // recovering the relative pose of a tag:

    // NOTE: for this to be accurate, it is necessary to use the
    // actual camera parameters here as well as the actual tag size
    // (m_fx, m_fy, m_px, m_py, m_tagSize)

    Eigen::Vector3d translation;
    Eigen::Matrix3d rotation;
    detection.getRelativeTranslationRotation(m_tagSize, m_fx, m_fy, m_px, m_py,
                                             translation, rotation);

    Eigen::Matrix3d F;
    F <<
        1, 0,  0,
        0, -1, 0,
        0, 0,  1;

    Eigen::Matrix3d fixed_rot = F * rotation;


    double yaw, pitch, roll;
    wRo_to_euler(fixed_rot, yaw, pitch, roll);

    cout << "  distance=" << translation.norm()
         << "m, x=" << translation(0)
         << ", y=" << translation(1)
         << ", z=" << translation(2)
         << ", yaw=" << yaw
         << ", pitch=" << pitch
         << ", roll=" << roll
         << endl;

    // Also note that for SLAM/multi-view application it is better to
    // use reprojection error of corner points, because the noise in
    // this relative pose is very non-Gaussian; see iSAM source code
    // for suitable factors.
}
void AprilAnalysis::printDetection(AprilTags::TagDetection& detection) const {
    std::cout << "  Id: " << detection.id
              << " (Hamming: " << detection.hammingDistance << ")";
    Eigen::Vector3d translation;
    Eigen::Matrix3d rotation;
    detection.getRelativeTranslationRotation(m_tagSize, m_fx, m_fy, m_px, m_py,
                                            translation, rotation);
    Eigen::Matrix3d F;
    F << 1, 0,  0,
         0,  -1,  0,
         0,  0,  1;
    Eigen::Matrix3d fixed_rot = F*rotation;
    double yaw, pitch, roll;
    wRo_to_euler(fixed_rot, yaw, pitch, roll);

    std::cout << "  distance = " << translation.norm()
        << "m,\nx = " << translation(0)
        << "\ny = " << translation(1)
        << "\nz = " << translation(2)
        << "\nyaw = " << yaw
        << ", pitch = " << pitch
        << ", roll = " << roll
        << std::endl;
}
void print_detection(AprilTags::TagDetection& detection) {
  
  if(detection.id != TAG_ID)
    return;

  // recovering the relative pose of a tag:
  // NOTE: for this to be accurate, it is necessary to use the
  // actual camera parameters here as well as the actual tag size
  // (m_fx, m_fy, m_px, m_py, m_tagSize)

  Eigen::Vector3d tag_translation; //in camera frame
  Eigen::Matrix3d tag_rotation;    //in camera frame
  detection.getRelativeTranslationRotation(m_tagSize, m_fx, m_fy, m_px, m_py,
                                           tag_translation, tag_rotation);

  double tag_roll, tag_pitch, tag_yaw;
  wRo_to_euler(tag_rotation, tag_yaw, tag_pitch, tag_roll);

  // Note: Roll measures 180 degrees at a neutral attitude for some reason.
  // I know this isn't yaw, but the yawWrap function does the bounding we want.
  tag_roll = wave::yawWrap(tag_roll + M_PI);

  Eigen::Vector3d qr_frame_tag_angles(tag_roll, tag_pitch, tag_yaw);

  Eigen::Matrix3d rotation_roll_pi = wave::createEulerRot(M_PI, 0, 0);
  qr_frame_tag_angles = rotation_roll_pi * qr_frame_tag_angles;

  Eigen::Matrix3d rotation_yaw_half_pi = wave::createEulerRot(0, 0, M_PI/2);
  qr_frame_tag_angles = rotation_yaw_half_pi * qr_frame_tag_angles;

  tf::Quaternion tag_quaternion;
  tag_quaternion.setEulerZYX(qr_frame_tag_angles(2), qr_frame_tag_angles(1),
                        qr_frame_tag_angles(0));

  tf::Matrix3x3 tag_dcm(tag_quaternion);
  Eigen::Vector3d copterTranslation = cToM*tag_translation;
  double measured_x_position = -copterTranslation(2);
  double measured_y_position = -copterTranslation(1);
  double measured_z_position = -copterTranslation(0);


  Eigen::Matrix4d measured_pose;
  measured_pose <<
    tag_dcm[0][0], tag_dcm[0][1], tag_dcm[0][2], measured_x_position,
    tag_dcm[1][0], tag_dcm[1][1], tag_dcm[1][2], measured_y_position,
    tag_dcm[2][0], tag_dcm[2][1], tag_dcm[2][2], measured_z_position,
    0, 0, 0, 1;
    

  // Correct for origin offset.
  Eigen::Matrix4d origin_pose;
  origin_pose <<
    __origin_rotation_matrix[0][0], __origin_rotation_matrix[0][1], __origin_rotation_matrix[0][2], __origin_position(0),
    __origin_rotation_matrix[1][0], __origin_rotation_matrix[1][1], __origin_rotation_matrix[1][2], __origin_position(1),
    __origin_rotation_matrix[2][0], __origin_rotation_matrix[2][1], __origin_rotation_matrix[2][2], __origin_position(2),
    0,                              0,                              0,                              1;

  Eigen::Matrix4d measured_pose_with_origin_correction;
  measured_pose_with_origin_correction = origin_pose.inverse() * measured_pose;

  tf::Matrix3x3 rotation_with_origin_correction(measured_pose_with_origin_correction(0,0),
    measured_pose_with_origin_correction(0,1), measured_pose_with_origin_correction(0,2),
    measured_pose_with_origin_correction(1,0), measured_pose_with_origin_correction(1,1),
    measured_pose_with_origin_correction(1,2), measured_pose_with_origin_correction(2,0),
    measured_pose_with_origin_correction(2,1), measured_pose_with_origin_correction(2,2));

  tf::Quaternion final_quaternion;

  rotation_with_origin_correction.getRotation(final_quaternion);

  geometry_msgs::Pose to_publish;
  to_publish.position.x = measured_pose_with_origin_correction(0, 3);
  to_publish.position.y = measured_pose_with_origin_correction(1, 3);
  to_publish.position.z = measured_pose_with_origin_correction(2, 3);
  tf::quaternionTFToMsg(final_quaternion, to_publish.orientation);

  cout<<"x: "<<to_publish.orientation.x<<", y: "<<to_publish.orientation.y<<", z:" << to_publish.orientation.z<<", w: "<<to_publish.orientation.w<<endl;

  prev_t = ros::Time::now();
  crop_image = true;

  april_tag_one_target_ips::ips_msg curpose;
  curpose.header.stamp = ros::Time::now();
  curpose.tag_id = 0;
  curpose.hamming_distance = 0;
  curpose.header.frame_id="/map";
  curpose.X = to_publish.position.x;
  curpose.Y = to_publish.position.y;
  curpose.Z = to_publish.position.z;
  curpose.Roll = 0; //tf::getRoll(to_publish.orientation);;
  curpose.Pitch = 0;//tf::getPitch(to_publish.orientation);;
  curpose.Yaw = tf::getYaw(to_publish.orientation); // Robot Yaw
  //republish pose for rviz

  // send transform
  br = new tf::TransformBroadcaster;
  tform = new tf::Transform;
  tform->setOrigin( tf::Vector3(to_publish.position.x, to_publish.position.y, 0) );
  tf::Quaternion q;
  q.setEulerZYX(tf::getYaw(to_publish.orientation), 0,0);
  tform->setRotation( q );
  *tform = tform->inverse();
  br->sendTransform(tf::StampedTransform(*tform, ros::Time::now(), "base_footprint", "map"));

  pose_pub.publish(curpose);

/*
#ifdef DEBUG_ROS_APRIL
  double DEG2RAD = 180.0 / M_PI;
  tf::Matrix3x3 m(q);
  double roll, pitch, yaw;
  m.getRPY(roll, pitch, yaw);
  ROS_INFO("Euler Angles: %f, %f, %f", roll*DEG2RAD, pitch*DEG2RAD, yaw*DEG2RAD);
#endif
*/
}