Exemple #1
0
TEST_F(TrajectoryLibraryTest, ManyTrajectoriesWithTransform) {
    StereoOctomap octomap(bot_frames_);

    TrajectoryLibrary lib(0);
    lib.LoadLibrary("trajtest/full", true);

    double altitude = 30;

    double point[3] = {18, 12, 0};
    AddPointToOctree(&octomap, point, altitude);

    BotTrans trans;
    bot_trans_set_identity(&trans);
    trans.trans_vec[0] = 17;
    trans.trans_vec[1] = 11;
    trans.trans_vec[2] = altitude;

    // send th

    double dist;
    const Trajectory *best_traj;

    std::tie(dist, best_traj) = lib.FindFarthestTrajectory(octomap, trans, 5.0);

    DrawOriginLcmGl(lcm_);

    bot_lcmgl_color3f(lcmgl_, 0, 0, 1);
    best_traj->Draw(lcmgl_, &trans);


    EXPECT_EQ_ARM(best_traj->GetTrajectoryNumber(), 3);
    EXPECT_NEAR(dist, 1.025243, TOLERANCE);

    // now add a yaw
    trans.rot_quat[0] = 0.642787609686539;
    trans.rot_quat[1] = 0;
    trans.rot_quat[2] = 0;
    trans.rot_quat[3] = 0.766044443118978;

    bot_lcmgl_color3f(lcmgl_, 1, 0, 0);
    best_traj->Draw(lcmgl_, &trans);
    bot_lcmgl_switch_buffer(lcmgl_);

    std::tie(dist, best_traj) = lib.FindFarthestTrajectory(octomap, trans, 5.0);
    //lib.Draw(lcm_, &trans);

    EXPECT_EQ_ARM(best_traj->GetTrajectoryNumber(), 2);
    EXPECT_NEAR(dist, 1.174604, TOLERANCE);

    // now have a transform with roll, pitch, and yaw
    trans.rot_quat[0] = 0.863589399067779;
    trans.rot_quat[1] = -0.004581450790098;
    trans.rot_quat[2] = 0.298930259006064;
    trans.rot_quat[3] = 0.405996379758463;

    std::tie(dist, best_traj) = lib.FindFarthestTrajectory(octomap, trans, 5.0);

    EXPECT_EQ_ARM(best_traj->GetTrajectoryNumber(), 4);
    EXPECT_NEAR(dist, 0.327772, TOLERANCE);
}
int main(int argc, char** argv)
{

    lcm_t* lcm_ = lcm_create(NULL);
    bot_lcmgl_t* lcmgl_ = bot_lcmgl_init(lcm_, "LCMGL_DEMO");

    // lcmgl setup
    bot_lcmgl_translated(lcmgl_, 0, 0, 0);
    bot_lcmgl_line_width(lcmgl_, 2.0f);
    bot_lcmgl_point_size(lcmgl_, 12.0f);
    bot_lcmgl_begin(lcmgl_, GL_POINTS);

    // setup color and draw a point
    bot_lcmgl_color3f(lcmgl_, 1.0, 0, 0);
    bot_lcmgl_vertex3f(lcmgl_, 0, 0, 1);

    // end and refresh to viewer
    bot_lcmgl_end(lcmgl_);
    bot_lcmgl_switch_buffer(lcmgl_);

    printf("Dosvedanya!\n");
    return 0;
}
Exemple #3
0
/**
 * Tests an obstacle appearing during a trajectory execution
 */
TEST_F(StateMachineControlTest, TrajectoryInterrupt) {

    StateMachineControl *fsm_control = new StateMachineControl(lcm_, "../TrajectoryLibrary/trajtest/full", "tvlqr-action-out", "state-machine-state", "altitude-reset", false);
    //fsm_control->GetFsmContext()->setDebugFlag(true);

    SubscribeLcmChannels(fsm_control);

    ForceAutonomousMode();

    float altitude = 100;

    // send an obstacle to get it to transition to a new time

    mav::pose_t msg = GetDefaultPoseMsg();

    lcm_->publish(pose_channel_, &msg);
    ProcessAllLcmMessages(fsm_control);

    float point[3] = { 24, 0, 0+altitude };
    SendStereoPointTriple(point);
    ProcessAllLcmMessages(fsm_control);

    lcm_->publish(pose_channel_, &msg);
    ProcessAllLcmMessages(fsm_control);


    // ensure that we have changed trajectories
    EXPECT_EQ_ARM(fsm_control->GetCurrentTrajectory().GetTrajectoryNumber(), 2); // from matlab

    Trajectory running_traj = fsm_control->GetCurrentTrajectory();

    // wait for that trajectory to time out
    int64_t t_start = GetTimestampNow();
    double t = 0;
    while (t < 1.0) {
        usleep(7142); // 1/140 of a second

        msg.utime = GetTimestampNow();

        t = (msg.utime - t_start) / 1000000.0;

        Eigen::VectorXd state_t = running_traj.GetState(t);

        msg.pos[0] = state_t(0);
        msg.pos[1] = state_t(1);
        msg.pos[2] = state_t(2) + altitude;

        msg.vel[0] = state_t(6);
        msg.vel[1] = state_t(7);
        msg.vel[2] = state_t(8);

        double rpy[3];
        rpy[0] = state_t(3);
        rpy[1] = state_t(4);
        rpy[2] = state_t(5);

        double quat[4];
        bot_roll_pitch_yaw_to_quat(rpy, quat);

        msg.orientation[0] = quat[0];
        msg.orientation[1] = quat[1];
        msg.orientation[2] = quat[2];
        msg.orientation[3] = quat[3];

        msg.rotation_rate[0] = state_t(9);
        msg.rotation_rate[1] = state_t(10);
        msg.rotation_rate[2] = state_t(11);


        lcm_->publish(pose_channel_, &msg);
        ProcessAllLcmMessages(fsm_control);
    }

    // now add a new obstacle right in front!
    std::cout << "NEW POINT" << std::endl;
    lcm_->publish(pose_channel_, &msg);
    ProcessAllLcmMessages(fsm_control);

    float point2[3] = { 18, 12, 0+altitude };
    SendStereoPointTriple(point2);
    ProcessAllLcmMessages(fsm_control);

    lcm_->publish(pose_channel_, &msg);
    ProcessAllLcmMessages(fsm_control);

    fsm_control->GetOctomap()->Draw(lcm_->getUnderlyingLCM());
    BotTrans body_to_local;
    bot_frames_get_trans(bot_frames_, "body", "local", &body_to_local);
    fsm_control->GetTrajectoryLibrary()->Draw(lcm_->getUnderlyingLCM(), &body_to_local);

    fsm_control->GetTrajectoryLibrary()->Draw(lcm_->getUnderlyingLCM(), &body_to_local);

    bot_lcmgl_t *lcmgl = bot_lcmgl_init(lcm_->getUnderlyingLCM(), "Trjaectory");
    bot_lcmgl_color3f(lcmgl, 0, 1, 0);
    fsm_control->GetCurrentTrajectory().Draw(lcmgl ,&body_to_local);
    bot_lcmgl_switch_buffer(lcmgl);
    bot_lcmgl_destroy(lcmgl);


    EXPECT_EQ_ARM(fsm_control->GetCurrentTrajectory().GetTrajectoryNumber(), 3); // from matlab

    delete fsm_control;

    UnsubscribeLcmChannels();
}
Exemple #4
0
void
Visualization::draw_pyramid_level_matches(const VisualOdometry* odom, int level_num)
{
  const OdometryFrame* ref_frame = odom->getReferenceFrame();
  const OdometryFrame* target_frame = odom->getTargetFrame();
  const PyramidLevel* ref_level = ref_frame->getLevel(level_num);
  const PyramidLevel* target_level = target_frame->getLevel(level_num);

  int width = ref_level->getWidth();
  int height = ref_level->getHeight();

  const MotionEstimator* estimator = odom->getMotionEstimator();
  const FeatureMatch* matches = estimator->getMatches();
  int num_matches = estimator->getNumMatches();

  // previous image
  bot_lcmgl_color3f(_lcmgl, 1,1,1);
  const uint8_t* ref_gray = ref_level->getGrayscaleImage();
  int ref_gray_stride = ref_level->getGrayscaleImageStride();
  int prev_gray_texid = bot_lcmgl_texture2d(_lcmgl, ref_gray,
      width, height, ref_gray_stride,
      BOT_LCMGL_LUMINANCE, BOT_LCMGL_UNSIGNED_BYTE, BOT_LCMGL_COMPRESS_NONE);

  bot_lcmgl_push_matrix(_lcmgl);
  bot_lcmgl_translated(_lcmgl, 0, height + 10, 0);
  bot_lcmgl_texture_draw_quad(_lcmgl, prev_gray_texid,
      0, 0, 0,
      0, height, 0,
      width, height, 0,
      width, 0, 0);

  // draw features in reference frame
  bot_lcmgl_color3f(_lcmgl, 1, 0, 1);
  bot_lcmgl_point_size(_lcmgl, 1.5f);
  bot_lcmgl_begin(_lcmgl, GL_POINTS);
  for(int i=0, nfeatures=ref_level->getNumKeypoints(); i<nfeatures; i++) {
    const KeyPoint& kp = ref_level->getKeypoint(i);
    bot_lcmgl_vertex2f(_lcmgl, kp.u, kp.v);
  }
  bot_lcmgl_end(_lcmgl);
  bot_lcmgl_pop_matrix(_lcmgl);

  // current image
  bot_lcmgl_color3f(_lcmgl, 1,1,1);
  const uint8_t* target_gray = target_level->getGrayscaleImage();
  int target_gray_stride = target_level->getGrayscaleImageStride();
  int gray_texid = bot_lcmgl_texture2d(_lcmgl, target_gray,
      width, height, target_gray_stride,
      BOT_LCMGL_LUMINANCE, BOT_LCMGL_UNSIGNED_BYTE, BOT_LCMGL_COMPRESS_NONE);
  bot_lcmgl_texture_draw_quad(_lcmgl, gray_texid,
      0, 0, 0,
      0, height, 0,
      width, height, 0,
      width, 0, 0);

  // draw features
  bot_lcmgl_color3f(_lcmgl, 0, 1, 0);
  bot_lcmgl_point_size(_lcmgl, 3.0f);
  bot_lcmgl_begin(_lcmgl, GL_POINTS);
  for(int i=0, nfeatures=target_level->getNumKeypoints(); i<nfeatures; i++) {
    const KeyPoint& kp = target_level->getKeypoint(i);
    bot_lcmgl_vertex2f(_lcmgl, kp.u, kp.v);
  }
  bot_lcmgl_end(_lcmgl);

  // draw matches that are not in the maximal clique
  bot_lcmgl_color3f(_lcmgl, 0.3, 0, 0);
  bot_lcmgl_begin(_lcmgl, GL_LINES);
  for(int i=0; i<num_matches; i++) {
    const FeatureMatch& match = matches[i];
    if(match.inlier || match.in_maximal_clique || match.target_keypoint->pyramid_level != level_num)
        continue;
    int cur_x = match.target_keypoint->kp.u;
    int cur_y = match.target_keypoint->kp.v;
    int prev_x = match.ref_keypoint->kp.u;
    int prev_y = match.ref_keypoint->kp.v;
    bot_lcmgl_vertex2f(_lcmgl, cur_x, cur_y);
    bot_lcmgl_vertex2f(_lcmgl, prev_x, prev_y + height + 10);
  }
  bot_lcmgl_end(_lcmgl);

  // draw inliers
  bot_lcmgl_color3f(_lcmgl, 0, 0, 1);
  bot_lcmgl_line_width(_lcmgl, 2.0);
  bot_lcmgl_begin(_lcmgl, GL_LINES);
  for(int i=0; i<num_matches; i++) {
    const FeatureMatch& match = matches[i];
    if(!match.inlier || match.target_keypoint->pyramid_level != level_num)
        continue;
    int cur_x = match.target_keypoint->kp.u;
    int cur_y = match.target_keypoint->kp.v;
    int prev_x = match.ref_keypoint->kp.u;
    int prev_y = match.ref_keypoint->kp.v;
    bot_lcmgl_vertex2f(_lcmgl, cur_x, cur_y);
    bot_lcmgl_vertex2f(_lcmgl, prev_x, prev_y + height + 10);
  }
  bot_lcmgl_end(_lcmgl);

  // draw matches that are in the maximal clique but failed the projection test
  bot_lcmgl_line_width(_lcmgl, 1.0);
  for(int i=0; i<num_matches; i++) {
    const FeatureMatch& match = matches[i];
    if(match.in_maximal_clique && !match.inlier && match.target_keypoint->pyramid_level == level_num) {
      int cur_x = match.target_keypoint->kp.u;
      int cur_y = match.target_keypoint->kp.v;
      int prev_x = match.ref_keypoint->kp.u;
      int prev_y = match.ref_keypoint->kp.v;
      bot_lcmgl_color3f(_lcmgl, 1, 0, 0);
      bot_lcmgl_begin(_lcmgl, GL_LINES);
      bot_lcmgl_vertex2f(_lcmgl, cur_x, cur_y);
      bot_lcmgl_vertex2f(_lcmgl, prev_x, prev_y + height + 10);
      bot_lcmgl_end(_lcmgl);

      bot_lcmgl_color3f(_lcmgl, 1, 1, 1);
      double cur_xyz[] = { cur_x, cur_y + 10, 0 };
      char txt[500];
      snprintf(txt, 80, "%.3f", match.reprojection_error);
      bot_lcmgl_text(_lcmgl, cur_xyz, txt);
    }
  }

  if (level_num ==0){
    //draw the ESM homography estimate
    bot_lcmgl_line_width(_lcmgl, 2.0);
    bot_lcmgl_color3f(_lcmgl, 1, 1, 0);
    bot_lcmgl_begin(_lcmgl,GL_LINE_STRIP);
    const Eigen::Matrix3d & H = odom->getInitialHomography();
    Eigen::MatrixXd vertices(5, 3);
    vertices <<
        0     , 0      , 1  ,
        width , 0      , 1  ,
        width , height , 1  ,
        0     , height , 1  ,
        0     , 0      , 1;
    Eigen::MatrixXd warpedPoints = H*vertices.transpose();
    warpedPoints.row(0) = warpedPoints.row(0).array()/warpedPoints.row(2).array();
    warpedPoints.row(1) = warpedPoints.row(1).array()/warpedPoints.row(2).array();
    for (int i=0;i<warpedPoints.cols();i++){
      bot_lcmgl_vertex2f(_lcmgl, warpedPoints(0, i), warpedPoints(1, i));
    }
    bot_lcmgl_end(_lcmgl);
  }

}
Exemple #5
0
void
Visualization::draw_pyramid_level_flow(const VisualOdometry* odom, int level_num)
{
  const OdometryFrame* ref_frame = odom->getReferenceFrame();
  const OdometryFrame* target_frame = odom->getTargetFrame();
  const PyramidLevel* ref_level = ref_frame->getLevel(level_num);
  const PyramidLevel* target_level = target_frame->getLevel(level_num);

  int width = ref_level->getWidth();
  int height = ref_level->getHeight();

  const MotionEstimator* estimator = odom->getMotionEstimator();
  const FeatureMatch* matches = estimator->getMatches();
  int num_matches = estimator->getNumMatches();

  // current image
  bot_lcmgl_color3f(_lcmgl, 1, 1, 1);
  const uint8_t* target_gray = target_level->getGrayscaleImage();
  int target_gray_stride = target_level->getGrayscaleImageStride();
  int gray_texid = bot_lcmgl_texture2d(_lcmgl, target_gray, width, height,
                                       target_gray_stride, BOT_LCMGL_LUMINANCE,
                                       BOT_LCMGL_UNSIGNED_BYTE,
                                       BOT_LCMGL_COMPRESS_NONE);
  bot_lcmgl_texture_draw_quad(_lcmgl, gray_texid,
      0     , 0      , 0   ,
      0     , height , 0   ,
      width , height , 0   ,
      width , 0      , 0);

  float rgb[3];

#if 0
  // draw target features
  bot_lcmgl_color3f(_lcmgl, 0, 1, 0);
  bot_lcmgl_point_size(_lcmgl, 3.0f);
  bot_lcmgl_begin(_lcmgl, GL_POINTS);
  for(int i=0, nfeatures=target_level->getNumKeypoints(); i<nfeatures; ++i) {
    const KeypointData& kpdata(*target_level->getKeypointData(i));
    colormap(kpdata.xyz.z(), rgb);
    bot_lcmgl_color3f(_lcmgl, rgb[0], rgb[1], rgb[2]);
    bot_lcmgl_vertex2f(_lcmgl, kpdata.kp.u, kpdata.kp.v);
  }
  bot_lcmgl_end(_lcmgl);
#endif

#if 0
  // draw 9x9 boxes around keypoints
  bot_lcmgl_line_width(_lcmgl, 1.0);
  bot_lcmgl_color3f(_lcmgl, .5, .5, 1);
  bot_lcmgl_begin(_lcmgl, GL_LINES);
  for(int i=0, num_kp=target_level->getNumKeypoints();
      i < num_kp;
      ++i) {
    const KeypointData& kpdata(*target_level->getKeypointData(i));
    colormap(kpdata.xyz.z(), rgb);
    bot_lcmgl_color3f(_lcmgl, rgb[0], rgb[1], rgb[2]);

    bot_lcmgl_vertex2f(_lcmgl, kpdata.kp.u-4, kp.v-4);
    bot_lcmgl_vertex2f(_lcmgl, kpdata.kp.u-4, kp.v+4);

    bot_lcmgl_vertex2f(_lcmgl, kpdata.kp.u-4, kp.v+4);
    bot_lcmgl_vertex2f(_lcmgl, kpdata.kp.u+4, kp.v+4);

    bot_lcmgl_vertex2f(_lcmgl, kpdata.kp.u+4, kp.v+4);
    bot_lcmgl_vertex2f(_lcmgl, kpdata.kp.u+4, kp.v-4);

    bot_lcmgl_vertex2f(_lcmgl, kpdata.kp.u+4, kp.v-4);
    bot_lcmgl_vertex2f(_lcmgl, kpdata.kp.u-4, kp.v-4);
  }
  bot_lcmgl_end(_lcmgl);
#endif

#if 1
  // draw inliers
  bot_lcmgl_point_size(_lcmgl, 4.0f);
  bot_lcmgl_begin(_lcmgl, GL_POINTS);
  for (int i=0; i<num_matches; i++) {
    const FeatureMatch& match = matches[i];
    if (!match.inlier ||
        match.target_keypoint->pyramid_level != level_num)
      continue;
    int cur_x = match.target_keypoint->kp.u;
    int cur_y = match.target_keypoint->kp.v;
    colormap(match.target_keypoint->xyz(2), rgb);
    bot_lcmgl_color3f(_lcmgl, rgb[0], rgb[1], rgb[2]);
    bot_lcmgl_vertex2f(_lcmgl, cur_x, cur_y);
  }
  bot_lcmgl_end(_lcmgl);
#endif

#if 1
  // draw ref-to-target 'flow'
  //bot_lcmgl_color3f(_lcmgl, 0, 1, 0);
  bot_lcmgl_line_width(_lcmgl, 2.0f);
  bot_lcmgl_begin(_lcmgl, GL_LINES);
  for (int i=0; i<num_matches; i++) {
    const FeatureMatch& match = matches[i];
    if (!match.inlier ||
        match.target_keypoint->pyramid_level != level_num)
      continue;
    int cur_x = match.target_keypoint->kp.u;
    int cur_y = match.target_keypoint->kp.v;
    int prev_x = match.ref_keypoint->kp.u;
    int prev_y = match.ref_keypoint->kp.v;
    colormap(match.target_keypoint->xyz(2), rgb);
    bot_lcmgl_color3f(_lcmgl, rgb[0], rgb[1], rgb[2]);
    bot_lcmgl_vertex2f(_lcmgl, cur_x, cur_y);
    bot_lcmgl_vertex2f(_lcmgl, prev_x, prev_y);
  }
  bot_lcmgl_end(_lcmgl);
#endif

  if (level_num == 0) {
    //draw the ESM homography estimate
    bot_lcmgl_line_width(_lcmgl, 2.0);
    bot_lcmgl_color3f(_lcmgl, 1, 1, 0);
    bot_lcmgl_begin(_lcmgl, GL_LINE_STRIP);
    const Eigen::Matrix3d & H = odom->getInitialHomography();
    Eigen::MatrixXd vertices(5, 3);
    vertices <<
        0     , 0      , 1,
        width , 0      , 1,
        width , height , 1,
        0     , height , 1,
        0     , 0      , 1;
    Eigen::MatrixXd warpedPoints = H*vertices.transpose();
    warpedPoints.row(0) = warpedPoints.row(0).array()/warpedPoints.row(2).array();
    warpedPoints.row(1) = warpedPoints.row(1).array()/warpedPoints.row(2).array();
    for (int i=0;i<warpedPoints.cols();i++) {
      bot_lcmgl_vertex2f(_lcmgl, warpedPoints(0, i) ,warpedPoints(1, i));
    }
    bot_lcmgl_end(_lcmgl);
  }

}