Example #1
0
// A 'halo' camera - a circular ring of poses all pointing at a center point
// @param: focus_center: the center points
// @param: halo_r: radius of the ring
// @param: halo_dz: elevation of the camera above/below focus_center's z value
// @param: n_poses: number of generated poses
void
generate_halo(
  std::vector<Eigen::Isometry3d, Eigen::aligned_allocator<Eigen::Isometry3d> > &poses,
  Eigen::Vector3d focus_center,double halo_r,double halo_dz,int n_poses){
  
  for (double t=0;t < (2*M_PI);t =t + (2*M_PI)/ ((double) n_poses) ){
    double x = halo_r*cos(t);
    double y = halo_r*sin(t);
    double z = halo_dz;
    double pitch =atan2( halo_dz,halo_r); 
    double yaw = atan2(-y,-x);
   
    Eigen::Isometry3d pose;
    pose.setIdentity();
    Eigen::Matrix3d m;
    m = AngleAxisd(yaw, Eigen::Vector3d::UnitZ())
	* AngleAxisd(pitch, Eigen::Vector3d::UnitY())
	* AngleAxisd(0, Eigen::Vector3d::UnitZ());    

    pose *=m;
    Vector3d v(x,y,z);
    v += focus_center;
    pose.translation() = v;
    poses.push_back(pose);  
  }
  return ;
}
//==============================================================================
BodyNodeInfo readBodyNodeInfo(tinyxml2::XMLElement* bodyNodeEle)
{
  assert(bodyNodeEle != nullptr);
  // bodyNodeElement shouldn't be nullptr at all, which always should be
  // gauranteed because this function is only used internally to be so. If we
  // find the nullptr case, then we should fix the caller.

  BodyNodeInfo bodyNodeInfo;

  dart::dynamics::BodyNode::Properties properties;

  // Name attribute
  properties.mName = DEFAULT_KINBODY_ROOT_BODYNODE_NAME;
  if (dart::utils::hasAttribute(bodyNodeEle, "name"))
    properties.mName = dart::utils::getAttributeString(bodyNodeEle, "name");

  // transformation
  Eigen::Isometry3d initTransform = Eigen::Isometry3d::Identity();
  if (dart::utils::hasElement(bodyNodeEle, "transformation"))
  {
    initTransform
        = dart::utils::getValueIsometry3d(bodyNodeEle, "transformation");
  }
  else
  {
    initTransform.setIdentity();
  }

  bodyNodeInfo.properties = std::move(properties);
  bodyNodeInfo.initTransform = initTransform;

  return bodyNodeInfo;
}
Example #3
0
Eigen::Isometry3d matrixdToIsometry3d(Matrix<double,7,1>  input){
  Eigen::Isometry3d output;
  output.setIdentity();
  output.translation() = Eigen::Vector3d( input[0], input[1], input[2] );
  Eigen::Quaterniond quat = Eigen::Quaterniond(input[3], input[4], input[5], input[6]);
  output.rotate(quat); 
  return output;
}
Example #4
0
 Eigen::Isometry3d intrinsicsTransform() const
 {
   Eigen::Isometry3d m; m.setIdentity();
   m(0,0) = iface->m_focal_x;
   m(0,2) = iface->m_image_center_x;
   m(1,1) = iface->m_focal_y;
   m(1,2) = iface->m_image_center_y;
   return m;
 }
Example #5
0
Eigen::Isometry3d setIsometry3dFromBotTrans(BotTrans *bt){
  Eigen::Isometry3d tf;
  tf.setIdentity();
  tf.translation()  << bt->trans_vec[0], bt->trans_vec[1],bt->trans_vec[2];
  Eigen::Quaterniond q = Eigen::Quaterniond(bt->rot_quat[0], bt->rot_quat[1], bt->rot_quat[2], bt->rot_quat[3]);  
  tf.rotate(q); 

  return tf;
}
Example #6
0
void randomize_transform(Eigen::Isometry3d& tf,
                         double translation_limit=100,
                         double rotation_limit=100)
{
  Eigen::Vector3d r = random_vec<3>(translation_limit);
  Eigen::Vector3d theta = random_vec<3>(rotation_limit);

  tf.setIdentity();
  tf.translate(r);

  if(theta.norm()>0)
    tf.rotate(Eigen::AngleAxisd(theta.norm(), theta.normalized()));
}
Example #7
0
std::vector<ImageFeature> RegApp::getFeaturesFromLCM(const  reg::features_t* msg, Eigen::Isometry3d &pose){
  std::vector<ImageFeature> features;
    
  for (int i =0; i < msg->nfeatures; i++){ 
    ImageFeature f;
    
    f.track_id = msg->features[i].track_id;
    f.uv[0] = msg->features[i].uv[0];
    f.uv[1] = msg->features[i].uv[1];
    f.base_uv[0] = msg->features[i].base_uv[0];
    f.base_uv[1] = msg->features[i].base_uv[1];
    f.uvd[0] = msg->features[i].uvd[0];
    f.uvd[1] = msg->features[i].uvd[1];
    f.uvd[2] = msg->features[i].uvd[2];
    f.xyz[0] = msg->features[i].xyz[0];
    f.xyz[1] = msg->features[i].xyz[1];
    f.xyz[2] = msg->features[i].xyz[2];
    f.xyzw[0] = msg->features[i].xyzw[0];
    f.xyzw[1] = msg->features[i].xyzw[1];
    f.xyzw[2] = msg->features[i].xyzw[2];
    f.xyzw[3] = msg->features[i].xyzw[3];
    // color left out for now
    
    pose.setIdentity();
    pose.translation() << msg->pos[0], msg->pos[1], msg->pos[2];
    Eigen::Quaterniond m(msg->quat[0],msg->quat[1],msg->quat[2],msg->quat[3]);
    pose.rotate(m);
    /*
    cout << line << " is line\n";
    cout << "i: " << i <<"\n";
    cout << "f.track_id: " << f.track_id <<"\n";
    cout << "f.uv: " << f.uv[0] << " "<< f.uv[1] <<"\n";
    cout << "f.base_uv: " << f.base_uv[0] << " "<< f.base_uv[1] <<"\n";
    cout << "f.uvd: " << f.uvd[0] << " "<< f.uvd[1]<< " "<< f.uvd[2]<<"\n";
    cout << "f.xyz: " << f.xyz[0] << " "<< f.xyz[1]<< " "<< f.xyz[2]<<"\n";
    cout << "f.xyzw: " << f.xyzw[0] << " "<< f.xyzw[1]<< " "<< f.xyzw[2]<< " "<< f.xyzw[3]<<"\n";
    cout << "f.color: " << (int)f.color[0] << " "<< (int)f.color[1] << " "<< (int)f.color[2] <<"\n";
      */

    features.push_back(f);
  }
  
  //cout << "in: " << msg->nfeatures << " | extracted: "<< features.size() << "\n"; 
  return features;  
}
Example #8
0
int
main (int argc, char** argv)
{
  // 1. Parse arguments:
  print_info ("Manually generate a simulated RGB-D point cloud using pcl::simulation. For more information, use: %s -h\n", argv[0]);
  if (argc < 3)
  {
    printHelp (argc, argv);
    return (-1);
  }  
  int mode=atoi(argv[1]);
  
  // 2 Construct the simulation method:
  int width = 640;
  int height = 480;
  simexample = SimExample::Ptr (new SimExample (argc, argv, height,width ));
  
  // 3 Generate a series of simulation poses:
  // -0 100 fixed poses 
  // -1 a 'halo' camera 
  // -2 slerp between two different poses
  std::vector<Eigen::Isometry3d, Eigen::aligned_allocator<Eigen::Isometry3d> > poses;
  if (mode==0){
    // Create a pose:
    Eigen::Isometry3d pose;
    pose.setIdentity();
    Matrix3d m;
    //ypr:
    m = AngleAxisd(-9.14989, Vector3d::UnitZ())     * AngleAxisd(0.20944, Vector3d::UnitY())    * AngleAxisd(0, Vector3d::UnitX());  
    pose *= m;
    Vector3d v;
    v << 1.31762, 0.382931, 1.89533;
    pose.translation() = v;  
    for (int i=0;i< 100;i++){ // duplicate the pose 100 times
      poses.push_back(pose); 
    }
  }else if(mode==1){
    Eigen::Vector3d focus_center(0,0,1.3);
    double halo_r = 4;
    double halo_dz = 2;
    int n_poses=16;
    generate_halo(poses,focus_center,halo_r,halo_dz,n_poses);
  }else if (mode==2){
    Eigen::Isometry3d pose1;
    pose1.setIdentity();
    pose1.translation() << 1,0.75,2;
    Eigen::Matrix3d rot1;
    rot1 = AngleAxisd(M_PI, Eigen::Vector3d::UnitZ())
	* AngleAxisd(M_PI/10, Eigen::Vector3d::UnitY())
	* AngleAxisd(0.0, Eigen::Vector3d::UnitZ()); // ypr
    pose1.rotate(rot1);  
  
    Eigen::Isometry3d pose2;
    pose2.setIdentity();
    pose2.translation() << 1,-1,3;
    Eigen::Matrix3d rot2;
    rot2 = AngleAxisd(3*M_PI/4, Eigen::Vector3d::UnitZ())
	* AngleAxisd(M_PI/4, Eigen::Vector3d::UnitY())
	* AngleAxisd(0.0, Eigen::Vector3d::UnitZ()); // ypr    
    pose2.rotate(rot2);  
  
    int n_poses = 20;
    for (double i=0; i<=1;i+= 1/((double) n_poses -1) ){
      Eigen::Quaterniond rot3; 
      Eigen::Quaterniond r1(pose1.rotation());
      Eigen::Quaterniond r2(pose2.rotation());
      rot3 = r1.slerp(i,r2);
      Eigen::Isometry3d pose;
      pose.setIdentity();
      Eigen::Vector3d trans3 = (1-i)*pose1.translation() +  i*pose2.translation();
      pose.translation() <<  trans3[0] ,trans3[1] ,trans3[2];
      pose.rotate(rot3);
      poses.push_back(pose);
    }
  }

  // 4 Do the simulation and write the output:
  double tic_main = getTime();
  for (size_t i=0;i < poses.size();i++){
    std::stringstream ss;
    ss.precision(20);
    ss << "simcloud_" << i;// << ".pcd";
    double tic = getTime();
    simexample->doSim(poses[i]);
    
    write_sim_output(ss.str());
    cout << (getTime() -tic) << " sec\n";
  }
  cout << poses.size() << " poses simulated in " << (getTime() -tic_main) << "seconds\n";
  cout << (poses.size()/ (getTime() -tic_main) ) << "Hz on average\n";
  
  return 0;
}
Eigen::Isometry3d pose_estimate(FrameMatchPtr match,
                                std::vector<char> & inliers,
                                Eigen::Isometry3d & motion,
                                Eigen::MatrixXd & motion_covariance,
                                Eigen::Matrix<double, 3, 4> & proj_matrix) {
    using namespace pose_estimator;

    PoseEstimator pe(proj_matrix);

    if ((match->featuresA_indices.size()!=match->featuresB_indices.size()))
        cout <<    "Number of features doesn't match\n";

    size_t num_matches = match->featuresA_indices.size();

    if(num_matches < 3)
        cout << "Need at least three matches to estimate pose";

    motion.setIdentity();
    motion_covariance.setIdentity();

    Eigen::Matrix<double, 4, Eigen::Dynamic, Eigen::ColMajor> src_xyzw(4, num_matches);
    Eigen::Matrix<double, 4, Eigen::Dynamic, Eigen::ColMajor>dst_xyzw(4, num_matches);
    for (size_t i=0; i < num_matches; ++i) {

        //    const ImageFeature& featureA(int i) const {
        //      assert (frameA);
        //    int ix = featuresA_indices.at(i);
        //      return frameA->features().at(ix);
        //    }
        //src_xyzw.col(i) = match->featureA(i).xyzw;
        //dst_xyzw.col(i) = match->featureB(i).xyzw;

        int ixA = match->featuresA_indices.at(i);
        int ixB = match->featuresB_indices.at(i);
        //cout << ixA << " | " << ixB << "\n";
        //cout << match->featuresA.size() << " fA size\n";
        //cout <<  match->featuresA[ixA].xyzw[0] << "\n";
        //cout <<  match->featuresA[ixA].xyzw[0] << "\n";

        src_xyzw.col(i) = match->featuresA[ixA].xyzw;
        dst_xyzw.col(i) = match->featuresB[ixB].xyzw;
    }

    // PoseEstimateStatus status = pe.estimate(src_xyzw, dst_xyzw, &inliers, &motion, &motion_covariance);
    Eigen::Matrix<double, Eigen::Dynamic, Eigen::Dynamic, Eigen::ColMajor> motion_covariance_col_major;

    pose_estimator::PoseEstimateStatus status = pe.estimate(src_xyzw, dst_xyzw, &inliers, &motion, &motion_covariance_col_major); //&motion_covariance);
    motion_covariance = motion_covariance_col_major;

    match->status = status;
    match->delta = motion;

    /*
    int num_inliers = std::accumulate(inliers.begin(), inliers.end(), 0);
    const char* str_status = PoseEstimateStatusStrings[status];
    std::cerr << "Motion: " << str_status << " feats: " << match->featuresA_indices.size()
              << " inliers: " << num_inliers
              << " Pose: " << motion
              << " Delta: " << match->delta
              //<< " Cov:\n" << motion_covariance << "\n"
              << " " << match->timeA << " " << match->timeB << std::endl;
     */

    return motion;
}