Пример #1
0
opengv::transformations_t
opengv::absolute_pose::gp3p(
    const AbsoluteAdapterBase & adapter,
    const std::vector<int> & indices )
{
  assert(indices.size()>2);

  Eigen::Matrix3d f;
  Eigen::Matrix3d v;
  Eigen::Matrix3d p;

  for(size_t i = 0; i < 3; i++)
  {
    f.col(i) = adapter.getBearingVector(indices[i]);
    rotation_t R = adapter.getCamRotation(indices[i]);
    //unrotate the bearingVectors already so the camera rotation doesn't appear
    //in the problem
    f.col(i) = R * f.col(i);
    v.col(i) = adapter.getCamOffset(indices[i]);
    p.col(i) = adapter.getPoint(indices[i]);
  }

  transformations_t solutions;

  modules::gp3p_main(f,v,p,solutions);
  return solutions;
}
Пример #2
0
void mrpt::vision::pnp::rpnp::calcampose(Eigen::MatrixXd& XXc, Eigen::MatrixXd& XXw, Eigen::Matrix3d& R2, Eigen::Vector3d& t2)
{
	Eigen::MatrixXd X = XXc;
	Eigen::MatrixXd Y = XXw;
	Eigen::MatrixXd K = Eigen::MatrixXd::Identity(n, n) - Eigen::MatrixXd::Ones(n, n) * 1 / n;
	Eigen::VectorXd ux, uy;
	uy = X.rowwise().mean();
	ux = Y.rowwise().mean();

	// Need to verify sigmax2
	double sigmax2 = (((X*K).array() * (X*K).array()).colwise().sum()).mean();

	Eigen::MatrixXd SXY = Y*K*(X.transpose()) / n;

	Eigen::JacobiSVD<Eigen::MatrixXd> svd(SXY, Eigen::ComputeThinU | Eigen::ComputeThinV);

	Eigen::Matrix3d S = Eigen::MatrixXd::Identity(3, 3);
	if (SXY.determinant() <0)
		S(2, 2) = -1;

	R2 = svd.matrixV()*S*svd.matrixU().transpose();

	double c2 = (svd.singularValues().asDiagonal()*S).trace() / sigmax2;
	t2 = uy - c2*R2*ux;
	 
	Eigen::Vector3d x, y, z;
	x = R2.col(0);
	y = R2.col(1);
	z = R2.col(2);

	if ((x.cross(y) - z).norm()>0.02)
		R2.col(2) = -R2.col(2);
}
Пример #3
0
void tangent_and_bitangent(const Eigen::Vector3d & n_,
    Eigen::Vector3d & t_, Eigen::Vector3d & b_)
{
  double rmin = 0.99;
  double n0 = n_(0), n1 = n_(1), n2 = n_(2);
  if (std::abs(n0) <= rmin) {
    rmin = std::abs(n0);
    t_(0) = 0.0;
    t_(1) = - n2 / std::sqrt(1.0 - std::pow(n0, 2));
    t_(2) =   n1 / std::sqrt(1.0 - std::pow(n0, 2));
  }
  if (std::abs(n1) <= rmin) {
    rmin = std::abs(n1);
    t_(0) =   n2 / std::sqrt(1.0 - std::pow(n1, 2));
    t_(1) =   0.0;
    t_(2) = - n0 / std::sqrt(1.0 - std::pow(n1, 2));
  }
  if (std::abs(n2) <= rmin) {
    rmin = std::abs(n2);
    t_(0) =  n1 / std::sqrt(1.0 - std::pow(n2, 2));
    t_(1) = -n0 / std::sqrt(1.0 - std::pow(n2, 2));
    t_(2) =  0.0;
  }
  b_ = n_.cross(t_);
  // Check that the calculated Frenet-Serret frame is left-handed (levogiro)
  // by checking that the determinant of the matrix whose columns are the normal,
  // tangent and bitangent vectors has determinant 1 (the system is orthonormal!)
  Eigen::Matrix3d M;
  M.col(0) = n_;
  M.col(1) = t_;
  M.col(2) = b_;
  if (boost::math::sign(M.determinant()) != 1) {
    PCMSOLVER_ERROR("Frenet-Serret local frame is not left-handed!", BOOST_CURRENT_FUNCTION);
  }
}
Пример #4
0
    /*
     * Creates a rotation matrix which describes how a point in an auxiliary
     * coordinate system, whose x axis is desbibed by vec_along_x_axis and has
     * a point on its xz-plane vec_on_xz_plane, rotates into the real coordinate
     * system.
     */
    void Cornea::createRotationMatrix(const Eigen::Vector3d &vec_along_x_axis,
                                      const Eigen::Vector3d &vec_on_xz_plane,
                                      Eigen::Matrix3d &R) {

        // normalise pw
        Eigen::Vector3d vec_on_xz_plane_n = vec_on_xz_plane.normalized();

        // define helper variables x, y and z
        // x
        Eigen::Vector3d xn = vec_along_x_axis.normalized();

        // y
        Eigen::Vector3d tmp = vec_on_xz_plane_n.cross(xn);
        Eigen::Vector3d yn = tmp.normalized();

        // z
        tmp = xn.cross(yn);
        Eigen::Vector3d zn = tmp.normalized();

        // create the rotation matrix
        R.col(0) << xn(0), xn(1), xn(2);
        R.col(1) << yn(0), yn(1), yn(2);
        R.col(2) << zn(0), zn(1), zn(2);

    }
Пример #5
0
Eigen::Quaterniond mesh_core::PlaneProjection::getOrientation() const
{
  Eigen::Matrix3d m;
  m.col(0) = x_axis_;
  m.col(1) = y_axis_;
  m.col(2) = normal_;
  return Eigen::Quaterniond(m);
}
Пример #6
0
Eigen::Matrix3d Reaching::reorderHandAxes(const Eigen::Matrix3d& Q)
{
  Eigen::Matrix3d R = Eigen::MatrixXd::Zero(3, 3);
  R.col(params_.axis_order_[0]) = Q.col(0); // grasp approach vector
  R.col(params_.axis_order_[1]) = Q.col(1); // hand axis
  R.col(params_.axis_order_[2]) = Q.col(2); // hand binormal
  return R;
}
Пример #7
0
void Reconstruction3D::RQdecomposition(Eigen::MatrixXd A, Eigen::Matrix3d &R, Eigen::Matrix3d &Q)
{
	float c, s;
	Eigen::Matrix3d Qx, Qy, Qz;
	Eigen::Matrix3d M;
	M << A(0, 0), A(0, 1), A(0, 2),
		A(1, 0), A(1, 1), A(1, 2),
		A(2, 0), A(2, 1), A(2, 2);

	R = M;
	Reconstruction3D::solveCS(c, s, R(2, 1), R(2, 2));
	Qx << 1, 0, 0,
		0, c, -s,
		0, s, c;
	R *= Qx;

	Reconstruction3D::solveCS(c, s, R(2, 0), -R(2, 2));
	Qy << c, 0, s,
		0, 1, 0,
		-s, 0, c;
	R *= Qy;

	Reconstruction3D::solveCS(c, s, R(1, 0), R(1, 1));
	Qz << c, -s, 0,
		s, c, 0,
		0, 0, 1;
	R *= Qz;

	if (std::abs(R(1, 0)) > 0.00005 || std::abs(R(2, 0)) > 0.00005 || std::abs(R(2, 1)) > 0.00005)
		std::cerr << "PROBLEM WITH RQdecomposition" << std::endl;;

	//    if(R(1,0)!= 0) R(1,0) = 0;
	//    if(R(2,0)!= 0) R(2,0) = 0;
	//    if(R(2,1)!= 0) R(2,1) = 0;


	Q = Qz.transpose()  * Qy.transpose() * Qx.transpose();

	if (R(0, 0) < 0)
	{
		R.col(0) *= -1;
		Q.row(0) *= -1;
	}
	if (R(1, 1) < 0)
	{
		R.col(1) *= -1;
		Q.row(1) *= -1;
	}
	if (R(2, 2) < 0)
	{
		R.col(2) *= -1;
		Q.row(2) *= -1;
	}
}
Пример #8
0
bool determinantCorrection(Eigen::Matrix3d &Rot)
{
    double det1 = Rot.determinant();
    if( det1 < -0.9999999 && det1 > -1.0000001 )
    {
        // When a rotation matrix is a reflection because its determinant is -1
        Rot.col(2) = -1.0*Rot.col(2); // Corrects the third column sign
        std::cout << "Rotation matrix has det = -1, correct procedure to achieve det = 1\n";
        return true;
    }
    return false;
}
/* most relevant */
Eigen::Matrix3d MerryMotionplanner::compute_orientation(Eigen::Vector3f plane_normal, Eigen::Vector3f major_axis) {
    Eigen::Vector3d xvec_des, yvec_des, zvec_des;
    Eigen::Matrix3d Rmat;
    for (int i = 0; i < 3; i++) {
        zvec_des[i] = -plane_normal[i]; //want tool z pointing OPPOSITE surface normal
        xvec_des[i] = major_axis[i];
    }
    yvec_des = zvec_des.cross(xvec_des); //construct consistent right-hand triad
    Rmat.col(0)= xvec_des;
    Rmat.col(1)= yvec_des;
    Rmat.col(2)= zvec_des;
    return Rmat;
}
Пример #10
0
void
NurbsTools::pca (const vector_vec3d &data, ON_3dVector &mean, Eigen::Matrix3d &eigenvectors,
		 Eigen::Vector3d &eigenvalues)
{
  if (data.empty ())
  {
    printf ("[NurbsTools::pca] Error, data is empty\n");
    abort ();
  }

  mean = computeMean (data);

  unsigned s = data.size ();

  ON_Matrix Q(3, s);

  for (unsigned i = 0; i < s; i++) {
    Q[0][i] = data[i].x - mean.x;
    Q[1][i] = data[i].y - mean.y;
    Q[2][i] = data[i].z - mean.z;
  }

  ON_Matrix Qt = Q;
  Qt.Transpose();

  ON_Matrix oC;
  oC.Multiply(Q,Qt);

  Eigen::Matrix3d C(3,3);
  for (unsigned i = 0; i < 3; i++) {
      for (unsigned j = 0; j < 3; j++) {
	  C(i,j) = oC[i][j];
      }
  }

  Eigen::SelfAdjointEigenSolver < Eigen::Matrix3d > eigensolver (C);
  if (eigensolver.info () != Eigen::Success)
  {
    printf ("[NurbsTools::pca] Can not find eigenvalues.\n");
    abort ();
  }

  for (int i = 0; i < 3; ++i)
  {
    eigenvalues (i) = eigensolver.eigenvalues () (2 - i);
    if (i == 2)
      eigenvectors.col (2) = eigenvectors.col (0).cross (eigenvectors.col (1));
    else
      eigenvectors.col (i) = eigensolver.eigenvectors ().col (2 - i);
  }
}
Пример #11
0
  void CEViewOptionsWidget::updateMillerPlane()
  {
    // View into a Miller plane
    Camera *camera = m_glWidget->camera();
    Eigen::Transform3d modelView;
    modelView.setIdentity();

    // OK, so we want to rotate to look along the normal at the plane
    // So we convert <h k l> into a Cartesian normal
    Eigen::Matrix3d cellMatrix = m_ext->unconvertLength(m_ext->currentCellMatrix()).transpose();
    // Get miller indices:
    const Eigen::Vector3d millerIndices
      (static_cast<double>(ui.spin_mi_h->value()),
       static_cast<double>(ui.spin_mi_k->value()),
       static_cast<double>(ui.spin_mi_l->value()));
    // Check to see if we have 0,0,0
    //  in which case, we do nothing
    if (millerIndices.squaredNorm() < 0.5)
      return;

    const Eigen::Vector3d normalVector ((cellMatrix * millerIndices).normalized());

    Eigen::Matrix3d rotation;
    rotation.row(2) = normalVector;
    rotation.row(0) = rotation.row(2).unitOrthogonal();
    rotation.row(1) = rotation.row(2).cross(rotation.row(0));

    // Translate camera to the center of the cell
    const Eigen::Vector3d cellDiagonal =
        cellMatrix.col(0) * m_glWidget->aCells() +
        cellMatrix.col(1) * m_glWidget->bCells() +
        cellMatrix.col(2) * m_glWidget->cCells();

    modelView.translate(-cellDiagonal*0.5);

    // Prerotate the camera to look down the specified normal
    modelView.prerotate(rotation);


    // Pretranslate in the negative Z direction
    modelView.pretranslate(Eigen::Vector3d(0.0, 0.0,
                                           -1.5 * cellDiagonal.norm()));

    camera->setModelview(modelView);

    // Call for a redraw
    m_glWidget->update();
  }
Пример #12
0
 void msEntity::computeEigenInertia(Eigen::Matrix3d& evect,Eigen::Vector3d& eval) const {
     
     evect=Eigen::Matrix3d::Zero();
     eval=Eigen::Vector3d::Zero();
     
     if(Elements.size()==0) return;
     
     Eigen::Matrix3d Inertia=Matrix3d::Zero();
     
     double A,B,C,D,E,F;A=B=C=D=E=F=0; double x,y,z,m;
     
     msChildren<msElement>::const_iterator it=Elements.begin();
     
     for(;it!=Elements.end();++it)
     {
         m=(*it)->getMass(getUnits()->getMass());
         x=(*it)->getPosition()[0];
         y=(*it)->getPosition()[1];
         z=(*it)->getPosition()[2];
         A+=m*(y*y+z*z);B+=m*(x*x+z*z);C+=m*(y*y+x*x);
         D-=m*(z*y);E-=m*(x*z);F-=m*(x*y);
     }
     Inertia<<A,F,E,F,B,D,E,D,C;
     Eigen::EigenSolver<Eigen::Matrix3d> eigensolver(Inertia);
     for( size_t i=0; i<3 ;i++ ){
         
         eval[i]= eigensolver.eigenvalues()[i].real();
         for( size_t j=0; j<3 ; j++ ) evect.col(i)[j] = eigensolver.eigenvectors().col(i)[j].real();
     }
 }
Пример #13
0
void update_arrows() {
    geometry_msgs::Point origin, arrow_x_tip, arrow_y_tip, arrow_z_tip;
    Eigen::Matrix3d R;
    Eigen::Quaterniond quat;
    quat.x() = g_stamped_pose.pose.orientation.x;
    quat.y() = g_stamped_pose.pose.orientation.y;
    quat.z() = g_stamped_pose.pose.orientation.z;
    quat.w() = g_stamped_pose.pose.orientation.w;
    R = quat.toRotationMatrix();
    Eigen::Vector3d x_vec, y_vec, z_vec;
    double veclen = 0.2; //make the arrows this long
    x_vec = R.col(0) * veclen;
    y_vec = R.col(1) * veclen;
    z_vec = R.col(2) * veclen;

    //update the arrow markers w/ new pose:
    origin = g_stamped_pose.pose.position;
    arrow_x_tip = origin;
    arrow_x_tip.x += x_vec(0);
    arrow_x_tip.y += x_vec(1);
    arrow_x_tip.z += x_vec(2);
    arrow_marker_x.points.clear();
    arrow_marker_x.points.push_back(origin);
    arrow_marker_x.points.push_back(arrow_x_tip);
    arrow_marker_x.header = g_stamped_pose.header;

    arrow_y_tip = origin;
    arrow_y_tip.x += y_vec(0);
    arrow_y_tip.y += y_vec(1);
    arrow_y_tip.z += y_vec(2);

    arrow_marker_y.points.clear();
    arrow_marker_y.points.push_back(origin);
    arrow_marker_y.points.push_back(arrow_y_tip);
    arrow_marker_y.header = g_stamped_pose.header;

    arrow_z_tip = origin;
    arrow_z_tip.x += z_vec(0);
    arrow_z_tip.y += z_vec(1);
    arrow_z_tip.z += z_vec(2);

    arrow_marker_z.points.clear();
    arrow_marker_z.points.push_back(origin);
    arrow_marker_z.points.push_back(arrow_z_tip);
    arrow_marker_z.header = g_stamped_pose.header;
}
Пример #14
0
ON_NurbsSurface
FittingSurface::initNurbsPCA (int order, NurbsDataSurface *m_data, ON_3dVector z)
{
  ON_3dVector mean;
  Eigen::Matrix3d eigenvectors;
  Eigen::Vector3d eigenvalues;

  unsigned s = m_data->interior.size ();

  NurbsTools::pca (m_data->interior, mean, eigenvectors, eigenvalues);

  m_data->mean = mean;
  //m_data->eigenvectors = (*eigenvectors);

  bool flip (false);
  Eigen::Vector3d ez(z[0],z[1],z[2]);
  if (eigenvectors.col (2).dot (ez) < 0.0)
     flip = true;

  eigenvalues = eigenvalues / s; // seems that the eigenvalues are dependent on the number of points (???)

  ON_3dVector sigma(sqrt(eigenvalues[0]), sqrt(eigenvalues[1]), sqrt(eigenvalues[2]));

  ON_NurbsSurface nurbs (3, false, order, order, order, order);
  nurbs.MakeClampedUniformKnotVector (0, 1.0);
  nurbs.MakeClampedUniformKnotVector (1, 1.0);

  // +- 2 sigma -> 95,45 % aller Messwerte
  double dcu = (4.0 * sigma[0]) / (nurbs.Order (0) - 1);
  double dcv = (4.0 * sigma[1]) / (nurbs.Order (1) - 1);

  ON_3dVector cv_t, cv;
  Eigen::Vector3d ecv_t, ecv;
  Eigen::Vector3d emean(mean[0], mean[1], mean[2]);
  for (int i = 0; i < nurbs.Order (0); i++)
  {
    for (int j = 0; j < nurbs.Order (1); j++)
    {
      cv[0] = -2.0 * sigma[0] + dcu * i;
      cv[1] = -2.0 * sigma[1] + dcv * j;
      cv[2] = 0.0;
      ecv (0) = -2.0 * sigma[0] + dcu * i;
      ecv (1) = -2.0 * sigma[1] + dcv * j;
      ecv (2) = 0.0;
      ecv_t = eigenvectors * ecv + emean;
      cv_t[0] = ecv_t (0);
      cv_t[1] = ecv_t (1);
      cv_t[2] = ecv_t (2);
      if (flip)
	nurbs.SetCV (nurbs.Order (0) - 1 - i, j, ON_3dPoint (cv_t[0], cv_t[1], cv_t[2]));
      else
	nurbs.SetCV (i, j, ON_3dPoint (cv_t[0], cv_t[1], cv_t[2]));
    }
  }
  return nurbs;
}
Пример #15
0
void MarkerArrayVisualizer::drawGaussianDistributions(const char* ns, const std::vector<Eigen::Vector3d>& mu, const std::vector<Eigen::Matrix3d>& sigma, double probability, const std::vector<double>& offset)
{
    visualization_msgs::MarkerArray marker_array;

    const int size = mu.size();

    for (int i=0; i<size; i++)
    {
        visualization_msgs::Marker marker;

        marker.header.frame_id = "/world";
        marker.header.stamp = ros::Time::now();

        marker.ns = ns;
        marker.id = i;
        marker.type = visualization_msgs::Marker::SPHERE;
        marker.action = visualization_msgs::Marker::ADD;

        // axis: eigenvectors
        // radius: eigenvalues
        Eigen::JacobiSVD<Eigen::MatrixXd> svd(sigma[i], Eigen::ComputeThinU | Eigen::ComputeThinV);
        const Eigen::VectorXd& r = svd.singularValues();
        Eigen::Matrix3d Q = svd.matrixU();

        // to make determinant 1
        if (Q.determinant() < 0)
            Q.col(2) *= -1.;
        const Eigen::Quaterniond q(Q);

        marker.pose.position.x = mu[i](0);
        marker.pose.position.y = mu[i](1);
        marker.pose.position.z = mu[i](2);
        marker.pose.orientation.x = q.x();
        marker.pose.orientation.y = q.y();
        marker.pose.orientation.z = q.z();
        marker.pose.orientation.w = q.w();

        const double probability_radius = gaussianDistributionRadius3D(probability);

        marker.scale.x = 2. * (probability_radius * std::sqrt(r[0]) + offset[i]);
        marker.scale.y = 2. * (probability_radius * std::sqrt(r[1]) + offset[i]);
        marker.scale.z = 2. * (probability_radius * std::sqrt(r[2]) + offset[i]);

        marker.color.r = 1.0;
        marker.color.g = 0.0;
        marker.color.b = 0.0;
        marker.color.a = 0.25;

        marker.lifetime = ros::Duration();

        marker_array.markers.push_back(marker);
    }

    publish(marker_array);
}
Пример #16
0
void MarkerArrayVisualizer::drawEllipsoids(const char* ns, const std::vector<Eigen::Vector3d>& center, const std::vector<Eigen::Matrix3d>& A)
{
    visualization_msgs::MarkerArray marker_array;

    const int size = center.size();

    for (int i=0; i<size; i++)
    {
        visualization_msgs::Marker marker;

        marker.header.frame_id = "/world";
        marker.header.stamp = ros::Time::now();

        marker.ns = ns;
        marker.id = i;
        marker.type = visualization_msgs::Marker::SPHERE;
        marker.action = visualization_msgs::Marker::ADD;

        // axis: eigenvectors
        // radius: eigenvalues
        Eigen::JacobiSVD<Eigen::MatrixXd> svd(A[i], Eigen::ComputeThinU | Eigen::ComputeThinV);
        const Eigen::VectorXd& r = svd.singularValues();
        Eigen::Matrix3d Q = svd.matrixU();

        // to make determinant 1
        if (Q.determinant() < 0)
            Q.col(2) *= -1.;
        const Eigen::Quaterniond q(Q);

        marker.pose.position.x = center[i](0);
        marker.pose.position.y = center[i](1);
        marker.pose.position.z = center[i](2);
        marker.pose.orientation.x = q.x();
        marker.pose.orientation.y = q.y();
        marker.pose.orientation.z = q.z();
        marker.pose.orientation.w = q.w();

        marker.scale.x = 2. * r(0);
        marker.scale.y = 2. * r(1);
        marker.scale.z = 2. * r(2);

        marker.color.r = 1.0;
        marker.color.g = 0.0;
        marker.color.b = 0.0;
        marker.color.a = 0.2;

        marker.lifetime = ros::Duration();

        marker_array.markers.push_back(marker);
    }

    publish(marker_array);
}
Пример #17
0
std::vector<tf::Quaternion> Reaching::calculateHandOrientations(const GraspEigen& grasp)
{
  // calculate first hand orientation
  Eigen::Matrix3d R = Eigen::MatrixXd::Zero(3, 3);
  R.col(0) = -1.0 * grasp.approach_;
  R.col(1) = grasp.axis_;
  R.col(2) << R.col(0).cross(R.col(1));
	
	// rotate by 180deg around the grasp approach vector to get the "opposite" hand orientation
	Eigen::Transform<double, 3, Eigen::Affine> T(Eigen::AngleAxis<double>(M_PI, grasp.approach_));
	
	// calculate second hand orientation
  Eigen::Matrix3d Q = Eigen::MatrixXd::Zero(3, 3);
  Q.col(0) = T * grasp.approach_;
  Q.col(1) = T * grasp.axis_;
  Q.col(2) << Q.col(0).cross(Q.col(1));
  
  // reorder rotation matrix columns according to axes ordering of the robot hand
  Eigen::Matrix3d R1 = reorderHandAxes(R);
  Eigen::Matrix3d R2 = reorderHandAxes(Q);
	
	// convert Eigen rotation matrices to TF quaternions and normalize them
	tf::Matrix3x3 TF1, TF2;
  tf::matrixEigenToTF(R1, TF1);
  tf::matrixEigenToTF(R2, TF2);
  tf::Quaternion quat1, quat2;
  TF1.getRotation(quat1);
  TF2.getRotation(quat2);
  quat1.normalize();
  quat2.normalize();
		
	std::vector<tf::Quaternion> quats;
	quats.push_back(quat1);
	quats.push_back(quat2);
  
  // for debugging
  //tf::TransformBroadcaster br;
  //tf::Transform transform;
  //transform.setOrigin( tf::Vector3(grasp.center_(0), grasp.center_(1), grasp.center_(2)) );
  //transform.setRotation( quat1 );
  //br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "base", "carrot1"));
  //ros::Duration(1.0).sleep();
  //transform.setRotation( quat2 );
  //br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "base", "carrot2"));
  //ros::Duration(1.0).sleep();
  //std::cout << "transform:\n" << transform.getOrigin() << std::endl;
  
	return quats;
}
 /**
  * \brief create a marker from a chart and stores it into markers array
  */
 virtual void createNodeMarker(const Chart &c)
 {
     if (!markers){
         ROS_WARN_THROTTLE(30,"[ExplorerBase::%s]\tNo marker array to update is provided, visualization is disabled.",__func__);
         return;
     }
     visualization_msgs::Marker disc;
     disc.header.frame_id = mark_frame;
     disc.header.stamp = ros::Time();
     disc.lifetime = ros::Duration(0.5);
     disc.ns = "Atlas Nodes";
     disc.id = c.getId();
     disc.type = visualization_msgs::Marker::CYLINDER;
     disc.action = visualization_msgs::Marker::ADD;
     disc.scale.x = c.getRadius()*2;
     disc.scale.y = c.getRadius()*2;
     disc.scale.z = 0.001;
     disc.color.a = 0.75;
     disc.color.r = 0.3;
     disc.color.b = 0.35;
     disc.color.g = 0.5;
     Eigen::Matrix3d rot;
     rot.col(0) =  c.getTanBasisOne();
     rot.col(1) =  c.getTanBasisTwo();
     rot.col(2) =  c.getNormal();
     Eigen::Quaterniond q(rot);
     q.normalize();
     disc.pose.orientation.x = q.x();
     disc.pose.orientation.y = q.y();
     disc.pose.orientation.z = q.z();
     disc.pose.orientation.w = q.w();
     disc.pose.position.x = c.getCenter()[0];
     disc.pose.position.y = c.getCenter()[1];
     disc.pose.position.z = c.getCenter()[2];
     std::lock_guard<std::mutex> guard(*mtx_ptr);
     markers->markers.push_back(disc);
 }
Пример #19
0
void
NurbsTools::pca (const vector_vec3d &data, Eigen::Vector3d &mean, Eigen::Matrix3d &eigenvectors,
                 Eigen::Vector3d &eigenvalues)
{
  if (data.empty ())
  {
    printf ("[NurbsTools::pca] Error, data is empty\n");
    abort ();
  }

  mean = computeMean (data);

  unsigned s = unsigned (data.size ());

  Eigen::MatrixXd Q (3, s);

  for (unsigned i = 0; i < s; i++)
    Q.col (i) << (data[i] - mean);

  Eigen::Matrix3d C = Q * Q.transpose ();

  Eigen::SelfAdjointEigenSolver<Eigen::Matrix3d> eigensolver (C);
  if (eigensolver.info () != Success)
  {
    printf ("[NurbsTools::pca] Can not find eigenvalues.\n");
    abort ();
  }

  for (int i = 0; i < 3; ++i)
  {
    eigenvalues (i) = eigensolver.eigenvalues () (2 - i);
    if (i == 2)
      eigenvectors.col (2) = eigenvectors.col (0).cross (eigenvectors.col (1));
    else
      eigenvectors.col (i) = eigensolver.eigenvectors ().col (2 - i);
  }
}
Пример #20
0
ON_NurbsSurface
FittingSurface::initNurbsPCA (int order, NurbsDataSurface *m_data, Eigen::Vector3d z)
{
  Eigen::Vector3d mean;
  Eigen::Matrix3d eigenvectors;
  Eigen::Vector3d eigenvalues;

  unsigned s = m_data->interior.size ();

  NurbsTools::pca (m_data->interior, mean, eigenvectors, eigenvalues);

  m_data->mean = mean;
  m_data->eigenvectors = eigenvectors;

  bool flip (false);
  if (eigenvectors.col (2).dot (z) < 0.0)
    flip = true;

  eigenvalues = eigenvalues / s; // seems that the eigenvalues are dependent on the number of points (???)

  Eigen::Vector3d sigma (sqrt (eigenvalues (0)), sqrt (eigenvalues (1)), sqrt (eigenvalues (2)));

  ON_NurbsSurface nurbs (3, false, order, order, order, order);
  nurbs.MakeClampedUniformKnotVector (0, 1.0);
  nurbs.MakeClampedUniformKnotVector (1, 1.0);

  // +- 2 sigma -> 95,45 % aller Messwerte
  double dcu = (4.0 * sigma (0)) / (nurbs.Order (0) - 1);
  double dcv = (4.0 * sigma (1)) / (nurbs.Order (1) - 1);

  Eigen::Vector3d cv_t, cv;
  for (int i = 0; i < nurbs.Order (0); i++)
  {
    for (int j = 0; j < nurbs.Order (1); j++)
    {
      cv (0) = -2.0 * sigma (0) + dcu * i;
      cv (1) = -2.0 * sigma (1) + dcv * j;
      cv (2) = 0.0;
      cv_t = eigenvectors * cv + mean;
      if (flip)
        nurbs.SetCV (nurbs.Order (0) - 1 - i, j, ON_3dPoint (cv_t (0), cv_t (1), cv_t (2)));
      else
        nurbs.SetCV (i, j, ON_3dPoint (cv_t (0), cv_t (1), cv_t (2)));
    }
  }
  return nurbs;
}
Пример #21
0
template<typename PointT> void
pcl::GeneralizedIterativeClosestPoint<PointSource, PointTarget>::computeCovariances(typename pcl::PointCloud<PointT>::ConstPtr cloud, 
                                                                                    const typename pcl::search::KdTree<PointT>::Ptr kdtree,
                                                                                    std::vector<Eigen::Matrix3d>& cloud_covariances)
{
  if (k_correspondences_ > int (cloud->size ()))
  {
    PCL_ERROR ("[pcl::GeneralizedIterativeClosestPoint::computeCovariances] Number or points in cloud (%lu) is less than k_correspondences_ (%lu)!\n", cloud->size (), k_correspondences_);
    return;
  }

  Eigen::Vector3d mean;
  std::vector<int> nn_indecies; nn_indecies.reserve (k_correspondences_);
  std::vector<float> nn_dist_sq; nn_dist_sq.reserve (k_correspondences_);

  // We should never get there but who knows
  if(cloud_covariances.size () < cloud->size ())
    cloud_covariances.resize (cloud->size ());

  typename pcl::PointCloud<PointT>::const_iterator points_iterator = cloud->begin ();
  std::vector<Eigen::Matrix3d>::iterator matrices_iterator = cloud_covariances.begin ();
  for(;
      points_iterator != cloud->end ();
      ++points_iterator, ++matrices_iterator)
  {
    const PointT &query_point = *points_iterator;
    Eigen::Matrix3d &cov = *matrices_iterator;
    // Zero out the cov and mean
    cov.setZero ();
    mean.setZero ();

    // Search for the K nearest neighbours
    kdtree->nearestKSearch(query_point, k_correspondences_, nn_indecies, nn_dist_sq);
    
    // Find the covariance matrix
    for(int j = 0; j < k_correspondences_; j++) {
      const PointT &pt = (*cloud)[nn_indecies[j]];
      
      mean[0] += pt.x;
      mean[1] += pt.y;
      mean[2] += pt.z;
      
      cov(0,0) += pt.x*pt.x;
      
      cov(1,0) += pt.y*pt.x;
      cov(1,1) += pt.y*pt.y;
      
      cov(2,0) += pt.z*pt.x;
      cov(2,1) += pt.z*pt.y;
      cov(2,2) += pt.z*pt.z;    
    }
  
    mean /= static_cast<double> (k_correspondences_);
    // Get the actual covariance
    for (int k = 0; k < 3; k++)
      for (int l = 0; l <= k; l++) 
      {
        cov(k,l) /= static_cast<double> (k_correspondences_);
        cov(k,l) -= mean[k]*mean[l];
        cov(l,k) = cov(k,l);
      }
    
    // Compute the SVD (covariance matrix is symmetric so U = V')
    Eigen::JacobiSVD<Eigen::Matrix3d> svd(cov, Eigen::ComputeFullU);
    cov.setZero ();
    Eigen::Matrix3d U = svd.matrixU ();
    // Reconstitute the covariance matrix with modified singular values using the column     // vectors in V.
    for(int k = 0; k < 3; k++) {
      Eigen::Vector3d col = U.col(k);
      double v = 1.; // biggest 2 singular values replaced by 1
      if(k == 2)   // smallest singular value replaced by gicp_epsilon
        v = gicp_epsilon_;
      cov+= v * col * col.transpose(); 
    }
  }
}
Пример #22
0
void EKFOA::process(const double delta_t, cv::Mat & frame, Eigen::Vector3d & rW, Eigen::Vector4d & qWR, Eigen::Matrix3d & axes_orientation_and_confidence, std::vector<Point3d> (& XYZs)[3], Delaunay & triangulation, Point3d & closest_point){
	double time_total;
	std::vector<cv::Point2f> features_to_add;
	std::vector<Features_extra> features_extra;

	/*
	 * EKF prediction (state and measurement prediction)
	 */
	time_total = (double)cv::getTickCount();
	double time_prediction = (double)cv::getTickCount();
	filter.predict_state_and_covariance(delta_t);
	filter.compute_features_h(cam, features_extra);
	time_prediction = (double)cv::getTickCount() - time_prediction;
//	std::cout << "predict = " << time_prediction/((double)cvGetTickFrequency()*1000.) << "ms" << std::endl;

	/*
	 * Sense and map management (delete features from EKF)
	 */
	double time_tracker = (double)cv::getTickCount();
	motion_tracker.process(frame, features_extra, features_to_add);
	//TODO: Why is optical flow returning points outside the image???
	time_tracker = (double)cv::getTickCount() - time_tracker;

	time_total = time_total + time_tracker; //do not count the time spent by the tracker

	//Delete no longer seen features from the state, covariance matrix and the features_extra:
	double time_del = (double)cv::getTickCount();
	filter.delete_features(features_extra);
	time_del = (double)cv::getTickCount() - time_del;
//	std::cout << "delete  = " << time_del/((double)cvGetTickFrequency()*1000.) << "ms" << std::endl;

	/*
	 * EKF Update step and map management (add new features to EKF)
	 */
	double time_update = (double)cv::getTickCount();
	filter.update(cam, features_extra);
	time_update = (double)cv::getTickCount() - time_update;
//	std::cout << "update  = " << time_update/((double)cvGetTickFrequency()*1000.) << "ms" << std::endl;


	//Add new features
	double time_add = (double)cv::getTickCount();
	filter.add_features_inverse_depth(cam, features_to_add);
	time_add = (double)cv::getTickCount() - time_add;
//	std::cout << "add_fea = " << time_add/((double)cvGetTickFrequency()*1000.) << "ms" << std::endl;


	/*
	 * Triangulation, surface and GUI data setting:
	 */
	double time_triangulation = (double)cv::getTickCount();

	std::vector< std::pair<Point2d, size_t> > triangle_list;
	std::list<Triangle> triangles_list_3d;

	const Eigen::VectorXd & x_k_k = filter.x_k_k();
	const Eigen::MatrixXd & p_k_k = filter.p_k_k();

	//Set the position, so the GUI can draw it:
	rW = x_k_k.segment<3>(0);//current position

	//Set the axes orientation and confidence:
	axes_orientation_and_confidence.setIdentity();//axes_orientation_and_confidence stores in each column one axis (X, Y, Z)
	axes_orientation_and_confidence *= 5; //make the lines larger, so they are actually informative
	//Apply rotation matrix:
	Eigen::Matrix3d qWR_R;//Rotation matrix of current orientation quaternion
	qWR = x_k_k.segment<4>(3);
	MotionModel::quaternion_matrix(qWR, qWR_R);
	axes_orientation_and_confidence.applyOnTheLeft(qWR_R); // == R * axes_orientation_and_confidence
	for (int axis=0 ; axis<axes_orientation_and_confidence.cols() ; axis++){
		//Set the length to be 3*sigma:
		axes_orientation_and_confidence.col(axis) *= 3*std::sqrt(p_k_k(axis, axis)); //the first 3 positions of the cov matrix define the confidence for the position
		//Translate origin:
		axes_orientation_and_confidence.col(axis) += rW;
	}

	int num_features = (x_k_k.rows()-13)/6;
	XYZs[0].resize(num_features);
	XYZs[1].resize(num_features);
	XYZs[2].resize(num_features);


	//Compute the 3d positions and inverse depth variances of all the points in the state
	int i=0; //Feature counter
	for (int start_feature=13 ; start_feature<x_k_k.rows() ; start_feature+=6){
		const int feature_inv_depth_index = start_feature + 5;

		//As with any normal distribution, nearly all (99.73%) of the possible depths lie within three standard deviations of the mean!
		const double sigma_3 = std::sqrt(p_k_k(feature_inv_depth_index, feature_inv_depth_index)); //sqrt(depth_variance)

		const Eigen::VectorXd & yi = x_k_k.segment(start_feature, 6);
		Eigen::VectorXd point_close(x_k_k.segment(start_feature, 6));
		Eigen::VectorXd point_far(x_k_k.segment(start_feature, 6));

		//Change the depth of the feature copy, so that it is possible to represent the range between -3*sigma and 3*sigma:
		point_close(5) += sigma_3;
		point_far(5) -= sigma_3;

		Eigen::Vector3d XYZ_mu = (Feature::compute_cartesian(yi)); //mu (mean)
		Eigen::Vector3d XYZ_close = (Feature::compute_cartesian(point_close)); //mean + 3*sigma. (since inverted signs are also inverted)
		Eigen::Vector3d XYZ_far = (Feature::compute_cartesian(point_far)); //mean - 3*sigma

		//The center of the model is ALWAYS the current position of the camera/robot, so have to 'cancel' the current orientation (R_inv) and translation (rWC = x_k_k.head(3)):
		//Note: It is nicer to do this in the GUI class, as it is only a presention/perspective change. But due to the structure, it was easier to do it here.
		XYZs[0][i] = Point3d(XYZ_mu(0), XYZ_mu(1), XYZ_mu(2)); //mu (mean)
		XYZs[1][i] = Point3d(XYZ_close(0), XYZ_close(1), XYZ_close(2)); //mean + 3*sigma. (since inverted signs are also inverted)
		XYZs[2][i] = Point3d(XYZ_far(0), XYZ_far(1), XYZ_far(2)); //mean - 3*sigma

		//If the size that contains the 99.73% of the inverse depth distribution is smaller than the current inverse depth, add it to the surface:
		const double size_sigma_3 = std::abs(1.0/(x_k_k(feature_inv_depth_index)-sigma_3) - 1.0/(x_k_k(feature_inv_depth_index)+sigma_3));
		if (size_sigma_3 < 1/x_k_k(feature_inv_depth_index)){
			triangle_list.push_back(std::make_pair(Point2d(features_extra[i].z(0), features_extra[i].z(1)), i));
		}

		if (x_k_k(feature_inv_depth_index) < 0 ){
			std::cout << "feature behind the camera!!! : idx=" << i << ", value=" << x_k_k(feature_inv_depth_index) << std::endl;
		}
		i++;
	}

	triangulation.insert(triangle_list.begin(), triangle_list.end());

	cv::Scalar delaunay_color = cv::Scalar(255, 0, 0); //blue
	for(Delaunay::Finite_faces_iterator fit = triangulation.finite_faces_begin(); fit != triangulation.finite_faces_end(); ++fit) {
		const Delaunay::Face_handle & face = fit;
		//face->vertex(i)->info() = index of the point in the observation list.
		line(frame, features_extra[face->vertex(0)->info()].z_cv, features_extra[face->vertex(1)->info()].z_cv, delaunay_color, 1);
		line(frame, features_extra[face->vertex(1)->info()].z_cv, features_extra[face->vertex(2)->info()].z_cv, delaunay_color, 1);
		line(frame, features_extra[face->vertex(2)->info()].z_cv, features_extra[face->vertex(0)->info()].z_cv, delaunay_color, 1);

		//Add the face of the linked 3d points of this 2d triangle:
		triangles_list_3d.push_back(Triangle(XYZs[1][face->vertex(0)->info()], XYZs[1][face->vertex(1)->info()], XYZs[1][face->vertex(2)->info()])); //XYZs[1] == close
	}

	// constructs AABB tree
	Tree tree(triangles_list_3d.begin(), triangles_list_3d.end());

	if (tree.size()>0){
		// compute closest point and squared distance
		Point3d point_query(rW[0], rW[1], rW[2]);
		closest_point = tree.closest_point(point_query);
//		FT sqd = tree.squared_distance(point_query);

		Eigen::Vector3d last_displacement_vector = last_position - rW;

//		double repealing_force = 0;
//		if (std::sqrt(sqd) < 0.2){
//			std::cout << "can crash! " << std::endl;
//			repealing_force = 1/std::sqrt(sqd);
//		}
//		std::cout << "distance = [distance, " << std::sqrt(sqd) << "];" << std::endl;
//		std::cout << "repealing_force = [repealing_force, " << repealing_force << "];" << std::endl;
	}


	//remember this position
	last_position = rW;
//	std::cout << "certaint= " << p_k_k.diagonal().sum() << std::endl;

	time_triangulation = (double)cv::getTickCount() - time_triangulation;
//	std::cout << "Triang  = " << time_triangulation/((double)cvGetTickFrequency()*1000.) << "ms" << std::endl;

	time_total = (double)cv::getTickCount() - time_total;
//	std::cout << "EKF     = " << time_total/((double)cvGetTickFrequency()*1000.) << "ms" << std::endl;

//	std::cout << "tracker = " << time_tracker/((double)cvGetTickFrequency()*1000.) << "ms" << std::endl;
}
Пример #23
0
 void apply_small_rotation(Eigen::Matrix3d &rotation, double h, int i, int j)
 {
     const Eigen::Vector3d col_l = rotation.col(j) - rotation.col(i)*h;
     rotation.col(i) += rotation.col(j) * h;
     rotation.col(j) = col_l;
 }
void init_poses() {
    ROS_INFO("getting transforms from camera to PSMs");
    //tf::TransformListener tfListener;
    tf::StampedTransform tfResult_one, tfResult_two;
    g_tfListener_ptr = new tf::TransformListener;
    bool tferr = true;
    int ntries = 0;
    ROS_INFO("waiting for tf between base and camera...");
    while (tferr) {
        if (ntries > 5) break; //give up and accept default after this many tries
        tferr = false;
        try {
            //The direction of the transform returned will be from the target_frame to the source_frame.
            //Which if applied to data, will transform data in the source_frame into the target_frame. See tf/CoordinateFrameConventions#Transform_Direction
            g_tfListener_ptr->lookupTransform("left_camera_optical_frame", "one_psm_base_link", ros::Time(0), tfResult_one);
            g_tfListener_ptr->lookupTransform("left_camera_optical_frame", "two_psm_base_link", ros::Time(0), tfResult_two);
        } catch (tf::TransformException &exception) {
            ROS_WARN("%s", exception.what());
            tferr = true;
            ros::Duration(0.5).sleep(); // sleep for half a second
            ros::spinOnce();
            ntries++;
        }
    }
    //default transform: need to match this up to camera calibration!
    if (tferr) {
        g_affine_lcamera_to_psm_one.translation() << -0.155, -0.03265, 0.0;
        Eigen::Vector3d nvec, tvec, bvec;
        nvec << -1, 0, 0;
        tvec << 0, 1, 0;
        bvec << 0, 0, -1;
        Eigen::Matrix3d R;
        R.col(0) = nvec;
        R.col(1) = tvec;
        R.col(2) = bvec;
        g_affine_lcamera_to_psm_one.linear() = R;
        g_affine_lcamera_to_psm_two.linear() = R;
        g_affine_lcamera_to_psm_two.translation() << 0.145, -0.03265, 0.0;
        ROS_WARN("using default transform");
    } else {

        ROS_INFO("tf is good");

        //g_affine_lcamera_to_psm_one is the position/orientation of psm1 base frame w/rt left camera link frame
        // need to extend this to camera optical frame
        g_affine_lcamera_to_psm_one = transformTFToEigen(tfResult_one);
        g_affine_lcamera_to_psm_two = transformTFToEigen(tfResult_two);
    }
    ROS_INFO("transform from left camera to psm one:");
    cout << g_affine_lcamera_to_psm_one.linear() << endl;
    cout << g_affine_lcamera_to_psm_one.translation().transpose() << endl;
    ROS_INFO("transform from left camera to psm two:");
    cout << g_affine_lcamera_to_psm_two.linear() << endl;
    cout << g_affine_lcamera_to_psm_two.translation().transpose() << endl;

    //now get initial poses:
    ROS_INFO("waiting for tf between base and grippers...");
    while (tferr) {
        if (ntries > 5) break; //give up and accept default after this many tries
        tferr = false;
        try {
            //The direction of the transform returned will be from the target_frame to the source_frame.
            //Which if applied to data, will transform data in the source_frame into the target_frame. See tf/CoordinateFrameConventions#Transform_Direction
            g_tfListener_ptr->lookupTransform("left_camera_optical_frame", "one_tool_tip_link", ros::Time(0), tfResult_one);
            g_tfListener_ptr->lookupTransform("left_camera_optical_frame", "two_tool_tip_link", ros::Time(0), tfResult_two);
        } catch (tf::TransformException &exception) {
            ROS_WARN("%s", exception.what());
            tferr = true;
            ros::Duration(0.5).sleep(); // sleep for half a second
            ros::spinOnce();
            ntries++;
        }
    }
    //default start pose, if can't get tf:
    if (tferr) {
        g_psm1_start_pose.translation() << -0.02, 0, 0.04;
        g_psm2_start_pose.translation() << 0.02, 0, 0.04;
        Eigen::Vector3d nvec, tvec, bvec;
        nvec << -1, 0, 0;
        tvec << 0, 1, 0;
        bvec << 0, 0, -1;
        Eigen::Matrix3d R;
        R.col(0) = nvec;
        R.col(1) = tvec;
        R.col(2) = bvec;
        g_psm1_start_pose.linear() = R;
        g_psm2_start_pose.linear() = R;
        ROS_WARN("using default start poses");
    } else {

        ROS_INFO("tf is good");

        //g_affine_lcamera_to_psm_one is the position/orientation of psm1 base frame w/rt left camera link frame
        // need to extend this to camera optical frame
        g_psm1_start_pose = transformTFToEigen(tfResult_one);
        g_psm2_start_pose = transformTFToEigen(tfResult_two);
    }
    ROS_INFO("psm1 gripper start pose:");
    cout << g_psm1_start_pose.linear() << endl;
    cout << g_psm1_start_pose.translation().transpose() << endl;
    ROS_INFO("psm2 gripper start pose:");
    cout << g_psm2_start_pose.linear() << endl;
    cout << g_psm2_start_pose.translation().transpose() << endl;

    //just to be safe:
    g_O_entry_point(0) = 0.0;
    g_O_entry_point(1) = 0.0;
    g_O_entry_point(2) = 0.12;
    g_O_exit_point = g_O_entry_point;
    g_O_exit_point(0) += 0.02;


}
Пример #25
0
ON_NurbsSurface
FittingSurface::initNurbsPCABoundingBox (int order, NurbsDataSurface *m_data, ON_3dVector z)
{
  ON_3dVector mean;
  Eigen::Matrix3d eigenvectors;
  Eigen::Vector3d eigenvalues;

  unsigned s = m_data->interior.size ();
  m_data->interior_param.clear ();

  NurbsTools::pca (m_data->interior, mean, eigenvectors, eigenvalues);

  m_data->mean = mean;
  //m_data->eigenvectors = (*eigenvectors);

  bool flip (false);
  Eigen::Vector3d ez(z[0],z[1],z[2]);
  if (eigenvectors.col (2).dot (ez) < 0.0)
    flip = true;

  eigenvalues = eigenvalues / s; // seems that the eigenvalues are dependent on the number of points (???)
  Eigen::Matrix3d eigenvectors_inv = eigenvectors.inverse ();

  ON_3dVector v_max(0.0, 0.0, 0.0);
  ON_3dVector v_min(DBL_MAX, DBL_MAX, DBL_MAX);
  Eigen::Vector3d emean(mean[0], mean[1], mean[2]);
  for (unsigned i = 0; i < s; i++)
  {
    Eigen::Vector3d eint(m_data->interior[i][0], m_data->interior[i][1], m_data->interior[i][2]);
    Eigen::Vector3d ep = eigenvectors_inv * (eint - emean);
    ON_3dPoint p(ep (0), ep (1), ep(2));
    m_data->interior_param.push_back (ON_2dPoint(p[0], p[1]));

    if (p[0] > v_max[0])
      v_max[0] = p[0];
    if (p[1] > v_max[1])
      v_max[1] = p[1];
    if (p[2] > v_max[2])
      v_max[2] = p[2];

    if (p[0] < v_min[0])
      v_min[0] = p[0];
    if (p[1] < v_min[1])
      v_min[1] = p[1];
    if (p[2] < v_min[2])
      v_min[2] = p[2];
  }

  for (unsigned i = 0; i < s; i++)
  {
    ON_2dVector &p = m_data->interior_param[i];
    if (v_max[0] > v_min[0] && v_max[0] > v_min[0])
    {
      p[0] = (p[0] - v_min[0]) / (v_max[0] - v_min[0]);
      p[1] = (p[1] - v_min[1]) / (v_max[1] - v_min[1]);
    }
    else
    {
      throw std::runtime_error ("[NurbsTools::initNurbsPCABoundingBox] Error: v_max <= v_min");
    }
  }

  ON_NurbsSurface nurbs (3, false, order, order, order, order);

  nurbs.MakeClampedUniformKnotVector (0, 1.0);
  nurbs.MakeClampedUniformKnotVector (1, 1.0);

  double dcu = (v_max[0] - v_min[0]) / (nurbs.Order (0) - 1);
  double dcv = (v_max[1] - v_min[1]) / (nurbs.Order (1) - 1);

  ON_3dPoint cv_t, cv;
  Eigen::Vector3d ecv_t2, ecv2;
  Eigen::Vector3d emean2(mean[0],mean[1],mean[2]);
  for (int i = 0; i < nurbs.Order (0); i++)
  {
    for (int j = 0; j < nurbs.Order (1); j++)
    {
      cv[0] = v_min[0] + dcu * i;
      cv[1] = v_min[1] + dcv * j;
      cv[2] = 0.0;
      ecv2 (0) = cv[0];
      ecv2 (1) = cv[1];
      ecv2 (2) = cv[2];
      ecv_t2 = eigenvectors * ecv2 + emean2;
      if (flip)
	nurbs.SetCV (nurbs.Order (0) - 1 - i, j, cv_t);
      else
	nurbs.SetCV (i, j, cv_t);
    }
  }
  return nurbs;
}
Пример #26
0
int P3P::computePoses(const Eigen::Matrix3d & feature_vectors, const Eigen::Matrix3d & world_points,
                      Eigen::Matrix<Eigen::Matrix<double, 3, 4>, 4, 1> & solutions)
{
  // Extraction of world points
  Eigen::Vector3d P1 = world_points.col(0);
  Eigen::Vector3d P2 = world_points.col(1);
  Eigen::Vector3d P3 = world_points.col(2);

  // Verification that world points are not colinear
  Eigen::Vector3d temp1 = P2 - P1;
  Eigen::Vector3d temp2 = P3 - P1;

  if (temp1.cross(temp2).norm() == 0)
  {
    return -1;
  }

  // Extraction of feature vectors
  Eigen::Vector3d f1 = feature_vectors.col(0);
  Eigen::Vector3d f2 = feature_vectors.col(1);
  Eigen::Vector3d f3 = feature_vectors.col(2);

  // Creation of intermediate camera frame
  Eigen::Vector3d e1 = f1;
  Eigen::Vector3d e3 = f1.cross(f2);
  e3 = e3 / e3.norm();
  Eigen::Vector3d e2 = e3.cross(e1);

  Eigen::Matrix3d T;
  T.row(0) = e1.transpose();
  T.row(1) = e2.transpose();
  T.row(2) = e3.transpose();

  f3 = T * f3;

  // Reinforce that f3(2,0) > 0 for having theta in [0;pi]
  if (f3(2, 0) > 0)
  {
    f1 = feature_vectors.col(1);
    f2 = feature_vectors.col(0);
    f3 = feature_vectors.col(2);

    e1 = f1;
    e3 = f1.cross(f2);
    e3 = e3 / e3.norm();
    e2 = e3.cross(e1);

    T.row(0) = e1.transpose();
    T.row(1) = e2.transpose();
    T.row(2) = e3.transpose();

    f3 = T * f3;

    P1 = world_points.col(1);
    P2 = world_points.col(0);
    P3 = world_points.col(2);
  }

  // Creation of intermediate world frame
  Eigen::Vector3d n1 = P2 - P1;
  n1 = n1 / n1.norm();
  Eigen::Vector3d n3 = n1.cross(P3 - P1);
  n3 = n3 / n3.norm();
  Eigen::Vector3d n2 = n3.cross(n1);

  Eigen::Matrix3d N;
  N.row(0) = n1.transpose();
  N.row(1) = n2.transpose();
  N.row(2) = n3.transpose();

  // Extraction of known parameters
  P3 = N * (P3 - P1);

  double d_12 = (P2 - P1).norm();
  double f_1 = f3(0, 0) / f3(2, 0);
  double f_2 = f3(1, 0) / f3(2, 0);
  double p_1 = P3(0, 0);
  double p_2 = P3(1, 0);

  double cos_beta = f1.dot(f2);
  double b = 1 / (1 - pow(cos_beta, 2)) - 1;

  if (cos_beta < 0)
  {
    b = -sqrt(b);
  }
  else
  {
    b = sqrt(b);
  }

  // Definition of temporary variables for avoiding multiple computation
  double f_1_pw2 = pow(f_1, 2);
  double f_2_pw2 = pow(f_2, 2);
  double p_1_pw2 = pow(p_1, 2);
  double p_1_pw3 = p_1_pw2 * p_1;
  double p_1_pw4 = p_1_pw3 * p_1;
  double p_2_pw2 = pow(p_2, 2);
  double p_2_pw3 = p_2_pw2 * p_2;
  double p_2_pw4 = p_2_pw3 * p_2;
  double d_12_pw2 = pow(d_12, 2);
  double b_pw2 = pow(b, 2);

  // Computation of factors of 4th degree polynomial
  Eigen::Matrix<double, 5, 1> factors;

  factors(0) = -f_2_pw2 * p_2_pw4 - p_2_pw4 * f_1_pw2 - p_2_pw4;

  factors(1) = 2 * p_2_pw3 * d_12 * b + 2 * f_2_pw2 * p_2_pw3 * d_12 * b - 2 * f_2 * p_2_pw3 * f_1 * d_12;

  factors(2) = -f_2_pw2 * p_2_pw2 * p_1_pw2 - f_2_pw2 * p_2_pw2 * d_12_pw2 * b_pw2 - f_2_pw2 * p_2_pw2 * d_12_pw2
      + f_2_pw2 * p_2_pw4 + p_2_pw4 * f_1_pw2 + 2 * p_1 * p_2_pw2 * d_12 + 2 * f_1 * f_2 * p_1 * p_2_pw2 * d_12 * b
      - p_2_pw2 * p_1_pw2 * f_1_pw2 + 2 * p_1 * p_2_pw2 * f_2_pw2 * d_12 - p_2_pw2 * d_12_pw2 * b_pw2
      - 2 * p_1_pw2 * p_2_pw2;

  factors(3) = 2 * p_1_pw2 * p_2 * d_12 * b + 2 * f_2 * p_2_pw3 * f_1 * d_12 - 2 * f_2_pw2 * p_2_pw3 * d_12 * b
      - 2 * p_1 * p_2 * d_12_pw2 * b;

  factors(4) = -2 * f_2 * p_2_pw2 * f_1 * p_1 * d_12 * b + f_2_pw2 * p_2_pw2 * d_12_pw2 + 2 * p_1_pw3 * d_12
      - p_1_pw2 * d_12_pw2 + f_2_pw2 * p_2_pw2 * p_1_pw2 - p_1_pw4 - 2 * f_2_pw2 * p_2_pw2 * p_1 * d_12
      + p_2_pw2 * f_1_pw2 * p_1_pw2 + f_2_pw2 * p_2_pw2 * d_12_pw2 * b_pw2;

  // Computation of roots
  Eigen::Matrix<double, 4, 1> realRoots;

  P3P::solveQuartic(factors, realRoots);

  // Backsubstitution of each solution
  for (int i = 0; i < 4; ++i)
  {
    double cot_alpha = (-f_1 * p_1 / f_2 - realRoots(i) * p_2 + d_12 * b)
        / (-f_1 * realRoots(i) * p_2 / f_2 + p_1 - d_12);

    double cos_theta = realRoots(i);
    double sin_theta = sqrt(1 - pow((double)realRoots(i), 2));
    double sin_alpha = sqrt(1 / (pow(cot_alpha, 2) + 1));
    double cos_alpha = sqrt(1 - pow(sin_alpha, 2));

    if (cot_alpha < 0)
    {
      cos_alpha = -cos_alpha;
    }

    Eigen::Vector3d C;
    C(0) = d_12 * cos_alpha * (sin_alpha * b + cos_alpha);
    C(1) = cos_theta * d_12 * sin_alpha * (sin_alpha * b + cos_alpha);
    C(2) = sin_theta * d_12 * sin_alpha * (sin_alpha * b + cos_alpha);

    C = P1 + N.transpose() * C;

    Eigen::Matrix3d R;
    R(0, 0) = -cos_alpha;
    R(0, 1) = -sin_alpha * cos_theta;
    R(0, 2) = -sin_alpha * sin_theta;
    R(1, 0) = sin_alpha;
    R(1, 1) = -cos_alpha * cos_theta;
    R(1, 2) = -cos_alpha * sin_theta;
    R(2, 0) = 0;
    R(2, 1) = -sin_theta;
    R(2, 2) = cos_theta;

    R = N.transpose() * R.transpose() * T;

    Eigen::Matrix<double, 3, 4> solution;
    solution.block<3, 3>(0, 0) = R;
    solution.col(3) = C;

    solutions(i) = solution;
  }

  return 0;
}
Пример #27
0
visualization_msgs::MarkerArray PCTrackContainer::toMarkerGMMs()
{

    double margin = 0.1;
    double stepsize = 0.01;

    visualization_msgs::MarkerArray gmmMarkers;
    int cnt = 0;
    for(int i=0;i<numTracks();i++){
//        if(tracks.at(i)->lastFrame().time == currentT){
            PCObject object;
            object = tracks.at(i)->lastFrame().object;
            int id = tracks.at(i)->id;
            // make a gmm eval points of the object
            for(int j=0;j<object.gmm.size();j++){
                cnt ++;
                visualization_msgs::Marker gmmMarker;
                gmmMarker.header.frame_id = "/origin";
                gmmMarker.header.stamp = ros::Time();
                gmmMarker.ns = "gmm";
                gmmMarker.id = cnt;

                Gaussian gmm = object.gmm.at(j);
                gmmMarker.type = visualization_msgs::Marker::SPHERE;
                gmmMarker.action = visualization_msgs::Marker::ADD;
                gmmMarker.lifetime = ros::Duration(0);
                gmmMarker.pose.position.x = gmm.mean[0];
                gmmMarker.pose.position.y = gmm.mean[1];
                gmmMarker.pose.position.z = gmm.mean[2];


                Eigen::SelfAdjointEigenSolver<Eigen::Matrix3d> eigensolver(gmm.covariance);
                Eigen::Vector3d eigenvalues = eigensolver.eigenvalues();
                Eigen::Matrix3d eigenvectors = eigensolver.eigenvectors();

                Eigen::Matrix3d rotation = eigenvectors;

                double r11 = rotation.col(0)[0];
                double r21 = rotation.col(0)[1];
                double r31 = rotation.col(0)[2];
                double r12 = rotation.col(1)[0];
                double r22 = rotation.col(1)[1];
                double r32 = rotation.col(1)[2];
                double r13 = rotation.col(2)[0];
                double r23 = rotation.col(2)[1];
                double r33 = rotation.col(2)[2];


                double q0 = ( r11 + r22 + r33 + 1.0f) / 4.0f;
                double q1 = ( r11 - r22 - r33 + 1.0f) / 4.0f;
                double q2 = (-r11 + r22 - r33 + 1.0f) / 4.0f;
                double q3 = (-r11 - r22 + r33 + 1.0f) / 4.0f;
                if(q0 < 0.0f) q0 = 0.0f;
                if(q1 < 0.0f) q1 = 0.0f;
                if(q2 < 0.0f) q2 = 0.0f;
                if(q3 < 0.0f) q3 = 0.0f;
                q0 = sqrt(q0);
                q1 = sqrt(q1);
                q2 = sqrt(q2);
                q3 = sqrt(q3);
                if(q0 >= q1 && q0 >= q2 && q0 >= q3) {
                    q0 *= +1.0f;
                    q1 *= SIGN(r32 - r23);
                    q2 *= SIGN(r13 - r31);
                    q3 *= SIGN(r21 - r12);
                } else if(q1 >= q0 && q1 >= q2 && q1 >= q3) {
                    q0 *= SIGN(r32 - r23);
                    q1 *= +1.0f;
                    q2 *= SIGN(r21 + r12);
                    q3 *= SIGN(r13 + r31);
                } else if(q2 >= q0 && q2 >= q1 && q2 >= q3) {
                    q0 *= SIGN(r13 - r31);
                    q1 *= SIGN(r21 + r12);
                    q2 *= +1.0f;
                    q3 *= SIGN(r32 + r23);
                } else if(q3 >= q0 && q3 >= q1 && q3 >= q2) {
                    q0 *= SIGN(r21 - r12);
                    q1 *= SIGN(r31 + r13);
                    q2 *= SIGN(r32 + r23);
                    q3 *= +1.0f;
                } else {
                    printf("coding error\n");
                }
                float rq = NORM(q0, q1, q2, q3);
                q0 /= rq;
                q1 /= rq;
                q2 /= rq;
                q3 /= rq;

                gmmMarker.pose.orientation.w = q0;
                gmmMarker.pose.orientation.x = q1;
                gmmMarker.pose.orientation.y = q2;
                gmmMarker.pose.orientation.z = q3;

                gmmMarker.frame_locked = 0;

                gmmMarker.scale.x = sqrt(eigenvalues[0])*2;
                gmmMarker.scale.y = sqrt(eigenvalues[1])*2;
                gmmMarker.scale.z = sqrt(eigenvalues[2])*2;

//                std::cout<<" scalex "<<gmmMarker.scale.x;
//                std::cout<<" scaley "<<gmmMarker.scale.y;
//                std::cout<<" scalez "<<gmmMarker.scale.z;
//                std::cout<<std::endl;
//                gmmMarker.scale.x = 0.02;
//                gmmMarker.scale.y = 0.02;
//                gmmMarker.scale.z = 0.02;

                gmmMarker.color.a = gmm.weight*object.gmm.size()/2;
//                gmmMarker.color.a = 1.0;
                gmmMarker.color.r = ((double)r[id])/256;
                gmmMarker.color.g = ((double)g[id])/256;
                gmmMarker.color.b = ((double)b[id])/256;

//                gmmMarker.color.r = 1.;
//                gmmMarker.color.g = 1.;
//                gmmMarker.color.b = 1.;

                gmmMarkers.markers.push_back(gmmMarker);

            }
            // delete old marker
            if(cnt < oldCnt){
                for(int j=cnt+1;j<=oldCnt;j++){
                    visualization_msgs::Marker gmmMarker;
                    gmmMarker.header.frame_id = "/origin";
                    gmmMarker.header.stamp = ros::Time();
                    gmmMarker.ns = "gmm";
                    gmmMarker.id = j;
                    gmmMarker.type = visualization_msgs::Marker::SPHERE;
                    gmmMarker.action = visualization_msgs::Marker::DELETE;
                    gmmMarker.lifetime = ros::Duration(0);
                    gmmMarkers.markers.push_back(gmmMarker);
                }
            }
            oldCnt = cnt;
//        }
    }
    return gmmMarkers;
}
Пример #28
0
ON_NurbsSurface
FittingSurface::initNurbsPCABoundingBox (int order, NurbsDataSurface *m_data, Eigen::Vector3d z)
{
  Eigen::Vector3d mean;
  Eigen::Matrix3d eigenvectors;
  Eigen::Vector3d eigenvalues;

  unsigned s = m_data->interior.size ();
  m_data->interior_param.clear ();

  NurbsTools::pca (m_data->interior, mean, eigenvectors, eigenvalues);

  m_data->mean = mean;
  m_data->eigenvectors = eigenvectors;

  bool flip (false);
  if (eigenvectors.col (2).dot (z) < 0.0)
    flip = true;

  eigenvalues = eigenvalues / s; // seems that the eigenvalues are dependent on the number of points (???)
  Eigen::Matrix3d eigenvectors_inv = eigenvectors.inverse ();

  Eigen::Vector3d v_max (0.0, 0.0, 0.0);
  Eigen::Vector3d v_min (DBL_MAX, DBL_MAX, DBL_MAX);
  for (unsigned i = 0; i < s; i++)
  {
    Eigen::Vector3d p = eigenvectors_inv * (m_data->interior[i] - mean);
    m_data->interior_param.push_back (Eigen::Vector2d (p (0), p (1)));

    if (p (0) > v_max (0))
      v_max (0) = p (0);
    if (p (1) > v_max (1))
      v_max (1) = p (1);
    if (p (2) > v_max (2))
      v_max (2) = p (2);

    if (p (0) < v_min (0))
      v_min (0) = p (0);
    if (p (1) < v_min (1))
      v_min (1) = p (1);
    if (p (2) < v_min (2))
      v_min (2) = p (2);
  }

  for (unsigned i = 0; i < s; i++)
  {
    Eigen::Vector2d &p = m_data->interior_param[i];
    if (v_max (0) > v_min (0) && v_max (0) > v_min (0))
    {
      p (0) = (p (0) - v_min (0)) / (v_max (0) - v_min (0));
      p (1) = (p (1) - v_min (1)) / (v_max (1) - v_min (1));
    }
    else
    {
      throw std::runtime_error ("[NurbsTools::initNurbsPCABoundingBox] Error: v_max <= v_min");
    }
  }

  ON_NurbsSurface nurbs (3, false, order, order, order, order);
  nurbs.MakeClampedUniformKnotVector (0, 1.0);
  nurbs.MakeClampedUniformKnotVector (1, 1.0);

  double dcu = (v_max (0) - v_min (0)) / (nurbs.Order (0) - 1);
  double dcv = (v_max (1) - v_min (1)) / (nurbs.Order (1) - 1);

  Eigen::Vector3d cv_t, cv;
  for (int i = 0; i < nurbs.Order (0); i++)
  {
    for (int j = 0; j < nurbs.Order (1); j++)
    {
      cv (0) = v_min (0) + dcu * i;
      cv (1) = v_min (1) + dcv * j;
      cv (2) = 0.0;
      cv_t = eigenvectors * cv + mean;
      if (flip)
        nurbs.SetCV (nurbs.Order (0) - 1 - i, j, ON_3dPoint (cv_t (0), cv_t (1), cv_t (2)));
      else
        nurbs.SetCV (i, j, ON_3dPoint (cv_t (0), cv_t (1), cv_t (2)));
    }
  }
  return nurbs;
}
int main(int argc, char** argv) {
    // ROS set-ups:
    ros::init(argc, argv, "needle_planner_test_main"); //node name

    ros::NodeHandle nh; // create a node handle; need to pass this to the class constructor
    Eigen::Vector3d entrance_pt, exit_pt, tissue_normal;
    tissue_normal << 0, 0, -1; //antiparallel to optical axis
    entrance_pt << -0.1, 0.05, 0.1; //100mm under camera; slightly forward, to avoid jnt lims should be OK
    exit_pt << -0.09, 0.05, 0.1; // exit pt is shifted along camera-frame +x axis relative to entrance pt
    vector <Eigen::Affine3d> gripper_affines_wrt_camera; //put answers here 
    vector <Eigen::Affine3d> gripper2_affines_wrt_camera;
    
    vector <Eigen::Affine3d> vetted_gripper_affines_wrt_camera;

    ROS_INFO("instantiating  forward solver and an ik_solver");
    Davinci_fwd_solver davinci_fwd_solver; //instantiate a forward-kinematics solver    
    Davinci_IK_solver ik_solver;
    
    ofstream outfile;    
    outfile.open ("best_poses.dat");

    ROS_INFO("main: instantiating an object of type NeedlePlanner");
    NeedlePlanner needlePlanner;


    //compute the tissue frame in camera coords, based on point-cloud selections:
    needlePlanner.compute_tissue_frame_wrt_camera(entrance_pt, exit_pt, tissue_normal);
    //optional: manually set the needle grasp pose, else accept default from constructor
    //needlePlanner.set_affine_needle_frame_wrt_gripper_frame(affine);
    //optional: set the initial needle frame w/rt tissue frame (or accept default in constructor)
    //needlePlanner.set_affine_needle_frame_wrt_tissue(Eigen::Affine3d affine)
    // given grasp transform, tissue transform and initial needle pose w/rt tissue,
    // compute needle-drive path as rotation about needle-z axis:

    ROS_INFO("getting transforms from camera to PSMs");
    tf::TransformListener tfListener;
    tf::StampedTransform tfResult_one, tfResult_two;
    Eigen::Affine3d affine_lcamera_to_psm_one, affine_lcamera_to_psm_two, affine_gripper_wrt_base;
    bool tferr = true;
    int ntries = 0;
    ROS_INFO("waiting for tf between base and right_hand...");
    while (tferr) {
        if (ntries > 5) break; //give up and accept default after this many tries
        tferr = false;
        try {
            //The direction of the transform returned will be from the target_frame to the source_frame. 
            //Which if applied to data, will transform data in the source_frame into the target_frame. See tf/CoordinateFrameConventions#Transform_Direction
            tfListener.lookupTransform("left_camera_optical_frame", "one_psm_base_link", ros::Time(0), tfResult_one);
            tfListener.lookupTransform("left_camera_optical_frame", "two_psm_base_link", ros::Time(0), tfResult_two);
        } catch (tf::TransformException &exception) {
            ROS_WARN("%s", exception.what());
            tferr = true;
            ros::Duration(0.5).sleep(); // sleep for half a second
            ros::spinOnce();
            ntries++;
        }
    }
    //default transform: need to match this up to camera calibration!
    if (tferr) {
        affine_lcamera_to_psm_one.translation() << -0.155, -0.03265, 0.0;
        Eigen::Vector3d nvec, tvec, bvec;
        nvec << -1, 0, 0;
        tvec << 0, 1, 0;
        bvec << 0, 0, -1;
        Eigen::Matrix3d R;
        R.col(0) = nvec;
        R.col(1) = tvec;
        R.col(2) = bvec;
        affine_lcamera_to_psm_one.linear() = R;
        affine_lcamera_to_psm_two.linear() = R;
        affine_lcamera_to_psm_two.translation() << 0.145, -0.03265, 0.0;
        ROS_WARN("using default transform");
    } else {

        ROS_INFO("tf is good");

        //affine_lcamera_to_psm_one is the position/orientation of psm1 base frame w/rt left camera link frame
        // need to extend this to camera optical frame
        affine_lcamera_to_psm_one = transformTFToEigen(tfResult_one);
        affine_lcamera_to_psm_two = transformTFToEigen(tfResult_two);
    }
    ROS_INFO("transform from left camera to psm one:");
    cout << affine_lcamera_to_psm_one.linear() << endl;
    cout << affine_lcamera_to_psm_one.translation().transpose() << endl;
    ROS_INFO("transform from left camera to psm two:");
    cout << affine_lcamera_to_psm_two.linear() << endl;
    cout << affine_lcamera_to_psm_two.translation().transpose() << endl;

    //try grabbing needle w/ bvec_needle = -bvec_gripper
    //needlePlanner.set_grab_needle_plus_minus_z(GRASP_W_NEEDLE_NEGATIVE_GRIPPER_Z);
    //needlePlanner.compute_grasp_transform();

    double ang_to_exit, phi_x, phi_y, tilt;
    Eigen::Vector3d nvec_tissue;
    vector <int> valid_ik_samps;
    double best_phi_x = 0.0;
    double best_phi_y = 0.0;
    double best_tilt = 0.0;
    int best_drive_score = 0;
    int drive_score = 0;
    int best_run = 0;

    Eigen::VectorXi ik_ok_array(40); //Add by DLC 5-26-2016 also in test_main, test_main_v2
    int ik_score=0;
    
    //while (ros::ok()) 
    {
        cout << "entrance point at: " << entrance_pt.transpose() << endl;
        cout << "enter angle to exit point (0-2pi): ";
        cin >> ang_to_exit;

        //cout << "enter needle-grasp phi_x: ";

        //cin >> phi_x;
        for (phi_x = 0.0; phi_x < 6.28; phi_x += 0.1) {
            for (phi_y = 0.0; phi_y < 6.28; phi_y += 0.1) {
                ROS_INFO("phi_x, phi_y = %f, %f",phi_x,phi_y);
                for (tilt = -1.2; tilt < 1.21; tilt += 0.1) {
                    drive_score = 0;

                    //cout << "enter needle-grasp phi_y: ";
                    //cin >> phi_y;
                    needlePlanner.compute_grasp_transform(phi_x, phi_y);
                    //cout<< "enter tilt of needle z-axis w/rt tissue: ";
                    //cin>> tilt;
                    needlePlanner.set_psi_needle_axis_tilt_wrt_tissue(tilt);

                    //for (ang_to_exit = 0; ang_to_exit < 6.28; ang_to_exit += 1.0) 
                    {
                        //cout << "angle to exit pt on tissue surface: " << ang_to_exit << endl;
                        //cin>>ang_to_exit;
                        nvec_tissue << cos(ang_to_exit), sin(ang_to_exit), 0.0;
                        exit_pt = entrance_pt + nvec_tissue;
                        needlePlanner.compute_tissue_frame_wrt_camera(entrance_pt, exit_pt, tissue_normal);
                        gripper_affines_wrt_camera.clear();
                        gripper2_affines_wrt_camera.clear();

                        vetted_gripper_affines_wrt_camera.clear();

                        needlePlanner.compute_needle_drive_gripper_affines(gripper_affines_wrt_camera, gripper2_affines_wrt_camera, ik_ok_array, ik_score);
                        int nposes = gripper_affines_wrt_camera.size();
                        //ROS_INFO("computed %d gripper poses w/rt camera", nposes);
                        Eigen::Affine3d affine_pose, affine_gripper_wrt_base_frame, affine_gripper_wrt_base_fk;
                        Eigen::Vector3d origin_err;
                        Eigen::Matrix3d R_err;
                        Vectorq7x1 q_vec1;
                        q_vec1.resize(7);
                        valid_ik_samps.clear();
                        for (int i = 0; i < nposes; i++) {
                            if (test_debug) ROS_INFO("pose %d", i);
                            affine_pose = gripper_affines_wrt_camera[i];
                            if (test_debug) cout << affine_pose.linear() << endl;
                            if (test_debug) cout << "origin: " << affine_pose.translation().transpose() << endl;
                            affine_gripper_wrt_base_frame = affine_lcamera_to_psm_one.inverse() * affine_pose;
                            if (test_debug) ROS_INFO("pose %d w/rt PSM1 base:", i);
                            if (test_debug) cout << affine_gripper_wrt_base_frame.linear() << endl;
                            if (test_debug) cout << "origin: " << affine_gripper_wrt_base_frame.translation().transpose() << endl;
                            if (ik_solver.ik_solve(affine_gripper_wrt_base_frame)) { //convert desired pose into equiv joint displacements
                                valid_ik_samps.push_back(1);
                                drive_score++;
                                vetted_gripper_affines_wrt_camera.push_back(affine_pose); //accept this one
                                q_vec1 = ik_solver.get_soln();
                                q_vec1(6) = 0.0;
                                if (test_debug) cout << "qvec1: " << q_vec1.transpose() << endl;
                                if (test_debug) ROS_INFO("FK of IK soln: ");
                                affine_gripper_wrt_base_fk = davinci_fwd_solver.fwd_kin_solve(q_vec1);
                                if (test_debug) cout << "affine linear (R): " << endl;
                                if (test_debug) cout << affine_gripper_wrt_base_fk.linear() << endl;
                                if (test_debug) cout << "origin: ";
                                if (test_debug) cout << affine_gripper_wrt_base_fk.translation().transpose() << endl;
                                origin_err = affine_gripper_wrt_base_frame.translation() - affine_gripper_wrt_base_fk.translation();
                                R_err = affine_gripper_wrt_base_frame.linear() - affine_gripper_wrt_base_fk.linear();
                                if (test_debug) ROS_WARN("error test: %f", origin_err.norm());
                                if (test_debug) cout << "IK/FK origin err: " << origin_err.transpose() << endl;
                                if (test_debug) cout << "R err: " << endl;
                                if (test_debug) cout << R_err << endl;
                            } else {
                                if (test_debug) ROS_WARN("GRIPPER 1: NO VALID IK SOLN!!");
                                valid_ik_samps.push_back(0);
                            }
                        } //end loop through needle-drive poses
                        drive_score = runlength_ones(valid_ik_samps);
                        ROS_INFO("drive_score: %d",drive_score);
                    } //end loop ang_to_exit
                    if (drive_score == best_drive_score) {
                        best_drive_score = drive_score;
                        best_phi_x = phi_x;
                        best_phi_y = phi_y;
                        best_tilt = tilt;
                        cout << "valid samples status:" << endl;
                        for (int i = 0; i < valid_ik_samps.size(); i++) {
                            cout << valid_ik_samps[i] << "  ";
                        }
                        cout << endl;
                        ROS_INFO("phi_x, phi_y, tilt = %f, %f, %f",phi_x,phi_y,tilt);                        
                        ROS_WARN("tied best strategy; score = %d; enter 1: ",best_drive_score);

                        int ans;
                        //cin>>ans;
                        //needlePlanner.write_needle_drive_affines_to_file(vetted_gripper_affines_wrt_camera);
                    }                               
                    if (drive_score > best_drive_score) {
                        best_drive_score = drive_score;
                        best_phi_x = phi_x;
                        best_phi_y = phi_y;
                        best_tilt = tilt;
                        cout << "valid samples status:" << endl;
                        for (int i = 0; i < valid_ik_samps.size(); i++) {
                            cout << valid_ik_samps[i] << "  ";
                        }
                        cout << endl;
                        ROS_INFO("phi_x, phi_y, tilt = %f, %f, %f",phi_x,phi_y,tilt);                        
                        ROS_WARN("new best strategy; score = %d; enter 1: ",best_drive_score);

                        int ans;
                        //cin>>ans;
                        needlePlanner.write_needle_drive_affines_to_file(vetted_gripper_affines_wrt_camera);
                    }
                    if (drive_score==21) { // save best solns to file
                        outfile<<phi_x<<", "<<phi_y<<", "<<tilt<<endl;
                    }
         
                }


            }
        }
    }
    //needlePlanner.write_needle_drive_affines_to_file(vetted_gripper_affines_wrt_camera);

    ROS_INFO("best drive score: %d", best_drive_score);
    ROS_INFO("best phi_x, phi_y, tilt = %f, %f, %f", best_phi_x, best_phi_y, best_tilt);
    outfile.close();
    return 0;

}
int main(int argc, char** argv)
{
    ros::init(argc, argv, "vision_guided_motion_action_client");
    ros::NodeHandle nh;

    // create an instance of arm commander class and cwru_pcl_utils class
    ArmMotionCommander arm_motion_commander(&nh);
    CwruPclUtils cwru_pcl_utils(&nh);

    // wait to receive point cloud
    while (!cwru_pcl_utils.got_kinect_cloud())
    {
        ROS_INFO("did not receive pointcloud");
        ros::spinOnce();
        ros::Duration(1.0).sleep();
    }
    ROS_INFO("got a pointcloud");

    tf::StampedTransform tf_sensor_frame_to_torso_frame;  // use this to transform sensor frame to torso frame
    tf::TransformListener tf_listener;  // start a transform listener

    // warm up the tf_listener (this is to make sure it get's all the transforms it needs to avoid crashing)
    bool tferr = true;
    ROS_INFO("waiting for tf between kinect_pc_frame and torso...");
    while (tferr)
    {
        tferr = false;
        try
        {
            // The direction of the transform returned will be from the target_frame to the source_frame
            // which if applied to data, will transform data in the source_frame into the target_frame
            // See tf/CoordinateFrameConventions#Transform_Direction
            tf_listener.lookupTransform("torso", "kinect_pc_frame", ros::Time(0), tf_sensor_frame_to_torso_frame);
        }
        catch (tf::TransformException &exception)
        {
            ROS_ERROR("%s", exception.what());
            tferr = true;
            ros::Duration(0.5).sleep();
            ros::spinOnce();
        }
    }
    ROS_INFO("tf is good");  // tf_listener found a complete chain from sensor to world



    Eigen::Affine3f A_sensor_wrt_torso;
    Eigen::Affine3d Affine_des_gripper;
    Eigen::Vector3d xvec_des, yvec_des, zvec_des, origin_des;
    geometry_msgs::PoseStamped rt_tool_pose;
    Eigen::Vector3f plane_normal, major_axis, centroid;
    Eigen::Matrix3d Rmat;
    int rtn_val;
    double plane_dist;

    // convert the tf to an Eigen::Affine
    A_sensor_wrt_torso = cwru_pcl_utils.transformTFToEigen(tf_sensor_frame_to_torso_frame);

    // send a command to plan and then execute a joint-space move to pre-defined pose
    rtn_val = arm_motion_commander.plan_move_to_pre_pose();
    rtn_val = arm_motion_commander.rt_arm_execute_planned_path();

    // repeatedly scan for a new patch of selected points
    while (ros::ok())
    {
        if (cwru_pcl_utils.got_selected_points())
        {
            ROS_INFO("transforming selected points");
            cwru_pcl_utils.transform_selected_points_cloud(A_sensor_wrt_torso);

            // fit a plane to these selected points
            cwru_pcl_utils.fit_xformed_selected_pts_to_plane(plane_normal, plane_dist);
            ROS_INFO_STREAM("normal: " << plane_normal.transpose() << "\ndist = " << plane_dist);

            major_axis = cwru_pcl_utils.get_major_axis();
            centroid = cwru_pcl_utils.get_centroid();
            cwru_pcl_utils.reset_got_selected_points();  // reset the selected-points trigger

            // construct a goal affine pose
            for (int i = 0; i < 3; i++) {
                origin_des[i] = centroid[i];  // convert to double precision
                zvec_des[i] = -plane_normal[i];  // want tool z pointing OPPOSITE surface normal
                xvec_des[i] = major_axis[i];
            }

            origin_des[2] += 0.02;  // raise up 2cm
            yvec_des = zvec_des.cross(xvec_des);  // construct consistent right-hand triad
            Rmat.col(0)= xvec_des;
            Rmat.col(1)= yvec_des;
            Rmat.col(2)= zvec_des;
            Affine_des_gripper.linear() = Rmat;
            Affine_des_gripper.translation() = origin_des;
            ROS_INFO_STREAM("des origin: " << Affine_des_gripper.translation().transpose() << endl);
            ROS_INFO_STREAM("orientation: " << endl);
            ROS_INFO_STREAM(Rmat << endl);

            // convert des pose from Eigen::Affine to geometry_msgs::PoseStamped
            rt_tool_pose.pose = arm_motion_commander.transformEigenAffine3dToPose(Affine_des_gripper);

            // send move plan request
            rtn_val = arm_motion_commander.rt_arm_plan_path_current_to_goal_pose(rt_tool_pose);
            if (rtn_val == cwru_action::cwru_baxter_cart_moveResult::SUCCESS) {
                // send command to execute planned motion
                rtn_val = arm_motion_commander.rt_arm_execute_planned_path();

                /* New code added to enable wiping motion */
                geometry_msgs::PoseStamped rt_tool_wipe_table;
                origin_des[1] += 0.1;
                Affine_des_gripper.translation() = origin_des;
                rt_tool_wipe_table.pose = arm_motion_commander.transformEigenAffine3dToPose(Affine_des_gripper);

                rtn_val = arm_motion_commander.rt_arm_plan_path_current_to_goal_pose(rt_tool_wipe_table);
                if (rtn_val == cwru_action::cwru_baxter_cart_moveResult::SUCCESS) {
                    rtn_val = arm_motion_commander.rt_arm_execute_planned_path();
                } else {
                    ROS_WARN("Cartesian path to desired pose not achievable");
                }

                origin_des[1] -= 0.2;
                Affine_des_gripper.translation() = origin_des;
                rt_tool_wipe_table.pose = arm_motion_commander.transformEigenAffine3dToPose(Affine_des_gripper);

                rtn_val = arm_motion_commander.rt_arm_plan_path_current_to_goal_pose(rt_tool_wipe_table);
                if (rtn_val == cwru_action::cwru_baxter_cart_moveResult::SUCCESS){
                    rtn_val = arm_motion_commander.rt_arm_execute_planned_path();
                } else {
                    ROS_WARN("Cartesian path to desired pose not achievable");
                }

                origin_des[1] += 0.2;
                Affine_des_gripper.translation() = origin_des;
                rt_tool_wipe_table.pose = arm_motion_commander.transformEigenAffine3dToPose(Affine_des_gripper);

                rtn_val = arm_motion_commander.rt_arm_plan_path_current_to_goal_pose(rt_tool_wipe_table);
                if (rtn_val == cwru_action::cwru_baxter_cart_moveResult::SUCCESS){
                    rtn_val = arm_motion_commander.rt_arm_execute_planned_path();
                } else {
                    ROS_WARN("Cartesian path to desired pose not achievable");
                }

                origin_des[1] -= 0.1;
                Affine_des_gripper.translation() = origin_des;
                rt_tool_wipe_table.pose = arm_motion_commander.transformEigenAffine3dToPose(Affine_des_gripper);

                rtn_val = arm_motion_commander.rt_arm_plan_path_current_to_goal_pose(rt_tool_wipe_table);
                if (rtn_val == cwru_action::cwru_baxter_cart_moveResult::SUCCESS){
                    rtn_val = arm_motion_commander.rt_arm_execute_planned_path();
                } else {
                    ROS_WARN("Cartesian path to desired pose not achievable");
                }
                /* end of new code */
            } else {
                ROS_WARN("Cartesian path to desired pose not achievable");
            }
        }
        ros::Duration(0.5).sleep();  // sleep for half a second
        ros::spinOnce();
    }

    return 0;
}