Ejemplo n.º 1
0
/**
 * @function heuristicCost
 * @brief
 */
double M_RRT::heuristicCost( Eigen::VectorXd node )
{

  Eigen::Transform<double, 3, Eigen::Affine> T;

  // Calculate the EE position
  robinaLeftArm_fk( node, TWBase, Tee, T );

  Eigen::VectorXd trans_ee = T.translation();
  Eigen::VectorXd x_ee = T.rotation().col(0);
  Eigen::VectorXd y_ee = T.rotation().col(1);
  Eigen::VectorXd z_ee = T.rotation().col(2);

  Eigen::VectorXd GH = ( goalPosition - trans_ee );

  double fx1 = GH.norm() ;

  GH = GH/GH.norm();

  double fx2 = abs( GH.dot( x_ee ) - 1 );

  double fx3 = abs( GH.dot( z_ee ) );

  double heuristic = w1*fx1 + w2*fx2 + w3*fx3;     

  return heuristic;
}
Ejemplo n.º 2
0
Eigen::Matrix<Scalar, HOMOGENEOUS_TRANSFORM_SIZE, DerivedQdotToV::ColsAtCompileTime> dHomogTrans(
    const Eigen::Transform<Scalar, 3, Eigen::Isometry>& T,
    const Eigen::MatrixBase<DerivedS>& S,
    const Eigen::MatrixBase<DerivedQdotToV>& qdot_to_v) {
  const int nq_at_compile_time = DerivedQdotToV::ColsAtCompileTime;
  int nq = qdot_to_v.cols();
  auto qdot_to_twist = (S * qdot_to_v).eval();

  const int numel = HOMOGENEOUS_TRANSFORM_SIZE;
  Eigen::Matrix<Scalar, numel, nq_at_compile_time> ret(numel, nq);

  const auto& Rx = T.linear().col(0);
  const auto& Ry = T.linear().col(1);
  const auto& Rz = T.linear().col(2);

  const auto& qdot_to_omega_x = qdot_to_twist.row(0);
  const auto& qdot_to_omega_y = qdot_to_twist.row(1);
  const auto& qdot_to_omega_z = qdot_to_twist.row(2);

  ret.template middleRows<3>(0) = -Rz * qdot_to_omega_y + Ry * qdot_to_omega_z;
  ret.row(3).setZero();

  ret.template middleRows<3>(4) = Rz * qdot_to_omega_x - Rx * qdot_to_omega_z;
  ret.row(7).setZero();

  ret.template middleRows<3>(8) = -Ry * qdot_to_omega_x + Rx * qdot_to_omega_y;
  ret.row(11).setZero();

  ret.template middleRows<3>(12) = T.linear() * qdot_to_twist.bottomRows(3);
  ret.row(15).setZero();

  return ret;
}
Ejemplo n.º 3
0
Eigen::Matrix<Scalar, HOMOGENEOUS_TRANSFORM_SIZE, DerivedDT::ColsAtCompileTime> dHomogTransInv(
    const Eigen::Transform<Scalar, 3, Eigen::Isometry>& T,
    const Eigen::MatrixBase<DerivedDT>& dT) {
  const int nq_at_compile_time = DerivedDT::ColsAtCompileTime;
  int nq = dT.cols();

  const auto& R = T.linear();
  const auto& p = T.translation();

  std::array<int, 3> rows {0, 1, 2};
  std::array<int, 3> R_cols {0, 1, 2};
  std::array<int, 1> p_cols {3};

  auto dR = getSubMatrixGradient(dT, rows, R_cols, T.Rows);
  auto dp = getSubMatrixGradient(dT, rows, p_cols, T.Rows);

  auto dinvT_R = transposeGrad(dR, R.rows());
  auto dinvT_p = (-R.transpose() * dp - matGradMult(dinvT_R, p)).eval();

  const int numel = HOMOGENEOUS_TRANSFORM_SIZE;
  Eigen::Matrix<Scalar, numel, nq_at_compile_time> ret(numel, nq);
  setSubMatrixGradient(ret, dinvT_R, rows, R_cols, T.Rows);
  setSubMatrixGradient(ret, dinvT_p, rows, p_cols, T.Rows);

  // zero out gradient of elements in last row:
  const int last_row = 3;
  for (int col = 0; col < T.HDim; col++) {
    ret.row(last_row + col * T.Rows).setZero();
  }

  return ret;
}
Ejemplo n.º 4
0
void TranslationRotation3D::setF(const std::vector<double> &F_in) {
  if (F_in.size() != 16)
    throw std::runtime_error(
        "TranslationRotation3D::setF: F_in requires 16 elements");

  if ((F_in.at(12) != 0.0) || (F_in.at(13) != 0.0) || (F_in.at(14) != 0.0) ||
      (F_in.at(15) != 1.0))
    throw std::runtime_error(
        "TranslationRotation3D::setF: bottom row of F_in should be [0 0 0 1]");

  Eigen::Map<const Eigen::Matrix<double, 4, 4, Eigen::RowMajor> > F_in_eig(
      F_in.data());

  Eigen::Transform<double, 3, Eigen::Affine> F;
  F = F_in_eig;

  double tmpT[3];
  Eigen::Map<Eigen::Vector3d> tra_eig(tmpT);
  tra_eig = F.translation();

  double tmpR_mat[9];
  Eigen::Map<Eigen::Matrix<double, 3, 3, Eigen::RowMajor> > rot_eig(tmpR_mat);
  rot_eig = F.rotation();

  setT(tmpT);
  setR_mat(tmpR_mat);
  updateR_mat(); // for stability
}
Ejemplo n.º 5
0
void X3DConverter::startShape(const std::array<float, 12>& matrix) {

    // Finding axis/angle from matrix using Eigen for its bullet proof implementation.
    Eigen::Transform<float, 3, Eigen::Affine> t;
    t.setIdentity();
    for (unsigned int i = 0; i < 3; i++) {
        for (unsigned int j = 0; j < 4; j++) {
            t(i, j) = matrix[i+j*3];
        }
    }

    Eigen::Matrix3f rotationMatrix;
	Eigen::Matrix3f scaleMatrix;
    t.computeRotationScaling(&rotationMatrix, &scaleMatrix);
	Eigen::Quaternionf q;
	Eigen::AngleAxisf aa;
	q = rotationMatrix;
    aa = q;

	Eigen::Vector3f scale = scaleMatrix.diagonal();
	Eigen::Vector3f translation = t.translation();

    startNode(ID::Transform);
    m_writers.back()->setSFVec3f(ID::translation, translation.x(), translation.y() , translation.z());
    m_writers.back()->setSFRotation(ID::rotation, aa.axis().x(), aa.axis().y(), aa.axis().z(), aa.angle());
    m_writers.back()->setSFVec3f(ID::scale, scale.x(), scale.y(), scale.z());
    startNode(ID::Shape);
    startNode(ID::Appearance);
    startNode(ID::Material);
    m_writers.back()->setSFColor<vector<float> >(ID::diffuseColor, RVMColorHelper::color(m_materials.back()));
    endNode(ID::Material); // Material
    endNode(ID::Appearance); // Appearance

}
Ejemplo n.º 6
0
/**
 * @function wsDiff
 */
Eigen::VectorXd JG_RRT::wsDiff( Eigen::VectorXd q )
{
    Eigen::Transform<double, 3, Eigen::Affine> T;
    robinaLeftArm_fk( q, TWBase, Tee, T );
    Eigen::VectorXd ws_diff = ( goalPosition - T.translation() );

    return ws_diff;
}
Ejemplo n.º 7
0
	operator Eigen::Transform<double, 3, Eigen::Affine, _Options>() const
	{
	    Eigen::Transform<double, 3, Eigen::Affine, _Options> ret;
	    ret.setIdentity();
	    ret.rotate(this->orientation);
	    ret.translation() = this->position;
	    return ret;
	}
Ejemplo n.º 8
0
/**
 * @function wsDistance
 */
double goWSOrient::wsDistance( Eigen::VectorXd q )
{
  Eigen::Transform<double, 3, Eigen::Affine> T;
  robinaLeftArm_fk( q, TWBase, Tee, T );
  Eigen::VectorXd ws_diff = ( goalPos - T.translation() );
  double ws_dist = ws_diff.norm();
    
  return ws_dist;
}
void multiKinectCalibration::getData()
{
    iterationStep = 0;
//    std::string relativeTo = listOfTFnames.at(0);
    kinect2kinectTransform.resize(listOfTFnames.size() - 1);
    ros::Rate r(2);
    
    Eigen::Transform<double,3,Eigen::Affine> kinectReference;
    ros::spinOnce();
    if (!useTFonly)
    {
        while ( iterationStep < listOfTFnames.size() && ros::ok())
        {
            std::cerr << "Waiting for point clouds... \n";
            r.sleep();
            ros::spinOnce();
        }
        showPCL();
        std::cerr << "Subscribe pcl done" << std::endl;
    }
    sleep(1.0);
    std::cerr << listener.allFramesAsString() << std::endl;
    // listen to all tf Frames
    
    // get the transformation between kinects
    for (std::size_t i = 0; i < listOfTFnames.size(); i++)
    {
        tf::StampedTransform transform;
        std::string parentFrame;
        listener.getParent(listOfTFnames.at(i),ros::Time(0),parentFrame);
        listOfPointCloudnameHeaders.push_back(parentFrame);
        
        std::cerr << "Lookup transform: "<< listOfTFnames.at(i) << " with parent: "<< parentFrame <<std::endl;
        listener.waitForTransform(parentFrame,listOfTFnames.at(i),ros::Time(0),ros::Duration(5.0));
        listener.lookupTransform(parentFrame,listOfTFnames.at(i),ros::Time(0),transform);
        Eigen::Transform<double,3,Eigen::Affine> tmpTransform;
        tf::transformTFToEigen(transform,tmpTransform);
        
//        geometry_msgs::TransformStamped msg;
//        transformStampedTFToMsg(transform, msg);
//        Eigen::Translation<float,3> translation(msg.transform.translation.x, msg.transform.translation.y, msg.transform.translation.z);
//        Eigen::Quaternion<float> rotation(msg.transform.rotation.w,
//                                          msg.transform.rotation.x,
//                                          msg.transform.rotation.y,
//                                          msg.transform.rotation.z);
//        std::cerr << "tmp:\n" << tmpTransform.matrix() << std::endl;

        if (i == 0) kinectReference = tmpTransform;
        else kinect2kinectTransform[i-1] = kinectReference * tmpTransform.inverse();
    }
//    std::cerr << "Kinect Ref:\n" << kinectReference.matrix() << std::endl;
}
Ejemplo n.º 10
0
void toGeometryMsg(geometry_msgs::Transform& out, const Eigen::Transform<double, 3, TransformType>& pose) {
    // convert accumulated_pose_ to transformStamped
    Eigen::Quaterniond rot_quat(pose.rotation());

    out.translation.x = pose.translation().x();
    out.translation.y = pose.translation().y();
    out.translation.z = pose.translation().z();

    out.rotation.x = rot_quat.x();
    out.rotation.y = rot_quat.y();
    out.rotation.z = rot_quat.z();
    out.rotation.w = rot_quat.w();
}
Ejemplo n.º 11
0
Eigen::Matrix<Scalar,4,4> perspective(Scalar fovy, Scalar aspect, Scalar zNear, Scalar zFar){
    Eigen::Transform<Scalar,3,Eigen::Projective> tr;
    tr.matrix().setZero();
    assert(aspect > 0);
    assert(zFar > zNear);
    Scalar radf = M_PI * fovy / 180.0;
    Scalar tan_half_fovy = std::tan(radf / 2.0);
    tr(0,0) = 1.0 / (aspect * tan_half_fovy);
    tr(1,1) = 1.0 / (tan_half_fovy);
    tr(2,2) = - (zFar + zNear) / (zFar - zNear);
    tr(3,2) = - 1.0;
    tr(2,3) = - (2.0 * zFar * zNear) / (zFar - zNear);
    return tr.matrix();
}
Ejemplo n.º 12
0
template <typename PointT> void
pcl::people::GroundBasedPeopleDetectionApp<PointT>::applyTransformationGround ()
{
  if (transformation_set_ && ground_coeffs_set_)
  {
    Eigen::Transform<float, 3, Eigen::Affine> transform;
    transform = transformation_;
    ground_coeffs_transformed_ = transform.matrix() * ground_coeffs_;
  }
  else
  {
    ground_coeffs_transformed_ = ground_coeffs_;
  }
}
Ejemplo n.º 13
0
/**
 * @function workspaceDist
 * @brief
 */
Eigen::VectorXd M_RRT::workspaceDist( Eigen::VectorXd node, Eigen::VectorXd ws_target )
{
  Eigen::Transform<double, 3, Eigen::Affine> T;
  Eigen::VectorXd diff;

  // Calculate the EE position
  robinaLeftArm_fk( node, TWBase, Tee, T );
  Eigen::VectorXd ws_node = T.translation();

  // Calculate the workspace distance to goal
  diff = ( ws_target - ws_node );

  return diff;
}
Ejemplo n.º 14
0
/**
   Transforms this lifting surface.
   
   @param[in]   transformation   Affine transformation.
*/
void
LiftingSurface::transform(const Eigen::Transform<double, 3, Eigen::Affine> &transformation)
{
    // Call super:
    this->Surface::transform(transformation);
    
    // Transform bisectors and wake normals:
    for (int i = 0; i < n_spanwise_nodes(); i++) {
        Vector3d trailing_edge_bisector = trailing_edge_bisectors.row(i);
        trailing_edge_bisectors.row(i) = transformation.linear() * trailing_edge_bisector;
        
        Vector3d wake_normal = wake_normals.row(i);
        wake_normals.row(i) = transformation.linear() * wake_normal;
    }
}
 void renderSprites() {
     glUseProgram(spriteShaderProgramId);
     entityManagerRef->visitEntitiesWithTypeMask(componentMask, [&](Entity<EntityManagerTypes...> &entity){
         auto &aabbComponent = entity.template getComponent<AABBComponent>();
         auto &transformComponent = entity.template getComponent<TransformComponent>();
         
         Eigen::Translation<GLfloat, 3> translationMat((transformComponent.x - HALF_SCREEN_WIDTH) / HALF_SCREEN_WIDTH,
                                                       (transformComponent.y - HALF_SCREEN_HEIGHT) / HALF_SCREEN_HEIGHT,
                                                       0);
         Eigen::DiagonalMatrix<GLfloat, 3> scaleMat(aabbComponent.width / SCREEN_WIDTH,
                                                    aabbComponent.height / SCREEN_HEIGHT,
                                                    1);
         
         Eigen::Transform<GLfloat, 3, Eigen::Affine> transformMatrix = translationMat * scaleMat;
         
         boundsSprite.render(transformMatrix.matrix());
     });
 }
Ejemplo n.º 16
0
template <typename PointT, typename Scalar> inline PointT
pcl::transformPointWithNormal (const PointT &point, 
                     const Eigen::Transform<Scalar, 3, Eigen::Affine> &transform)
{
  PointT ret = point;
  ret.getVector3fMap () = transform * point.getVector3fMap ();
  ret.getNormalVector3fMap () = transform.rotation () * point.getNormalVector3fMap ();

  return (ret);
}
Ejemplo n.º 17
0
typename Gradient<DerivedX, DerivedDX::ColsAtCompileTime>::type dTransformAdjointTranspose(
    const Eigen::Transform<Scalar, 3, Eigen::Isometry>& T,
    const Eigen::MatrixBase<DerivedX>& X,
    const Eigen::MatrixBase<DerivedDT>& dT,
    const Eigen::MatrixBase<DerivedDX>& dX) {
  assert(dT.cols() == dX.cols());
  int nq = dT.cols();

  const auto& R = T.linear();
  const auto& p = T.translation();

  std::array<int, 3> rows {0, 1, 2};
  std::array<int, 3> R_cols {0, 1, 2};
  std::array<int, 1> p_cols {3};

  auto dR = getSubMatrixGradient(dT, rows, R_cols, T.Rows);
  auto dp = getSubMatrixGradient(dT, rows, p_cols, T.Rows);

  auto Rtranspose = R.transpose();
  auto dRtranspose = transposeGrad(dR, R.rows());

  typename Gradient<DerivedX, DerivedDX::ColsAtCompileTime>::type ret(X.size(), nq);
  std::array<int, 3> Xomega_rows {0, 1, 2};
  std::array<int, 3> Xv_rows {3, 4, 5};
  for (int col = 0; col < X.cols(); col++) {
    auto Xomega_col = X.template block<3, 1>(0, col);
    auto Xv_col = X.template block<3, 1>(3, col);

    std::array<int, 1> col_array {col};
    auto dXomega_col = getSubMatrixGradient(dX, Xomega_rows, col_array, X.rows());
    auto dXv_col = getSubMatrixGradient(dX, Xv_rows, col_array, X.rows());

    auto dp_hatXv_col = (dp.colwise().cross(Xv_col) - dXv_col.colwise().cross(p)).eval();
    auto Xomega_col_minus_p_cross_Xv_col = (Xomega_col - p.cross(Xv_col)).eval();
    auto dXomega_transformed_col = (Rtranspose * (dXomega_col - dp_hatXv_col) + matGradMult(dRtranspose, Xomega_col_minus_p_cross_Xv_col)).eval();
    auto dRtransposeXv_col = (Rtranspose * dXv_col + matGradMult(dRtranspose, Xv_col)).eval();

    setSubMatrixGradient(ret, dXomega_transformed_col, Xomega_rows, col_array, X.rows());
    setSubMatrixGradient(ret, dRtransposeXv_col, Xv_rows, col_array, X.rows());
  }
  return ret;
}
Ejemplo n.º 18
0
/**
 * @function plan
 * @brief
 */
bool JG_RRT::plan( const Eigen::VectorXd &_startConfig,
                   const Eigen::Transform<double, 3, Eigen::Affine> &_goalPose,
                   const std::vector< Eigen::VectorXd > &_guidingNodes,
                   std::vector<Eigen::VectorXd> &path )
{
    /** Store information */
    this->startConfig = _startConfig;
    this->goalPose = _goalPose;
    this->goalPosition = _goalPose.translation();


    //-- Initialize the search tree
    addNode( startConfig, -1 );

    //-- Add the guiding nodes
    addGuidingNodes( _guidingNodes );

    double p;
    while( goalDistVector[activeNode] > distanceThresh )
    {
        //-- Generate the probability
        p = RANDNM( 0.0, 1.0 );

        //-- Apply either extension to goal (J) or random connection
        if( p < p_goal )
        {
            if( extendGoal() == true )
            {
                break;
            }
        }
        else
        {
            tryStep(); /*extendRandom();*/
        }

        // Check if we are still inside
        if( configVector.size() > maxNodes )
        {   cout<<"-- Exceeded "<<maxNodes<<" allowed - ws_dist: "<<goalDistVector[rankingVector[0]]<<endl;
            break;
        }
    }

    //-- If a path is found
    if( goalDistVector[activeNode] < distanceThresh )
    {   tracePath( activeNode, path );
        cout<<"JG - Got a path! - nodes: "<<path.size()<<" tree size: "<<configVector.size()<<endl;
        return true;
    }
    else
    {   cout<<"--(!) JG :No successful path found! "<<endl;
        return false;
    }
}
 CalibrateKinectCheckerboard()
   : nh_("~"), it_(nh_), calibrated(false)
 {
   // Load parameters from the server.
   nh_.param<std::string>("fixed_frame", fixed_frame, "/base_link");
   nh_.param<std::string>("camera_frame", camera_frame, "/camera_link");
   nh_.param<std::string>("target_frame", target_frame, "/calibration_pattern");
   nh_.param<std::string>("tip_frame", tip_frame, "/gripper_link");
   
   nh_.param<int>("checkerboard_width", checkerboard_width, 6);
   nh_.param<int>("checkerboard_height", checkerboard_height, 7);
   nh_.param<double>("checkerboard_grid", checkerboard_grid, 0.027);
   
   // Set pattern detector sizes
   pattern_detector_.setPattern(cv::Size(checkerboard_width, checkerboard_height), checkerboard_grid, CHESSBOARD);
   
   transform_.translation().setZero();
   transform_.matrix().topLeftCorner<3, 3>() = Quaternionf().setIdentity().toRotationMatrix();
   
   // Create subscriptions
   info_sub_ = nh_.subscribe("/camera/rgb/camera_info", 1, &CalibrateKinectCheckerboard::infoCallback, this);
   
   // Also publishers
   pub_ = it_.advertise("calibration_pattern_out", 1);
   detector_pub_ = nh_.advertise<pcl::PointCloud<pcl::PointXYZ> >("detector_cloud", 1);
   physical_pub_ = nh_.advertise<pcl::PointCloud<pcl::PointXYZ> >("physical_points_cloud", 1);
   
   // Create ideal points
   ideal_points_.push_back( pcl::PointXYZ(0, 0, 0) );
   ideal_points_.push_back( pcl::PointXYZ((checkerboard_width-1)*checkerboard_grid, 0, 0) );
   ideal_points_.push_back( pcl::PointXYZ(0, (checkerboard_height-1)*checkerboard_grid, 0) );
   ideal_points_.push_back( pcl::PointXYZ((checkerboard_width-1)*checkerboard_grid, (checkerboard_height-1)*checkerboard_grid, 0) );
   
   // Create proper gripper tip point
   nh_.param<double>("gripper_tip_x", gripper_tip.point.x, 0.0);
   nh_.param<double>("gripper_tip_y", gripper_tip.point.y, 0.0);
   nh_.param<double>("gripper_tip_z", gripper_tip.point.z, 0.0);
   gripper_tip.header.frame_id = tip_frame;
   
   ROS_INFO("[calibrate] Initialized.");
 }
Ejemplo n.º 20
0
template <typename PointT, typename Scalar> double
pcl::getPrincipalTransformation (const pcl::PointCloud<PointT> &cloud, 
                                 Eigen::Transform<Scalar, 3, Eigen::Affine> &transform)
{
  EIGEN_ALIGN16 Eigen::Matrix<Scalar, 3, 3> covariance_matrix;
  Eigen::Matrix<Scalar, 4, 1> centroid;
  
  pcl::computeMeanAndCovarianceMatrix (cloud, covariance_matrix, centroid);

  EIGEN_ALIGN16 Eigen::Matrix<Scalar, 3, 3> eigen_vects;
  Eigen::Matrix<Scalar, 3, 1> eigen_vals;
  pcl::eigen33 (covariance_matrix, eigen_vects, eigen_vals);

  double rel1 = eigen_vals.coeff (0) / eigen_vals.coeff (1);
  double rel2 = eigen_vals.coeff (1) / eigen_vals.coeff (2);
  
  transform.translation () = centroid.head (3);
  transform.linear () = eigen_vects;
  
  return (std::min (rel1, rel2));
}
Ejemplo n.º 21
0
/**
 * @function Basic_M_RRT
 * @brief
 */
bool M_RRT::BasicPlan( std::vector<Eigen::VectorXd> &path, 
			Eigen::VectorXd &_startConfig, 
			Eigen::Transform<double, 3, Eigen::Affine> &_goalPose, 
                        Eigen::Transform<double, 3, Eigen::Affine> _TWBase,
                        Eigen::Transform<double, 3, Eigen::Affine> _Tee )
{
  printf("Basic Plan \n");

  /** Store information */
  this->startConfig = _startConfig;
  this->goalPose = _goalPose;
  this->goalPosition = _goalPose.translation();
  this->TWBase = _TWBase;
  this->Tee = _Tee;

  //-- Initialize the search tree
  addNode( startConfig, -1 );

  //-- Calculate the heuristicThreshold
  heuristicThresh = w1*distThresh + w2*abs( cos( xAlignThresh ) - 1 ) +w3*abs( cos( yAlignThresh ) );

  //-- Let's start the loop
  double p;
  double heuristic = heuristicVector[0];
  int gc = 0; int rc = 0;
  while( heuristic > heuristicThresh )
   { 
     //-- Probability
     p = rand()%100; 

     //-- Either extends towards goal or random
     if( p < pGoal )
       { printf("Goal \n");extendGoal(); gc++; }
     else
       { printf("Random \n"); extendRandom(); rc++;}

     //-- If bigger than maxNodes, get out loop 
     if( maxNodes > 0 && configVector.size() > maxNodes )
       { 
         cout<<"** Exceeded "<<maxNodes<<" MinCost: "<<heuristicVector[minHeuristicIndex()]<<"MinRankingCost: "<<heuristicVector[rankingVector[0]]<<endl;
         printf("Goal counter: %d, random counter: %d \n", gc, rc );
         return false; }

     heuristic = heuristicVector[ rankingVector[0] ];
   }
  printf("Goal counter: %d, random counter: %d \n", gc, rc );
  printf( "-- Plan successfully generated with %d nodes \n", configVector.size() );
  tracePath( activeNode, path );
  return true;
}
//DEPRECATED???
double
NDTMatcherFeatureD2D::scoreNDT(std::vector<NDTCell*> &sourceNDT, NDTMap &targetNDT,
        Eigen::Transform<double,3,Eigen::Affine,Eigen::ColMajor>& T)
{
    NUMBER_OF_ACTIVE_CELLS = 0;
    double score_here = 0;
    double det = 0;
    bool exists = false;
    NDTCell *cell;
    Eigen::Matrix3d covCombined, icov;
    Eigen::Vector3d meanFixed;
    Eigen::Vector3d meanMoving;
    Eigen::Matrix3d R = T.rotation();
    std::vector<std::pair<unsigned int, double> > scores;
    for(unsigned int j=0; j<_corr.size(); j++)
    {
        unsigned int i = _corr[j].second;
        if (_corr[j].second >= (int)sourceNDT.size())
        {
            std::cout << "second correspondance : " << _corr[j].second << ", " << sourceNDT.size() << std::endl;
        }
        if (sourceNDT[i] == NULL)
        {
            std::cout << "sourceNDT[i] == NULL!" << std::endl;
        }
        meanMoving = T*sourceNDT[i]->getMean();

        cell = targetNDT.getCellIdx(_corr[j].first);
        {

            if(cell == NULL)
            {
                std::cout << "cell== NULL!!!" << std::endl;
            }
            else
            {
                if(cell->hasGaussian_)
                {
                    meanFixed = cell->getMean();
                    covCombined = cell->getCov() + R.transpose()*sourceNDT[i]->getCov()*R;
                    covCombined.computeInverseAndDetWithCheck(icov,det,exists);
                    if(!exists) continue;
                    double l = (meanMoving-meanFixed).dot(icov*(meanMoving-meanFixed));
                    if(l*0 != 0) continue;
                    if(l > 120) continue;

                    double sh = -lfd1*(exp(-lfd2*l/2));

                    if(fabsf(sh) > 1e-10)
                    {
                        NUMBER_OF_ACTIVE_CELLS++;
                    }
                    scores.push_back(std::pair<unsigned int, double>(j, sh));
                    score_here += sh;
                    //score_here += l;
                }
            }
        }
    }

    if (_trimFactor == 1.)
    {
        return score_here;
    }
    else
    {
        // Determine the score value
        if (scores.empty()) // TODO, this happens(!), why??!??
            return score_here;

        score_here = 0.;
        unsigned int index = static_cast<unsigned int>(_trimFactor * (scores.size() - 1));
        //	std::nth_element (scores.begin(), scores.begin()+index, scores.end(), sort_scores()); //boost::bind(&std::pair<unsigned int, double>::second, _1) < boost::bind(&std::pair<unsigned int, double>::second, _2));
        std::nth_element (scores.begin(), scores.begin()+index, scores.end(), boost::bind(&std::pair<unsigned int, double>::second, _1) < boost::bind(&std::pair<unsigned int, double>::second, _2));
        std::fill(_goodCorr.begin(), _goodCorr.end(), false);
        //	std::cout << "_goodCorr.size() : " << _goodCorr.size() << " scores.size() : " << scores.size() << " index : " << index << std::endl;
        for (unsigned int i = 0; i < _goodCorr.size(); i++)
        {
            if (i <= index)
            {
                score_here += scores[i].second;
                _goodCorr[scores[i].first] = true;
            }
        }
        return score_here;
    }
}
int main(int argc, char** argv){
  ros::init(argc, argv, "workspace_transformation");

  ros::NodeHandle node;
  ros::Rate loop_rate(loop_rate_in_hz);


  // Topics
  //  Publishers
  ros::Publisher pub = node.advertise<geometry_msgs::PoseStamped>("poseSlaveWorkspace", 1);
  ros::Publisher pub_poseSlaveWSOrigin = node.advertise<geometry_msgs::PoseStamped>("poseSlaveWorkspaceOrigin", 1);
  ros::Publisher pub_set_camera_pose = node.advertise<geometry_msgs::PoseStamped>("Set_ActiveCamera_Pose", 1);
  pub_OmniForceFeedback = node.advertise<geometry_msgs::Vector3>("set_forces", 1);
  //  Subscribers
  ros::Subscriber sub_PoseMasterWS = node.subscribe("poseMasterWorkspace", 1, &PoseMasterWSCallback);
  ros::Subscriber sub_BaseMasterWS = node.subscribe("baseMasterWorkspace", 1, &BaseMasterWSCallback);
  ros::Subscriber sub_BaseSlaveWS = node.subscribe("baseSlaveWorkspace", 1, &BaseSlaveWSCallback);
  ros::Subscriber sub_OriginSlaveWS = node.subscribe("originSlaveWorkspace", 1, &OriginSlaveWSCallback); 
  ros::Subscriber sub_scale = node.subscribe("scale", 1, &scaleCallback);
  ros::Subscriber subOmniButtons = node.subscribe("button", 1, &omniButtonCallback);
  ros::Subscriber sub_HapticAngles = node.subscribe("angles", 1, &HapticAnglesCallback);
  //  Services
  ros::ServiceServer service_server_algorithm = node.advertiseService("set_algorithm", algorithmSrvCallback);

  
  // INITIALIZATION ------------------------------------------------------------------------
  
  //  Haptic
  omni_white_button_pressed = omni_grey_button_pressed = first_haptic_angle_read = false;  
  
  //  Algorithm
  algorithm_type = 0;

  for (unsigned int i=0; i<3; i++)	scale[i] = 1.0;  
  
  m_init = s_init = mi_init = s0_init = algorithm_set = false;

  dm << 0,0,0;
  ds << 0,0,0;
  pm_im1 << 0,0,0;
  ps_im1 << 0,0,0;
  vm_i << 0,0,0;
  
  ps_0 << 0.0, 0.0, 0.0;
  quats_0.x() = quats_0.y() = quats_0.z() = 0.0;
  quats_0.w() = 1.0;
  Rs_0 = quats_0.toRotationMatrix();  

  //   Auxiliary pose
  geometry_msgs::PoseStamped outputPose;  
  outputPose.pose.position.x = outputPose.pose.position.y = outputPose.pose.position.z = 0.0;
  outputPose.pose.orientation.x = outputPose.pose.orientation.y = outputPose.pose.orientation.z = 0.0;
  outputPose.pose.orientation.w = 1.0;  
  
  //   Workspace boundaries
  Xmin = -5.0;
  Ymin = -5.0;
  Zmin = 0.0;
  Xmax = 5.0;
  Ymax = 5.0;
  Zmax = 2.0;
  
  // Default camera pose
  geometry_msgs::PoseStamped cameraPose;  
  cameraPose.pose.position.x = cameraPose.pose.position.y = cameraPose.pose.position.z = 0.0;
  cameraPose.pose.orientation.x = cameraPose.pose.orientation.y = cameraPose.pose.orientation.z = 0.0;
  cameraPose.pose.orientation.w = 1.0;
  
  Eigen::Vector3d origin_cam = 10.0 * Eigen::Vector3d(1.0, 0.0, 0.5);
  T_camPose_S.translation()  = Eigen::Vector3d(0.0, 0.0, 1.0) + origin_cam;

  Eigen::Vector3d eigen_cam_axis_z = origin_cam.normalized();
  Eigen::Vector3d eigen_cam_axis_x = ( Eigen::Vector3d::UnitZ().cross( eigen_cam_axis_z ) ).normalized();
  Eigen::Vector3d eigen_cam_axis_y = ( eigen_cam_axis_z.cross( eigen_cam_axis_x ) ).normalized();

  T_camPose_S.linear() << eigen_cam_axis_x(0), eigen_cam_axis_y(0), eigen_cam_axis_z(0), 
			  eigen_cam_axis_x(1), eigen_cam_axis_y(1), eigen_cam_axis_z(1),
			  eigen_cam_axis_x(2), eigen_cam_axis_y(2), eigen_cam_axis_z(2);
  
  // Time management
  period = 1.0/(double)loop_rate_in_hz;
  timeval past_time_, current_time_;  
  gettimeofday(&current_time_, NULL);  
  time_increment_ = 0;
  
  // File management
  std::ofstream WTdataRecord;  
  WTdataRecord.open("/home/users/josep.a.claret/data/WTdataRecord.txt", std::ofstream::trunc);
    
  
  // UPDATE -------------------------------------------------------------------------------
  while (ros::ok())
  {
    if (m_init && s_init && mi_init)
    {
      // Time management
//       past_time_ = current_time_;
//       gettimeofday(&current_time_, NULL);
//       time_increment_ = ( (current_time_.tv_sec*1e6 + current_time_.tv_usec) - (past_time_.tv_sec*1e6 + past_time_.tv_usec) ) / 1e6;
      time_increment_ = period;
      
      // Velocity computation
      vm_i = (pm_i - pm_im1)/time_increment_; 
      
      ws_tf_alg_scaling_Bubble_smoothposrotRateControl_camRateControl_FormatJAC();
            
            
// 	std::cout	<< "-- VISUALIZATION DATA ---------------------------------------------------" << std::endl;
//	std::cout	<< "alg: " << algorithm_type 
// 	std::cout	<< "    time inc: " << time_increment_*1000 << std::endl;
// 	std::cout	<< "    pm_0:     " << 1000*pm_0.transpose()<< std::endl;
// 	std::cout	<< "    pm_im1:   " << 1000*pm_im1.transpose()<< std::endl;
// 	std::cout	<< "    pm_i:     " << 1000*pm_i.transpose()<< std::endl;
// 	std::cout	<< "    ps_0:     " << 1000*ps_0.transpose()<< std::endl;
// 	std::cout	<< "    ps_im1:   " << 1000*ps_im1.transpose()<< std::endl;
// 	std::cout	<< "    ps_i:     " << 1000*ps_i.transpose()<< std::endl;
// 	std::cout	<< "    dm:       " << 1000*dm.transpose()<< std::endl;
// 	std::cout	<< "    ds:       " << 1000*ds.transpose()<< std::endl;
// 	std::cout	<< "    vm_i:     " << 1000*vm_i.transpose()<< std::endl;
      
//       std::cout << "Rm_0" << std::endl;
//       std::cout << Rm_0 << std::endl;
//       std::cout << "Rm" << std::endl;
//       std::cout << Rm << std::endl;
//       std::cout << "Rm * ds" << std::endl;
//       std::cout << Rm * ds << std::endl;      
//       std::cout << "Rs.transpose() * Rm * ds" << std::endl;
//       std::cout << Rs.transpose() * Rm * ds << std::endl;        
//       
//       std::cout << "Rm_i" << std::endl;
//       std::cout << Rm_i << std::endl; 
//       std::cout << "Rs_i" << std::endl;
//       std::cout << Rs_i << std::endl;
//       std::cout << "quat Rs_i" << std::endl;
//       std::cout << quats_i.x() << " " << quats_i.y() << " " << quats_i.z() << " " << quats_i.w() << std::endl; 

      pm_im1 = pm_i;
      ps_im1 = ps_i;

     
      // Send data ***********************************************
      //  Slave pose
      outputPose.header.stamp = ros::Time::now();
      outputPose.pose.position.x = ps_i(0,0);
      outputPose.pose.position.y = ps_i(1,0);
      outputPose.pose.position.z = ps_i(2,0);
      outputPose.pose.orientation.x = quats_i.x();
      outputPose.pose.orientation.y = quats_i.y();
      outputPose.pose.orientation.z = quats_i.z();
      outputPose.pose.orientation.w = quats_i.w();
      pub.publish(outputPose);
      
      //  Slave origin pose
      outputPose.header.stamp = ros::Time::now();
      outputPose.pose.position.x = ps_0(0,0);
      outputPose.pose.position.y = ps_0(1,0);
      outputPose.pose.position.z = ps_0(2,0);
      outputPose.pose.orientation.x = quats_0.x();
      outputPose.pose.orientation.y = quats_0.y();
      outputPose.pose.orientation.z = quats_0.z();
      outputPose.pose.orientation.w = quats_0.w();      
      pub_poseSlaveWSOrigin.publish(outputPose);
    }
    
    // Camera pose
    cameraPose.header.stamp = ros::Time::now();
    cameraPose.pose.position.x = T_camPose_S.translation()(0);
    cameraPose.pose.position.y = T_camPose_S.translation()(1);
    cameraPose.pose.position.z = T_camPose_S.translation()(2);
    cameraPose.pose.orientation.x = Quaternion<double>(T_camPose_S.linear()).x();
    cameraPose.pose.orientation.y = Quaternion<double>(T_camPose_S.linear()).y();
    cameraPose.pose.orientation.z = Quaternion<double>(T_camPose_S.linear()).z();
    cameraPose.pose.orientation.w = Quaternion<double>(T_camPose_S.linear()).w();
    pub_set_camera_pose.publish(cameraPose);    
    

    ros::spinOnce();
    loop_rate.sleep();
  }
  
  WTdataRecord.close();
  
  return 0;
}
Ejemplo n.º 24
0
Archivo: views.cpp Proyecto: cheind/aam
#include <iostream>

TEST_CASE("views")
{
    aam::MatrixX shapes(2, 4);
    shapes << 1, 2, 3, 4, 
              5, 6, 7, 8;

    aam::MatrixX shapesSeparated(2, 4);
    shapesSeparated << 1, 2, 5, 6,
                       3, 4, 7, 8;

    REQUIRE(shapesSeparated.block(0, 0, 2, 2).isApprox(aam::toSeparatedView<aam::Scalar>(shapes.row(0))));
    REQUIRE(shapesSeparated.block(0, 2, 2, 2).isApprox(aam::toSeparatedView<aam::Scalar>(shapes.row(1))));

    // Test with affine transforms
    Eigen::Transform<aam::Scalar, 2, Eigen::AffineCompact> t;
    t = Eigen::Translation<aam::Scalar, 2>(0.5, 0.5) * Eigen::Scaling(aam::Scalar(2));
    
    auto x = aam::toSeparatedView<aam::Scalar>(shapes.row(0)).rowwise().homogeneous();
    aam::MatrixX r = x * t.matrix().transpose();
    
    aam::MatrixX shouldBe(2, 2);
    shouldBe << 2.5, 4.5, 6.5, 8.5;

    REQUIRE(r.isApprox(shouldBe));

    aam::MatrixX shouldBeArray(1, 4);
    shouldBeArray << 2.5, 4.5, 6.5, 8.5;
    REQUIRE(aam::toInterleavedView<aam::Scalar>(r).isApprox(shouldBeArray));
}
Ejemplo n.º 25
0
//TODO: This is ugly and hacky and needs to get refactored
void OverlayManager::asyncUpdate()
{
	boost::lock_guard<boost::mutex> guard(mtx_);

	vr::TrackedDeviceIndex_t controller1 = -1;
	vr::TrackedDeviceIndex_t controller2 = -1;

	vr::VRControllerState_t hmdState;
	vr::VRControllerState_t controller1State;
	vr::VRControllerState_t controller2State;

	vr::TrackedDevicePose_t hmdPose;
	vr::TrackedDevicePose_t controller1Pose;
	vr::TrackedDevicePose_t controller2Pose;

	vr::HmdMatrix34_t overlayTransform;
	vr::HmdVector2_t overlayCenter;

	vr::ETrackingUniverseOrigin origin;

	

	unsigned int width, height;

	

	//Find the controllers
	vr::IVRSystem* vrSys = vr::VRSystem();
	vr::IVRCompositor* vrComp = vr::VRCompositor();
	vr::IVROverlay* vrOvrly = vr::VROverlay();

	for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++)
	{
		switch (vrSys->GetTrackedDeviceClass(i))
		{
		case vr::TrackedDeviceClass_Controller:
			if (controller1 == -1)
			{
				controller1 = i;
			}
			if (controller1 >= 0 && i !=controller1)
			{
				controller2 = i;
			}
			if (controller2 >= 0)
			{
				break;
			}
		}
	}


	int count = 0;
	for (std::vector<std::shared_ptr<Overlay>>::iterator it = m_overlayVec.begin(); it != m_overlayVec.end(); ++it)
	{
		if (vrSys && controller1 >= 0 && (*it)->isVisible())
		{
			//Set padding of the overlay based on scale
			float padding = 0.5f * ((float)(*it)->getScale() / 100.0f);
			float z_padding = 0.1f;

			//Get the controller pose information relative to tracking space
			vrSys->GetControllerStateWithPose(vrComp->GetTrackingSpace(), controller1, &controller1State, sizeof(controller1State), &controller1Pose);
			vrSys->GetControllerStateWithPose(vrComp->GetTrackingSpace(), controller2, &controller2State, sizeof(controller2State), &controller2Pose);
			vrSys->GetControllerStateWithPose(vrComp->GetTrackingSpace(), vr::k_unTrackedDeviceIndex_Hmd, &hmdState, sizeof(hmdState), &hmdPose);
			
			//Center of the overlay adjusted for scale
			overlayCenter.v[0] = 0.5f;// * ((float)(*it)->getScale() / 100.0f);
			overlayCenter.v[1] = 0.5f;// * ((float)(*it)->getScale() / 100.0f);

			//Get the overlay transform relative to tracking space
			vr::EVROverlayError err = vr::VROverlayError_None;
			if (err = vrOvrly->GetTransformForOverlayCoordinates((*it)->getOverlayHandle(), vrComp->GetTrackingSpace(), overlayCenter, &overlayTransform))
			{
				DBOUT("Error with overlay!!" << err << std::endl);
			}

			//Converts Controller world tracking transform matrix to a transform matrix relative to the overlay
			Eigen::Transform<float, 3, Eigen::Affine> controller1Transform;
			Eigen::Transform<float, 3, Eigen::Affine> controller2Transform;
			Eigen::Transform<float, 3, Eigen::Affine> overlayTrans;

			for (int i = 0; i < 3; ++i)
			{
				for (int j = 0; j < 4; ++j)
				{
					controller1Transform(i, j) = controller1Pose.mDeviceToAbsoluteTracking.m[i][j];
					controller2Transform(i, j) = controller2Pose.mDeviceToAbsoluteTracking.m[i][j];
					overlayTrans(i, j) = overlayTransform.m[i][j];
				}
			}
			Eigen::Matrix<float, 4, 4> overlayInverse = overlayTrans.matrix().inverse();
			Eigen::Matrix<float, 4, 4> controller1OverlayTransform = overlayInverse * controller1Transform.matrix();
			Eigen::Matrix<float, 4, 4> controller2OverlayTransform = overlayInverse * controller2Transform.matrix();

			//Boolean values for if the controller is within the bounds of the overlay based on the padding
			//z-padding is used for the depth across the face of the overlay
			bool controller1InOverlay = (controller1OverlayTransform(0, 3) < padding && controller1OverlayTransform(0, 3) > -padding) &&
										(controller1OverlayTransform(1, 3) < padding && controller1OverlayTransform(1, 3) > -padding) &&
										(controller1OverlayTransform(2, 3) < z_padding && controller1OverlayTransform(2, 3) > -z_padding);
			
			bool controller2InOverlay = (controller2OverlayTransform(0, 3) < padding && controller2OverlayTransform(0, 3) > -padding) &&
										(controller2OverlayTransform(1, 3) < padding && controller2OverlayTransform(1, 3) > -padding) &&
										(controller2OverlayTransform(2, 3) < z_padding && controller2OverlayTransform(2, 3) > -z_padding);;

			//If the controller is within the bounds the center of the overlay
			if (controller1InOverlay || controller2InOverlay)
			{
				//Buzz controller  -- Not working fix
				vr::VRSystem()->TriggerHapticPulse(controller1, 2, 2000);

				if (controller1InOverlay && (*it)->getTracking() != 2)
				{
					//If controller1 is not currently tracking and controller2 isn't tracking overlay
					if(controller1State.rAxis[1].x > 0.75f && (m_controller1Tracking == NULL || m_controller1Tracking == (*it).get()) && m_controller2Tracking != (*it).get())
					{
						
						TrackingUpdate(it, controller1State, controller1Pose, controller1InOverlay, vrSys, vrComp);
						m_controller1Tracking = (*it).get();
						emit textureUpdated(count);
						
					}

					//If trigger is released and was tracking, reset pointer to null;
					if (controller1State.rAxis[1].x < 0.75f && m_controller1Tracking != NULL)
					{
						
						m_controller1Tracking = NULL;
					}


					//If touchpad is pressed in overlay w/ debounce check
					if (((controller1State.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_SteamVR_Touchpad)) == vr::ButtonMaskFromId(vr::k_EButton_SteamVR_Touchpad)) &&
						!m_controller1TouchPressed)
					{
						m_controller1TouchPressed = true;
						//bottom left - decrease opacity
						if (controller1State.rAxis[0].x < 0 && controller1State.rAxis[0].y < 0)
							(*it)->setTransparancy((*it)->getTransparancy() - 5);
						//bottom right - increase opacity
						if (controller1State.rAxis[0].x > 0 && controller1State.rAxis[0].y < 0)
							(*it)->setTransparancy((*it)->getTransparancy() + 5);
						//top left - decrease scale
						if (controller1State.rAxis[0].x < 0 && controller1State.rAxis[0].y > 0)
							(*it)->setScale((*it)->getScale() - 5);
						//top right - increase scale
						if (controller1State.rAxis[0].x > 0 && controller1State.rAxis[0].y > 0)
							(*it)->setScale((*it)->getScale() + 5);

						emit textureUpdated(count);
					}
					else if (((controller1State.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_SteamVR_Touchpad)) != vr::ButtonMaskFromId(vr::k_EButton_SteamVR_Touchpad)) &&
						m_controller1TouchPressed)
					{
						m_controller1TouchPressed = false;
					}

					//If SideButton is pressed
					else if (((controller1State.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_Grip)) == vr::ButtonMaskFromId(vr::k_EButton_Grip)) &&
						!m_controller1GripPressed)
					{
						m_controller1GripPressed = true;
						(*it)->setTracking(((*it)->getTracking() + 1) % 4);
						emit textureUpdated(count);
					}

					else if (((controller1State.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_Grip)) != vr::ButtonMaskFromId(vr::k_EButton_Grip)) &&
						m_controller1GripPressed)
					{
						m_controller1GripPressed = false;
					}

					
				}
				if (controller2InOverlay && (*it)->getTracking() != 3)
				{
					//If controller2 is not currently tracking and controller1 isn't tracking overlay			
					if (controller2State.rAxis[1].x > 0.75f && (m_controller2Tracking == NULL || m_controller2Tracking == (*it).get()) && m_controller1Tracking != (*it).get())
					{
						
						TrackingUpdate(it, controller2State, controller2Pose, controller2InOverlay, vrSys, vrComp);
						m_controller2Tracking = (*it).get();
						emit textureUpdated(count);
					}
					if (controller2State.rAxis[1].x < 0.75f && m_controller2Tracking != NULL)
					{
						m_controller2Tracking = NULL;
					}

					//If touchpad is pressed in overlay w/ debounce check
					if (((controller2State.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_SteamVR_Touchpad)) == vr::ButtonMaskFromId(vr::k_EButton_SteamVR_Touchpad)) &&
						!m_controller2TouchPressed)
					{
						m_controller2TouchPressed = true;
						//bottom left - decrease opacity
						if (controller2State.rAxis[0].x < 0 && controller2State.rAxis[0].y < 0)
							(*it)->setTransparancy((*it)->getTransparancy() - 5);
						//bottom right - increase opacity
						if (controller2State.rAxis[0].x > 0 && controller2State.rAxis[0].y < 0)
							(*it)->setTransparancy((*it)->getTransparancy() + 5);
						//top left - decrease scale
						if (controller2State.rAxis[0].x < 0 && controller2State.rAxis[0].y > 0)
							(*it)->setScale((*it)->getScale() - 5);
						//top right - increase scale
						if (controller2State.rAxis[0].x > 0 && controller2State.rAxis[0].y > 0)
							(*it)->setScale((*it)->getScale() + 5);

						emit textureUpdated(count);
					}
					else if (((controller2State.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_SteamVR_Touchpad)) != vr::ButtonMaskFromId(vr::k_EButton_SteamVR_Touchpad)) &&
						m_controller2TouchPressed)
					{
						m_controller2TouchPressed = false;
					}
					//If SideButton is pressed
					else if (((controller2State.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_Grip)) == vr::ButtonMaskFromId(vr::k_EButton_Grip)) &&
						!m_controller2GripPressed)
					{
						m_controller2GripPressed = true;
						(*it)->setTracking(((*it)->getTracking() + 1) % 4);
					}
					else if (((controller2State.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_Grip)) != vr::ButtonMaskFromId(vr::k_EButton_Grip)) &&
						m_controller2GripPressed)
					{
						m_controller2GripPressed = false;
					}


				}

				
			} //end controller in overlay if check

		} //end VR check if
		count++;
	} //end iterator loop for overlays 



	m_timer.expires_from_now(boost::posix_time::milliseconds(5));
	m_timer.async_wait(boost::bind(&OverlayManager::asyncUpdate, this));
}
Ejemplo n.º 26
0
void OverlayManager::TrackingUpdate(std::vector<std::shared_ptr<Overlay>>::iterator it, vr::VRControllerState_t controllerState, vr::TrackedDevicePose_t controllerPose, bool controllerInOverlay, vr::IVRSystem* vrSys, vr::IVRCompositor* vrComp)
{
	vr::TrackedDeviceIndex_t controller1 = -1;
	vr::TrackedDeviceIndex_t controller2 = -1;

	vr::VRControllerState_t hmdState;
	vr::VRControllerState_t controller1State;
	vr::VRControllerState_t controller2State;

	vr::TrackedDevicePose_t hmdPose;
	vr::TrackedDevicePose_t controller1Pose;
	vr::TrackedDevicePose_t controller2Pose;

	

	for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++)
	{
		switch (vrSys->GetTrackedDeviceClass(i))
		{
		case vr::TrackedDeviceClass_Controller:
			if (controller1 == -1)
			{
				controller1 = i;
			}
			if (controller1 >= 0 && i != controller1)
			{
				controller2 = i;
			}
			if (controller2 >= 0)
			{
				break;
			}
		}
	}

	vrSys->GetControllerStateWithPose(vrComp->GetTrackingSpace(), controller1, &controller1State, sizeof(controller1State), &controller1Pose);
	vrSys->GetControllerStateWithPose(vrComp->GetTrackingSpace(), controller2, &controller2State, sizeof(controller2State), &controller2Pose);
	vrSys->GetControllerStateWithPose(vrComp->GetTrackingSpace(), vr::k_unTrackedDeviceIndex_Hmd,  &hmdState, sizeof(hmdState), &hmdPose);


	if (controllerInOverlay) //controller trigger squeezed, in overlay and not being tracked to controller1
	{
		if ((*it)->getTracking() == 0)
		{
			(*it)->setOverlayMatrix(controllerPose.mDeviceToAbsoluteTracking);
		}
		else
		{
			//Must be same sized for matrix inverse calculation
			Eigen::Transform<float, 3, Eigen::Affine> trackedSource;
			Eigen::Transform<float, 3, Eigen::Affine> invertedSource;
			Eigen::Transform<float, 3, Eigen::Affine> controllerTransform;
			//Eigen::Transform<float, 3, Eigen::Affine> newTransform;
			vr::HmdMatrix34_t newPosition;
			memset(&newPosition, 0, sizeof(vr::HmdMatrix34_t));

			//HMD Calculation


			//Populate boost matrices
			for (unsigned i = 0; i < 3; ++i)
			{
				for (unsigned j = 0; j < 4; ++j)
				{
					
						if ((*it)->getTracking() == 1)
						{
							trackedSource(i, j) = hmdPose.mDeviceToAbsoluteTracking.m[i][j];
						}
						if ((*it)->getTracking() == 2)
						{
							trackedSource(i, j) = controller1Pose.mDeviceToAbsoluteTracking.m[i][j];
						}
						if ((*it)->getTracking() == 3)
						{
							trackedSource(i, j) = controller2Pose.mDeviceToAbsoluteTracking.m[i][j];
						}
						controllerTransform(i, j) = controllerPose.mDeviceToAbsoluteTracking.m[i][j];
				
					
				} //End Column loop
			}  //End Row loop

			
			Eigen::Matrix<float, 4, 4> invMatrix = trackedSource.matrix().inverse();
			Eigen::Matrix<float, 4, 4> newTransform;
			newTransform = invMatrix * controllerTransform.matrix();



			//Copy values from the new matrix into the openVR matrix
			for (unsigned i = 0; i < 3; ++i)
			{
				for (unsigned j = 0; j < 4; ++j)
				{
					if (i < 3)
					{
						newPosition.m[i][j] = newTransform(i, j);
					}
				} // end column loop
			} //end row loop

			//Maintain previous rotation


			(*it)->setOverlayMatrix(newPosition);
		} // end else (tracking check for non-spacial tracking)

	} //end controller check
}
Ejemplo n.º 27
0
void IterativeClosestPoint::execute() {
    float error = std::numeric_limits<float>::max(), previousError;
    uint iterations = 0;

    // Get access to the two point sets
    PointSetAccess::pointer accessFixedSet = ((PointSet::pointer)getStaticInputData<PointSet>(0))->getAccess(ACCESS_READ);
    PointSetAccess::pointer accessMovingSet = ((PointSet::pointer)getStaticInputData<PointSet>(1))->getAccess(ACCESS_READ);

    // Get transformations of point sets
    AffineTransformation::pointer fixedPointTransform2 = SceneGraph::getAffineTransformationFromData(getStaticInputData<PointSet>(0));
    Eigen::Affine3f fixedPointTransform;
    fixedPointTransform.matrix() = fixedPointTransform2->matrix();
    AffineTransformation::pointer initialMovingTransform2 = SceneGraph::getAffineTransformationFromData(getStaticInputData<PointSet>(1));
    Eigen::Affine3f initialMovingTransform;
    initialMovingTransform.matrix() = initialMovingTransform2->matrix();

    // These matrices are Nx3
    MatrixXf fixedPoints = accessFixedSet->getPointSetAsMatrix();
    MatrixXf movingPoints = accessMovingSet->getPointSetAsMatrix();

    Eigen::Affine3f currentTransformation = Eigen::Affine3f::Identity();

    // Want to choose the smallest one as moving
    bool invertTransform = false;
    if(false && fixedPoints.cols() < movingPoints.cols()) {
        reportInfo() << "switching fixed and moving" << Reporter::end;
        // Switch fixed and moving
        MatrixXf temp = fixedPoints;
        fixedPoints = movingPoints;
        movingPoints = temp;
        invertTransform = true;

        // Apply initial transformations
        //currentTransformation = fixedPointTransform.getTransform();
        movingPoints = fixedPointTransform*movingPoints.colwise().homogeneous();
        fixedPoints = initialMovingTransform*fixedPoints.colwise().homogeneous();
    } else {
        // Apply initial transformations
        //currentTransformation = initialMovingTransform.getTransform();
        movingPoints = initialMovingTransform*movingPoints.colwise().homogeneous();
        fixedPoints = fixedPointTransform*fixedPoints.colwise().homogeneous();
    }
    do {
        previousError = error;
        MatrixXf movedPoints = currentTransformation*(movingPoints.colwise().homogeneous());

        // Match closest points using current transformation
        MatrixXf rearrangedFixedPoints = rearrangeMatrixToClosestPoints(
                fixedPoints, movedPoints);

        // Get centroids
        Vector3f centroidFixed = getCentroid(rearrangedFixedPoints);
        //reportInfo() << "Centroid fixed: " << Reporter::end;
        //reportInfo() << centroidFixed << Reporter::end;
        Vector3f centroidMoving = getCentroid(movedPoints);
        //reportInfo() << "Centroid moving: " << Reporter::end;
        //reportInfo() << centroidMoving << Reporter::end;

        Eigen::Transform<float, 3, Eigen::Affine> updateTransform = Eigen::Transform<float, 3, Eigen::Affine>::Identity();

        if(mTransformationType == IterativeClosestPoint::RIGID) {
            // Create correlation matrix H of the deviations from centroid
            MatrixXf H = (movedPoints.colwise() - centroidMoving)*
                    (rearrangedFixedPoints.colwise() - centroidFixed).transpose();

            // Do SVD on H
            Eigen::JacobiSVD<Eigen::MatrixXf> svd(H, Eigen::ComputeFullU | Eigen::ComputeFullV);

            // Estimate rotation as R=V*U.transpose()
            MatrixXf temp = svd.matrixV()*svd.matrixU().transpose();
            Matrix3f d = Matrix3f::Identity();
            d(2,2) = sign(temp.determinant());
            Matrix3f R = svd.matrixV()*d*svd.matrixU().transpose();

            // Estimate translation
            Vector3f T = centroidFixed - R*centroidMoving;

            updateTransform.linear() = R;
            updateTransform.translation() = T;
        } else {
            // Only translation
            Vector3f T = centroidFixed - centroidMoving;
            updateTransform.translation() = T;
        }

        // Update current transformation
        currentTransformation = updateTransform*currentTransformation;

        // Calculate RMS error
        MatrixXf distance = rearrangedFixedPoints - currentTransformation*(movingPoints.colwise().homogeneous());
        error = 0;
        for(uint i = 0; i < distance.cols(); i++) {
            error += pow(distance.col(i).norm(),2);
        }
        error = sqrt(error / distance.cols());

        iterations++;
        reportInfo() << "Error: " << error << Reporter::end;
        // To continue, change in error has to be above min error change and nr of iterations less than max iterations
    } while(previousError-error > mMinErrorChange && iterations < mMaxIterations);


    if(invertTransform){
        currentTransformation = currentTransformation.inverse();
    }

    mError = error;
    mTransformation->matrix() = currentTransformation.matrix();
}
Ejemplo n.º 28
0
/**
 * @function balancePath
 * @brief Builds a straight path -- easy
 */
bool goWSOrient::balancePath( const Eigen::VectorXd &_startConfig, 
		     	      const Eigen::Transform<double, 3, Eigen::Affine> &_goalPose,
	          	      std::vector<Eigen::VectorXd> &path )
{

  srand( time(NULL) );
  //-- Copy input 
  startConfig = _startConfig;
  goalPose = _goalPose;
  goalPos = _goalPose.translation();

  //-- Create needed variables
  Eigen::VectorXd q; Eigen::VectorXd wsPos;
  Eigen::Transform<double, 3, Eigen::Affine> T;
  Eigen::MatrixXd Jc(3, 7); Eigen::MatrixXd Jcinv(7, 3);

  Eigen::MatrixXd delta_q;
  double ws_dist;

  //-- Initialize variables
  q = startConfig; path.push_back( q );
  ws_dist = DBL_MAX;

  //-- Loop
  int trials = 0;
    //-- Get ws position
    robinaLeftArm_fk( q, TWBase, Tee, T );
    wsPos = T.translation();

    Eigen::VectorXd wsOrient(3); 
    getRPY( T, wsOrient(0), wsOrient(1), wsOrient(2) );


    Eigen::VectorXd phi(7);
    Eigen::VectorXd dq(7);   
    Eigen::VectorXd qorig = q;   

      //-- Get the Jacobian
      robinaLeftArm_j( q, TWBase, Tee, Jc );

      //-- Get the pseudo-inverse(easy way)
      pseudoInv( 3, 7, Jc, Jcinv );
    int count = 0;
    for( int i = 0; i < 1000; i++ )
    {

     for( int j = 0; j < 7; j++ )
     { phi[j] = rand()%6400 / 10000.0 ; }

      //-- Get a null space projection displacement that does not affect the thing
      dq = ( Eigen::MatrixXd::Identity(7,7) - Jcinv*Jc )*phi;

      //-- Add to q 
      //q = qorig + dq;
      Eigen::VectorXd newq;
      newq = qorig + dq;

     //-- Calculate distance
     Eigen::Transform<double, 3, Eigen::Affine> Torig;
     Eigen::Transform<double, 3, Eigen::Affine> Tphi;
     robinaLeftArm_fk( newq, TWBase, Tee, Tphi );
     robinaLeftArm_fk( qorig, TWBase, Tee, Torig );
     double dist = ( Tphi.translation() - Torig.translation() ).norm();

     if( dist < 0.015 )
     { 
       count++;
       q = newq;
       printf("--> [%d] Distance to the original xyz is: %.3f \n", count, dist );
       //-- Push it 
       path.push_back( q );
     }

    }

  return true;
}
Ejemplo n.º 29
0
/**
 * @function straightPath
 * @brief Builds a straight path -- easy
 */
bool goWSOrient::straightPath( const Eigen::VectorXd &_startConfig, 
		     	       const Eigen::Transform<double, 3, Eigen::Affine> &_goalPose,
	          	       std::vector<Eigen::VectorXd> &path )
{
  //-- Copy input 
  startConfig = _startConfig;
  goalPose = _goalPose;
  goalPos = _goalPose.translation();

  //-- Create needed variables
  Eigen::VectorXd q; Eigen::VectorXd wsPos;
  Eigen::Transform<double, 3, Eigen::Affine> T;
  Eigen::MatrixXd Jc(6, 7); Eigen::MatrixXd Jcinv(7, 6);

  Eigen::MatrixXd delta_q;
  double ws_dist;

  //-- Initialize variables
  q = startConfig; path.push_back( q );
  ws_dist = DBL_MAX;

  //-- Loop
  int trials = 0;
  while( ws_dist > goalThresh )
  {
    //-- Get ws position
    robinaLeftArm_fk( q, TWBase, Tee, T );
    wsPos = T.translation();

    Eigen::VectorXd wsOrient(3); 
    getRPY( T, wsOrient(0), wsOrient(1), wsOrient(2) );

    //-- Get the Jacobian

    robinaLeftArm_jc( q, TWBase, Tee, Jc );

    std::cout<< " JC: " << std::endl << Jc << std::endl;

    for( int i = 3; i < 6;i++ )
    { for( int j = 0; j < 7; j++ )
      {
        Jc(i,j) = Jc(i,j)*0.001;
      }
    }

    std::cout<< " JC switch: " << std::endl << Jc << std::endl;

    //-- Get the pseudo-inverse(easy way)
    pseudoInv( 6, 7, Jc, Jcinv );

    std::cout<< " JCW Inv: " << std::endl << Jcinv << std::endl;

    Eigen::VectorXd goalOrient(3);
    getRPY( goalPose, goalOrient(0), goalOrient(1), goalOrient(2) ); 

    //-- Get delta (orientation + position)
    Eigen::VectorXd delta(6);
    delta.head(3) = (goalPos - wsPos); delta.tail(3) = (goalOrient - wsOrient);

    //-- Get delta_q (jointspace)   
    delta_q = Jcinv*delta;


    //-- Scale
    double scal = stepSize/delta_q.norm();
    delta_q = delta_q*scal;

    //-- Add to q
    q = q + delta_q; 

    //-- Push it 
    path.push_back( q );

    //-- Check distance to goal
    ws_dist = (goalPos - wsPos).norm();
    printf(" -- Ws dist: %.3f \n", ws_dist );
    trials++;      
    
    if( trials > maxTrials )
     { break; }
  }  


  if( trials >= maxTrials )
   { path.clear();
     printf(" --(!) Did not get to the goal . Thresh: %.3f Trials: %d \n", ws_dist, trials ); 
     return false;}
  else
   { printf(" -- Got to the goal . Thresh: %.3f Trials: %d \n", ws_dist, trials ); 
     return true; }

}
void
PointCloudProjector::synchronized_callback(const sensor_msgs::PointCloud2ConstPtr& points_msg,
        const samplereturn_msgs::PatchArrayConstPtr& patches_msg)
{
    // create patch output message
    samplereturn_msgs::PatchArray positioned_patches;
    positioned_patches.header = patches_msg->header;
    positioned_patches.cam_info = patches_msg->cam_info;

    // create marker array debug message
    visualization_msgs::MarkerArray vis_markers;

    // create camera model object
    image_geometry::PinholeCameraModel model;
    model.fromCameraInfo(patches_msg->cam_info);

    // ensure tf is ready
    if(!listener_.canTransform(clipping_frame_id_, patches_msg->header.frame_id,
                patches_msg->header.stamp))
    {
        patches_out.publish( positioned_patches );
        return;
    }

    // get camera origin in clipping frame
    tf::StampedTransform camera;
    listener_.lookupTransform(clipping_frame_id_, patches_msg->header.frame_id,
            patches_msg->header.stamp, camera);
    Eigen::Vector3d camera_origin;
    tf::vectorTFToEigen(camera.getOrigin(), camera_origin);

    // scale and transform pointcloud into clipping frame
    pcl::PointCloud<pcl::PointXYZRGB>::Ptr colorpoints(new pcl::PointCloud<pcl::PointXYZRGB>),
        points_native(new pcl::PointCloud<pcl::PointXYZRGB>),
        points_scaled(new pcl::PointCloud<pcl::PointXYZRGB>);
    pcl::fromROSMsg(*points_msg, *points_native);
    // this scale is a horible hack to fix the manipulator point clouds
    Eigen::Transform<float, 3, Eigen::Affine> trans;
    trans.setIdentity();
    trans.scale(config_.pointcloud_scale);
    pcl::transformPointCloud(*points_native, *points_scaled, trans);
    pcl_ros::transformPointCloud(clipping_frame_id_, *points_scaled, *colorpoints, listener_);

    // id counter for debug markers
    int mid = 0;
    for(const auto& patch : patches_msg->patch_array)
    {
        samplereturn_msgs::Patch positioned_patch(patch);
        cv_bridge::CvImagePtr cv_ptr_mask;
        try {
            cv_ptr_mask = cv_bridge::toCvCopy(patch.mask, "mono8");
        }
        catch (cv_bridge::Exception& e) {
            ROS_ERROR("cv_bridge mask exception: %s", e.what());
            continue;
        }

        cv::Point2f roi_offset(patch.image_roi.x_offset, patch.image_roi.x_offset);
        Eigen::Vector4d ground_plane;
        // assume ground plane at z=0, in base_link xy plane for manipulators
        ground_plane << 0,0,1,0;
        float dimension, angle;
        tf::Stamped<tf::Point> world_point;
        if(samplereturn::computeMaskPositionAndSize(listener_,
                    cv_ptr_mask->image, roi_offset,
                    model, patches_msg->header.stamp, patches_msg->header.frame_id,
                    ground_plane, "base_link",
                    &dimension, &angle, &world_point,
                    NULL))
        {
            // if sample size is outside bounds, skip this patch
            if ((dimension > config_.max_major_axis) ||
                    (dimension < config_.min_major_axis)) {
                continue;
            }
        }


        // find bounding box of mask
        cv::Rect rect;
        samplereturn::computeBoundingBox(cv_ptr_mask->image, &rect);

        // turn image space bounding box into 4 3d rays
        cv::Point2d patch_origin(patch.image_roi.x_offset,
                patch.image_roi.y_offset);
        std::vector<cv::Point2d> rpoints;
        rpoints.push_back(cv::Point2d(rect.x,            rect.y) +
                patch_origin);
        rpoints.push_back(cv::Point2d(rect.x,            rect.y+rect.height) +
                patch_origin);
        rpoints.push_back(cv::Point2d(rect.x+rect.width, rect.y+rect.height) +
                patch_origin);
        rpoints.push_back(cv::Point2d(rect.x+rect.width, rect.y) +
                patch_origin);
        std::vector<Eigen::Vector3d> rays;
        rays.resize(rpoints.size());
        std::transform(rpoints.begin(), rpoints.end(), rays.begin(),
                [model, patches_msg, this](cv::Point2d uv) -> Eigen::Vector3d
                {
                    cv::Point3d xyz = model.projectPixelTo3dRay(uv);
                    tf::Stamped<tf::Vector3> vect(tf::Vector3(xyz.x, xyz.y, xyz.z),
                        patches_msg->header.stamp,
                        patches_msg->header.frame_id);
                    tf::Stamped<tf::Vector3> vect_t;
                    listener_.transformVector(clipping_frame_id_, vect, vect_t);
                    Eigen::Vector3d ray;
                    tf::vectorTFToEigen(vect_t, ray);
                    return ray;
                });

        // clip point cloud by the planes of the bounding volume
        // described by the rays above
        // Add one more clipping plane at z=0 in the clipping frame
        // to remove noise points below the ground
        pcl::PointIndices::Ptr clipped(new pcl::PointIndices);
        for(size_t i=0;i<rays.size()+1;i++)
        {
            Eigen::Vector4f plane;
            if(i<rays.size())
            {
                plane.segment<3>(0) = -rays[i].cross(rays[(i+1)%4]).cast<float>();
                plane[3] = -plane.segment<3>(0).dot(camera_origin.cast<float>());
            }
            else
            {
                plane << 0,0,1, config_.bottom_clipping_depth;
            }
            pcl::PlaneClipper3D<pcl::PointXYZRGB> clip(plane);
            std::vector<int> newclipped;
            clip.clipPointCloud3D(*colorpoints,  newclipped, clipped->indices);
            clipped->indices.resize(newclipped.size());
            std::copy(newclipped.begin(), newclipped.end(),
                    clipped->indices.begin());
        }

        // bail if the clipped pointcloud is empty
        if(clipped->indices.size() == 0)
        {
            continue;
        }

        // publish clipped pointcloud if anybody is listening
        if(debug_points_out.getNumSubscribers()>0)
        {
            pcl::PointCloud<pcl::PointXYZRGB> clipped_pts;
            pcl::ExtractIndices<pcl::PointXYZRGB> extract;
            extract.setInputCloud(colorpoints);
            extract.setIndices(clipped);
            extract.filter(clipped_pts);
            sensor_msgs::PointCloud2 clipped_msg;
            pcl::toROSMsg(clipped_pts, clipped_msg);
            debug_points_out.publish( clipped_msg );
        }

        // compute suitable z value for this patch
        // First, find min, max and sum
        typedef std::tuple<float, float, float> stats_tuple;
        stats_tuple z_stats = std::accumulate(
                clipped->indices.begin(), clipped->indices.end(),
                std::make_tuple(std::numeric_limits<float>().max(),
                                std::numeric_limits<float>().min(),
                                0.0f),
                [colorpoints](stats_tuple sum, int idx) -> stats_tuple
                {
                    return std::make_tuple(
                        std::min(std::get<0>(sum), colorpoints->points[idx].z),
                        std::max(std::get<1>(sum), colorpoints->points[idx].z),
                        std::get<2>(sum) + colorpoints->points[idx].z);
                });
        // use sum to find mean
        float z_min = std::get<0>(z_stats);
        float z_max = std::get<1>(z_stats);
        float z_mean = std::get<2>(z_stats)/float(clipped->indices.size());
        // build histogram of values larger than mean
        float hist_min = z_min + (z_mean-z_min)*config_.hist_min_scale;
        ROS_DEBUG("z_min: %04.3f z_mean: %04.3f z_max: %04.3f z_sum: %4.2f hist_min:%04.3f",
                z_min, z_mean, z_max, std::get<2>(z_stats), hist_min);
        const int NHIST = 20;
        int z_hist[NHIST];
        bzero(z_hist, NHIST*sizeof(int));
        std::accumulate(clipped->indices.begin(), clipped->indices.end(), z_hist,
                [colorpoints, hist_min, z_max](int *z_hist, int idx) -> int *
                {
                    float z = colorpoints->points[idx].z;
                    if(z>hist_min)
                    {
                        int zidx = floor((z-hist_min)*NHIST/(z_max-hist_min));
                        z_hist[zidx] ++;
                    }
                    return z_hist;
                });
        char debughist[2048];
        int pos=0;
        for(int i=0;i<NHIST;i++)
        {
            pos += snprintf(debughist+pos, 2048-pos, "%d, ", z_hist[i]);
        }
        debughist[pos] = '\x00';
        ROS_DEBUG("hist: %s", debughist);
        // select the most common value larger than the mean
        int * argmax = std::max_element( z_hist, z_hist + NHIST );
        ROS_DEBUG("argmax: %d", int(argmax - z_hist));
        float z_peak = (argmax - z_hist)*(z_max - hist_min)/NHIST + hist_min;
        if(z_peak>config_.maximum_patch_height)
        {
            ROS_INFO("Clipping z to max, was %f", z_peak);
            z_peak = config_.maximum_patch_height;
        }

        // project center of patch until it hits z_peak
        cv::Point2d uv = cv::Point2d(rect.x + rect.width/2, rect.y + rect.height/2) + patch_origin;
        cv::Point3d cvxyz = model.projectPixelTo3dRay(uv);
        tf::Stamped<tf::Vector3> patch_ray(
                tf::Vector3(
                    cvxyz.x,
                    cvxyz.y,
                    cvxyz.z),
                patches_msg->header.stamp,
                patches_msg->header.frame_id);
        tf::Stamped<tf::Vector3> clipping_ray;
        listener_.transformVector( clipping_frame_id_, patch_ray, clipping_ray);
        clipping_ray.normalize();
        double r = (z_peak - camera_origin.z())/clipping_ray.z();
        // finally, compute expected object position
        tf::Stamped<tf::Vector3> stamped_camera_origin(
                tf::Vector3(camera_origin.x(),
                    camera_origin.y(),
                    camera_origin.z()),
                patches_msg->header.stamp,
                clipping_frame_id_);

        tf::Vector3 object_position = stamped_camera_origin + r*clipping_ray;

        ROS_DEBUG_STREAM("patch_ray: (" <<
                patch_ray.x() << ", " << patch_ray.y() << ", " << patch_ray.z() << ") ");
        ROS_DEBUG_STREAM("clipping_ray: (" <<
                clipping_ray.x() << ", " << clipping_ray.y() << ", " << clipping_ray.z() << ") " << " z_peak: " << z_peak << " r: " << r);

        // put corresponding point_stamped in output message
        tf::Stamped<tf::Vector3> point(
                object_position,
                patches_msg->header.stamp,
                clipping_frame_id_);
        if(listener_.canTransform(output_frame_id_, point.frame_id_, point.stamp_))
        {
            tf::Stamped<tf::Vector3> output_point;
            listener_.transformPoint(output_frame_id_, point, output_point);
            tf::pointStampedTFToMsg(output_point, positioned_patch.world_point);
        }
        else
        {
            tf::pointStampedTFToMsg(point, positioned_patch.world_point);
        }
        positioned_patches.patch_array.push_back(positioned_patch);

        if(debug_marker_out.getNumSubscribers()>0)
        {
            visualization_msgs::Marker marker;
            marker.id = mid++;
            marker.type = visualization_msgs::Marker::SPHERE;
            marker.action = visualization_msgs::Marker::ADD;
            marker.header = positioned_patch.world_point.header;
            marker.pose.position = positioned_patch.world_point.point;
            marker.scale.x = 0.02;
            marker.scale.y = 0.02;
            marker.scale.z = 0.02;
            marker.color.r = 0.0;
            marker.color.g = 0.0;
            marker.color.b = 1.0;
            marker.color.a = 1.0;
            vis_markers.markers.push_back(marker);
        }
    }

    patches_out.publish( positioned_patches );

    if(debug_marker_out.getNumSubscribers()>0)
    {
        debug_marker_out.publish(vis_markers);
    }
}