void
transform_cylinder(CylinderPtr & c_ptr,Eigen::Affine3f& trafo)
{


	Cylinder & c=*c_ptr;

 	 for (int i = 0; i < (int) c.contours.size(); ++i) {
 		 for (int j = 0; j < (int) c.contours[i].size(); ++j) {


 			 c.contours[i][j]=trafo*c.contours[i][j];


 		}
 	}

c.origin_=trafo*c.origin_;
std::cout<<"transformed origin\n"<<c.origin_<<std::endl;

for (int i = 0; i < 3; ++i) {

	c.axes_[i]=trafo.rotation()*c.axes_[i];
//	std::cout<<"axis -"<<i<<" \n"<<c.axes_[i]<<std::endl;
}
c.normal=trafo.rotation()*c.normal;

float roll,pitch,yaw,x,y,z;
pcl::getTranslationAndEulerAngles(trafo,x,y,z,roll,pitch,yaw);
//	std::cout<<" x= "<<x<<" y= "<<z<<" z= "<<z<<" roll= "<<roll<<" pitch= "<<pitch<<" yaw= "<<yaw<<std::endl;

c.assignMembers(c.axes_[1], c.axes_[2], c.origin_);	//	configure unrolled polygon
}
예제 #2
0
void ConvexPolygon::projectOnPlane(
    const Eigen::Affine3f& pose, Eigen::Affine3f& output)
{
    Eigen::Vector3f p(pose.translation());
    Eigen::Vector3f output_p;
    projectOnPlane(p, output_p);
    // ROS_INFO("[ConvexPolygon::projectOnPlane] p: [%f, %f, %f]",
    //          p[0], p[1], p[2]);
    // ROS_INFO("[ConvexPolygon::projectOnPlane] output_p: [%f, %f, %f]",
    //          output_p[0], output_p[1], output_p[2]);
    Eigen::Quaternionf rot;
    rot.setFromTwoVectors(pose.rotation() * Eigen::Vector3f::UnitZ(),
                          coordinates().rotation() * Eigen::Vector3f::UnitZ());
    Eigen::Quaternionf coords_rot(coordinates().rotation());
    Eigen::Quaternionf pose_rot(pose.rotation());
    // ROS_INFO("[ConvexPolygon::projectOnPlane] rot: [%f, %f, %f, %f]",
    //          rot.x(), rot.y(), rot.z(), rot.w());
    // ROS_INFO("[ConvexPolygon::projectOnPlane] coords_rot: [%f, %f, %f, %f]",
    //          coords_rot.x(), coords_rot.y(), coords_rot.z(), coords_rot.w());
    // ROS_INFO("[ConvexPolygon::projectOnPlane] pose_rot: [%f, %f, %f, %f]",
    //          pose_rot.x(), pose_rot.y(), pose_rot.z(), pose_rot.w());
    // ROS_INFO("[ConvexPolygon::projectOnPlane] normal: [%f, %f, %f]", normal_[0], normal_[1], normal_[2]);
    // Eigen::Affine3f::Identity() *
    // output.translation() = Eigen::Translation3f(output_p);
    // output.rotation() = rot * pose.rotation();
    //output = Eigen::Translation3f(output_p) * rot * pose.rotation();
    output = Eigen::Affine3f(rot * pose.rotation());
    output.pretranslate(output_p);
    // Eigen::Vector3f projected_point = output * Eigen::Vector3f(0, 0, 0);
    // ROS_INFO("[ConvexPolygon::projectOnPlane] output: [%f, %f, %f]",
    //          projected_point[0], projected_point[1], projected_point[2]);
}
예제 #3
0
void setViewerPose (pcl::visualization::PCLVisualizer& viewer, const Eigen::Affine3f& viewer_pose) {
  Eigen::Vector3f pos_vector = viewer_pose * Eigen::Vector3f(10, 10, 10);
  Eigen::Vector3f look_at_vector = viewer_pose.rotation () * Eigen::Vector3f(0, 0, 0);
  Eigen::Vector3f up_vector = viewer_pose.rotation () * Eigen::Vector3f(0, -1, 0);
  viewer.setCameraPosition (pos_vector[0], pos_vector[1], pos_vector[2],
                            look_at_vector[0], look_at_vector[1], look_at_vector[2],
                            up_vector[0], up_vector[1], up_vector[2]);
}
예제 #4
0
template <typename PointSource, typename PointTarget> bool
pcl::PPFRegistration<PointSource, PointTarget>::posesWithinErrorBounds (Eigen::Affine3f &pose1,
                                                                        Eigen::Affine3f &pose2)
{
  float position_diff = (pose1.translation () - pose2.translation ()).norm ();
  Eigen::AngleAxisf rotation_diff_mat (pose1.rotation ().inverse () * pose2.rotation ());

  float rotation_diff_angle = fabsf (rotation_diff_mat.angle ());

  if (position_diff < clustering_position_diff_threshold_ && rotation_diff_angle < clustering_rotation_diff_threshold_)
    return true;
  else return false;
}
transformation  objectModelSV::modelToScene( const int modelPointIndex, const Eigen::Affine3f & transformSceneToGlobal, const float alpha)
{
	Eigen::Vector3f modelPoint=modelCloud->points[modelPointIndex].getVector3fMap();
	Eigen::Vector3f modelNormal=modelCloud->points[modelPointIndex].getNormalVector3fMap ();

	// Get transformation from model local frame to scene local frame
    Eigen::Affine3f completeTransform = transformSceneToGlobal.inverse () * Eigen::AngleAxisf(alpha, Eigen::Vector3f::UnitX ()) * modelCloudTransformations[modelPointIndex];

	Eigen::Quaternion<float> rotationQ=Eigen::Quaternion<float>(completeTransform.rotation());

	// if object is symmetric remove yaw rotation (assume symmetry around z axis)
	if(symmetric)
	{
		Eigen::Vector3f eulerAngles;
		// primeiro [0] -> rot. around x (roll) [1] -> rot. around y (pitch) [2] -> rot. around z (yaw)
		quaternionToEuler(rotationQ, eulerAngles[0], eulerAngles[1], eulerAngles[2]);
		//pcl::getEulerAngles(completeTransform,eulerAngles[0], eulerAngles[1], eulerAngles[2]);
		//eulerAngles[2]=0.0;
		eulerToQuaternion(rotationQ, eulerAngles[0], eulerAngles[1], eulerAngles[2]);
		//quaternionToEuler(rotationQ, eulerAngles[2], eulerAngles[1], eulerAngles[2]);
		//std::cout << "EULER ANGLES: " << eulerAngles << std::endl;
	}
	//std::cout << "translation: " << completeTransform.rotation().matrix() << std::endl;
	return transformation(rotationQ, Eigen::Translation3f(completeTransform.translation()) );
}
예제 #6
0
파일: kinfu.cpp 프로젝트: neeljp/pcl
void
pcl::gpu::kinfuLS::KinfuTracker::setInitialCameraPose (const Eigen::Affine3f& pose)
{
  init_Rcam_ = pose.rotation ();
  init_tcam_ = pose.translation ();
  //reset (); // (already called in constructor)
}
예제 #7
0
  void FootstepGraph::setBasicSuccessors(
    std::vector<Eigen::Affine3f> left_to_right_successors)
  {
    successors_from_left_to_right_ = left_to_right_successors;
    for (size_t i = 0; i < successors_from_left_to_right_.size(); i++) {
      Eigen::Affine3f transform = successors_from_left_to_right_[i];
      float roll, pitch, yaw;
      pcl::getEulerAngles(transform, roll, pitch, yaw);
      Eigen::Vector3f translation = transform.translation();
      Eigen::Affine3f flipped_transform
        = Eigen::Translation3f(translation[0], -translation[1], translation[2])
        * Eigen::Quaternionf(Eigen::AngleAxisf(-yaw, Eigen::Vector3f::UnitZ()));
      successors_from_right_to_left_.push_back(flipped_transform);
    }

    // max_successor_distance_
    for (size_t i = 0; i < successors_from_left_to_right_.size(); i++) {
      Eigen::Affine3f transform = successors_from_left_to_right_[i];
      //double dist = transform.translation().norm();
      double dist = transform.translation()[0]; // Only consider x
      if (dist > max_successor_distance_) {
        max_successor_distance_ = dist;
      }
      double rot = Eigen::AngleAxisf(transform.rotation()).angle();
      if (rot > max_successor_rotation_) {
        max_successor_rotation_ = rot;
      }
    }
  }
예제 #8
0
  bool FootstepGraph::isGoal(StatePtr state)
  {
    FootstepState::Ptr goal = getGoal(state->getLeg());
    if (publish_progress_) {
      jsk_footstep_msgs::FootstepArray msg;
      msg.header.frame_id = "odom"; // TODO fixed frame_id
      msg.header.stamp = ros::Time::now();
      msg.footsteps.push_back(*state->toROSMsg());
      pub_progress_.publish(msg);
    }
    Eigen::Affine3f pose = state->getPose();
    Eigen::Affine3f goal_pose = goal->getPose();
    Eigen::Affine3f transformation = pose.inverse() * goal_pose;

    if ((parameters_.goal_pos_thr > transformation.translation().norm()) &&
        (parameters_.goal_rot_thr > std::abs(Eigen::AngleAxisf(transformation.rotation()).angle()))) {
      // check collision
      if (state->getLeg() == jsk_footstep_msgs::Footstep::LEFT) {
        if (right_goal_state_->crossCheck(state)) {
          return true;
        }
      } else if (state->getLeg() == jsk_footstep_msgs::Footstep::RIGHT) {
        if (left_goal_state_->crossCheck(state)) {
          return true;
        }
      }
    }
    return false;
  }
예제 #9
0
void Evaluation::saveAllPoses(const pcl::gpu::KinfuTracker& kinfu, int frame_number, const std::string& logfile) const
{   
  size_t total = accociations_.empty() ? depth_stamps_and_filenames_.size() : accociations_.size();

  if (frame_number < 0)
      frame_number = (int)total;

  frame_number = std::min(frame_number, (int)kinfu.getNumberOfPoses());

  cout << "Writing " << frame_number << " poses to " << logfile << endl;
  
  ofstream path_file_stream(logfile.c_str());
  path_file_stream.setf(ios::fixed,ios::floatfield);
  
  for(int i = 0; i < frame_number; ++i)
  {
    Eigen::Affine3f pose = kinfu.getCameraPose(i);
    Eigen::Quaternionf q(pose.rotation());
    Eigen::Vector3f t = pose.translation();

    double stamp = accociations_.empty() ? depth_stamps_and_filenames_[i].first : accociations_[i].time1;

    path_file_stream << stamp << " ";
    path_file_stream << t[0] << " " << t[1] << " " << t[2] << " ";
    path_file_stream << q.x() << " " << q.y() << " " << q.z() << " " << q.w() << endl;
  }
}
bool reduce_measurement_g2o::find_transform(const color_keyframe::Ptr & fi,
		const color_keyframe::Ptr & fj, Sophus::SE3f & t) {

	std::vector<cv::KeyPoint> keypoints_i, keypoints_j;
	pcl::PointCloud<pcl::PointXYZ> keypoints3d_i, keypoints3d_j;
	cv::Mat descriptors_i, descriptors_j;

	compute_features(fi->get_i(0), fi->get_d(0), fi->get_intrinsics(0), fd, de,
			keypoints_i, keypoints3d_i, descriptors_i);

	compute_features(fj->get_i(0), fj->get_d(0), fj->get_intrinsics(0), fd, de,
			keypoints_j, keypoints3d_j, descriptors_j);

	std::vector<cv::DMatch> matches, matches_filtered;
	dm->match(descriptors_j, descriptors_i, matches);

	Eigen::Affine3f transform;
	std::vector<bool> inliers;

	bool res = estimate_transform_ransac(keypoints3d_j, keypoints3d_i, matches,
			100, 0.03 * 0.03, 20, transform, inliers);

	t = Sophus::SE3f(transform.rotation(), transform.translation());

	return res;
}
예제 #11
0
파일: kinfu.cpp 프로젝트: BITVoyager/pcl
void
pcl::gpu::KinfuTracker::setInitalCameraPose (const Eigen::Affine3f& pose)
{
  init_Rcam_ = pose.rotation ();
  init_tcam_ = pose.translation ();
  reset ();
}
예제 #12
0
template <typename PointT> void
pcl::transformPointCloudWithNormals (const pcl::PointCloud<PointT> &cloud_in, 
                                     pcl::PointCloud<PointT> &cloud_out,
                                     const Eigen::Affine3f &transform)
{
  if (&cloud_in != &cloud_out)
  {
    // Note: could be replaced by cloud_out = cloud_in
    cloud_out.header   = cloud_in.header;
    cloud_out.width    = cloud_in.width;
    cloud_out.height   = cloud_in.height;
    cloud_out.is_dense = cloud_in.is_dense;
    cloud_out.points.reserve (cloud_out.points.size ());
    cloud_out.points.assign (cloud_in.points.begin (), cloud_in.points.end ());
  }

  // If the data is dense, we don't need to check for NaN
  if (cloud_in.is_dense)
  {
    for (size_t i = 0; i < cloud_out.points.size (); ++i)
    {
      cloud_out.points[i].getVector3fMap() = transform * 
                                             cloud_in.points[i].getVector3fMap ();

      // Rotate normals
      cloud_out.points[i].getNormalVector3fMap() = transform.rotation () * 
                                                   cloud_in.points[i].getNormalVector3fMap ();
    }
  }
  // Dataset might contain NaNs and Infs, so check for them first.
  else
  {
    for (size_t i = 0; i < cloud_out.points.size (); ++i)
    {
      if (!pcl_isfinite (cloud_in.points[i].x) || 
          !pcl_isfinite (cloud_in.points[i].y) || 
          !pcl_isfinite (cloud_in.points[i].z))
        continue;
      cloud_out.points[i].getVector3fMap() = transform * 
                                             cloud_in.points[i].getVector3fMap ();

      // Rotate normals
      cloud_out.points[i].getNormalVector3fMap() = transform.rotation () * 
                                                   cloud_in.points[i].getNormalVector3fMap ();
    }
  }
}
예제 #13
0
파일: elch.hpp 프로젝트: 2php/pcl
template <typename PointT> void
pcl::registration::ELCH<PointT>::compute ()
{
  if (!initCompute ())
  {
    return;
  }

  LOAGraph grb[4];

  typename boost::graph_traits<LoopGraph>::edge_iterator edge_it, edge_it_end;
  for (boost::tuples::tie (edge_it, edge_it_end) = edges (*loop_graph_); edge_it != edge_it_end; edge_it++)
  {
    for (int j = 0; j < 4; j++)
      add_edge (source (*edge_it, *loop_graph_), target (*edge_it, *loop_graph_), 1, grb[j]);  //TODO add variance
  }

  double *weights[4];
  for (int i = 0; i < 4; i++)
  {
    weights[i] = new double[num_vertices (*loop_graph_)];
    loopOptimizerAlgorithm (grb[i], weights[i]);
  }

  //TODO use pose
  //Eigen::Vector4f cend;
  //pcl::compute3DCentroid (*((*loop_graph_)[loop_end_].cloud), cend);
  //Eigen::Translation3f tend (cend.head (3));
  //Eigen::Affine3f aend (tend);
  //Eigen::Affine3f aendI = aend.inverse ();

  //TODO iterate ovr loop_graph_
  //typename boost::graph_traits<LoopGraph>::vertex_iterator vertex_it, vertex_it_end;
  //for (boost::tuples::tie (vertex_it, vertex_it_end) = vertices (*loop_graph_); vertex_it != vertex_it_end; vertex_it++)
  for (size_t i = 0; i < num_vertices (*loop_graph_); i++)
  {
    Eigen::Vector3f t2;
    t2[0] = loop_transform_ (0, 3) * static_cast<float> (weights[0][i]);
    t2[1] = loop_transform_ (1, 3) * static_cast<float> (weights[1][i]);
    t2[2] = loop_transform_ (2, 3) * static_cast<float> (weights[2][i]);

    Eigen::Affine3f bl (loop_transform_);
    Eigen::Quaternionf q (bl.rotation ());
    Eigen::Quaternionf q2;
    q2 = Eigen::Quaternionf::Identity ().slerp (static_cast<float> (weights[3][i]), q);

    //TODO use rotation from branch start
    Eigen::Translation3f t3 (t2);
    Eigen::Affine3f a (t3 * q2);
    //a = aend * a * aendI;

    pcl::transformPointCloud (*(*loop_graph_)[i].cloud, *(*loop_graph_)[i].cloud, a);
    (*loop_graph_)[i].transform = a;
  }

  add_edge (loop_start_, loop_end_, *loop_graph_);

  deinitCompute ();
}
예제 #14
0
void
setViewerPose (pcl::visualization::PCLVisualizer& viewer, const Eigen::Affine3f& viewer_pose)
{
    Eigen::Vector3f pos_vector = viewer_pose * Eigen::Vector3f (0, 0, 0);
    Eigen::Vector3f look_at_vector = viewer_pose.rotation () * Eigen::Vector3f (0, 0, 1) + pos_vector;
    Eigen::Vector3f up_vector = viewer_pose.rotation () * Eigen::Vector3f (0, -1, 0);
    viewer.camera_.pos[0] = pos_vector[0];
    viewer.camera_.pos[1] = pos_vector[1];
    viewer.camera_.pos[2] = pos_vector[2];
    viewer.camera_.focal[0] = look_at_vector[0];
    viewer.camera_.focal[1] = look_at_vector[1];
    viewer.camera_.focal[2] = look_at_vector[2];
    viewer.camera_.view[0] = up_vector[0];
    viewer.camera_.view[1] = up_vector[1];
    viewer.camera_.view[2] = up_vector[2];
    viewer.updateCamera ();
}
예제 #15
0
 double footstepHeuristicStraightRotation(
   SolverNode<FootstepState, FootstepGraph>::Ptr node, FootstepGraph::Ptr graph)
 {
   FootstepState::Ptr state = node->getState();
   FootstepState::Ptr goal = graph->getGoal(state->getLeg());
   Eigen::Affine3f transform = state->getPose().inverse() * goal->getPose();
   return (std::abs(transform.translation().norm() / graph->maxSuccessorDistance()) +
              std::abs(Eigen::AngleAxisf(transform.rotation()).angle()) / graph->maxSuccessorRotation());
 }
예제 #16
0
 void Plane::project(const Eigen::Affine3f& pose, Eigen::Affine3f& output)
 {
   Eigen::Vector3f p(pose.translation());
   Eigen::Vector3f output_p;
   project(p, output_p);
   Eigen::Quaternionf rot;
   rot.setFromTwoVectors(pose.rotation() * Eigen::Vector3f::UnitZ(),
                         coordinates().rotation() * Eigen::Vector3f::UnitZ());
   output = Eigen::Affine3f::Identity() * Eigen::Translation3f(output_p) * rot;
 }
예제 #17
0
Eigen::Affine3f interpolateAffine(const Eigen::Affine3f &pose0, 
        const Eigen::Affine3f &pose1, float blend)
{
    /* interpolate translation */
    Eigen::Vector3f t0 = pose0.translation();
    Eigen::Vector3f t1 = pose1.translation();
    Eigen::Vector3f tIP = (t1 - t0)*blend;

    /* interpolate rotation */
    Eigen::Quaternionf r0(pose0.rotation());
    Eigen::Quaternionf r1(pose1.rotation());
    Eigen::Quaternionf rIP(r1.slerp(blend, r0));

    /* compose resulting pose */
    Eigen::Affine3f ipAff = pose0;
    ipAff.rotate(rIP);
    ipAff.translate(tIP);
    return ipAff;
}
  void PolygonArrayAngleLikelihood::likelihood(
    const jsk_recognition_msgs::PolygonArray::ConstPtr& msg)
  {
    boost::mutex::scoped_lock lock(mutex_);
    jsk_recognition_msgs::PolygonArray new_msg(*msg);

    try
    {
      // Resolve tf
      // ConstPtrmpute position of target_frame_id
      // respected from msg->header.frame_id
      tf::StampedTransform transform;
      tf_listener_->lookupTransform(
        target_frame_id_, msg->header.frame_id, msg->header.stamp, transform);
      Eigen::Affine3f pose;
      tf::transformTFToEigen(transform, pose);

      // Use x
      Eigen::Vector3f reference_axis = pose.rotation() * axis_;

      double min_distance = DBL_MAX;
      double max_distance = - DBL_MAX;
      std::vector<double> distances; 
      for (size_t i = 0; i < msg->polygons.size(); i++) {
        jsk_recognition_utils::Polygon::Ptr polygon
          = jsk_recognition_utils::Polygon::fromROSMsgPtr(msg->polygons[i].polygon);
        Eigen::Vector3f n = polygon->getNormal();
        double distance = std::abs(reference_axis.dot(n));
        min_distance = std::min(distance, min_distance);
        max_distance = std::max(distance, max_distance);
        distances.push_back(distance);
      }

      // Normalization
      for (size_t i = 0; i < distances.size(); i++) {
        // double normalized_distance
        //   = (distances[i] - min_distance) / (max_distance - min_distance);
        double likelihood = 1 / (1 + (distances[i] - 1) * (distances[i] - 1));
        
        if (msg->likelihood.size() == 0) {
          new_msg.likelihood.push_back(likelihood);
        }
        else {
          new_msg.likelihood[i] = new_msg.likelihood[i] * likelihood;
        }
      }
      pub_.publish(new_msg);
    }
    catch (...)
    {
      JSK_NODELET_ERROR("Unknown transform error");
    }
    
  }
예제 #19
0
 bool FootstepGraph::isGoal(StatePtr state)
 {
   FootstepState::Ptr goal = getGoal(state->getLeg());
   if (publish_progress_) {
     jsk_footstep_msgs::FootstepArray msg;
     msg.header.frame_id = "odom";
     msg.header.stamp = ros::Time::now();
     msg.footsteps.push_back(*state->toROSMsg());
     pub_progress_.publish(msg);
   }
   Eigen::Affine3f pose = state->getPose();
   Eigen::Affine3f goal_pose = goal->getPose();
   Eigen::Affine3f transformation = pose.inverse() * goal_pose;
   return (pos_goal_thr_ > transformation.translation().norm()) &&
     (rot_goal_thr_ > std::abs(Eigen::AngleAxisf(transformation.rotation()).angle()));
 }
예제 #20
0
void SLTrackerWorker::trackPointCloud(PointCloudConstPtr pointCloud){

    // Recursively call self until latest event is hit
    busy = true;
    QCoreApplication::sendPostedEvents(this, QEvent::MetaCall);
    bool result = busy;
    busy = false;
    if(!result){
        std::cerr << "SLTrackerWorker: dropped point cloud!" << std::endl;
        return;
    }

    if(!referenceSet){
        tracker->setReference(pointCloud);
        referenceSet = true;
        return;
    }

    performanceTime.start();

    Eigen::Affine3f T;
    bool converged;
    float RMS;
    tracker->determineTransformation(pointCloud, T, converged, RMS);

    // Emit result
    if(converged)
        emit newPoseEstimate(T);

//    std::cout << "Pose: " << T.matrix() << std::endl;

    std::cout << "Tracker: " << performanceTime.elapsed() << "ms" << std::endl;

    if(writeToDisk){
        Eigen::Vector3f t(T.translation());
        Eigen::Quaternionf q(T.rotation());

        (*ofStream) << trackingTime.elapsed() << ",";
        (*ofStream) << t.x() << "," << t.y() << "," << t.z() << ",";
        (*ofStream) << q.w() << "," << q.x() << "," << q.y() << "," << q.z() << "," << RMS;
        (*ofStream) << std::endl;
    }


}
transformation objectModelSV::modelToScene(const pcl::PointNormal & pointModel, const Eigen::Affine3f & transformSceneToGlobal, const float alpha)
{
	Eigen::Vector3f modelPoint=pointModel.getVector3fMap();
	Eigen::Vector3f modelNormal=pointModel.getNormalVector3fMap ();

	// Get transformation from model local frame to global frame
	Eigen::Vector3f cross=modelNormal.cross (Eigen::Vector3f::UnitX ()).normalized ();
	Eigen::AngleAxisf rotationModelToGlobal(acosf (modelNormal.dot (Eigen::Vector3f::UnitX ())), cross);

	if (isnan(cross[0]))
	{
		rotationModelToGlobal=Eigen::AngleAxisf(0.0,Eigen::Vector3f::UnitX ());
	}		
	//std::cout<< "ola:" <<acosf (modelNormal.dot (Eigen::Vector3f::UnitX ()))<<std::endl;
	//std::cout <<"X:"<< Eigen::Translation3f( rotationModelToGlobal * ((-1) * modelPoint)).x() << std::endl;
	//std::cout <<"Y:"<< Eigen::Translation3f( rotationModelToGlobal * ((-1) * modelPoint)).y() << std::endl;
	//std::cout <<"Z:"<< Eigen::Translation3f( rotationModelToGlobal * ((-1) * modelPoint)).z() << std::endl;

    Eigen::Affine3f transformModelToGlobal = Eigen::Translation3f( rotationModelToGlobal * ((-1) * modelPoint)) * rotationModelToGlobal;

	// Get transformation from model local frame to scene local frame
    Eigen::Affine3f completeTransform = transformSceneToGlobal.inverse () * Eigen::AngleAxisf(alpha, Eigen::Vector3f::UnitX ()) * transformModelToGlobal;

	//std::cout << Eigen::AngleAxisf(alpha, Eigen::Vector3f::UnitX ()).matrix() << std::endl;

	Eigen::Quaternion<float> rotationQ=Eigen::Quaternion<float>(completeTransform.rotation());

	// if object is symmetric remove yaw rotation (assume symmetry around z axis)
	if(symmetric)
	{
		Eigen::Vector3f eulerAngles;
		// primeiro [0] -> rot. around x (roll) [1] -> rot. around y (pitch) [2] -> rot. around z (yaw)
		quaternionToEuler(rotationQ, eulerAngles[0], eulerAngles[1], eulerAngles[2]);
		//pcl::getEulerAngles(completeTransform,eulerAngles[0], eulerAngles[1], eulerAngles[2]);
		//eulerAngles[2]=0.0;
		eulerToQuaternion(rotationQ, eulerAngles[0], eulerAngles[1], eulerAngles[2]);
		//quaternionToEuler(rotationQ, eulerAngles[2], eulerAngles[1], eulerAngles[2]);
		//std::cout << "EULER ANGLES: " << eulerAngles << std::endl;
	}


	//std::cout << "rotation: " << rotationQ << std::endl;
	return transformation(rotationQ, Eigen::Translation3f(completeTransform.translation()) );
}
예제 #22
0
gsl_vector* VelStereoMatcher::stereoToVec(const StereoProperties stereo)
{
  Eigen::Affine3f tform = stereo.getLeftCamera().tform;
  Eigen::Matrix4f intrinsics = stereo.getLeftCamera().intrinsics.matrix();

  Eigen::Vector3f tran;
  tran = tform.translation();
  float x = tran[0];
  float y = tran[1];
  float z = tran[2];

  Eigen::Matrix3f mat = tform.rotation();
  Eigen::AngleAxisf axis;
  axis = mat;
  float ax = axis.axis()[0] * axis.angle();
  float ay = axis.axis()[1] * axis.angle();
  float az = axis.axis()[2] * axis.angle();

  float fx = intrinsics(0,0);
  float fy = intrinsics(1,1);
  float cx = intrinsics(0,2);
  float cy = intrinsics(1,2);

  float baseline = stereo.baseline;

  gsl_vector* vec = gsl_vector_alloc(11);
  gsl_vector_set(vec, 0, x);
  gsl_vector_set(vec, 1, y);
  gsl_vector_set(vec, 2, z);
  gsl_vector_set(vec, 3, ax);
  gsl_vector_set(vec, 4, ay);
  gsl_vector_set(vec, 5, az);

  gsl_vector_set(vec, 6, fx);
  gsl_vector_set(vec, 7, fy);
  gsl_vector_set(vec, 8, cx);
  gsl_vector_set(vec, 9, cy);
  gsl_vector_set(vec, 10, baseline);

  return vec;

}
예제 #23
0
void SLTrackerDialog::showPoseEstimate(const Eigen::Affine3f & T){

    if(ui->poseTab->isVisible()){
        ui->poseWidget->showPoseEstimate(T);
    } else if(ui->traceTab->isVisible()){
        Eigen::Vector3f t(T.translation());
        Eigen::Quaternionf q(T.rotation());

        ui->translationTraceWidget->addMeasurement("tx", t(0));
        ui->translationTraceWidget->addMeasurement("ty", t(1));
        ui->translationTraceWidget->addMeasurement("tz", t(2));
        ui->translationTraceWidget->draw();

        ui->rotationTraceWidget->addMeasurement("qw", q.w());
        ui->rotationTraceWidget->addMeasurement("qx", q.x());
        ui->rotationTraceWidget->addMeasurement("qy", q.y());
        ui->rotationTraceWidget->addMeasurement("qz", q.z());
        ui->rotationTraceWidget->draw();
    }
}
예제 #24
0
void affine3fToTransRotVec(const Eigen::Affine3f &aff,
        cv::Mat &tvec, cv::Mat &rvec)
{   
    /* decompose matrix */
    Eigen::Vector3f pos = aff.translation();

    Eigen::AngleAxisf rot(aff.rotation());
    Eigen::Vector3f rotVec = rot.axis();
    float angle = rot.angle();
    rotVec *= angle;

    /* copy translation vector */
    tvec = cv::Mat::zeros(3, 1, CV_32F);
    tvec.at<float>(0,0) = pos[0];
    tvec.at<float>(1,0) = pos[1];
    tvec.at<float>(2,0) = pos[2];

    /* copy axis angle rotation */
    rvec = cv::Mat::zeros(3, 1, CV_32F);
    rvec.at<float>(0,0) = rotVec[0];
    rvec.at<float>(1,0) = rotVec[1];
    rvec.at<float>(2,0) = rotVec[2];
}
예제 #25
0
void
  Cylinder::transform(Eigen::Affine3f & trafo)
  {
    //transform contours
    //this->TransformContours(trafo);
    pose_ = trafo * pose_;

    //  transform parameters
    sym_axis_ = trafo.rotation() * sym_axis_;

    //Eigen::Vector3f tf_axes_2 = trafo.rotation() * this->normal_;
    //this->normal_ = tf_axes_2;

    //Eigen::Vector3f tf_origin = trafo * this->origin_;
    //this->origin_ =  tf_origin;

    /*Eigen::Vector3f centroid3f;
  centroid3f<<  this->centroid[0], this->centroid[1], this->centroid[2];
  centroid3f = trafo * centroid3f;
  this->centroid << centroid3f[0], centroid3f[1], centroid3f[2], 0;*/
    //this->computeAttributes(sym_axis_,normal_,origin_);

  }
/* ---[ */
int
main (int argc, char** argv)
{
  srand (static_cast<unsigned int> (time (0)));

  print_info ("The viewer window provides interactive commands; for help, press 'h' or 'H' from within the window.\n");

  if (argc < 2)
  {
    printHelp (argc, argv);
    return (-1);
  }

  bool debug = false;
  pcl::console::parse_argument (argc, argv, "-debug", debug);
  if (debug)
    pcl::console::setVerbosityLevel (pcl::console::L_DEBUG);

  // Parse the command line arguments for .pcd files
  std::vector<int> p_file_indices   = pcl::console::parse_file_extension_argument (argc, argv, ".pcd");
  std::vector<int> vtk_file_indices = pcl::console::parse_file_extension_argument (argc, argv, ".vtk");

  if (p_file_indices.size () == 0 && vtk_file_indices.size () == 0)
  {
    print_error ("No .PCD or .VTK file given. Nothing to visualize.\n");
    return (-1);
  }

  // Command line parsing
  double bcolor[3] = {0, 0, 0};
  pcl::console::parse_3x_arguments (argc, argv, "-bc", bcolor[0], bcolor[1], bcolor[2]);

  std::vector<double> fcolor_r, fcolor_b, fcolor_g;
  bool fcolorparam = pcl::console::parse_multiple_3x_arguments (argc, argv, "-fc", fcolor_r, fcolor_g, fcolor_b);

  std::vector<double> pose_x, pose_y, pose_z, pose_roll, pose_pitch, pose_yaw;
  bool poseparam = pcl::console::parse_multiple_3x_arguments (argc, argv, "-position", pose_x, pose_y, pose_z);
  poseparam &= pcl::console::parse_multiple_3x_arguments (argc, argv, "-orientation", pose_roll, pose_pitch, pose_yaw);

  std::vector<int> psize;
  pcl::console::parse_multiple_arguments (argc, argv, "-ps", psize);

  std::vector<double> opaque;
  pcl::console::parse_multiple_arguments (argc, argv, "-opaque", opaque);

  std::vector<std::string> shadings;
  pcl::console::parse_multiple_arguments (argc, argv, "-shading", shadings);

  int mview = 0;
  pcl::console::parse_argument (argc, argv, "-multiview", mview);

  int normals = 0;
  pcl::console::parse_argument (argc, argv, "-normals", normals);
  float normals_scale = NORMALS_SCALE;
  pcl::console::parse_argument (argc, argv, "-normals_scale", normals_scale);

  int pc = 0;
  pcl::console::parse_argument (argc, argv, "-pc", pc);
  float pc_scale = PC_SCALE;
  pcl::console::parse_argument (argc, argv, "-pc_scale", pc_scale);

  bool use_vbos = false;
  pcl::console::parse_argument (argc, argv, "-vbo_rendering", use_vbos);
  if (use_vbos) 
    print_highlight ("Vertex Buffer Object (VBO) visualization enabled.\n");

  bool use_pp   = pcl::console::find_switch (argc, argv, "-use_point_picking");
  if (use_pp) 
    print_highlight ("Point picking enabled.\n");

  bool use_optimal_l_colors = pcl::console::find_switch (argc, argv, "-optimal_label_colors");
  if (use_optimal_l_colors)
    print_highlight ("Optimal glasbey colors are being assigned to existing labels.\nNote: No static mapping between label ids and colors\n");

  // If VBOs are not enabled, then try to use immediate rendering
  bool use_immediate_rendering = false;
  if (!use_vbos)
  {
    pcl::console::parse_argument (argc, argv, "-immediate_rendering", use_immediate_rendering);
    if (use_immediate_rendering) 
      print_highlight ("Using immediate mode rendering.\n");
  }

  // Multiview enabled?
  int y_s = 0, x_s = 0;
  double x_step = 0, y_step = 0;
  if (mview)
  {
    print_highlight ("Multi-viewport rendering enabled.\n");

    y_s = static_cast<int>(floor (sqrt (static_cast<float>(p_file_indices.size () + vtk_file_indices.size ()))));
    x_s = y_s + static_cast<int>(ceil (double (p_file_indices.size () + vtk_file_indices.size ()) / double (y_s) - y_s));

    if (p_file_indices.size () != 0)
    {
      print_highlight ("Preparing to load "); print_value ("%d", p_file_indices.size ()); print_info (" pcd files.\n");
    }

    if (vtk_file_indices.size () != 0)
    {
      print_highlight ("Preparing to load "); print_value ("%d", vtk_file_indices.size ()); print_info (" vtk files.\n");
    }

    x_step = static_cast<double>(1.0 / static_cast<double>(x_s));
    y_step = static_cast<double>(1.0 / static_cast<double>(y_s));
    print_value ("%d", x_s);    print_info ("x"); print_value ("%d", y_s);
    print_info (" / ");      print_value ("%f", x_step); print_info ("x"); print_value ("%f", y_step);
    print_info (")\n");
  }

  // Fix invalid multiple arguments
  if (psize.size () != p_file_indices.size () && psize.size () > 0)
    for (size_t i = psize.size (); i < p_file_indices.size (); ++i)
      psize.push_back (1);
  if (opaque.size () != p_file_indices.size () && opaque.size () > 0)
    for (size_t i = opaque.size (); i < p_file_indices.size (); ++i)
      opaque.push_back (1.0);

  if (shadings.size () != p_file_indices.size () && shadings.size () > 0)
    for (size_t i = shadings.size (); i < p_file_indices.size (); ++i)
      shadings.push_back ("flat");

  // Create the PCLVisualizer object
#if VTK_MAJOR_VERSION==6 || (VTK_MAJOR_VERSION==5 && VTK_MINOR_VERSION>6)
  boost::shared_ptr<pcl::visualization::PCLPlotter> ph;
#endif  
  // Using min_p, max_p to set the global Y min/max range for the histogram
  float min_p = FLT_MAX; float max_p = -FLT_MAX;

  int k = 0, l = 0, viewport = 0;
  // Load the data files
  pcl::PCDReader pcd;
  pcl::console::TicToc tt;
  ColorHandlerPtr color_handler;
  GeometryHandlerPtr geometry_handler;

  // Go through VTK files
  for (size_t i = 0; i < vtk_file_indices.size (); ++i)
  {
    // Load file
    tt.tic ();
    print_highlight (stderr, "Loading "); print_value (stderr, "%s ", argv[vtk_file_indices.at (i)]);
    vtkPolyDataReader* reader = vtkPolyDataReader::New ();
    reader->SetFileName (argv[vtk_file_indices.at (i)]);
    reader->Update ();
    vtkSmartPointer<vtkPolyData> polydata = reader->GetOutput ();
    if (!polydata)
      return (-1);
    print_info ("[done, "); print_value ("%g", tt.toc ()); print_info (" ms : "); print_value ("%d", polydata->GetNumberOfPoints ()); print_info (" points]\n");

    // Create the PCLVisualizer object here on the first encountered XYZ file
    if (!p)
      p.reset (new pcl::visualization::PCLVisualizer (argc, argv, "PCD viewer"));

    // Multiview enabled?
    if (mview)
    {
      p->createViewPort (k * x_step, l * y_step, (k + 1) * x_step, (l + 1) * y_step, viewport);
      k++;
      if (k >= x_s)
      {
        k = 0;
        l++;
      }
    }

    // Add as actor
    std::stringstream cloud_name ("vtk-");
    cloud_name << argv[vtk_file_indices.at (i)] << "-" << i;
    p->addModelFromPolyData (polydata, cloud_name.str (), viewport);

    // Change the shape rendered color
    if (fcolorparam && fcolor_r.size () > i && fcolor_g.size () > i && fcolor_b.size () > i)
      p->setShapeRenderingProperties (pcl::visualization::PCL_VISUALIZER_COLOR, fcolor_r[i], fcolor_g[i], fcolor_b[i], cloud_name.str ());

    // Change the shape rendered point size
    if (psize.size () > 0)
      p->setShapeRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, psize.at (i), cloud_name.str ());

    // Change the shape rendered opacity
    if (opaque.size () > 0)
      p->setShapeRenderingProperties (pcl::visualization::PCL_VISUALIZER_OPACITY, opaque.at (i), cloud_name.str ());

    // Change the shape rendered shading
    if (shadings.size () > 0)
    {
      if (shadings[i] == "flat")
      {
        print_highlight (stderr, "Setting shading property for %s to FLAT.\n", argv[vtk_file_indices.at (i)]);
        p->setShapeRenderingProperties (pcl::visualization::PCL_VISUALIZER_SHADING, pcl::visualization::PCL_VISUALIZER_SHADING_FLAT, cloud_name.str ());
      }
      else if (shadings[i] == "gouraud")
      {
        print_highlight (stderr, "Setting shading property for %s to GOURAUD.\n", argv[vtk_file_indices.at (i)]);
        p->setShapeRenderingProperties (pcl::visualization::PCL_VISUALIZER_SHADING, pcl::visualization::PCL_VISUALIZER_SHADING_GOURAUD, cloud_name.str ());
      }
      else if (shadings[i] == "phong")
      {
        print_highlight (stderr, "Setting shading property for %s to PHONG.\n", argv[vtk_file_indices.at (i)]);
        p->setShapeRenderingProperties (pcl::visualization::PCL_VISUALIZER_SHADING, pcl::visualization::PCL_VISUALIZER_SHADING_PHONG, cloud_name.str ());
      }
    }
  }

  pcl::PCLPointCloud2::Ptr cloud;
  // Go through PCD files
  for (size_t i = 0; i < p_file_indices.size (); ++i)
  {
    tt.tic ();
    cloud.reset (new pcl::PCLPointCloud2);
    Eigen::Vector4f origin;
    Eigen::Quaternionf orientation;
    int version;

    print_highlight (stderr, "Loading "); print_value (stderr, "%s ", argv[p_file_indices.at (i)]);

    if (pcd.read (argv[p_file_indices.at (i)], *cloud, origin, orientation, version) < 0)
      return (-1);

    // Calculate transform if available.
    if (pose_x.size () > i && pose_y.size () > i && pose_z.size () > i &&
        pose_roll.size () > i && pose_pitch.size () > i && pose_yaw.size () > i)
    {
      Eigen::Affine3f pose =
        Eigen::Translation3f (Eigen::Vector3f (pose_x[i], pose_y[i], pose_z[i])) *
        Eigen::AngleAxisf (pose_yaw[i],   Eigen::Vector3f::UnitZ ()) *
        Eigen::AngleAxisf (pose_pitch[i], Eigen::Vector3f::UnitY ()) *
        Eigen::AngleAxisf (pose_roll[i],  Eigen::Vector3f::UnitX ());
      orientation = pose.rotation () * orientation;
      origin.block<3, 1> (0, 0) = (pose * Eigen::Translation3f (origin.block<3, 1> (0, 0))).translation ();
    }

    std::stringstream cloud_name;

    // ---[ Special check for 1-point multi-dimension histograms
    if (cloud->fields.size () == 1 && isMultiDimensionalFeatureField (cloud->fields[0]))
    {
      cloud_name << argv[p_file_indices.at (i)];

#if VTK_MAJOR_VERSION==6 || (VTK_MAJOR_VERSION==5 && VTK_MINOR_VERSION>6)
      if (!ph)
        ph.reset (new pcl::visualization::PCLPlotter);
#endif

      pcl::getMinMax (*cloud, 0, cloud->fields[0].name, min_p, max_p);
#if VTK_MAJOR_VERSION==6 || (VTK_MAJOR_VERSION==5 && VTK_MINOR_VERSION>6)
      ph->addFeatureHistogram (*cloud, cloud->fields[0].name, cloud_name.str ());
#endif
      print_info ("[done, "); print_value ("%g", tt.toc ()); print_info (" ms : "); print_value ("%d", cloud->fields[0].count); print_info (" points]\n");
      continue;
    }

    // ---[ Special check for 2D images
    if (cloud->fields.size () == 1 && isOnly2DImage (cloud->fields[0]))
    {
      print_info ("[done, "); print_value ("%g", tt.toc ()); print_info (" ms : "); print_value ("%u", cloud->width * cloud->height); print_info (" points]\n");
      print_info ("Available dimensions: "); print_value ("%s\n", pcl::getFieldsList (*cloud).c_str ());
      
      std::stringstream name;
      name << "PCD Viewer :: " << argv[p_file_indices.at (i)];
      pcl::visualization::ImageViewer::Ptr img (new pcl::visualization::ImageViewer (name.str ()));
      pcl::PointCloud<pcl::RGB> rgb_cloud;
      pcl::fromPCLPointCloud2 (*cloud, rgb_cloud);

      img->addRGBImage (rgb_cloud);
      imgs.push_back (img);

      continue;
    }

    cloud_name << argv[p_file_indices.at (i)] << "-" << i;

    // Create the PCLVisualizer object here on the first encountered XYZ file
    if (!p)
    {
      p.reset (new pcl::visualization::PCLVisualizer (argc, argv, "PCD viewer"));
      if (use_pp)   // Only enable the point picking callback if the command line parameter is enabled
        p->registerPointPickingCallback (&pp_callback, static_cast<void*> (&cloud));

      // Set whether or not we should be using the vtkVertexBufferObjectMapper
      p->setUseVbos (use_vbos);

      if (!p->cameraParamsSet () && !p->cameraFileLoaded ())
      {
        Eigen::Matrix3f rotation;
        rotation = orientation;
        p->setCameraPosition (origin [0]                  , origin [1]                  , origin [2],
                              origin [0] + rotation (0, 2), origin [1] + rotation (1, 2), origin [2] + rotation (2, 2),
                                           rotation (0, 1),              rotation (1, 1),              rotation (2, 1));
      }
    }

    // Multiview enabled?
    if (mview)
    {
      p->createViewPort (k * x_step, l * y_step, (k + 1) * x_step, (l + 1) * y_step, viewport);
      k++;
      if (k >= x_s)
      {
        k = 0;
        l++;
      }
    }

    if (cloud->width * cloud->height == 0)
    {
      print_error ("[error: no points found!]\n");
      return (-1);
    }

    // If no color was given, get random colors
    if (fcolorparam)
    {
      if (fcolor_r.size () > i && fcolor_g.size () > i && fcolor_b.size () > i)
        color_handler.reset (new pcl::visualization::PointCloudColorHandlerCustom<pcl::PCLPointCloud2> (cloud, fcolor_r[i], fcolor_g[i], fcolor_b[i]));
      else
        color_handler.reset (new pcl::visualization::PointCloudColorHandlerRandom<pcl::PCLPointCloud2> (cloud));
    }
    else
      color_handler.reset (new pcl::visualization::PointCloudColorHandlerRandom<pcl::PCLPointCloud2> (cloud));

    // Add the dataset with a XYZ and a random handler
    geometry_handler.reset (new pcl::visualization::PointCloudGeometryHandlerXYZ<pcl::PCLPointCloud2> (cloud));
    // Add the cloud to the renderer
    //p->addPointCloud<pcl::PointXYZ> (cloud_xyz, geometry_handler, color_handler, cloud_name.str (), viewport);
    p->addPointCloud (cloud, geometry_handler, color_handler, origin, orientation, cloud_name.str (), viewport);


    if (mview)
      // Add text with file name
      p->addText (argv[p_file_indices.at (i)], 5, 5, 10, 1.0, 1.0, 1.0, "text_" + std::string (argv[p_file_indices.at (i)]), viewport);

    // If normal lines are enabled
    if (normals != 0)
    {
      int normal_idx = pcl::getFieldIndex (*cloud, "normal_x");
      if (normal_idx == -1)
      {
        print_error ("Normal information requested but not available.\n");
        continue;
        //return (-1);
      }
      //
      // Convert from blob to pcl::PointCloud
      pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_xyz (new pcl::PointCloud<pcl::PointXYZ>);
      pcl::fromPCLPointCloud2 (*cloud, *cloud_xyz);
      cloud_xyz->sensor_origin_ = origin;
      cloud_xyz->sensor_orientation_ = orientation;

      pcl::PointCloud<pcl::Normal>::Ptr cloud_normals (new pcl::PointCloud<pcl::Normal>);
      pcl::fromPCLPointCloud2 (*cloud, *cloud_normals);
      std::stringstream cloud_name_normals;
      cloud_name_normals << argv[p_file_indices.at (i)] << "-" << i << "-normals";
      p->addPointCloudNormals<pcl::PointXYZ, pcl::Normal> (cloud_xyz, cloud_normals, normals, normals_scale, cloud_name_normals.str (), viewport);
    }
	/*
    // If principal curvature lines are enabled
    if (pc != 0)
    {
      if (normals == 0)
        normals = pc;

      int normal_idx = pcl::getFieldIndex (*cloud, "normal_x");
      if (normal_idx == -1)
      {
        print_error ("Normal information requested but not available.\n");
        continue;
        //return (-1);
      }
      int pc_idx = pcl::getFieldIndex (*cloud, "principal_curvature_x");
      if (pc_idx == -1)
      {
        print_error ("Principal Curvature information requested but not available.\n");
        continue;
        //return (-1);
      }
      //
      // Convert from blob to pcl::PointCloud
      pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_xyz (new pcl::PointCloud<pcl::PointXYZ>);
      pcl::fromPCLPointCloud2 (*cloud, *cloud_xyz);
      cloud_xyz->sensor_origin_ = origin;
      cloud_xyz->sensor_orientation_ = orientation;
      pcl::PointCloud<pcl::Normal>::Ptr cloud_normals (new pcl::PointCloud<pcl::Normal>);
      pcl::fromPCLPointCloud2 (*cloud, *cloud_normals);
      pcl::PointCloud<pcl::PrincipalCurvatures>::Ptr cloud_pc (new pcl::PointCloud<pcl::PrincipalCurvatures>);
      pcl::fromPCLPointCloud2 (*cloud, *cloud_pc);
      std::stringstream cloud_name_normals_pc;
      cloud_name_normals_pc << argv[p_file_indices.at (i)] << "-" << i << "-normals";
      int factor = (std::min)(normals, pc);
      p->addPointCloudNormals<pcl::PointXYZ, pcl::Normal> (cloud_xyz, cloud_normals, factor, normals_scale, cloud_name_normals_pc.str (), viewport);
      p->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_COLOR, 1.0, 0.0, 0.0, cloud_name_normals_pc.str ());
      p->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_LINE_WIDTH, 3, cloud_name_normals_pc.str ());
      cloud_name_normals_pc << "-pc";
      p->addPointCloudPrincipalCurvatures<pcl::PointXYZ, pcl::Normal> (cloud_xyz, cloud_normals, cloud_pc, factor, pc_scale, cloud_name_normals_pc.str (), viewport);
      p->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_LINE_WIDTH, 3, cloud_name_normals_pc.str ());
    }
	*/
    // Add every dimension as a possible color
    if (!fcolorparam)
    {
      int rgb_idx = 0;
      int label_idx = 0;
      for (size_t f = 0; f < cloud->fields.size (); ++f)
      {
        if (cloud->fields[f].name == "rgb" || cloud->fields[f].name == "rgba")
        {
          rgb_idx = f + 1;
          color_handler.reset (new pcl::visualization::PointCloudColorHandlerRGBField<pcl::PCLPointCloud2> (cloud));
        }
        //else if (cloud->fields[f].name == "label")
        //{
         // label_idx = f + 1;
          //color_handler.reset (new pcl::visualization::PointCloudColorHandlerLabelField<pcl::PCLPointCloud2> (cloud, !use_optimal_l_colors));
        //}
        else
        {
          if (!isValidFieldName (cloud->fields[f].name))
            continue;
          color_handler.reset (new pcl::visualization::PointCloudColorHandlerGenericField<pcl::PCLPointCloud2> (cloud, cloud->fields[f].name));
        }
        // Add the cloud to the renderer
        //p->addPointCloud<pcl::PointXYZ> (cloud_xyz, color_handler, cloud_name.str (), viewport);
        p->addPointCloud (cloud, color_handler, origin, orientation, cloud_name.str (), viewport);
      }
      // Set RGB color handler or label handler as default
      p->updateColorHandlerIndex (cloud_name.str (), (rgb_idx ? rgb_idx : label_idx));
    }

    // Additionally, add normals as a handler
    geometry_handler.reset (new pcl::visualization::PointCloudGeometryHandlerSurfaceNormal<pcl::PCLPointCloud2> (cloud));
    if (geometry_handler->isCapable ())
      //p->addPointCloud<pcl::PointXYZ> (cloud_xyz, geometry_handler, cloud_name.str (), viewport);
      p->addPointCloud (cloud, geometry_handler, origin, orientation, cloud_name.str (), viewport);

    if (use_immediate_rendering)
      // Set immediate mode rendering ON
      p->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_IMMEDIATE_RENDERING, 1.0, cloud_name.str ());

    // Change the cloud rendered point size
    if (psize.size () > 0)
      p->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, psize.at (i), cloud_name.str ());

    // Change the cloud rendered opacity
    if (opaque.size () > 0)
      p->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_OPACITY, opaque.at (i), cloud_name.str ());

    // Reset camera viewpoint to center of cloud if camera parameters were not passed manually and this is the first loaded cloud
    if (i == 0 && !p->cameraParamsSet () && !p->cameraFileLoaded ())
    {
      p->resetCameraViewpoint (cloud_name.str ());
      p->resetCamera ();
    }

    print_info ("[done, "); print_value ("%g", tt.toc ()); print_info (" ms : "); print_value ("%u", cloud->width * cloud->height); print_info (" points]\n");
    print_info ("Available dimensions: "); print_value ("%s\n", pcl::getFieldsList (*cloud).c_str ());
    if (p->cameraFileLoaded ())
      print_info ("Camera parameters restored from %s.\n", p->getCameraFile ().c_str ());
  }

  if (!mview && p)
  {
    std::string str;
    if (!p_file_indices.empty ())
      str = std::string (argv[p_file_indices.at (0)]);
    else if (!vtk_file_indices.empty ())
      str = std::string (argv[vtk_file_indices.at (0)]);

    for (size_t i = 1; i < p_file_indices.size (); ++i)
      str += ", " + std::string (argv[p_file_indices.at (i)]);

    for (size_t i = 1; i < vtk_file_indices.size (); ++i)
      str += ", " + std::string (argv[vtk_file_indices.at (i)]);

    p->addText (str, 5, 5, 10, 1.0, 1.0, 1.0, "text_allnames");
  }

  if (p)
    p->setBackgroundColor (bcolor[0], bcolor[1], bcolor[2]);
  // Read axes settings
  double axes  = 0.0;
  pcl::console::parse_argument (argc, argv, "-ax", axes);
  if (axes != 0.0 && p)
  {
    float ax_x = 0.0, ax_y = 0.0, ax_z = 0.0;
    pcl::console::parse_3x_arguments (argc, argv, "-ax_pos", ax_x, ax_y, ax_z);
    // Draw XYZ axes if command-line enabled
    p->addCoordinateSystem (axes, ax_x, ax_y, ax_z, "global");
  }

  // Clean up the memory used by the binary blob
  // Note: avoid resetting the cloud, otherwise the PointPicking callback will fail
  if (!use_pp)   // Only enable the point picking callback if the command line parameter is enabled
  {
    cloud.reset ();
    xyzcloud.reset ();
  }

  // If we have been given images, create our own loop so that we can spin each individually
  if (!imgs.empty ())
  {
    bool stopped = false;
    do
    {
#if VTK_MAJOR_VERSION==6 || (VTK_MAJOR_VERSION==5 && VTK_MINOR_VERSION>6)
      if (ph) ph->spinOnce ();
#endif

      for (int i = 0; i < int (imgs.size ()); ++i)
      {
        if (imgs[i]->wasStopped ())
        {
          stopped = true;
          break;
        }
        imgs[i]->spinOnce ();
      }
        
      if (p)
      {
        if (p->wasStopped ())
        {
          stopped = true;
          break;
        }
        p->spinOnce ();
      }
      boost::this_thread::sleep (boost::posix_time::microseconds (100));
    }
    while (!stopped);
  }
  else
  {
    // If no images, continue
#if VTK_MAJOR_VERSION==6 || (VTK_MAJOR_VERSION==5 && VTK_MINOR_VERSION>6)
    if (ph)
    {
      //print_highlight ("Setting the global Y range for all histograms to: "); print_value ("%f -> %f\n", min_p, max_p);
      //ph->setGlobalYRange (min_p, max_p);
      //ph->updateWindowPositions ();
      if (p)
        p->spin ();
      else
        ph->spin ();
    }
    else
#endif
      if (p)
        p->spin ();
  }
}
int main(int argc, char **argv) {
  



  
  ros::init(argc, argv, "talker");
  ros::NodeHandle n;
  ros::Publisher pub = n.advertise<cob_3d_mapping_msgs::ShapeArray>("/segmentation/shape_array",1);

  ros::Rate loop_rate(1);

  int number_shapes=10;
  if(argc==2)
  {
      number_shapes = atoi(argv[1]);
      std::cout<<"Publishing map with "<<number_shapes<<" shapes.\n";
  }
  else
  {
      std::cout<<"WARNING: Use number of shapes in map as input argument.(Using default value 10) \n";
  }


  float x_shift,y_shift,z_shift;

  x_shift=-0.5;
  y_shift=-0.5;
  z_shift=-0.5;


    int runner = 1;
  //while(ros::ok()) {
      for(int k=0;k<2;++k){
          
    cob_3d_mapping_msgs::ShapeArray sa;
    sa.header.frame_id="/map";

    for(int i =0 ;i<number_shapes;i++)
    {

// Transform shape with pcl:: transformation
    Eigen::Affine3f transform ;
    Eigen::Vector3f u1,u2,o;

    u1<<0,1,0;
    u2<<0,0,1;
    o<<i*x_shift,i*y_shift,i*z_shift;
    pcl::getTransformationFromTwoUnitVectorsAndOrigin(u1,u2,o,transform);
     
    
    cob_3d_mapping_msgs::Shape s;
    //transform parameters
    Eigen::Vector3f normal,centroid;
    double d;

    normal << 0, 0, 1;
    normal= transform.rotation() * normal;

    centroid << 1, 1, 1;
    centroid =transform * centroid;

    d= 0;
    d=fabs(centroid.dot(normal));


    // put params to shape msg
    s.params.push_back(normal[0]);
    s.params.push_back(normal[1]);
    s.params.push_back(normal[2]);
    s.params.push_back(d);
    s.centroid.x = centroid[0];
    s.centroid.y = centroid[1];
    s.centroid.z = centroid[2];
    s.header.frame_id="/map";

    s.color.r = 0;
    s.color.g = 1;
    s.color.b = 0;
    s.color.a = 1;

    pcl::PointCloud<pcl::PointXYZ> pc;
    pcl::PointXYZ pt;

    pt.x=0;
    pt.y=0;
    pt.z=0;
    pc.push_back(pt);

    pt.x=1;
    pt.y=0;
    pt.z=0;
    pc.push_back(pt);

    pt.x=1;
    pt.y=2;
    pt.z=0;
    pc.push_back(pt);

    pt.x=0;
    pt.y=2;
    pt.z=0;
    pc.push_back(pt);

    pt.x=0.5;
    pt.y=1;
    pt.z=0;
    pc.push_back(pt);


    
    //transform poincloud
    pcl::getTransformedPointCloud(pc,transform,pc);

    sensor_msgs::PointCloud2 pc2;
    pcl::toROSMsg(pc,pc2);
    s.points.push_back(pc2);
    s.holes.push_back(false);
    sa.shapes.push_back(s);
  }

    pub.publish(sa);

    ros::spinOnce();
    loop_rate.sleep();
    runner++;
 }

  return 0;
}
예제 #28
0
TEST (PCL, Matrix4Affine3Transform)
{
  float rot_x = 2.8827f;
  float rot_y = -0.31190f;
  float rot_z = -0.93058f;
  Eigen::Affine3f affine;
  pcl::getTransformation (0, 0, 0, rot_x, rot_y, rot_z, affine);

  EXPECT_NEAR (affine (0, 0),  0.56854731f, 1e-4); EXPECT_NEAR (affine (0, 1), -0.82217032f, 1e-4); EXPECT_NEAR (affine (0, 2), -0.028107658f, 1e-4);
  EXPECT_NEAR (affine (1, 0), -0.76327348f, 1e-4); EXPECT_NEAR (affine (1, 1), -0.51445758f, 1e-4); EXPECT_NEAR (affine (1, 2), -0.39082864f, 1e-4);
  EXPECT_NEAR (affine (2, 0),  0.30686751f, 1e-4); EXPECT_NEAR (affine (2, 1),  0.24365838f, 1e-4); EXPECT_NEAR (affine (2, 2), -0.920034f, 1e-4);

  // Approximative!!! Uses SVD internally! See http://eigen.tuxfamily.org/dox/Transform_8h_source.html
  Eigen::Matrix3f rotation = affine.rotation ();

  EXPECT_NEAR (rotation (0, 0),  0.56854731f, 1e-4); EXPECT_NEAR (rotation (0, 1), -0.82217032f, 1e-4); EXPECT_NEAR (rotation (0, 2), -0.028107658f, 1e-4);
  EXPECT_NEAR (rotation (1, 0), -0.76327348f, 1e-4); EXPECT_NEAR (rotation (1, 1), -0.51445758f, 1e-4); EXPECT_NEAR (rotation (1, 2), -0.39082864f, 1e-4);
  EXPECT_NEAR (rotation (2, 0),  0.30686751f, 1e-4); EXPECT_NEAR (rotation (2, 1),  0.24365838f, 1e-4); EXPECT_NEAR (rotation (2, 2), -0.920034f, 1e-4);

  float trans_x, trans_y, trans_z;
  pcl::getTransformation (0.1f, 0.2f, 0.3f, rot_x, rot_y, rot_z, affine);
  pcl::getTranslationAndEulerAngles (affine, trans_x, trans_y, trans_z, rot_x, rot_y, rot_z);
  EXPECT_FLOAT_EQ (trans_x, 0.1f);
  EXPECT_FLOAT_EQ (trans_y, 0.2f);
  EXPECT_FLOAT_EQ (trans_z, 0.3f);
  EXPECT_FLOAT_EQ (rot_x, 2.8827f);
  EXPECT_FLOAT_EQ (rot_y, -0.31190f);
  EXPECT_FLOAT_EQ (rot_z, -0.93058f);

  Eigen::Matrix4f transformation (Eigen::Matrix4f::Identity ());
  transformation.block<3, 3> (0, 0) = affine.rotation ();
  transformation.block<3, 1> (0, 3) = affine.translation ();

  PointXYZ p (1.f, 2.f, 3.f);
  Eigen::Vector3f v3 = p.getVector3fMap ();
  Eigen::Vector4f v4 = p.getVector4fMap ();

  Eigen::Vector3f v3t (affine * v3);
  Eigen::Vector4f v4t (transformation * v4);

  PointXYZ pt = pcl::transformPoint (p, affine);

  EXPECT_NEAR (pt.x, v3t.x (), 1e-4); EXPECT_NEAR (pt.x, v4t.x (), 1e-4);
  EXPECT_NEAR (pt.y, v3t.y (), 1e-4); EXPECT_NEAR (pt.y, v4t.y (), 1e-4);
  EXPECT_NEAR (pt.z, v3t.z (), 1e-4); EXPECT_NEAR (pt.z, v4t.z (), 1e-4);

  PointNormal pn;
  pn.getVector3fMap () = p.getVector3fMap ();
  pn.getNormalVector3fMap () = Eigen::Vector3f (0.60f, 0.48f, 0.64f);
  Eigen::Vector3f n3 = pn.getNormalVector3fMap ();
  Eigen::Vector4f n4 = pn.getNormalVector4fMap ();

  Eigen::Vector3f n3t (affine.rotation() * n3);
  Eigen::Vector4f n4t (transformation * n4);

  PointNormal pnt = pcl::transformPointWithNormal (pn, affine);

  EXPECT_NEAR (pnt.x, v3t.x (), 1e-4); EXPECT_NEAR (pnt.x, v4t.x (), 1e-4);
  EXPECT_NEAR (pnt.y, v3t.y (), 1e-4); EXPECT_NEAR (pnt.y, v4t.y (), 1e-4);
  EXPECT_NEAR (pnt.z, v3t.z (), 1e-4); EXPECT_NEAR (pnt.z, v4t.z (), 1e-4);
  EXPECT_NEAR (pnt.normal_x, n3t.x (), 1e-4); EXPECT_NEAR (pnt.normal_x, n4t.x (), 1e-4);
  EXPECT_NEAR (pnt.normal_y, n3t.y (), 1e-4); EXPECT_NEAR (pnt.normal_y, n4t.y (), 1e-4);
  EXPECT_NEAR (pnt.normal_z, n3t.z (), 1e-4); EXPECT_NEAR (pnt.normal_z, n4t.z (), 1e-4);

  PointCloud<PointXYZ> c, ct;
  c.push_back (p);
  pcl::transformPointCloud (c, ct, affine);
  EXPECT_FLOAT_EQ (pt.x, ct[0].x); 
  EXPECT_FLOAT_EQ (pt.y, ct[0].y); 
  EXPECT_FLOAT_EQ (pt.z, ct[0].z); 

  pcl::transformPointCloud (c, ct, transformation);
  EXPECT_FLOAT_EQ (pt.x, ct[0].x); 
  EXPECT_FLOAT_EQ (pt.y, ct[0].y); 
  EXPECT_FLOAT_EQ (pt.z, ct[0].z); 

  affine = transformation;

  std::vector<int> indices (1); indices[0] = 0;

  pcl::transformPointCloud (c, indices, ct, affine);
  EXPECT_FLOAT_EQ (pt.x, ct[0].x); 
  EXPECT_FLOAT_EQ (pt.y, ct[0].y); 
  EXPECT_FLOAT_EQ (pt.z, ct[0].z); 

  pcl::transformPointCloud (c, indices, ct, transformation);
  EXPECT_FLOAT_EQ (pt.x, ct[0].x); 
  EXPECT_FLOAT_EQ (pt.y, ct[0].y); 
  EXPECT_FLOAT_EQ (pt.z, ct[0].z); 
}
예제 #29
0
파일: sim_viewer.cpp 프로젝트: hobu/pcl
void simulate_callback (const pcl::visualization::KeyboardEvent &event,
                        void* viewer_void)
{
  pcl::visualization::PCLVisualizer::Ptr viewer = *static_cast<pcl::visualization::PCLVisualizer::Ptr *> (viewer_void);
  // I choose v for virtual as s for simulate is takwen
  if (event.getKeySym () == "v" && event.keyDown ())
  {
    std::cout << "v was pressed => simulate cloud" << std::endl;

    std::vector<pcl::visualization::Camera> cams;
    viewer->getCameras(cams);
    
    if (cams.size() !=1){
      std::cout << "n cams not 1 exiting\n"; // for now in case ...
     return; 
    }
   // cout << "n cams: " << cams.size() << "\n";
    pcl::visualization::Camera cam = cams[0];
    


	      
	      Eigen::Affine3f pose;
	      pose = viewer->getViewerPose() ;
	      
	      
     std::cout << cam.pos[0] << " " 
               << cam.pos[1] << " " 
               << cam.pos[2] << " p\n";	      
	      
	     Eigen::Matrix3f m;
	     m =pose.rotation();
	     //All axies use right hand rule. x=red axis, y=green axis, z=blue axis z direction is point into the screen. z \ \ \ -----------> x | | | | | | y 
	      
 cout << pose(0,0)  << " " << pose(0,1) << " " << pose(0,2)  << " " << pose(0,3) << " x0\n";
 cout << pose(1,0) << " " << pose(1,1) << " " << pose(1,2) << " " << pose(1,3) << " x1\n";
  cout << pose(2,0) << " " << pose(2,1)  << " " << pose(2,2) << " " << pose(2,3)<< "x2\n";

  double yaw,pitch, roll;
  wRo_to_euler(m,yaw,pitch,roll);
  
 printf("RPY: %f %f %f\n", roll*180/M_PI,pitch*180/M_PI,yaw*180/M_PI);    

// matrix->GetElement(1,0);
  
 cout << m(0,0)  << " " << m(0,1) << " " << m(0,2)  << " "  << " x0\n";
 cout << m(1,0) << " " << m(1,1) << " " << m(1,2) << " "  << " x1\n";
  cout << m(2,0) << " " << m(2,1)  << " " << m(2,2) << " "<< "x2\n\n";
  
  Eigen::Quaternionf rf;
  rf = Eigen::Quaternionf(m);
  
  
   Eigen::Quaterniond r(rf.w(),rf.x(),rf.y(),rf.z());
  
   Eigen::Isometry3d init_pose;
   init_pose.setIdentity();
   init_pose.translation() << cam.pos[0], cam.pos[1], cam.pos[2];  
   //Eigen::Quaterniond m = euler_to_quat(-1.54, 0, 0);
   init_pose.rotate(r);  
//   
  
    std::stringstream ss;
  print_Isometry3d(init_pose,ss);
  std::cout << "init_pose: " << ss.str() << "\n";
  
  
  
  
  viewer->addCoordinateSystem (1.0,pose,"reference");
  
  
  
    double tic = getTime();
    std::stringstream ss2;
    ss2.precision(20);
    ss2 << "simulated_pcl_" << tic << ".pcd";
    capture(init_pose);
    cout << (getTime() -tic) << " sec\n";  
  }
}
예제 #30
0
void simulate_callback (const pcl::visualization::KeyboardEvent &event,
                        void* viewer_void)
{
  boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer = *static_cast<boost::shared_ptr<pcl::visualization::PCLVisualizer> *> (viewer_void);
  // I choose v for virtual as s for simulate is takwen
  if (event.getKeySym () == "v" && event.keyDown ())
  {
    std::cout << "v was pressed => simulate cloud" << std::endl;

    std::vector<pcl::visualization::Camera> cams;
    viewer->getCameras(cams);
    
    if (cams.size() !=1){
      std::cout << "n cams not 1 exiting\n"; // for now in case ...
     return; 
    }
   // cout << "n cams: " << cams.size() << "\n";
    pcl::visualization::Camera cam = cams[0];
    


	      
	      Eigen::Affine3f pose;
	      pose = viewer->getViewerPose() ;
	      
	      
     std::cout << cam.pos[0] << " " 
               << cam.pos[1] << " " 
               << cam.pos[2] << " p\n";	      
	      
	     Eigen::Matrix3f m;
	     m =pose.rotation();
	     //All axies use right hand rule. x=red axis, y=green axis, z=blue axis z direction is point into the screen. z \ \ \ -----------> x | | | | | | y 
	      
 cout << pose(0,0)  << " " << pose(0,1) << " " << pose(0,2)  << " " << pose(0,3) << " x0\n";
 cout << pose(1,0) << " " << pose(1,1) << " " << pose(1,2) << " " << pose(1,3) << " x1\n";
  cout << pose(2,0) << " " << pose(2,1)  << " " << pose(2,2) << " " << pose(2,3)<< "x2\n";

  double yaw,pitch, roll;
  wRo_to_euler(m,yaw,pitch,roll);
  
 printf("RPY: %f %f %f\n", roll*180/M_PI,pitch*180/M_PI,yaw*180/M_PI);    

// matrix->GetElement(1,0);
  
 cout << m(0,0)  << " " << m(0,1) << " " << m(0,2)  << " "  << " x0\n";
 cout << m(1,0) << " " << m(1,1) << " " << m(1,2) << " "  << " x1\n";
  cout << m(2,0) << " " << m(2,1)  << " " << m(2,2) << " "<< "x2\n\n";
  
  Eigen::Quaternionf rf;
  rf = Eigen::Quaternionf(m);
  
  
   Eigen::Quaterniond r(rf.w(),rf.x(),rf.y(),rf.z());
  
   Eigen::Isometry3d init_pose;
   init_pose.setIdentity();
   init_pose.translation() << cam.pos[0], cam.pos[1], cam.pos[2];  
   //Eigen::Quaterniond m = euler_to_quat(-1.54, 0, 0);
   init_pose.rotate(r);  
//   
  
    std::stringstream ss;
  print_Isometry3d(init_pose,ss);
  std::cout << "init_pose: " << ss.str() << "\n";
  
  
  
  
  viewer->addCoordinateSystem (1.0,pose);
  
  
  
    double tic = getTime();
    std::stringstream ss2;
    ss2.precision(20);
    ss2 << "simulated_pcl_" << tic << ".pcd";
    capture(init_pose,ss2.str());
    cout << (getTime() -tic) << " sec\n";  
  
  
  
  // these three variables determine the position and orientation of
    // the camera.
	      
	      
//     double lookat[3]; - focal location
//     double eye[3]; - my location
//     double up[3]; - updirection
     
	      
	      
//    std::cout << view[0]  << "," << view[1]  << "," << view[2] 
    
//    cameras.back ().view[2] = renderer->GetActiveCamera ()->GetOrientationWXYZ()[2];    
    
//ViewTransform->GetOrientationWXYZ();    
    
  //  Superclass::OnKeyUp ();
    
//       vtkSmartPointer<vtkCamera> cam = event.Interactor->GetRenderWindow ()->GetRenderers ()->GetFirstRenderer ()->GetActiveCamera ();
//       double clip[2], focal[3], pos[3], view[3];
//       cam->GetClippingRange (clip);
/*      cam->GetFocalPoint (focal);
      cam->GetPosition (pos);
      cam->GetViewUp (view);
      int *win_pos = Interactor->GetRenderWindow ()->GetPosition ();
      int *win_size = Interactor->GetRenderWindow ()->GetSize ();
      std::cerr << clip[0]  << "," << clip[1]  << "/" << focal[0] << "," << focal[1] << "," << focal[2] << "/" <<
                   pos[0]   << "," << pos[1]   << "," << pos[2]   << "/" << view[0]  << "," << view[1]  << "," << view[2] << "/" <<
                   cam->GetViewAngle () / 180.0 * M_PI  << "/" << win_size[0] << "," << win_size[1] << "/" << win_pos[0] << "," << win_pos[1]
                << endl;    
  */  
  }
}