bool CartesianTrajectoryAction::checkTolerance(Eigen::Affine3d err, cartesian_trajectory_msgs::CartesianTolerance tol) {
  if ((tol.position.x > 0.0) && (fabs(err.translation().x()) > tol.position.x)) {
    return false;
  }

  if ((tol.position.y > 0.0) && (fabs(err.translation().y()) > tol.position.y)) {
    return false;
  }

  if ((tol.position.z > 0.0) && (fabs(err.translation().z()) > tol.position.z)) {
    return false;
  }

  Eigen::AngleAxisd ax(err.rotation());
  Eigen::Vector3d rot = ax.axis() * ax.angle();

  if ((tol.rotation.x > 0.0) && (fabs(rot(0)) > tol.rotation.x)) {
    return false;
  }

  if ((tol.rotation.y > 0.0) && (fabs(rot(1)) > tol.rotation.y)) {
    return false;
  }

  if ((tol.rotation.z > 0.0) && (fabs(rot(2)) > tol.rotation.z)) {
    return false;
  }

  return true;
}
void UpperBodyPlanner::addPose_moveTo(const Eigen::Affine3d& start_pose, const Eigen::Affine3d& end_pose, const int& step_num, std::vector<geometry_msgs::Pose>& pose_sequence) {
    Eigen::Quaterniond start_q = Rmat2Quaternion(start_pose.rotation());
    Eigen::Quaterniond end_q = Rmat2Quaternion(end_pose.rotation());
    Eigen::Quaterniond step_q;

    std::cout << "**********************************" << std::endl;
    std::cout << "start_pose translation is: " << start_pose.translation().transpose() << std::endl;
    std::cout << "start_pose rotation is: " << std::endl;
    std::cout << start_pose.rotation() << std::endl;
    std::cout << "start_pose quaternion is: "<< std::endl;
    std::cout << "w = " << start_q.w() << ", x = " << start_q.x() << ", y = " << start_q.y() << ", z = " << start_q.z() << std::endl;
    std::cout << "end_pose translation is: " << end_pose.translation().transpose() << std::endl;
    std::cout << "end_pose rotation is: " << std::endl;
    std::cout << end_pose.rotation() << std::endl;
    std::cout << "Desired quaternion is: " << std::endl;
    std::cout << "w = " << end_q.w() << ", x = " << end_q.x() << ", y = " << end_q.y() << ", z = " << end_q.z() << std::endl;
    std::cout << "**********************************" << std::endl;

    Eigen::Vector3d step_translation_movement = (end_pose.translation() - start_pose.translation()) / step_num;
    for (int i = 0; i < step_num; i++) {
        Eigen::Affine3d step_target;
        step_target.translation() = start_pose.translation() + (i + 1) * step_translation_movement;
        step_q = start_q.slerp((i + 1.0) / step_num, end_q);
        geometry_msgs::Pose target_pose = Eigen2msgPose(step_target.translation(), step_q);
        pose_sequence.push_back(target_pose);
    }
}
Ejemplo n.º 3
0
geometry_msgs::Pose PoseAffineToGeomMsg(const Eigen::Affine3d &e) {
    geometry_msgs::Pose m;
    m.position.x = e.translation().x();
    m.position.y = e.translation().y();
    m.position.z = e.translation().z();
    // This is a column major vs row major matrice faux pas!
#if 0
    MatrixEXd em = e.rotation();

    Eigen::Quaterniond q = EMatrix2Quaterion(em);
#endif
    Eigen::Quaterniond q(e.rotation());
    m.orientation.x = q.x();
    m.orientation.y = q.y();
    m.orientation.z = q.z();
    m.orientation.w = q.w();
#if 0
    if (m.orientation.w < 0) {
        m.orientation.x *= -1;
        m.orientation.y *= -1;
        m.orientation.z *= -1;
        m.orientation.w *= -1;
    }
#endif
}
bool SensorProcessorBase::updateTransformations(const std::string& sensorFrameId,
                                                const ros::Time& timeStamp)
{
  try {
    transformListener_.waitForTransform(sensorFrameId, mapFrameId_, timeStamp, ros::Duration(1.0));

    tf::StampedTransform transformTf;
    transformListener_.lookupTransform(mapFrameId_, sensorFrameId, timeStamp, transformTf);
    poseTFToEigen(transformTf, transformationSensorToMap_);

    transformListener_.lookupTransform(robotBaseFrameId_, sensorFrameId, timeStamp, transformTf);  // TODO Why wrong direction?
    Eigen::Affine3d transform;
    poseTFToEigen(transformTf, transform);
    rotationBaseToSensor_.setMatrix(transform.rotation().matrix());
    translationBaseToSensorInBaseFrame_.toImplementation() = transform.translation();

    transformListener_.lookupTransform(mapFrameId_, robotBaseFrameId_, timeStamp, transformTf);  // TODO Why wrong direction?
    poseTFToEigen(transformTf, transform);
    rotationMapToBase_.setMatrix(transform.rotation().matrix());
    translationMapToBaseInMapFrame_.toImplementation() = transform.translation();

    return true;
  } catch (tf::TransformException &ex) {
    ROS_ERROR("%s", ex.what());
    return false;
  }
}
void MotionPlanningFrame::updateCollisionObjectPose(bool update_marker_position)
{
  QList<QListWidgetItem *> sel = ui_->collision_objects_list->selectedItems();
  if (sel.empty())
    return;
  planning_scene_monitor::LockedPlanningSceneRW ps = planning_display_->getPlanningSceneRW();
  if (ps)
  {
    collision_detection::CollisionWorld::ObjectConstPtr obj = ps->getWorld()->getObject(sel[0]->text().toStdString());
    if (obj && obj->shapes_.size() == 1)
    {
      Eigen::Affine3d p;
      p.translation()[0] = ui_->object_x->value();
      p.translation()[1] = ui_->object_y->value();
      p.translation()[2] = ui_->object_z->value();

      p = Eigen::Translation3d(p.translation()) *
        (Eigen::AngleAxisd(ui_->object_rx->value(), Eigen::Vector3d::UnitX()) *
         Eigen::AngleAxisd(ui_->object_ry->value(), Eigen::Vector3d::UnitY()) *
         Eigen::AngleAxisd(ui_->object_rz->value(), Eigen::Vector3d::UnitZ()));

      ps->getWorldNonConst()->moveShapeInObject(obj->id_, obj->shapes_[0], p);
      planning_display_->queueRenderSceneGeometry();

      // Update the interactive marker pose to match the manually introduced one
      if (update_marker_position && scene_marker_)
      {
        Eigen::Quaterniond eq(p.rotation());
        scene_marker_->setPose(Ogre::Vector3(ui_->object_x->value(), ui_->object_y->value(), ui_->object_z->value()),
                               Ogre::Quaternion(eq.w(), eq.x(), eq.y(), eq.z()), "");
      }
    }
  }
}
Ejemplo n.º 6
0
/*!
* \brief affine3d2UrdfPose converts an  Eigen affine 4x4 matrix  o represent the pose into a urdf pose 
* vparam pose   eigen Affine3d pose 
* \return   urdf pose with position and rotation.
*/
RCS::Pose Affine3d2UrdfPose(const  Eigen::Affine3d &pose) {
    RCS::Pose p;
    p.getOrigin().setX(pose.translation().x());
    p.getOrigin().setY(pose.translation().y());
    p.getOrigin().setZ(pose.translation().z());

    Eigen::Quaterniond q (pose.rotation());
    tf::Quaternion qtf(q.x(),q.y(),q.z(),q.w());
    //std::cout <<  "Affine3d2UrdfPose Quaterion = \n" << q.x() << ":" << q.y() << ":" << q.z() << ":" << q.w() << std::endl;

    p.setRotation(qtf);
    //std::cout <<  "After Affine3d2UrdfPose Quaterion = \n" << p.getRotation().x() << ":" << p.getRotation().y() << ":" << p.getRotation().z() << ":" << p.getRotation().w() << std::endl;

#if 0
    MatrixEXd m = pose.rotation();
    Eigen::Quaterniond q = EMatrix2Quaterion(m);

    Eigen::Quaterniond q(pose.rotation());
    p.getRotation().setX(q.x());
    p.getRotation().setY(q.y());
    p.getRotation().setZ(q.z());
    p.getRotation().setW(q.w());
    #endif
   return p;
}
Ejemplo n.º 7
0
bool kinematic_constraints::VisibilityConstraint::equal(const KinematicConstraint &other, double margin) const
{
  if (other.getType() != type_)
    return false;
  const VisibilityConstraint &o = static_cast<const VisibilityConstraint&>(other);
  
  if (target_frame_id_ == o.target_frame_id_ && sensor_frame_id_ == o.sensor_frame_id_ &&
      cone_sides_ == o.cone_sides_ && sensor_view_direction_ == o.sensor_view_direction_)
  {
    if (fabs(max_view_angle_ - o.max_view_angle_) > margin ||
        fabs(target_radius_ - o.target_radius_) > margin)
      return false;
    Eigen::Affine3d diff = sensor_pose_.inverse() * o.sensor_pose_;
    if (diff.translation().norm() > margin)
      return false;
    if (!diff.rotation().isIdentity(margin))
      return false;
    diff = target_pose_.inverse() * o.target_pose_;
    if (diff.translation().norm() > margin)
      return false;
    if (!diff.rotation().isIdentity(margin))
      return false;
    return true;
  }
  return false;
}
Ejemplo n.º 8
0
/* checks if a given scene is in static equilibrium or not */
bool SceneValidator::isValidScene(std::vector<string> modelnames, std::vector<Eigen::Affine3d> model_poses){
    //set all the Objects's positions
    num = modelnames.size();
    for (int i =0; i < num; i++){
       auto mappedObject= m.find(modelnames[i]);   //get model from hashmap
       Eigen::Affine3d a = model_poses[i];
       const dMatrix3 R = {                        //convert affine info to a 3x3 rotation matrix and a x,y,z position array
         a(0,0), a(0,1), a(0,2),
         a(1,0), a(1,1), a(1,2),
         a(2,0), a(2,1), a(2,2)    };
       const dReal center[3] = {a.translation()[0],a.translation()[1], a.translation()[2]};
       translateObject(mappedObject->second, center, R);  //get the model name's MyObject info and feed it the position and rotation
    }

    //complete series of checks to see if scene is still stable or not
    if (!isStableStill(modelnames, STEP1)){   //check #1
         return false;
    } else
    if (!isStableStill(modelnames, STEP2)){   //check #2
         return false;
    } else
    if (!isStableStill(modelnames, STEP3)){   //check #3
         return false;
    } else
    if (!isStableStill(modelnames, STEP4)){   //check #4
         return false;
    }
    else{
         return true;
    }
}
void UpperBodyPlanner::pose_moveTo2(const std::string &link_name,
                                    const Eigen::Affine3d& current_pose,
                                    const Eigen::Affine3d& desired_pose, const int &step_num,
                                    std::vector<geometry_msgs::Pose> &pose_sequence) {
    //Eigen::Affine3d current_pose = kinematic_state->getGlobalLinkTransform(link_name);
    Eigen::Quaterniond current_q = Rmat2Quaternion(current_pose.rotation());
    Eigen::Quaterniond desired_q = Rmat2Quaternion(desired_pose.rotation());
    Eigen::Quaterniond step_q;

    std::cout << "**********************************" << std::endl;
    std::cout << "end_effector_name is: " << link_name << std::endl;
    std::cout << "Current translation is: " << current_pose.translation().transpose() << std::endl;
    std::cout << "current rotation is: " << std::endl;
    std::cout << current_pose.rotation() << std::endl;
    std::cout << "current quaternion is: "<< std::endl;
    std::cout << "w = " << current_q.w() << ", x = " << current_q.x() << ", y = " << current_q.y() << ", z = " << current_q.z() << std::endl;
    std::cout << "Desired translation is: " << desired_pose.translation().transpose() << std::endl;
    std::cout << "Desired rotation is: " << std::endl;
    std::cout << desired_pose.rotation() << std::endl;
    std::cout << "Desired quaternion is: " << std::endl;
    std::cout << "w = " << desired_q.w() << ", x = " << desired_q.x() << ", y = " << desired_q.y() << ", z = " << desired_q.z() << std::endl;
    std::cout << "**********************************" << std::endl;

    Eigen::Vector3d step_translation_movement = (desired_pose.translation() - current_pose.translation()) / step_num;
    for (int i = 0; i < step_num; i++) {
        Eigen::Affine3d step_target;
        step_target.translation() = current_pose.translation() + (i + 1) * step_translation_movement;
        step_q = current_q.slerp((i + 1.0) / step_num, desired_q);
        geometry_msgs::Pose target_pose = Eigen2msgPose(step_target.translation(), step_q);
        pose_sequence.push_back(target_pose);
    }
}
Ejemplo n.º 10
0
// The input 3D points are stored as columns.
Eigen::Affine3d Find3DAffineTransform(Eigen::Matrix3Xd in, Eigen::Matrix3Xd out) {

  // Default output
  Eigen::Affine3d A;
  A.linear() = Eigen::Matrix3d::Identity(3, 3);
  A.translation() = Eigen::Vector3d::Zero();

  if (in.cols() != out.cols())
    throw "Find3DAffineTransform(): input data mis-match";

  // First find the scale, by finding the ratio of sums of some distances,
  // then bring the datasets to the same scale.
  double dist_in = 0, dist_out = 0;
  for (int col = 0; col < in.cols()-1; col++) {
    dist_in  += (in.col(col+1) - in.col(col)).norm();
    dist_out += (out.col(col+1) - out.col(col)).norm();
  }
  if (dist_in <= 0 || dist_out <= 0)
    return A;
  double scale = dist_out/dist_in;
  out /= scale;

  // Find the centroids then shift to the origin
  Eigen::Vector3d in_ctr = Eigen::Vector3d::Zero();
  Eigen::Vector3d out_ctr = Eigen::Vector3d::Zero();
  for (int col = 0; col < in.cols(); col++) {
    in_ctr  += in.col(col);
    out_ctr += out.col(col);
  }
  in_ctr /= in.cols();
  out_ctr /= out.cols();
  for (int col = 0; col < in.cols(); col++) {
    in.col(col)  -= in_ctr;
    out.col(col) -= out_ctr;
  }

  // SVD
  Eigen::MatrixXd Cov = in * out.transpose();
  Eigen::JacobiSVD<Eigen::MatrixXd> svd(Cov, Eigen::ComputeThinU | Eigen::ComputeThinV);

  // Find the rotation
  double d = (svd.matrixV() * svd.matrixU().transpose()).determinant();
  if (d > 0)
    d = 1.0;
  else
    d = -1.0;
  Eigen::Matrix3d I = Eigen::Matrix3d::Identity(3, 3);
  I(2, 2) = d;
  Eigen::Matrix3d R = svd.matrixV() * I * svd.matrixU().transpose();

  // The final transform
  A.linear() = scale * R;
  A.translation() = scale*(out_ctr - R*in_ctr);

  return A;
}
void robot_model::FloatingJointModel::computeJointStateValues(const Eigen::Affine3d& transf, std::vector<double> &joint_values) const
{
  joint_values.resize(7);
  joint_values[0] = transf.translation().x();
  joint_values[1] = transf.translation().y();
  joint_values[2] = transf.translation().z();
  Eigen::Quaterniond q(transf.rotation());
  joint_values[3] = q.x();
  joint_values[4] = q.y();
  joint_values[5] = q.z();
  joint_values[6] = q.w();
}
Ejemplo n.º 12
0
    void NDTFuserHMT::initialize(Eigen::Affine3d initPos, pcl::PointCloud<pcl::PointXYZ> &cloud, bool preLoad)
    {
	///Set the cloud to sensor frame with respect to base
	lslgeneric::transformPointCloudInPlace(sensor_pose, cloud);
	lslgeneric::transformPointCloudInPlace(initPos, cloud);
	Tnow = initPos;
	//#ifdef BASELINE
	//#else
	if(beHMT) {
	    map = new lslgeneric::NDTMapHMT(resolution,
		    Tnow.translation()(0),Tnow.translation()(1),Tnow.translation()(2),
		    map_size_x,map_size_y,map_size_z,sensor_range,hmt_map_dir,true);
	    if(preLoad) {
		lslgeneric::NDTMapHMT *map_hmt = dynamic_cast<lslgeneric::NDTMapHMT*> (map);
		std::cout<<"Trying to pre-load maps at "<<initPos.translation()<<std::endl;
		map_hmt->tryLoadPosition(initPos.translation());
	    }
	} else {
	    map = new lslgeneric::NDTMap(new lslgeneric::LazyGrid(resolution));
	    if(preLoad) {
		char fname[1000];
		snprintf(fname,999,"%s/%s_map.jff",hmt_map_dir.c_str(),prefix.c_str());
		std::cerr<<"Loading "<<fname<<std::endl;
		map->loadFromJFF(fname);
	    } else {
		map = new lslgeneric::NDTMap(new lslgeneric::LazyGrid(resolution));
		map->initialize(Tnow.translation()(0),Tnow.translation()(1),Tnow.translation()(2),map_size_x,map_size_y,map_size_z);
	    }
	}
	//#endif
	map->addPointCloud(Tnow.translation(),cloud, 0.1, 100.0, 0.1);
	//map->addPointCloudMeanUpdate(Tnow.translation(),cloud,localMapSize, 1e5, 1250, map_size_z/2, 0.06);
	//map->addPointCloudMeanUpdate(Tnow.translation(),cloud,localMapSize, 0.1, 100.0, 0.1);
	map->computeNDTCells(CELL_UPDATE_MODE_SAMPLE_VARIANCE, 1e5, 255, Tnow.translation(), 0.1);
	isInit = true;
	Tlast_fuse = Tnow;
	Todom = Tnow;
	if(visualize) 
	{
#ifndef NO_NDT_VIZ
      //      # error compiling with visualization
	    viewer->plotNDTSAccordingToOccupancy(-1,map); 
	    //viewer->plotLocalNDTMap(cloud,resolution);
	    viewer->addTrajectoryPoint(Tnow.translation()(0),Tnow.translation()(1),Tnow.translation()(2)+0.5,1,0,0);
	    viewer->addTrajectoryPoint(Todom.translation()(0),Todom.translation()(1),Todom.translation()(2)+0.5,0,1,0);
	    viewer->displayTrajectory();
	    viewer->setCameraPointing(Tnow.translation()(0),Tnow.translation()(1),Tnow.translation()(2)+3);
	    viewer->repaint();	
#endif
        }
    }
int ShapeGraspPlanner::createGrasp(const geometry_msgs::PoseStamped& pose,
                                   double gripper_opening,
                                   double gripper_pitch,
                                   double x_offset,
                                   double z_offset,
                                   double quality)
{
  moveit_msgs::Grasp grasp;
  grasp.grasp_pose = pose;

  // defaults
  grasp.pre_grasp_posture = makeGraspPosture(gripper_opening);
  grasp.grasp_posture = makeGraspPosture(0.0);
  grasp.pre_grasp_approach = makeGripperTranslation(approach_frame_,
                                                    approach_min_translation_,
                                                    approach_desired_translation_);
  grasp.post_grasp_retreat = makeGripperTranslation(retreat_frame_,
                                                    retreat_min_translation_,
                                                    retreat_desired_translation_,
                                                    -1.0);  // retreat is in negative x direction

  // initial pose
  Eigen::Affine3d p = Eigen::Translation3d(pose.pose.position.x,
                                           pose.pose.position.y,
                                           pose.pose.position.z) *
                        Eigen::Quaterniond(pose.pose.orientation.w,
                                           pose.pose.orientation.x,
                                           pose.pose.orientation.y,
                                           pose.pose.orientation.z);
  // translate by x_offset, 0, z_offset
  p = p * Eigen::Translation3d(x_offset, 0, z_offset);
  // rotate by 0, pitch, 0
  p = p * quaternionFromEuler(0.0, gripper_pitch, 0.0);
  // apply grasp point -> planning frame offset
  p = p * Eigen::Translation3d(-tool_offset_, 0, 0);

  grasp.grasp_pose.pose.position.x = p.translation().x();
  grasp.grasp_pose.pose.position.y = p.translation().y();
  grasp.grasp_pose.pose.position.z = p.translation().z();
  Eigen::Quaterniond q = (Eigen::Quaterniond)p.linear();
  grasp.grasp_pose.pose.orientation.x = q.x();
  grasp.grasp_pose.pose.orientation.y = q.y();
  grasp.grasp_pose.pose.orientation.z = q.z();
  grasp.grasp_pose.pose.orientation.w = q.w();

  grasp.grasp_quality = quality;

  grasps_.push_back(grasp);
  return 1;
}
TEST_F(LoadPlanningModelsPr2, InitOK)
{
  ASSERT_TRUE(urdf_ok_);
  ASSERT_EQ(urdf_model_->getName(), "pr2_test");

  robot_model::RobotModelPtr kmodel(new robot_model::RobotModel(urdf_model_, srdf_model_));
  robot_state::RobotState ks(kmodel);
  ks.setToRandomValues();
  ks.setToDefaultValues();


  robot_state::Transforms tf(kmodel->getModelFrame());

  Eigen::Affine3d t1;
  t1.setIdentity();
  t1.translation() = Eigen::Vector3d(10.0, 1.0, 0.0);
  tf.setTransform(t1, "some_frame_1");

  Eigen::Affine3d t2(Eigen::Translation3d(10.0, 1.0, 0.0)*Eigen::AngleAxisd(0.5, Eigen::Vector3d::UnitY()));
  tf.setTransform(t2, "some_frame_2");

  Eigen::Affine3d t3;
  t3.setIdentity();
  t3.translation() = Eigen::Vector3d(0.0, 1.0, -1.0);
  tf.setTransform(t3, "some_frame_3");


  EXPECT_TRUE(tf.isFixedFrame("some_frame_1"));
  EXPECT_FALSE(tf.isFixedFrame("base_footprint"));
  EXPECT_TRUE(tf.isFixedFrame(kmodel->getModelFrame()));

  Eigen::Affine3d x;
  x.setIdentity();
  tf.transformPose(ks, "some_frame_2", x, x);
  
  EXPECT_TRUE(t2.translation() == x.translation());
  EXPECT_TRUE(t2.rotation() == x.rotation());

  tf.transformPose(ks, kmodel->getModelFrame(), x, x);
  EXPECT_TRUE(t2.translation() == x.translation());
  EXPECT_TRUE(t2.rotation() == x.rotation());

  x.setIdentity();
  tf.transformPose(ks, "r_wrist_roll_link", x, x);

  EXPECT_NEAR(x.translation().x(), 0.585315, 1e-4);
  EXPECT_NEAR(x.translation().y(), -0.188, 1e-4);
  EXPECT_NEAR(x.translation().z(), 1.24001, 1e-4);
}
Ejemplo n.º 15
0
bool kinematic_constraints::PositionConstraint::equal(const KinematicConstraint &other, double margin) const
{
  if (other.getType() != type_)
    return false;
  const PositionConstraint &o = static_cast<const PositionConstraint&>(other);
  
  if (link_model_ == o.link_model_ && constraint_frame_id_ == o.constraint_frame_id_)
  {
    if ((offset_ - o.offset_).norm() > margin)
      return false;
    if (constraint_region_.size() != o.constraint_region_.size())
      return false;
    for (std::size_t i = 0 ; i < constraint_region_.size() ; ++i)
    {
      Eigen::Affine3d diff = constraint_region_pose_[i].inverse() * o.constraint_region_pose_[i];
      if (diff.translation().norm() > margin)
        return false;
      if (!diff.rotation().isIdentity(margin))
        return false;
      if (fabs(constraint_region_[i]->computeVolume() - o.constraint_region_[i]->computeVolume()) >= margin)
        return false;
    }
    return true;
  }
  return false;
}
Ejemplo n.º 16
0
Eigen::Isometry3d toIsometry(const Eigen::Affine3d& pose)
{
  Eigen::Isometry3d p(pose.rotation());
  p.translation() = pose.translation();

  return p;
}
Ejemplo n.º 17
0
void 
RGBID_SLAM::VisualizationManager::getPointCloud(pcl::PointCloud<pcl::PointXYZRGB>::Ptr whole_point_cloud)
{  
  IdToPoseMap::iterator id2pose_map_it;        
  IdToPointCloudMap::const_iterator id2pointcloud_map_it;
  
  pcl::PointCloud<pcl::PointXYZRGB>::Ptr aux_point_cloud;  
  aux_point_cloud.reset(new pcl::PointCloud<pcl::PointXYZRGB>);
  
  for (id2pose_map_it = id2pose_map_.begin(); 
       id2pose_map_it != id2pose_map_.end();
       id2pose_map_it++)
  {
    int kf_id = (*id2pose_map_it).first;
    Eigen::Affine3d pose = (*id2pose_map_it).second;
    
    id2pointcloud_map_it = id2pointcloud_map_.find(kf_id);
    
    if (id2pointcloud_map_it != id2pointcloud_map_.end())
    {
      aux_point_cloud->clear();
      copyPointCloud((*id2pointcloud_map_it).second, aux_point_cloud);  
      (*id2pointcloud_map_it).second->clear();
      alignPointCloud(aux_point_cloud, pose.linear(), pose.translation());   
      *whole_point_cloud += *aux_point_cloud;
    }          
  }  
  
  
}
void myCallback1(const interaction_cursor_msgs::InteractionCursorUpdate poseSubscribed) {
    // std::cout<<" i am in mycallback "<<std::endl;

    target1 = Eigen::Affine3d::Identity();



    Eigen::Quaterniond q(poseSubscribed.pose.pose.orientation.w,
            poseSubscribed.pose.pose.orientation.x,
            poseSubscribed.pose.pose.orientation.y,
            poseSubscribed.pose.pose.orientation.z);

    translation1 << poseSubscribed.pose.pose.position.x,
            poseSubscribed.pose.pose.position.y,
            poseSubscribed.pose.pose.position.z;
    std::cout << " 3 " << std::endl;
    rotation1 = q.toRotationMatrix();
    target1.translation() = translation1;
    target1.linear() = rotation1;




    std::cout << "please move hydra to a proper place and click the button. " << std::endl;

    if (poseSubscribed.button_state == 1) {
        button1 = true;

    }
}
TEST_F(DistanceFieldCollisionDetectionTester, LinksInCollision)
{
  collision_detection::CollisionRequest req;
  collision_detection::CollisionResult res1;
  collision_detection::CollisionResult res2;
  collision_detection::CollisionResult res3;
  //req.contacts = true;
  //req.max_contacts = 100;
  req.group_name = "whole_body";

  planning_models::KinematicState kstate(kmodel_);
  kstate.setToDefaultValues();

  Eigen::Affine3d offset = Eigen::Affine3d::Identity();
  offset.translation().x() = .01;

  kstate.getLinkState("base_link")->updateGivenGlobalLinkTransform(Eigen::Affine3d::Identity());
  kstate.getLinkState("base_bellow_link")->updateGivenGlobalLinkTransform(offset);
  acm_->setEntry("base_link", "base_bellow_link", false);
  crobot_->checkSelfCollision(req, res1, kstate, *acm_);
  ASSERT_TRUE(res1.collision);

  acm_->setEntry("base_link", "base_bellow_link", true);
  crobot_->checkSelfCollision(req, res2, kstate, *acm_);
  ASSERT_FALSE(res2.collision);
  
  //  req.verbose = true;
  kstate.getLinkState("r_gripper_palm_link")->updateGivenGlobalLinkTransform(Eigen::Affine3d::Identity());
  kstate.getLinkState("l_gripper_palm_link")->updateGivenGlobalLinkTransform(offset);

  acm_->setEntry("r_gripper_palm_link", "l_gripper_palm_link", false);
  crobot_->checkSelfCollision(req, res3, kstate, *acm_);
  ASSERT_TRUE(res3.collision);
}
Ejemplo n.º 20
0
void TestFind3DAffineTransform(){

  // Create datasets with known transform
  Eigen::Matrix3Xd in(3, 100), out(3, 100);
  Eigen::Quaternion<double> Q(1, 3, 5, 2);
  Q.normalize();
  Eigen::Matrix3d R = Q.toRotationMatrix();
  double scale = 2.0;
  for (int row = 0; row < in.rows(); row++) {
    for (int col = 0; col < in.cols(); col++) {
      in(row, col) = log(2*row + 10.0)/sqrt(1.0*col + 4.0) + sqrt(col*1.0)/(row + 1.0);
    }
  }
  Eigen::Vector3d S;
  S << -5, 6, -27;
  for (int col = 0; col < in.cols(); col++)
    out.col(col) = scale*R*in.col(col) + S;

  Eigen::Affine3d A = Find3DAffineTransform(in, out);

  // See if we got the transform we expected
  if ( (scale*R-A.linear()).cwiseAbs().maxCoeff() > 1e-13 ||
       (S-A.translation()).cwiseAbs().maxCoeff() > 1e-13)
    throw "Could not determine the affine transform accurately enough";
}
Ejemplo n.º 21
0
bool
pcl::EnsensoGrabber::estimateCalibrationPatternPose (Eigen::Affine3d &pattern_pose) const
{
  if (!device_open_)
    return (false);

  if (running_)
    return (false);

  try
  {
    NxLibCommand estimate_pattern_pose (cmdEstimatePatternPose);
    estimate_pattern_pose.execute ();
    NxLibItem tf = estimate_pattern_pose.result ()[itmPatternPose];
    // Convert tf into a matrix
    if (!jsonTransformationToMatrix (tf.asJson (), pattern_pose))
      return (false);
    pattern_pose.translation () /= 1000.0;  // Convert translation in meters (Ensenso API returns milimeters)
    return (true);
  }
  catch (NxLibException &ex)
  {
    ensensoExceptionHandling (ex, "estimateCalibrationPatternPoses");
    return (false);
  }
}
Ejemplo n.º 22
0
void mesh_core::PlaneProjection::getFrame(Eigen::Affine3d& frame) const
{
  frame.setIdentity();
  frame.translation() = origin_;
  frame.linear().col(0) = x_axis_;
  frame.linear().col(1) = y_axis_;
  frame.linear().col(2) = normal_;
}
Ejemplo n.º 23
0
 inline std::string PrettyPrint(const Eigen::Affine3d& transform_to_print, const bool add_delimiters, const std::string& separator)
 {
     UNUSED(add_delimiters);
     UNUSED(separator);
     Eigen::Vector3d vector_to_print = transform_to_print.translation();
     Eigen::Quaterniond quaternion_to_print(transform_to_print.rotation());
     return "Affine3d <x: " + std::to_string(vector_to_print.x()) + " y: " + std::to_string(vector_to_print.y()) + " z: " + std::to_string(vector_to_print.z()) + ">, <x: " + std::to_string(quaternion_to_print.x()) + " y: " + std::to_string(quaternion_to_print.y()) + " z: " + std::to_string(quaternion_to_print.z()) + " w: " + std::to_string(quaternion_to_print.w()) + ">";
 }
Ejemplo n.º 24
0
// Return the name of the handrail that intersects with the given pose.
std::string getHandrailAtPose(const ISSWorld& world, const Eigen::Affine3d& pose)
{
    unsigned int handrail_idx = world.findHandrail(pose.translation());
    if (handrail_idx < world.numHandrails())
        return world.getHandrail(handrail_idx)->name;

    return "";  // no handrail at pose
}
Ejemplo n.º 25
0
/**
 * @function KState
 */
Eigen::Affine3d KState::xform() const {

  Eigen::Affine3d xform = Eigen::Affine3d::Identity();
  xform.linear() = body_rot.toRotationMatrix();
  xform.translation() = body_pos;
  
  return xform;
}
Ejemplo n.º 26
0
void RockinPNPActionServer::tf2Affine(tf::StampedTransform& tf, Eigen::Affine3d& T)
{
  tf::Vector3 o=tf.getOrigin();
  tf::Quaternion q_tf=tf.getRotation();
  Eigen::Quaterniond q(q_tf[3],q_tf[0],q_tf[1],q_tf[2]);
  Eigen::Matrix3d R(q);
  Eigen::Vector3d t(o[0],o[1],o[2]);
  T.linear()=R; T.translation()=t;
  
}
Ejemplo n.º 27
0
void ICPLocalization::addScanLineToPointCloud(Eigen::Affine3d body2Odo, Eigen::Affine3d body2World, Eigen::Affine3d laser2Body, const ::base::samples::LaserScan &scan_reading)
{

    if(scansWithTransforms.size() == 0) 
    {
	addLaserScan(body2Odo,body2World,laser2Body,scan_reading); 
	return;
    }
    
    /*
    double max_rotation =  1.5 * conf_point_cloud.lines_per_point_cloud * conf_point_cloud.min_rotation_for_new_line;
    double max_translation = 1.5 * conf_point_cloud.lines_per_point_cloud * conf_point_cloud.min_distance_travelled_for_new_line;
    double max_head_movement = 1.5 * conf_point_cloud.lines_per_point_cloud * conf_point_cloud.min_rotation_head_for_new_line; 
    */
    bool add_laser_scan = true; 
    for( uint i = 0; i < scansWithTransforms.size(); i++) 
    {
	Eigen::Affine3d diference( body2Odo.inverse() * scansWithTransforms.at(i).body2Odo );

	Vector3d Ylaser2Body = laser2Body * Vector3d::UnitY() - laser2Body.translation();
	Ylaser2Body.normalize();
	Vector3d YlastLaser2Body = scansWithTransforms.back().laser2Body * Vector3d::UnitY() - scansWithTransforms.at(i).laser2Body.translation();
	YlastLaser2Body.normalize();
	
	double laserChange = acos(Ylaser2Body.dot(YlastLaser2Body));
	double translation =  diference.translation().norm(); 
	double rotation = fabs(Eigen::AngleAxisd( diference.rotation() ).angle()) ; 
	
	add_laser_scan = add_laser_scan && ( rotation > conf_point_cloud.min_rotation_for_new_line || translation > conf_point_cloud.min_distance_travelled_for_new_line || laserChange >  conf_point_cloud.min_rotation_head_for_new_line);
	 
	//if  the distance is to big means the old laser scan is not consistent anymore. 
// 	if( rotation > max_rotation|| translation > max_translation || laserChange > max_head_movement)
// 	{
// 	    scansWithTransforms.erase(scansWithTransforms.begin() + i); 
// 	    scanCount--; 
// 	    std::cout << " IcpLocalization.cpp erasing old laser scan " << std::endl; 
// 	}

/*	std::cout <<" add new scan " << add_laser_scan << std::endl; 
	std::cout << "\t translation " << (translation > conf_point_cloud.min_distance_travelled_for_new_line)<< " "  << translation << " > " << conf_point_cloud.min_distance_travelled_for_new_line << std::endl;
	std::cout << "\t rotation " << (rotation > conf_point_cloud.min_rotation_for_new_line) << " "  << rotation * 180 / M_PI << " > " << conf_point_cloud.min_rotation_for_new_line * 180 / M_PI << std::endl;
	std::cout << "\t head " << (laserChange >  conf_point_cloud.min_rotation_head_for_new_line) << "  "<< laserChange * 180 / M_PI << " > " << conf_point_cloud.min_rotation_head_for_new_line * 180 / M_PI<< std::endl; */
	
	if (!add_laser_scan) 
	    break; 
    }
    
    if ( add_laser_scan )
    {
	addLaserScan(body2Odo,body2World,laser2Body,scan_reading); 
    }
    
    return; 
  
}
Ejemplo n.º 28
0
ICPInputData ICPLocalization::generatePointcloudSample(ICPInputData originalData, Eigen::Affine3d offset)
{
    ICPInputData newData;

    newData.pc2World = offset * originalData.pc2World; 
    newData.pc2World.translation() = offset.translation() + originalData.pc2World.translation();
    newData.pc = originalData.pc->clone(); 
    newData.pointCloudTime = originalData.pointCloudTime;
    
    return newData; 
}
Ejemplo n.º 29
0
TEST(TfEigen, ConvertAffine3d)
{
  const Eigen::Affine3d v(Eigen::Translation3d(1,2,3) * Eigen::AngleAxis<double>(1, Eigen::Vector3d::UnitX()));

  Eigen::Affine3d v1;
  geometry_msgs::Pose p1;
  tf2::convert(v, p1);
  tf2::convert(p1, v1);

  EXPECT_EQ(v.translation(), v1.translation());
  EXPECT_EQ(v.rotation(), v1.rotation());
}
TEST_F(FclCollisionDetectionTester, ContactReporting)
{
  collision_detection::CollisionRequest req;
  req.contacts = true;
  req.max_contacts = 1;

  robot_state::RobotState kstate(kmodel_);
  kstate.setToDefaultValues();
  kstate.update();

  Eigen::Affine3d offset = Eigen::Affine3d::Identity();
  offset.translation().x() = .01;

  //  kstate.getLinkState("base_link")->updateGivenGlobalLinkTransform(Eigen::Affine3d::Identity());
  //  kstate.getLinkState("base_bellow_link")->updateGivenGlobalLinkTransform(offset);
  //  kstate.getLinkState("r_gripper_palm_link")->updateGivenGlobalLinkTransform(Eigen::Affine3d::Identity());
  //  kstate.getLinkState("l_gripper_palm_link")->updateGivenGlobalLinkTransform(offset);

  kstate.updateStateWithLinkAt("base_link", Eigen::Affine3d::Identity());
  kstate.updateStateWithLinkAt("base_bellow_link", offset);
  kstate.updateStateWithLinkAt("r_gripper_palm_link", Eigen::Affine3d::Identity());
  kstate.updateStateWithLinkAt("l_gripper_palm_link", offset);
  kstate.update();

  acm_->setEntry("base_link", "base_bellow_link", false);
  acm_->setEntry("r_gripper_palm_link", "l_gripper_palm_link", false);

  collision_detection::CollisionResult res;
  crobot_->checkSelfCollision(req, res, kstate, *acm_);
  ASSERT_TRUE(res.collision);
  EXPECT_EQ(res.contacts.size(),1);
  EXPECT_EQ(res.contacts.begin()->second.size(),1);

  res.clear();
  req.max_contacts = 2;
  req.max_contacts_per_pair = 1;
  //  req.verbose = true;
  crobot_->checkSelfCollision(req, res, kstate, *acm_);
  ASSERT_TRUE(res.collision);
  EXPECT_EQ(res.contacts.size(), 2);
  EXPECT_EQ(res.contacts.begin()->second.size(),1);

  res.contacts.clear();
  res.contact_count = 0;

  req.max_contacts = 10;
  req.max_contacts_per_pair = 2;
  acm_.reset(new collision_detection::AllowedCollisionMatrix(kmodel_->getLinkModelNames(), false));
  crobot_->checkSelfCollision(req, res, kstate, *acm_);
  ASSERT_TRUE(res.collision);
  EXPECT_LE(res.contacts.size(), 10);
  EXPECT_LE(res.contact_count, 10);
}