コード例 #1
0
void mesh_core::PlaneProjection::getFrame(Eigen::Affine3d& frame) const
{
  frame.setIdentity();
  frame.translation() = origin_;
  frame.linear().col(0) = x_axis_;
  frame.linear().col(1) = y_axis_;
  frame.linear().col(2) = normal_;
}
コード例 #2
0
ファイル: Kabsch.cpp プロジェクト: lood339/opencv_util
// The input 3D points are stored as columns.
Eigen::Affine3d Find3DAffineTransform(Eigen::Matrix3Xd in, Eigen::Matrix3Xd out) {

  // Default output
  Eigen::Affine3d A;
  A.linear() = Eigen::Matrix3d::Identity(3, 3);
  A.translation() = Eigen::Vector3d::Zero();

  if (in.cols() != out.cols())
    throw "Find3DAffineTransform(): input data mis-match";

  // First find the scale, by finding the ratio of sums of some distances,
  // then bring the datasets to the same scale.
  double dist_in = 0, dist_out = 0;
  for (int col = 0; col < in.cols()-1; col++) {
    dist_in  += (in.col(col+1) - in.col(col)).norm();
    dist_out += (out.col(col+1) - out.col(col)).norm();
  }
  if (dist_in <= 0 || dist_out <= 0)
    return A;
  double scale = dist_out/dist_in;
  out /= scale;

  // Find the centroids then shift to the origin
  Eigen::Vector3d in_ctr = Eigen::Vector3d::Zero();
  Eigen::Vector3d out_ctr = Eigen::Vector3d::Zero();
  for (int col = 0; col < in.cols(); col++) {
    in_ctr  += in.col(col);
    out_ctr += out.col(col);
  }
  in_ctr /= in.cols();
  out_ctr /= out.cols();
  for (int col = 0; col < in.cols(); col++) {
    in.col(col)  -= in_ctr;
    out.col(col) -= out_ctr;
  }

  // SVD
  Eigen::MatrixXd Cov = in * out.transpose();
  Eigen::JacobiSVD<Eigen::MatrixXd> svd(Cov, Eigen::ComputeThinU | Eigen::ComputeThinV);

  // Find the rotation
  double d = (svd.matrixV() * svd.matrixU().transpose()).determinant();
  if (d > 0)
    d = 1.0;
  else
    d = -1.0;
  Eigen::Matrix3d I = Eigen::Matrix3d::Identity(3, 3);
  I(2, 2) = d;
  Eigen::Matrix3d R = svd.matrixV() * I * svd.matrixU().transpose();

  // The final transform
  A.linear() = scale * R;
  A.translation() = scale*(out_ctr - R*in_ctr);

  return A;
}
コード例 #3
0
ファイル: Kabsch.cpp プロジェクト: lood339/opencv_util
void TestFind3DAffineTransform(){

  // Create datasets with known transform
  Eigen::Matrix3Xd in(3, 100), out(3, 100);
  Eigen::Quaternion<double> Q(1, 3, 5, 2);
  Q.normalize();
  Eigen::Matrix3d R = Q.toRotationMatrix();
  double scale = 2.0;
  for (int row = 0; row < in.rows(); row++) {
    for (int col = 0; col < in.cols(); col++) {
      in(row, col) = log(2*row + 10.0)/sqrt(1.0*col + 4.0) + sqrt(col*1.0)/(row + 1.0);
    }
  }
  Eigen::Vector3d S;
  S << -5, 6, -27;
  for (int col = 0; col < in.cols(); col++)
    out.col(col) = scale*R*in.col(col) + S;

  Eigen::Affine3d A = Find3DAffineTransform(in, out);

  // See if we got the transform we expected
  if ( (scale*R-A.linear()).cwiseAbs().maxCoeff() > 1e-13 ||
       (S-A.translation()).cwiseAbs().maxCoeff() > 1e-13)
    throw "Could not determine the affine transform accurately enough";
}
コード例 #4
0
void myCallback1(const interaction_cursor_msgs::InteractionCursorUpdate poseSubscribed) {
    // std::cout<<" i am in mycallback "<<std::endl;

    target1 = Eigen::Affine3d::Identity();



    Eigen::Quaterniond q(poseSubscribed.pose.pose.orientation.w,
            poseSubscribed.pose.pose.orientation.x,
            poseSubscribed.pose.pose.orientation.y,
            poseSubscribed.pose.pose.orientation.z);

    translation1 << poseSubscribed.pose.pose.position.x,
            poseSubscribed.pose.pose.position.y,
            poseSubscribed.pose.pose.position.z;
    std::cout << " 3 " << std::endl;
    rotation1 = q.toRotationMatrix();
    target1.translation() = translation1;
    target1.linear() = rotation1;




    std::cout << "please move hydra to a proper place and click the button. " << std::endl;

    if (poseSubscribed.button_state == 1) {
        button1 = true;

    }
}
コード例 #5
0
void 
RGBID_SLAM::VisualizationManager::getPointCloud(pcl::PointCloud<pcl::PointXYZRGB>::Ptr whole_point_cloud)
{  
  IdToPoseMap::iterator id2pose_map_it;        
  IdToPointCloudMap::const_iterator id2pointcloud_map_it;
  
  pcl::PointCloud<pcl::PointXYZRGB>::Ptr aux_point_cloud;  
  aux_point_cloud.reset(new pcl::PointCloud<pcl::PointXYZRGB>);
  
  for (id2pose_map_it = id2pose_map_.begin(); 
       id2pose_map_it != id2pose_map_.end();
       id2pose_map_it++)
  {
    int kf_id = (*id2pose_map_it).first;
    Eigen::Affine3d pose = (*id2pose_map_it).second;
    
    id2pointcloud_map_it = id2pointcloud_map_.find(kf_id);
    
    if (id2pointcloud_map_it != id2pointcloud_map_.end())
    {
      aux_point_cloud->clear();
      copyPointCloud((*id2pointcloud_map_it).second, aux_point_cloud);  
      (*id2pointcloud_map_it).second->clear();
      alignPointCloud(aux_point_cloud, pose.linear(), pose.translation());   
      *whole_point_cloud += *aux_point_cloud;
    }          
  }  
  
  
}
コード例 #6
0
  void commandForce(const geometry_msgs::WrenchStamped::ConstPtr &msg)
  {
    F_des_ << msg->wrench.force.x, msg->wrench.force.y, msg->wrench.force.z, 
              msg->wrench.torque.x, msg->wrench.torque.y, msg->wrench.torque.z;
    if(msg->header.frame_id == root_name_)
      return;

    geometry_msgs::PoseStamped in_root;
    in_root.pose.orientation.w = 1.0;
    in_root.header.frame_id = msg->header.frame_id;

    try {
      tf_.waitForTransform(root_name_, msg->header.frame_id, msg->header.stamp, ros::Duration(0.1));
      tf_.transformPose(root_name_, in_root, in_root);
    }
    catch (const tf::TransformException &ex)
    {
      ROS_ERROR("Failed to transform: %s", ex.what());
      return;
    }
    
    Eigen::Affine3d t;
    
    tf::poseMsgToEigen(in_root.pose, t);

    F_des_.head<3>() = t.linear() * F_des_.head<3>();
    F_des_.tail<3>() = t.linear() * F_des_.tail<3>();
  }
コード例 #7
0
/**
 * @function KState
 */
Eigen::Affine3d KState::xform() const {

  Eigen::Affine3d xform = Eigen::Affine3d::Identity();
  xform.linear() = body_rot.toRotationMatrix();
  xform.translation() = body_pos;
  
  return xform;
}
コード例 #8
0
ファイル: Actions.cpp プロジェクト: ChefOtter/SPQR_at_work
void RockinPNPActionServer::tf2Affine(tf::StampedTransform& tf, Eigen::Affine3d& T)
{
  tf::Vector3 o=tf.getOrigin();
  tf::Quaternion q_tf=tf.getRotation();
  Eigen::Quaterniond q(q_tf[3],q_tf[0],q_tf[1],q_tf[2]);
  Eigen::Matrix3d R(q);
  Eigen::Vector3d t(o[0],o[1],o[2]);
  T.linear()=R; T.translation()=t;
  
}
コード例 #9
0
int ShapeGraspPlanner::createGrasp(const geometry_msgs::PoseStamped& pose,
                                   double gripper_opening,
                                   double gripper_pitch,
                                   double x_offset,
                                   double z_offset,
                                   double quality)
{
  moveit_msgs::Grasp grasp;
  grasp.grasp_pose = pose;

  // defaults
  grasp.pre_grasp_posture = makeGraspPosture(gripper_opening);
  grasp.grasp_posture = makeGraspPosture(0.0);
  grasp.pre_grasp_approach = makeGripperTranslation(approach_frame_,
                                                    approach_min_translation_,
                                                    approach_desired_translation_);
  grasp.post_grasp_retreat = makeGripperTranslation(retreat_frame_,
                                                    retreat_min_translation_,
                                                    retreat_desired_translation_,
                                                    -1.0);  // retreat is in negative x direction

  // initial pose
  Eigen::Affine3d p = Eigen::Translation3d(pose.pose.position.x,
                                           pose.pose.position.y,
                                           pose.pose.position.z) *
                        Eigen::Quaterniond(pose.pose.orientation.w,
                                           pose.pose.orientation.x,
                                           pose.pose.orientation.y,
                                           pose.pose.orientation.z);
  // translate by x_offset, 0, z_offset
  p = p * Eigen::Translation3d(x_offset, 0, z_offset);
  // rotate by 0, pitch, 0
  p = p * quaternionFromEuler(0.0, gripper_pitch, 0.0);
  // apply grasp point -> planning frame offset
  p = p * Eigen::Translation3d(-tool_offset_, 0, 0);

  grasp.grasp_pose.pose.position.x = p.translation().x();
  grasp.grasp_pose.pose.position.y = p.translation().y();
  grasp.grasp_pose.pose.position.z = p.translation().z();
  Eigen::Quaterniond q = (Eigen::Quaterniond)p.linear();
  grasp.grasp_pose.pose.orientation.x = q.x();
  grasp.grasp_pose.pose.orientation.y = q.y();
  grasp.grasp_pose.pose.orientation.z = q.z();
  grasp.grasp_pose.pose.orientation.w = q.w();

  grasp.grasp_quality = quality;

  grasps_.push_back(grasp);
  return 1;
}
コード例 #10
0
ファイル: ensenso_grabber.cpp プロジェクト: StevenHickson/pcl
bool
pcl::EnsensoGrabber::jsonTransformationToMatrix (const std::string transformation,
                                                 Eigen::Affine3d &matrix) const
{
  try
  {
    NxLibCommand convert_transformation (cmdConvertTransformation);
    convert_transformation.parameters ()[itmTransformation].setJson (transformation);
    convert_transformation.execute ();
    Eigen::Affine3d tmp (Eigen::Affine3d::Identity ());

    // Rotation
    tmp.linear ().col (0) = Eigen::Vector3d (convert_transformation.result ()[itmTransformation][0][0].asDouble (),
                                             convert_transformation.result ()[itmTransformation][0][1].asDouble (),
                                             convert_transformation.result ()[itmTransformation][0][2].asDouble ());

    tmp.linear ().col (1) = Eigen::Vector3d (convert_transformation.result ()[itmTransformation][1][0].asDouble (),
                                             convert_transformation.result ()[itmTransformation][1][1].asDouble (),
                                             convert_transformation.result ()[itmTransformation][1][2].asDouble ());

    tmp.linear ().col (2) = Eigen::Vector3d (convert_transformation.result ()[itmTransformation][2][0].asDouble (),
                                             convert_transformation.result ()[itmTransformation][2][1].asDouble (),
                                             convert_transformation.result ()[itmTransformation][2][2].asDouble ());

    // Translation
    tmp.translation () = Eigen::Vector3d (convert_transformation.result ()[itmTransformation][3][0].asDouble (),
                                          convert_transformation.result ()[itmTransformation][3][1].asDouble (),
                                          convert_transformation.result ()[itmTransformation][3][2].asDouble ());
    matrix = tmp;
    return (true);
  }
  catch (NxLibException &ex)
  {
    ensensoExceptionHandling (ex, "jsonTransformationToMatrix");
    return (false);
  }
}
コード例 #11
0
//utility to convert a pose to an Eigen::Affine
Eigen::Affine3d transformPoseToEigenAffine3d(geometry_msgs::Pose pose) {
    
    Eigen::Affine3d e;
    Eigen::Vector3d Oe;
    Oe(0)=pose.position.x;
    Oe(1)=pose.position.y;
    Oe(2)=pose.position.z;

    Eigen::Quaterniond q;
    q.x()=pose.orientation.x;
    q.y()=pose.orientation.y;
    q.z()=pose.orientation.z;
    q.w()=pose.orientation.w;  
    Eigen::Matrix3d Re(q);
    e.linear()=Re;
    e.translation() = Oe;
    return e;
}
コード例 #12
0
bool ObjectManipulationProperties::get_object_info(int object_id, Eigen::Affine3d &grasp_transform, double &approach_dist, double &gripper_close_test) {
    bool got_info = false;
    Eigen::Vector3d object_origin_wrt_gripper_frame;
    //Eigen::Quaternion object_orientation_wrt_gripper_frame;
    Eigen::Matrix3d R_object_wrt_gripper;
    Eigen::Matrix3d R_gripper;
    Eigen::Vector3d x_axis, y_axis, z_axis;
   Eigen::Vector3d origin_object_wrt_gripper;    
    switch (object_id) {
        case TOY_BLOCK_ID:
            //set approach distance and gripper-closure test val: MAGIC NUMBERS
            // appropriate ONLY for this object with Baxter right-hand gripper in simu
            approach_dist = 0.05; //old float64 TOY_BLOCK_APPROACH_DIST = 0.05
            gripper_close_test = 83.0; //old float64 TOY_BLOCK_GRIPPER_CLOSE_TEST_VAL = 80.0
            //derive gripper approach pose from block pose:
            //compute a gripper pose with z-axis anti-parallel to object z-axis,
            // and x-axis coincident with object x-axis

            origin_object_wrt_gripper<<0,0,0;
            x_axis<<1,0,0; // make block x-axis parallel to gripper x-axis;
            z_axis<<0,0,-1; // gripper z axis antiparallel to block z axis;
            y_axis = z_axis.cross(x_axis); //consistent triad

            R_gripper.col(0) = x_axis; //populate orientation matrix from axis directions
            R_gripper.col(1) = y_axis;
            R_gripper.col(2) = z_axis;
            //gripper_affine is defined to have origin coincident w/ block-frame origin, but z-axis antiparallel to block z-axis
            // and x-axis parallel to block-frame x-axis
            grasp_transform.linear() = R_gripper; //populate affine w/ orientation
            grasp_transform.translation() = origin_object_wrt_gripper; //and origin          

            return true;
            break;

            //case SOMETHING_ELSE:  //add more object cases here!
            //...
            //return true;
            //break;
        default:
            ROS_WARN("object ID not recognized");
            return false;
            break;
    }
}
コード例 #13
0
geometry_msgs::Pose MotionPlanning::transformEigenAffine3dToPose(Eigen::Affine3d e) {
    Eigen::Vector3d Oe;
    Eigen::Matrix3d Re;
    geometry_msgs::Pose pose;
    Oe = e.translation();
    Re = e.linear();

    Eigen::Quaterniond q(Re); // convert rotation matrix Re to a quaternion, q
    pose.position.x = Oe(0);
    pose.position.y = Oe(1);
    pose.position.z = Oe(2);

    pose.orientation.x = q.x();
    pose.orientation.y = q.y();
    pose.orientation.z = q.z();
    pose.orientation.w = q.w();

    return pose;
}
コード例 #14
0
bool ArmMotionInterface::unpack_goal_pose(void) {
    geometry_msgs::PoseStamped poseStamped_goal = request_.poseStamped_goal;
    Eigen::Vector3d goal_origin;
    goal_origin[0] = poseStamped_goal.pose.position.x;
    goal_origin[1] = poseStamped_goal.pose.position.y;
    goal_origin[2] = poseStamped_goal.pose.position.z;
    a_tool_end_.translation() = goal_origin;

    Eigen::Quaterniond quat;
    quat.x() = poseStamped_goal.pose.orientation.x;
    quat.y() = poseStamped_goal.pose.orientation.y;
    quat.z() = poseStamped_goal.pose.orientation.z;
    quat.w() = poseStamped_goal.pose.orientation.w;
    Eigen::Matrix3d R_goal(quat);
    a_tool_end_.linear() = R_goal;
    a_flange_end_ = a_tool_end_ * A_tool_wrt_flange_.inverse();


    return true;
}
コード例 #15
0
ファイル: irb120_fk_ik.cpp プロジェクト: TuZZiX/ros_workspace
int Irb120_IK_solver::ik_solve(Eigen::Affine3d const& desired_hand_pose) {
    q6dof_solns.clear();
    bool reachable = compute_q123_solns(desired_hand_pose, q6dof_solns);
    if (!reachable) {
        return 0;
    }
    reachable = false;
    //is at least one solution within joint range limits?
    q_solns_fit.clear();
    Vectorq6x1 q_soln;
    Eigen::Matrix3d R_des;
    R_des = desired_hand_pose.linear();
    int nsolns = q6dof_solns.size();
    bool fits;

    std::vector<Vectorq6x1> q_wrist_solns;
    for (int i=0;i<nsolns;i++) {
        q_soln = q6dof_solns[i];
        fits = fit_joints_to_range(q_soln); // force q_soln in to periodic range, if possible, and return if possible
        if (fits) { // if here, then have a valid 3dof soln; try to get wrist solutions
            // get wrist solutions; expect 2, though not checked for joint limits
            solve_spherical_wrist(q_soln,R_des, q_wrist_solns);  
            int n_wrist_solns = q_wrist_solns.size();
            for (int iwrist=0;iwrist<n_wrist_solns;iwrist++) {
                q_soln = q_wrist_solns[iwrist];
                if (fit_joints_to_range(q_soln)) {
                  q_solns_fit.push_back(q_soln);
                  reachable = true; // note that we have at least one reachable solution
                }
            }

        }
    }
    if (!reachable) {
        return 0;
    }
    
    //if here, have reachable solutions
    nsolns = q_solns_fit.size();
    return nsolns;
}
コード例 #16
0
ファイル: kinfu_tf_feeder.cpp プロジェクト: caomw/ros_kinfu
  void Update()
    {
    Eigen::Affine3d epose;

    try
      {
      tf::StampedTransform tpose;
      m_transform_listener->lookupTransform(m_reference_frame,m_pose_frame,ros::Time(0),tpose);
      tf::poseTFToEigen(tpose,epose);
      }
    catch (tf::TransformException ex)
      {
      return;
      }

    kinfu_msgs::KinfuCommandPtr command(new kinfu_msgs::KinfuCommand);
    if (m_start_kinfu)
      {
      command->command_type = command->COMMAND_TYPE_RESUME;
      m_start_kinfu = false;
      ROS_INFO("kinfu_tf_feeder: got first pose, starting kinfu.");
      }
      else
        command->command_type = m_triggered ? uint(command->COMMAND_TYPE_TRIGGER) : uint(command->COMMAND_TYPE_NOOP);

    command->hint_expiration_time = ros::Time::now() + ros::Duration(m_pose_timeout);
    command->hint_forced = m_forced;

    for (uint i = 0; i < 3; i++)
      for (uint h = 0; h < 3; h++)
        command->pose_hint_rotation[i * 3 + h] = epose.linear()(i,h);
    for (uint i = 0; i < 3; i++)
      command->pose_hint_translation[i] = epose.translation()[i];

    m_command_publisher.publish(command);
    }
コード例 #17
0
ファイル: ensenso_grabber.cpp プロジェクト: StevenHickson/pcl
bool
pcl::EnsensoGrabber::matrixTransformationToJson (const Eigen::Affine3d &matrix,
                                                 std::string &json,
                                                 const bool pretty_format) const
{
  try
  {
    NxLibCommand convert_transformation (cmdConvertTransformation);
    // Rotation
    convert_transformation.parameters ()[itmTransformation][0][0].set (matrix.linear ().col (0)[0]);
    convert_transformation.parameters ()[itmTransformation][0][1].set (matrix.linear ().col (0)[1]);
    convert_transformation.parameters ()[itmTransformation][0][2].set (matrix.linear ().col (0)[2]);
    convert_transformation.parameters ()[itmTransformation][0][3].set (0.0);

    convert_transformation.parameters ()[itmTransformation][1][0].set (matrix.linear ().col (1)[0]);
    convert_transformation.parameters ()[itmTransformation][1][1].set (matrix.linear ().col (1)[1]);
    convert_transformation.parameters ()[itmTransformation][1][2].set (matrix.linear ().col (1)[2]);
    convert_transformation.parameters ()[itmTransformation][1][3].set (0.0);

    convert_transformation.parameters ()[itmTransformation][2][0].set (matrix.linear ().col (2)[0]);
    convert_transformation.parameters ()[itmTransformation][2][1].set (matrix.linear ().col (2)[1]);
    convert_transformation.parameters ()[itmTransformation][2][2].set (matrix.linear ().col (2)[2]);
    convert_transformation.parameters ()[itmTransformation][2][3].set (0.0);

    // Translation
    convert_transformation.parameters ()[itmTransformation][3][0].set (matrix.translation ()[0]);
    convert_transformation.parameters ()[itmTransformation][3][1].set (matrix.translation ()[1]);
    convert_transformation.parameters ()[itmTransformation][3][2].set (matrix.translation ()[1]);
    convert_transformation.parameters ()[itmTransformation][3][3].set (1.0);

    convert_transformation.execute ();
    json = convert_transformation.result ()[itmTransformation].asJson (pretty_format);
    return (true);
  }
  catch (NxLibException &ex)
  {
    ensensoExceptionHandling (ex, "matrixTransformationToJson");
    return (false);
  }
}
コード例 #18
0
ファイル: irb120_fk_ik.cpp プロジェクト: TuZZiX/ros_workspace
bool Irb120_IK_solver::compute_q123_solns(Eigen::Affine3d const& desired_hand_pose, std::vector<Vectorq6x1> &q_solns) {
    double L6 = DH_d_params[5];
    double r_goal;
    bool reachable;
    Eigen::Vector3d p_des = desired_hand_pose.translation();
    Eigen::Matrix3d R_des = desired_hand_pose.linear();
    Eigen::Vector3d z_des = R_des.col(2); // direction of desired z-vector
    Eigen::Vector3d w_des = p_des - L6*z_des; // desired wrist position w/rt frame0
    q_solns.clear();
    Vectorq6x1 q_soln;
    
    double q1a = atan2(w_des(1), w_des(0));
    double q1b = q1a + M_PI; // given q1, q1+pi is also a soln
    Eigen::Matrix4d A1a, A1b;
    //compute_A_of_DH(double a,double d,double q, double alpha); use q_vec(i) + DH_q_offsets(i)
    A1a = compute_A_of_DH(0, q1a); //compute_A_of_DH(DH_a_params[0],DH_d_params[0],DH_alpha_params[0], q1a+ DH_q_offsets[0] );
    A1b = compute_A_of_DH(0, q1b); //compute_A_of_DH(DH_a_params[0],DH_d_params[0],DH_alpha_params[0], q1b+ DH_q_offsets[0] );    
    double q2a_solns[2];
    double q2b_solns[2];

    Eigen::Matrix4d A10;
    A10 = compute_A_of_DH(0, q1a);
    Eigen::Matrix3d R10;
    Eigen::Vector3d p1_wrt_0, w_wrt_1a, w_wrt_1b;
    R10 = A10.block<3, 3>(0, 0);
    //std::cout << "compute_q123, R10: " << std::endl;
    //std::cout << R10 << std::endl;
    p1_wrt_0 = A10.col(3).head(3); ///origin of frame1 w/rt frame0
    //compute A10_inv * w_wrt_0, = R'*w_wrt_0 -R'*p1_wrt_0
    w_wrt_1a = R10.transpose() * w_des - R10.transpose() * p1_wrt_0; //desired wrist pos w/rt frame1
    //std::cout << "w_wrt_1a = " << w_wrt_1a.transpose() << std::endl;
    r_goal = sqrt(w_wrt_1a[0] * w_wrt_1a[0] + w_wrt_1a[1] * w_wrt_1a[1]);

    //ROS_INFO("r_goal = %f", r_goal);
    //is the desired wrist position reachable? if not, return false
    // does not yet consider joint limits
    if (r_goal >= L_humerus + L_forearm) {
        //ROS_INFO("goal is too far away!");
        return false;
    }
    // can also have problems if wrist goal is too close to shoulder
    if (r_goal <= fabs(L_humerus - L_forearm)) {
        //ROS_INFO("goal is too close!");
        return false;
    }

    reachable = solve_for_theta2(w_wrt_1a, r_goal, q2a_solns);

    if (!reachable) {
        ROS_WARN("logic error! desired wrist point is out of reach");
        return false;
    } else {
        //ROS_INFO("q2a solns: %f, %f", q2a_solns[0], q2a_solns[1]);
    }
    double q3a_solns[2];
    solve_for_theta3(w_wrt_1a, r_goal, q3a_solns);
    //ROS_INFO("q3a solns: %f, %f", q3a_solns[0], q3a_solns[1]);
    
    // now, get w_wrt_1b:
    A10 = compute_A_of_DH(0, q1b);
    R10 = A10.block<3, 3>(0, 0);
    p1_wrt_0 = A10.col(3).head(3); //origin of frame1 w/rt frame0; should be the same as above
    //compute A10_inv * w_wrt_0, = R'*w_wrt_0 -R'*p1_wrt_0
    w_wrt_1b = R10.transpose() * w_des - R10.transpose() * p1_wrt_0; //desired wrist pos w/rt frame1
    //std::cout << "w_wrt_1b = " << w_wrt_1b.transpose() << std::endl;
    //r_goal should be same as above...
    //r_goal = sqrt(w_wrt_1b[0] * w_wrt_1b[0] + w_wrt_1b[1] * w_wrt_1b[1]);
    //ROS_INFO("r_goal b: %f", r_goal);

    reachable = solve_for_theta2(w_wrt_1b, r_goal, q2b_solns);
    //ROS_INFO("q2b solns: %f, %f", q2b_solns[0], q2b_solns[1]);
    
    double q3b_solns[2];
    solve_for_theta3(w_wrt_1b, r_goal, q3b_solns);
    //ROS_INFO("q3b solns: %f, %f", q3b_solns[0], q3b_solns[1]);
    
    //now, assemble the 4 solutions:
    q_soln[0] = q1a;
    q_soln[1] = q2a_solns[0];
    q_soln[2] = q3a_solns[0];
    q_solns.push_back(q_soln);
    
    q_soln[0] = q1a;
    q_soln[1] = q2a_solns[1];
    q_soln[2] = q3a_solns[1];
    q_solns.push_back(q_soln);  
    
    q_soln[0] = q1b;
    q_soln[1] = q2b_solns[0];
    q_soln[2] = q3b_solns[0];
    q_solns.push_back(q_soln);
    
    q_soln[0] = q1b;
    q_soln[1] = q2b_solns[1];
    q_soln[2] = q3b_solns[1];
    q_solns.push_back(q_soln);      
    
    return true;

}
コード例 #19
0
int do_inits() {
    Eigen::Matrix3d R;
    Eigen::Vector3d nvec, tvec, bvec, tip_pos;
    bvec << 0, 0, 1; //default for testing: gripper points "down"
    nvec << 1, 0, 0;
    tvec = bvec.cross(nvec);
    R.col(0) = nvec;
    R.col(1) = tvec;
    R.col(2) = bvec;
    g_des_gripper_affine1.linear() = R;
    tip_pos << -0.15, -0.03, 0.07;
    g_des_gripper_affine1.translation() = tip_pos; //will change this, but start w/ something legal

    //hard-coded camera-to-base transform, useful for simple testing/debugging
    g_affine_lcamera_to_psm_one.translation() << -0.155, -0.03265, 0.0;
    nvec << -1, 0, 0;
    tvec << 0, 1, 0;
    bvec << 0, 0, -1;
    //Eigen::Matrix3d R;
    R.col(0) = nvec;
    R.col(1) = tvec;
    R.col(2) = bvec;
    g_affine_lcamera_to_psm_one.linear() = R;

    g_q_vec1_start.resize(7);
    g_q_vec1_goal.resize(7);
    g_q_vec2_start.resize(7);
    g_q_vec2_goal.resize(7);

    g_des_gripper1_wrt_base = g_affine_lcamera_to_psm_one.inverse() * g_des_gripper_affine1;
    g_ik_solver.ik_solve(g_des_gripper1_wrt_base); // compute IK:
    g_q_vec1_goal = g_ik_solver.get_soln();
    g_q_vec1_start = g_q_vec1_goal;
    g_q_vec2_start << 0, 0, 0, 0, 0, 0, 0; //just put gripper 2 at home position
    g_q_vec2_goal << 0, 0, 0, 0, 0, 0, 0;
    //repackage q's into a trajectory;
    //populate a goal message for action server; 
    // initialize with 2 poses that are identical
    g_trajectory_point1.positions.clear();
    g_trajectory_point2.positions.clear();
    //resize these:
    for (int i=0;i<14;i++)  {
        g_trajectory_point1.positions.push_back(0.0); 
        g_trajectory_point2.positions.push_back(0.0); 
    }
        
    for (int i = 0; i < 7; i++) { 
        g_trajectory_point1.positions[i] = g_q_vec1_start[i];
        g_trajectory_point1.positions[i + 7] = g_q_vec2_start[i];
        //should fix up jaw-opening values...do this later
    }

    g_trajectory_point1.time_from_start = ros::Duration(0.0);
    g_trajectory_point2.time_from_start = ros::Duration(2.0);


    g_des_trajectory.points.clear(); // can clear components, but not entire trajectory_msgs
    g_des_trajectory.joint_names.clear();
    //    des_trajectory.joint_names.push_back("right_s0"); //yada-yada should fill in names
    g_des_trajectory.header.stamp = ros::Time::now();

    g_des_trajectory.points.push_back(g_trajectory_point1); // first point of the trajectory 
    g_des_trajectory.points.push_back(g_trajectory_point2);

    // copy traj to goal:
    g_goal.trajectory = g_des_trajectory;
    g_got_new_pose = true; //send robot to start pose
    
     ROS_INFO("getting transforms from camera to PSMs");
    tf::TransformListener tfListener;
    tf::StampedTransform tfResult_one, tfResult_two;
    bool tferr = true;
    int ntries = 0;
    ROS_INFO("waiting for tf between base and right_hand...");
    while (tferr) {
        if (ntries > 5) break; //give up and accept default after this many tries
        tferr = false;
        try {
            //The direction of the transform returned will be from the target_frame to the source_frame. 
            //Which if applied to data, will transform data in the source_frame into the target_frame. See tf/CoordinateFrameConventions#Transform_Direction
            tfListener.lookupTransform("left_camera_optical_frame", "one_psm_base_link", ros::Time(0), tfResult_one);
            //tfListener.lookupTransform("left_camera_optical_frame", "two_psm_base_link", ros::Time(0), tfResult_two);
        } catch (tf::TransformException &exception) {
            ROS_WARN("%s", exception.what());
            tferr = true;
            ros::Duration(0.5).sleep(); // sleep for half a second
            ros::spinOnce();
            ntries++;
        }
    }
    //default transform: need to match this up to camera calibration!
    if (tferr) {
        g_affine_lcamera_to_psm_one.translation() << -0.155, -0.03265, 0.0;
        Eigen::Vector3d nvec, tvec, bvec;
        nvec << -1, 0, 0;
        tvec << 0, 1, 0;
        bvec << 0, 0, -1;
        Eigen::Matrix3d R;
        R.col(0) = nvec;
        R.col(1) = tvec;
        R.col(2) = bvec;
        g_affine_lcamera_to_psm_one.linear() = R;
        g_affine_lcamera_to_psm_one.linear() = R;
        g_affine_lcamera_to_psm_one.translation() << 0.145, -0.03265, 0.0;
        ROS_WARN("using default transform");
    } else {

        ROS_INFO("tf is good");

        //affine_lcamera_to_psm_one is the position/orientation of psm1 base frame w/rt left camera link frame
        // need to extend this to camera optical frame
        g_affine_lcamera_to_psm_one = transformTFToEigen(tfResult_one);
        //affine_lcamera_to_psm_two = transformTFToEigen(tfResult_two);
    }
    ROS_INFO("transform from left camera to psm one:");
    cout << g_affine_lcamera_to_psm_one.linear() << endl;
    cout << g_affine_lcamera_to_psm_one.translation().transpose() << endl;
    //ROS_INFO("transform from left camera to psm two:");
    //cout << affine_lcamera_to_psm_two.linear() << endl;
    //cout << affine_lcamera_to_psm_two.translation().transpose() << endl;    
    
    
    ROS_INFO("done w/ inits");

    return 0;
    
}
コード例 #20
0
void init_poses() {
    ROS_INFO("getting transforms from camera to PSMs");
    //tf::TransformListener tfListener;
    tf::StampedTransform tfResult_one, tfResult_two;
    g_tfListener_ptr = new tf::TransformListener;
    bool tferr = true;
    int ntries = 0;
    ROS_INFO("waiting for tf between base and camera...");
    while (tferr) {
        if (ntries > 5) break; //give up and accept default after this many tries
        tferr = false;
        try {
            //The direction of the transform returned will be from the target_frame to the source_frame.
            //Which if applied to data, will transform data in the source_frame into the target_frame. See tf/CoordinateFrameConventions#Transform_Direction
            g_tfListener_ptr->lookupTransform("left_camera_optical_frame", "one_psm_base_link", ros::Time(0), tfResult_one);
            g_tfListener_ptr->lookupTransform("left_camera_optical_frame", "two_psm_base_link", ros::Time(0), tfResult_two);
        } catch (tf::TransformException &exception) {
            ROS_WARN("%s", exception.what());
            tferr = true;
            ros::Duration(0.5).sleep(); // sleep for half a second
            ros::spinOnce();
            ntries++;
        }
    }
    //default transform: need to match this up to camera calibration!
    if (tferr) {
        g_affine_lcamera_to_psm_one.translation() << -0.155, -0.03265, 0.0;
        Eigen::Vector3d nvec, tvec, bvec;
        nvec << -1, 0, 0;
        tvec << 0, 1, 0;
        bvec << 0, 0, -1;
        Eigen::Matrix3d R;
        R.col(0) = nvec;
        R.col(1) = tvec;
        R.col(2) = bvec;
        g_affine_lcamera_to_psm_one.linear() = R;
        g_affine_lcamera_to_psm_two.linear() = R;
        g_affine_lcamera_to_psm_two.translation() << 0.145, -0.03265, 0.0;
        ROS_WARN("using default transform");
    } else {

        ROS_INFO("tf is good");

        //g_affine_lcamera_to_psm_one is the position/orientation of psm1 base frame w/rt left camera link frame
        // need to extend this to camera optical frame
        g_affine_lcamera_to_psm_one = transformTFToEigen(tfResult_one);
        g_affine_lcamera_to_psm_two = transformTFToEigen(tfResult_two);
    }
    ROS_INFO("transform from left camera to psm one:");
    cout << g_affine_lcamera_to_psm_one.linear() << endl;
    cout << g_affine_lcamera_to_psm_one.translation().transpose() << endl;
    ROS_INFO("transform from left camera to psm two:");
    cout << g_affine_lcamera_to_psm_two.linear() << endl;
    cout << g_affine_lcamera_to_psm_two.translation().transpose() << endl;

    //now get initial poses:
    ROS_INFO("waiting for tf between base and grippers...");
    while (tferr) {
        if (ntries > 5) break; //give up and accept default after this many tries
        tferr = false;
        try {
            //The direction of the transform returned will be from the target_frame to the source_frame.
            //Which if applied to data, will transform data in the source_frame into the target_frame. See tf/CoordinateFrameConventions#Transform_Direction
            g_tfListener_ptr->lookupTransform("left_camera_optical_frame", "one_tool_tip_link", ros::Time(0), tfResult_one);
            g_tfListener_ptr->lookupTransform("left_camera_optical_frame", "two_tool_tip_link", ros::Time(0), tfResult_two);
        } catch (tf::TransformException &exception) {
            ROS_WARN("%s", exception.what());
            tferr = true;
            ros::Duration(0.5).sleep(); // sleep for half a second
            ros::spinOnce();
            ntries++;
        }
    }
    //default start pose, if can't get tf:
    if (tferr) {
        g_psm1_start_pose.translation() << -0.02, 0, 0.04;
        g_psm2_start_pose.translation() << 0.02, 0, 0.04;
        Eigen::Vector3d nvec, tvec, bvec;
        nvec << -1, 0, 0;
        tvec << 0, 1, 0;
        bvec << 0, 0, -1;
        Eigen::Matrix3d R;
        R.col(0) = nvec;
        R.col(1) = tvec;
        R.col(2) = bvec;
        g_psm1_start_pose.linear() = R;
        g_psm2_start_pose.linear() = R;
        ROS_WARN("using default start poses");
    } else {

        ROS_INFO("tf is good");

        //g_affine_lcamera_to_psm_one is the position/orientation of psm1 base frame w/rt left camera link frame
        // need to extend this to camera optical frame
        g_psm1_start_pose = transformTFToEigen(tfResult_one);
        g_psm2_start_pose = transformTFToEigen(tfResult_two);
    }
    ROS_INFO("psm1 gripper start pose:");
    cout << g_psm1_start_pose.linear() << endl;
    cout << g_psm1_start_pose.translation().transpose() << endl;
    ROS_INFO("psm2 gripper start pose:");
    cout << g_psm2_start_pose.linear() << endl;
    cout << g_psm2_start_pose.translation().transpose() << endl;

    //just to be safe:
    g_O_entry_point(0) = 0.0;
    g_O_entry_point(1) = 0.0;
    g_O_entry_point(2) = 0.12;
    g_O_exit_point = g_O_entry_point;
    g_O_exit_point(0) += 0.02;


}
コード例 #21
0
bool CartTrajPlanner::cartesian_path_planner(Eigen::Affine3d a_flange_start,Eigen::Affine3d a_flange_end, std::vector<Eigen::VectorXd> &optimal_path) {
    std::vector<std::vector<Eigen::VectorXd> > path_options;
    path_options.clear();
    std::vector<Eigen::VectorXd> single_layer_nodes;
    Eigen::VectorXd node;
    Eigen::Affine3d a_flange_des;
    Eigen::Matrix3d R_des = a_flange_end.linear();
    a_flange_des.linear() = R_des;
 

    //a_flange_start = ur10FwdSolver_.fwd_kin_solve(q_start);
    //cout << "fwd kin from q_start: " << a_flange_start.translation().transpose() << endl;
    //cout << "fwd kin from q_start R: " << endl;
    //cout << a_flange_start.linear() << endl;
    //store a vector of Cartesian affine samples for desired path:
    cartesian_affine_samples_.clear();
    a_flange_des = a_flange_end;
    a_flange_start.linear() = R_des; // no interpolation of orientation; set goal orientation immediately   
    a_flange_des.linear() = R_des; //expected behavior: will try to achieve orientation first, before translating
            
    int nsolns;
    bool reachable_proposition;
    int nsteps = 0;
    Eigen::Vector3d p_des,dp_vec,del_p,p_start,p_end;
    p_start = a_flange_start.translation();
    p_end = a_flange_end.translation();
    del_p = p_end-p_start;
    double dp_scalar = CARTESIAN_PATH_SAMPLE_SPACING;
    nsteps = round(del_p.norm()/dp_scalar);
    if (nsteps<1) nsteps=1;
    dp_vec = del_p/nsteps;
    nsteps++; //account for pose at step 0


    std::vector<Eigen::VectorXd> q_solns;
    p_des = p_start;
    cartesian_affine_samples_.push_back(a_flange_start);
    for (int istep=0;istep<nsteps;istep++) 
    {
            a_flange_des.translation() = p_des;
            cartesian_affine_samples_.push_back(a_flange_des);
            cout<<"trying: "<<p_des.transpose()<<endl;
            //int ik_solve(Eigen::Affine3d const& desired_hand_pose,vector<Eigen::VectorXd> &q_ik_solns);
            nsolns = ur10IkSolver_.ik_solve(a_flange_des, q_solns);
            std::cout<<"cartesian step "<<istep<<" has = "<<nsolns<<" IK solns"<<endl;
            single_layer_nodes.clear();
            if (nsolns>0) {
                single_layer_nodes.resize(nsolns);
                for (int isoln = 0; isoln < nsolns; isoln++) {
                    // this is annoying: can't treat std::vector<Vectorq7x1> same as std::vector<Eigen::VectorXd> 
                    //node = q_solns[isoln];
                    single_layer_nodes[isoln] = q_solns[isoln]; //node;
                    //single_layer_nodes = q_solns; 
                }

                path_options.push_back(single_layer_nodes);
            }
            p_des += dp_vec;
    }

    //plan a path through the options:
    int nlayers = path_options.size();
    if (nlayers < 1) {
        ROS_WARN("no viable options: quitting");
        return false; // give up if no options
    }
    //std::vector<Eigen::VectorXd> optimal_path;
    optimal_path.resize(nlayers);
    double trip_cost;
    // set joint penalty weights for computing optimal path in joint space
    Eigen::VectorXd weights;
    weights.resize(NJNTS);
    for (int i = 0; i < NJNTS; i++) {
        weights(i) = 1.0;
    }

    // compute min-cost path, using Stagecoach algorithm
    cout << "instantiating a JointSpacePlanner:" << endl;
    { //limit the scope of jsp here:
        JointSpacePlanner jsp(path_options, weights);
        cout << "recovering the solution..." << endl;
        jsp.get_soln(optimal_path);
        trip_cost = jsp.get_trip_cost();
    }

    //now, jsp is deleted, but optimal_path lives on:
    cout << "resulting solution path: " << endl;
    for (int ilayer = 0; ilayer < nlayers; ilayer++) {
        cout << "ilayer: " << ilayer << " node: " << optimal_path[ilayer].transpose() << endl;
    }
    cout << "soln min cost: " << trip_cost << endl;
    return true;
}
コード例 #22
0
ファイル: compensator.cpp プロジェクト: HemaZ/apollo
void Compensator::motion_compensation(sensor_msgs::PointCloud2::Ptr& msg,
                                      const double timestamp_min,
                                      const double timestamp_max,
                                      const Eigen::Affine3d& pose_min_time,
                                      const Eigen::Affine3d& pose_max_time) {
  using std::abs;
  using std::sin;
  using std::acos;

  Eigen::Vector3d translation =
      pose_min_time.translation() - pose_max_time.translation();
  Eigen::Quaterniond q_max(pose_max_time.linear());
  Eigen::Quaterniond q_min(pose_min_time.linear());
  Eigen::Quaterniond q1(q_max.conjugate() * q_min);
  Eigen::Quaterniond q0(Eigen::Quaterniond::Identity());
  q1.normalize();
  translation = q_max.conjugate() * translation;

  int total = msg->width * msg->height;

  double d = q0.dot(q1);
  double abs_d = abs(d);
  double f = 1.0 / (timestamp_max - timestamp_min);

  // Threshold for a "significant" rotation from min_time to max_time:
  // The LiDAR range accuracy is ~2 cm. Over 70 meters range, it means an angle
  // of 0.02 / 70 =
  // 0.0003 rad. So, we consider a rotation "significant" only if the scalar
  // part of quaternion is
  // less than cos(0.0003 / 2) = 1 - 1e-8.
  if (abs_d < 1.0 - 1.0e-8) {
    double theta = acos(abs_d);
    double sin_theta = sin(theta);
    double c1_sign = (d > 0) ? 1 : -1;
    for (int i = 0; i < total; ++i) {
      size_t offset = i * msg->point_step;
      Scalar* x_scalar =
          reinterpret_cast<Scalar*>(&msg->data[offset + x_offset_]);
      if (std::isnan(*x_scalar)) {
        ROS_DEBUG_STREAM("nan point do not need motion compensation");
        continue;
      }
      Scalar* y_scalar =
          reinterpret_cast<Scalar*>(&msg->data[offset + y_offset_]);
      Scalar* z_scalar =
          reinterpret_cast<Scalar*>(&msg->data[offset + z_offset_]);
      Eigen::Vector3d p(*x_scalar, *y_scalar, *z_scalar);

      double tp = 0.0;
      memcpy(&tp, &msg->data[i * msg->point_step + timestamp_offset_],
             timestamp_data_size_);
      double t = (timestamp_max - tp) * f;

      Eigen::Translation3d ti(t * translation);

      double c0 = sin((1 - t) * theta) / sin_theta;
      double c1 = sin(t * theta) / sin_theta * c1_sign;
      Eigen::Quaterniond qi(c0 * q0.coeffs() + c1 * q1.coeffs());

      Eigen::Affine3d trans = ti * qi;
      p = trans * p;
      *x_scalar = p.x();
      *y_scalar = p.y();
      *z_scalar = p.z();
    }
    return;
  }
  // Not a "significant" rotation. Do translation only.
  for (int i = 0; i < total; ++i) {
    Scalar* x_scalar =
        reinterpret_cast<Scalar*>(&msg->data[i * msg->point_step + x_offset_]);
    if (std::isnan(*x_scalar)) {
      ROS_DEBUG_STREAM("nan point do not need motion compensation");
      continue;
    }
    Scalar* y_scalar =
        reinterpret_cast<Scalar*>(&msg->data[i * msg->point_step + y_offset_]);
    Scalar* z_scalar =
        reinterpret_cast<Scalar*>(&msg->data[i * msg->point_step + z_offset_]);
    Eigen::Vector3d p(*x_scalar, *y_scalar, *z_scalar);

    double tp = 0.0;
    memcpy(&tp, &msg->data[i * msg->point_step + timestamp_offset_],
           timestamp_data_size_);
    double t = (timestamp_max - tp) * f;
    Eigen::Translation3d ti(t * translation);

    p = ti * p;
    *x_scalar = p.x();
    *y_scalar = p.y();
    *z_scalar = p.z();
  }
}
コード例 #23
0
void CartMoveActionServer::executeCB(const actionlib::SimpleActionServer<cwru_action::cart_moveAction>::GoalConstPtr& goal) {

    ROS_INFO("in executeCB of CartMoveActionServer");
   cart_result_.err_code=0;
   cart_move_as_.isActive();
   //unpack the necessary info:
   gripper_ang1_ = goal->gripper_jaw_angle1;
   gripper_ang2_ = goal->gripper_jaw_angle2;
   arrival_time_ = goal->move_time;
   // interpret the desired gripper poses:
   geometry_msgs::PoseStamped des_pose_gripper1 = goal->des_pose_gripper1;
   geometry_msgs::PoseStamped des_pose_gripper2 = goal->des_pose_gripper2;
   // convert the above to affine objects:
   des_gripper1_affine_wrt_lcamera_ = transformPoseToEigenAffine3d(des_pose_gripper1.pose);
   cout<<"gripper1 desired pose;  "<<endl;
   cout<<des_gripper1_affine_wrt_lcamera_.linear()<<endl;
   cout<<"origin: "<<des_gripper1_affine_wrt_lcamera_.translation().transpose()<<endl;

   des_gripper2_affine_wrt_lcamera_ = transformPoseToEigenAffine3d(des_pose_gripper2.pose);
   cout<<"gripper2 desired pose;  "<<endl;
   cout<<des_gripper2_affine_wrt_lcamera_.linear()<<endl;
   cout<<"origin: "<<des_gripper2_affine_wrt_lcamera_.translation().transpose()<<endl;

   //do IK to convert these to joint angles:
    //Eigen::VectorXd q_vec1,q_vec2;
    Vectorq7x1 q_vec1,q_vec2;
    q_vec1.resize(7);
    q_vec2.resize(7);

    
    trajectory_msgs::JointTrajectory des_trajectory; // an empty trajectory 
    des_trajectory.points.clear(); // can clear components, but not entire trajectory_msgs
    des_trajectory.joint_names.clear(); //could put joint names in...but I assume a fixed order and fixed size, so this is unnecessary
    // if using wsn's trajectory streamer action server
    des_trajectory.header.stamp = ros::Time::now();

    trajectory_msgs::JointTrajectoryPoint trajectory_point; //,trajectory_point2; 
    trajectory_point.positions.resize(14);
    
    ROS_INFO("\n");
    ROS_INFO("stored previous command to gripper one: ");
    cout<<gripper1_affine_last_commanded_pose_.linear()<<endl;
    cout<<"origin: "<<gripper1_affine_last_commanded_pose_.translation().transpose()<<endl;
   

    // first, reiterate previous command:
    // this could be easier, if saved previous joint-space trajectory point...
    des_gripper_affine1_ = affine_lcamera_to_psm_one_.inverse()*gripper1_affine_last_commanded_pose_; //previous pose
    ik_solver_.ik_solve(des_gripper_affine1_); //convert desired pose into equiv joint displacements
    q_vec1 = ik_solver_.get_soln(); 
    q_vec1(6) = last_gripper_ang1_; // include desired gripper opening angle
    
    
    des_gripper_affine2_ = affine_lcamera_to_psm_two_.inverse()*gripper2_affine_last_commanded_pose_; //previous pose
    ik_solver_.ik_solve(des_gripper_affine2_); //convert desired pose into equiv joint displacements
    q_vec2 = ik_solver_.get_soln(); 
    cout<<"q_vec1 of stored pose: "<<endl;
    for (int i=0;i<6;i++) {
        cout<<q_vec1[i]<<", ";
    }
    cout<<endl;
    q_vec2(6) = last_gripper_ang2_; // include desired gripper opening angle
    
       for (int i=0;i<7;i++) {
            trajectory_point.positions[i] = q_vec1(i);
            trajectory_point.positions[i+7] = q_vec2(i);  
        }
    cout<<"start traj pt: "<<endl;
    for (int i=0;i<14;i++) {
        cout<<trajectory_point.positions[i]<<", ";
    }
    cout<<endl;
      trajectory_point.time_from_start = ros::Duration(0.0); // start time set to 0
    // PUSH IN THE START POINT:
      des_trajectory.points.push_back(trajectory_point);            

    // compute and append the goal point, in joint space trajectory:
    des_gripper_affine1_ = affine_lcamera_to_psm_one_.inverse()*des_gripper1_affine_wrt_lcamera_;
    ROS_INFO("desired gripper one location in base frame: ");
    cout<<"gripper1 desired pose;  "<<endl;
    cout<<des_gripper_affine1_.linear()<<endl;
    cout<<"origin: "<<des_gripper_affine1_.translation().transpose()<<endl;

    ik_solver_.ik_solve(des_gripper_affine1_); //convert desired pose into equiv joint displacements
    q_vec1 = ik_solver_.get_soln(); 
    q_vec1(6) = gripper_ang1_; // include desired gripper opening angle

    des_gripper_affine2_ = affine_lcamera_to_psm_two_.inverse()*des_gripper2_affine_wrt_lcamera_;
    ik_solver_.ik_solve(des_gripper_affine2_); //convert desired pose into equiv joint displacements
    q_vec2 = ik_solver_.get_soln();  
    cout<<"q_vec1 of goal pose: "<<endl;
    for (int i=0;i<6;i++) {
        cout<<q_vec1[i]<<", ";
    }
    cout<<endl;
    q_vec2(6) = gripper_ang2_;
        for (int i=0;i<7;i++) {
            trajectory_point.positions[i] = q_vec1(i);
            trajectory_point.positions[i+7] = q_vec2(i);  
        }
      trajectory_point.time_from_start = ros::Duration(arrival_time_);
   cout<<"goal traj pt: "<<endl;
    for (int i=0;i<14;i++) {
        cout<<trajectory_point.positions[i]<<", ";
    }
    cout<<endl;
      des_trajectory.points.push_back(trajectory_point);

    js_goal_.trajectory = des_trajectory;

    // Need boost::bind to pass in the 'this' pointer
  // see example: http://library.isr.ist.utl.pt/docs/roswiki/actionlib_tutorials%282f%29Tutorials%282f%29Writing%2820%29a%2820%29Callback%2820%29Based%2820%29Simple%2820%29Action%2820%29Client.html
  //  ac.sendGoal(goal,
  //              boost::bind(&MyNode::doneCb, this, _1, _2),
  //              Client::SimpleActiveCallback(),
  //              Client::SimpleFeedbackCallback());

    js_action_client_.sendGoal(js_goal_, boost::bind(&CartMoveActionServer::js_doneCb_,this,_1,_2)); // we could also name additional callback functions here, if desired
    //    action_client.sendGoal(goal, &doneCb, &activeCb, &feedbackCb); //alt--more callback funcs possible
    
    double t_timeout=arrival_time_+2.0; //wait 2 sec longer than expected duration of move
    
    bool finished_before_timeout = js_action_client_.waitForResult(ros::Duration(t_timeout));
    //bool finished_before_timeout = action_client.waitForResult(); // wait forever...
    if (!finished_before_timeout) {
        ROS_WARN("joint-space interpolation result is overdue ");
    } else {
        ROS_INFO("finished before timeout");
    }

    ROS_INFO("completed callback" );
    cart_move_as_.setSucceeded(cart_result_); // tell the client that we were successful acting on the request, and return the "result" message 
    
    //let's remember the last pose commanded, so we can use it as start pose for next move
    gripper1_affine_last_commanded_pose_ = des_gripper1_affine_wrt_lcamera_;
    gripper2_affine_last_commanded_pose_ = des_gripper2_affine_wrt_lcamera_;    
    //and the jaw opening angles:
    last_gripper_ang1_=gripper_ang1_;
    last_gripper_ang2_=gripper_ang2_;
}
コード例 #24
0
ファイル: adv_registration.cpp プロジェクト: ehuang3/apc_ros
icp_result sac_icp(PointCloudT::Ptr object, PointCloudT::Ptr scene, Eigen::Affine3d seed_pose) {
  /* Apply ICP at many different offsets and orientations to seek a more accurate pose estimate */
  // This is good, allow configurable distance-offset
  Eigen::Vector3d offsets[] = {
    Eigen::Vector3d(0., 0., 0. ),
    Eigen::Vector3d(-0.06, 0, 0),
    Eigen::Vector3d(0, -0.06, 0),
    Eigen::Vector3d(0, 0, -0.06),
    Eigen::Vector3d(0.06, 0, 0 ),
    Eigen::Vector3d(0, 0.06, 0 ),
    Eigen::Vector3d(0, 0, 0.06 )
  };

  // -> Do this better (try 45 deg increments, try auto-generating)
  orientation orientations[] = {
    {Eigen::Vector3d::UnitZ(), 0.0},
    {Eigen::Vector3d::UnitZ(), M_PI / 5},
    {Eigen::Vector3d::UnitZ(), 2 * M_PI / 5},
    {Eigen::Vector3d::UnitZ(), 3 * M_PI / 5},
    {Eigen::Vector3d::UnitZ(), 4 * M_PI / 5},
    {Eigen::Vector3d::UnitZ(), M_PI},

    {Eigen::Vector3d::UnitX(), M_PI / 5},
    {Eigen::Vector3d::UnitX(), 2 * M_PI / 5},
    {Eigen::Vector3d::UnitX(), 3 * M_PI / 5},
    {Eigen::Vector3d::UnitX(), 4 * M_PI / 5},
    {Eigen::Vector3d::UnitX(), M_PI},

    {Eigen::Vector3d::UnitY(), M_PI / 5},
    {Eigen::Vector3d::UnitY(), 2 * M_PI / 5},
    {Eigen::Vector3d::UnitY(), 3 * M_PI / 5},
    {Eigen::Vector3d::UnitY(), 4 * M_PI / 5},
    {Eigen::Vector3d::UnitY(), M_PI},
  };

  // void affine_cloud(Eigen::Vector3f axis, float theta, Eigen::Vector3f translation, pcl::PointCloud<pcl::PointXYZ>& input_cloud, 
  // pcl::PointCloud<pcl::PointXYZ>& destination_cloud) {
  pcl::PointCloud<pcl::PointXYZ>::Ptr xyz_scene(new pcl::PointCloud<pcl::PointXYZ>);
  pcl::copyPointCloud(*scene, *xyz_scene); // Try this with pointNormals (Can we do this?)

  pcl::visualization::PCLVisualizer viewer ("Point Cloud Visualization ENHANCED!");
  viewer.setSize (1280, 1024);  // Visualiser window size
  viewer.registerKeyboardCallback (&keyboardEventOccurred, (void*) NULL);

  pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ> scene_color_h(xyz_scene, 255, 0, 0);

  viewer.addPointCloud(xyz_scene, scene_color_h, "scene");
  pcl::PointCloud<pcl::PointXYZ>::Ptr affined_cloud(new pcl::PointCloud<pcl::PointXYZ>);
  viewer.addPointCloud(affined_cloud, "object");

  // Should be using .reset() instead of new, right? I duno
  // Eigen::Affine3d initial_transform;
  // Eigen::Affine3d transform;
  Eigen::Matrix3d seed_rotation = seed_pose.linear();
  Eigen::Vector3d seed_translation = seed_pose.translation();

  for (int k = 0; k < 7; k++) {
    for (int j = 0; j < 16; j++) {
      pcl::console::print_highlight ("At %i %i\n", k, j);

      affined_cloud.reset(new pcl::PointCloud<pcl::PointXYZ>);
      pcl::copyPointCloud(*object, *affined_cloud);

      Eigen::Matrix3d rotation = seed_rotation * (Eigen::AngleAxisd(orientations[j].theta, orientations[j].axis));
      Eigen::Vector3d translation = seed_translation + offsets[k];

      Eigen::Affine3d transform = Eigen::Affine3d::Identity();
      transform.rotate(rotation);
      transform.translation() = translation;
      pcl::transformPointCloud(*affined_cloud, *affined_cloud, transform);

      // affine_cloud(orientations[j].axis, orientations[j].theta, offsets[k], *affined_cloud, *affined_cloud);
      /*icp_result result = */apply_icp(affined_cloud, xyz_scene, 20, 0.01);
      // /*icp_result result = */apply_icp(affined_cloud, xyz_scene, 40, 0.01);
      // icp_result result = apply_icp(affined_cloud, xyz_scene, 40, 0.005);


      // affine_cloud(result.affine, *affined_cloud, *affined_cloud);

      viewer.updatePointCloud(affined_cloud, "object");
      viewer.updatePointCloud(xyz_scene, scene_color_h, "scene");
      next_iteration = false;

      // visualize(affined_cloud, xyz_scene, "Post-icp");
      while(next_iteration == false) {
        viewer.spinOnce();
      };
    }
  }
}
コード例 #25
0
ファイル: Pose.hpp プロジェクト: arneboe/base-types
 /** 
  * @brief set the pose based on a 4x4 matrix
  *
  * @param t 4x4 homogenous transform matrix, for which the upper-left
  *	3x3 matrix needs to be a rotation matrix.
  */
 void fromTransform( const Eigen::Affine3d &t )
 {
     position = t.translation();
     orientation = t.linear();
 }
コード例 #26
0
//handy utility, just to print data to screen for Affine objects
void  ArmMotionInterface::display_affine(Eigen::Affine3d affine) {
    cout<<"Affine origin: "<<affine.translation().transpose()<<endl;
    cout<<"Affine rotation: "<<endl;
    cout<<affine.linear()<<endl;
}
コード例 #27
0
BITBOTS_INLINE Eigen::Matrix4d transform_inline_multiplication(const Eigen::Affine3d& transform, double angle) {
    return (Eigen::Matrix4d()<<transform.linear() * Eigen::AngleAxisd(angle, Eigen::Vector3d::UnitY()), transform.translation(),0,0,0,1).finished();
}
コード例 #28
0
ファイル: Optimizer.cpp プロジェクト: rickytan/KALOFution
void Optimizer::optimizeUseG2O()
{


    // create the linear solver
    BlockSolverX::LinearSolverType * linearSolver = new LinearSolverCSparse<BlockSolverX::PoseMatrixType>();

    // create the block solver on top of the linear solver
    BlockSolverX* blockSolver = new BlockSolverX(linearSolver);

    // create the algorithm to carry out the optimization
    //OptimizationAlgorithmGaussNewton* optimizationAlgorithm = new OptimizationAlgorithmGaussNewton(blockSolver);
    OptimizationAlgorithmLevenberg* optimizationAlgorithm = new OptimizationAlgorithmLevenberg(blockSolver);

    // NOTE: We skip to fix a variable here, either this is stored in the file
    // itself or Levenberg will handle it.

    // create the optimizer to load the data and carry out the optimization
    SparseOptimizer optimizer;
    SparseOptimizer::initMultiThreading();
    optimizer.setVerbose(true);
    optimizer.setAlgorithm(optimizationAlgorithm);

    {
        pcl::ScopeTime time("G2O setup Graph vertices");
        for (size_t cloud_count = 0; cloud_count < m_pointClouds.size(); ++cloud_count)
        {
            VertexSE3 *vertex = new VertexSE3;
            vertex->setId(cloud_count);
            Isometry3D affine = Isometry3D::Identity();
            affine.linear() = m_pointClouds[cloud_count]->sensor_orientation_.toRotationMatrix().cast<Isometry3D::Scalar>();
            affine.translation() = m_pointClouds[cloud_count]->sensor_origin_.block<3, 1>(0, 0).cast<Isometry3D::Scalar>();
            vertex->setEstimate(affine);
            optimizer.addVertex(vertex);
        }
        optimizer.vertex(0)->setFixed(true);
    }

    {
        pcl::ScopeTime time("G2O setup Graph edges");
        double trans_noise = 0.5, rot_noise = 0.5235;
        EdgeSE3::InformationType infomation = EdgeSE3::InformationType::Zero();
        infomation.block<3, 3>(0, 0) << trans_noise * trans_noise, 0, 0,
                                        0, trans_noise * trans_noise, 0,
                                        0, 0, trans_noise * trans_noise;
        infomation.block<3, 3>(3, 3) << rot_noise * rot_noise, 0, 0,
                                        0, rot_noise * rot_noise, 0,
                                        0, 0, rot_noise * rot_noise;
        for (size_t pair_count = 0; pair_count < m_cloudPairs.size(); ++pair_count)
        {
            CloudPair pair = m_cloudPairs[pair_count];
		    int from = pair.corresIdx.first;
		    int to = pair.corresIdx.second;
            EdgeSE3 *edge = new EdgeSE3;
		    edge->vertices()[0] = optimizer.vertex(from);
		    edge->vertices()[1] = optimizer.vertex(to);

            Eigen::Matrix<double, 6, 6> ATA = Eigen::Matrix<double, 6, 6>::Zero();
            Eigen::Matrix<double, 6, 1> ATb = Eigen::Matrix<double, 6, 1>::Zero();
#pragma unroll 8
            for (size_t point_count = 0; point_count < pair.corresPointIdx.size(); ++point_count) {
                int point_p = pair.corresPointIdx[point_count].first;
                int point_q = pair.corresPointIdx[point_count].second;
                PointType P = m_pointClouds[from]->points[point_p];
                PointType Q = m_pointClouds[to]->points[point_q];

                Eigen::Vector3d p = P.getVector3fMap().cast<double>();
                Eigen::Vector3d q = Q.getVector3fMap().cast<double>();
                Eigen::Vector3d Np = P.getNormalVector3fMap().cast<double>();

                double b = (p - q).dot(Np);

                Eigen::Matrix<double, 6, 1> A_p;
                A_p.block<3, 1>(0, 0) = p.cross(Np);
                A_p.block<3, 1>(3, 0) = Np;

                ATA += A_p * A_p.transpose();
                ATb += A_p * b;
            }

            Eigen::Matrix<double, 6, 1> X = ATA.ldlt().solve(ATb);
            Isometry3D measure = Isometry3D::Identity();
            float beta = X[0];
            float gammar = X[1];
            float alpha = X[2];
            measure.linear() = (Eigen::Matrix3d)Eigen::AngleAxisd(alpha, Eigen::Vector3d::UnitZ()) *
                Eigen::AngleAxisd(gammar, Eigen::Vector3d::UnitY()) *
                Eigen::AngleAxisd(beta, Eigen::Vector3d::UnitX());
            measure.translation() = X.block<3, 1>(3, 0);

            edge->setMeasurement(measure);

		    edge->setInformation(infomation);
            
            optimizer.addEdge(edge);
        }
    }

    optimizer.save("debug_preOpt.g2o");
    {
        pcl::ScopeTime time("g2o optimizing");
        optimizer.initializeOptimization();
        optimizer.optimize(30);
    }
    optimizer.save("debug_postOpt.g2o");

    for (size_t cloud_count = 0; cloud_count < m_pointClouds.size(); ++cloud_count)
    {
        CloudTypePtr tmp(new CloudType);
        Isometry3D trans = ((VertexSE3 *)optimizer.vertices()[cloud_count])->estimate();
        Eigen::Affine3d affine;
        affine.linear() = trans.rotation();
        affine.translation() = trans.translation();
        pcl::transformPointCloudWithNormals(*m_pointClouds[cloud_count], *tmp, affine.cast<float>());
        pcl::copyPointCloud(*tmp, *m_pointClouds[cloud_count]);
    }

    PCL_WARN("Opitimization DONE!!!!\n");

    if (m_params.saveDirectory.length()) {
        if (boost::filesystem::exists(m_params.saveDirectory) && !boost::filesystem::is_directory(m_params.saveDirectory)) {
            boost::filesystem::remove(m_params.saveDirectory);
        }
        boost::filesystem::create_directories(m_params.saveDirectory);

        char filename[1024] = { 0 };
        for (size_t i = 0; i < m_pointClouds.size(); ++i) {
            sprintf(filename, "%s/cloud_%d.ply", m_params.saveDirectory.c_str(), i);
            pcl::io::savePLYFileBinary(filename, *m_pointClouds[i]);
        }
    }
}
コード例 #29
0
ファイル: Pose.hpp プロジェクト: Cirromulus/base-types
	/** 
	 * Test if the provided delta transformation is greater in 
	 * either distance or angle than the threshold
	 */
	bool test( const Eigen::Affine3d& pdelta )
	{
	    return test( pdelta.translation().norm(), Eigen::AngleAxisd( pdelta.linear() ).angle() );
	}