コード例 #1
0
    void MotionPlanningFrame::snapStateToFrame(robot_state::RobotState& state,
                                               const Eigen::Affine3d& T_frame_world,
                                               const std::string& link,
                                               const Eigen::Affine3d& T_frame_link,
                                               const std::string& group)
    {
        ROS_DEBUG_FUNCTION;

        // std::cout << "T_frame_world:\n" << T_frame_world.matrix() << std::endl;
        // std::cout << "T_frame_link:\n" << T_frame_link.matrix() << std::endl;

        // Back out the desired link transform in global coordinates.
        Eigen::Affine3d T_link_world = T_frame_world * T_frame_link.inverse();
        // Snap to the frame using IK.
        const moveit::core::JointModelGroup* jmg = state.getJointModelGroup(group);

        // std::cout << "group: " << group << std::endl;

        state.update();
        trajectory_msgs::JointTrajectoryPoint point;
        state.copyJointGroupPositions(jmg, point.positions);
        ROS_DEBUG_STREAM("pre-ik positions:\n" << point);

        geometry_msgs::Pose pose;
        tf::poseEigenToMsg(T_link_world, pose);
        state.setFromIK(jmg, pose, link);
        state.update();

        state.copyJointGroupPositions(jmg, point.positions);
        ROS_DEBUG_STREAM("post-ik positions:\n" << point);

        ROS_DEBUG_FUNCTION;
    }
コード例 #2
0
//this is a pretty general function:
// goal contains a desired tool pose;
// path is planned from current joint state to some joint state that achieves desired tool pose
bool ArmMotionInterface::rt_arm_plan_path_current_to_goal_pose() {
    ROS_INFO("computing a cartesian trajectory to right-arm tool goal pose");
    //unpack the goal pose:
    goal_gripper_pose_right_ = cart_goal_.des_pose_gripper_right;
    
    goal_gripper_affine_right_= transformPoseToEigenAffine3d(goal_gripper_pose_right_.pose);
    ROS_INFO("tool frame goal: ");
    display_affine(goal_gripper_affine_right_);    
    goal_flange_affine_right_ = goal_gripper_affine_right_ * A_tool_wrt_flange_.inverse();
    ROS_INFO("flange goal");
    display_affine(goal_flange_affine_right_);
    Vectorq7x1 q_start;
    q_start = get_jspace_start_right_arm_(); // choose last cmd, or current joint angles
    path_is_valid_ = cartTrajPlanner_.cartesian_path_planner(q_start, goal_flange_affine_right_, optimal_path_);

    if (path_is_valid_) {
        baxter_traj_streamer_.stuff_trajectory(optimal_path_, des_trajectory_); //convert from vector of poses to trajectory message   
        computed_arrival_time_ = des_trajectory_.points.back().time_from_start.toSec();
        cart_result_.return_code = cwru_action::cwru_baxter_cart_moveResult::SUCCESS;
        cart_result_.computed_arrival_time = computed_arrival_time_;
        cart_move_as_.setSucceeded(cart_result_); 
    }
    else {
        cart_result_.return_code = cwru_action::cwru_baxter_cart_moveResult::RT_ARM_PATH_NOT_VALID;
        cart_result_.computed_arrival_time = -1.0; //impossible arrival time        
        cart_move_as_.setSucceeded(cart_result_); //the communication was a success, but not the computation 
    }   

    return path_is_valid_;    
}
コード例 #3
0
void Irp6pInverseKinematic::updateHook() {

  if (port_input_wrist_pose_.read(wrist_pose_) == RTT::NewData) {

    Eigen::Affine3d trans;
    tf::poseMsgToEigen(wrist_pose_, trans);

    port_current_joint_position_.read(local_current_joints_);

    inverse_kinematics_single_iteration(local_current_joints_, trans,
                                        &local_desired_joints_);

    port_output_joint_position_.write(local_desired_joints_);

  } else if (port_input_end_effector_pose_.read(end_effector_pose_)
      == RTT::NewData) {

    port_tool_.read(tool_msgs_);

    Eigen::Affine3d tool;
    Eigen::Affine3d trans;
    Eigen::Affine3d wrist_pose;
    tf::poseMsgToEigen(end_effector_pose_, trans);
    tf::poseMsgToEigen(tool_msgs_, tool);

    wrist_pose = trans * tool.inverse();

    port_current_joint_position_.read(local_current_joints_);

    inverse_kinematics_single_iteration(local_current_joints_, wrist_pose,
                                        &local_desired_joints_);

    port_output_joint_position_.write(local_desired_joints_);
  }
}
コード例 #4
0
template <typename PointT, typename NormalT> bool
cpu_tsdf::TSDFVolumeOctree::integrateCloud (
  const pcl::PointCloud<PointT> &cloud, 
  const pcl::PointCloud<NormalT> &normals,
  const Eigen::Affine3d &trans)
{
  Eigen::Affine3f trans_inv = trans.inverse ().cast<float> ();

  // First, sample a few points and force their containing voxels to split
  int px_step = 1;
  int nsplit = 0;
  for (size_t u = 0; u < cloud.width; u += px_step)
  {
    for (size_t v = 0; v < cloud.height; v += px_step)
    {
      const PointT &pt_surface_orig = cloud (u, v);
      if (pcl_isnan (pt_surface_orig.z))
        continue;
      // Look at surroundings
      int nstep = 0;
      Eigen::Vector3f ray = pt_surface_orig.getVector3fMap ().normalized ();
      for (int perm = 0; perm < num_random_splits_; perm++)
      {
        // Get containing voxels
        PointT pt_trans; 
        float scale = (float)rand () / (float)RAND_MAX * 0.03;
        Eigen::Vector3f noise = Eigen::Vector3f::Random ().normalized () * scale;;
        if (perm == 0) noise *= 0;
        pt_trans.getVector3fMap () = trans.cast<float> () * (pt_surface_orig.getVector3fMap ()+ noise);
        OctreeNode* voxel = octree_->getContainingVoxel (pt_trans.x, pt_trans.y, pt_trans.z);
        if (voxel != NULL)
        {
          while (voxel->getMinSize () > xsize_ / xres_)
          {
            nsplit++;
            voxel->split ();
            voxel = voxel->getContainingVoxel (pt_trans.x, pt_trans.y, pt_trans.z);
            
          }
        }
      }
    }
  }
  
  // Do Frustum Culling to get rid of unseen voxels
  std::vector<cpu_tsdf::OctreeNode::Ptr> voxels_culled;
  getFrustumCulledVoxels(trans, voxels_culled);
#pragma omp parallel for
  for (size_t i = 0; i < voxels_culled.size (); i++)
  {
    updateVoxel (voxels_culled[i], cloud, normals, trans_inv);
  }
  // Cloud is no longer empty
  is_empty_ = false;
  return (true);
}
コード例 #5
0
void ICPLocalization::addScanLineToPointCloud(Eigen::Affine3d body2Odo, Eigen::Affine3d body2World, Eigen::Affine3d laser2Body, const ::base::samples::LaserScan &scan_reading)
{

    if(scansWithTransforms.size() == 0) 
    {
	addLaserScan(body2Odo,body2World,laser2Body,scan_reading); 
	return;
    }
    
    /*
    double max_rotation =  1.5 * conf_point_cloud.lines_per_point_cloud * conf_point_cloud.min_rotation_for_new_line;
    double max_translation = 1.5 * conf_point_cloud.lines_per_point_cloud * conf_point_cloud.min_distance_travelled_for_new_line;
    double max_head_movement = 1.5 * conf_point_cloud.lines_per_point_cloud * conf_point_cloud.min_rotation_head_for_new_line; 
    */
    bool add_laser_scan = true; 
    for( uint i = 0; i < scansWithTransforms.size(); i++) 
    {
	Eigen::Affine3d diference( body2Odo.inverse() * scansWithTransforms.at(i).body2Odo );

	Vector3d Ylaser2Body = laser2Body * Vector3d::UnitY() - laser2Body.translation();
	Ylaser2Body.normalize();
	Vector3d YlastLaser2Body = scansWithTransforms.back().laser2Body * Vector3d::UnitY() - scansWithTransforms.at(i).laser2Body.translation();
	YlastLaser2Body.normalize();
	
	double laserChange = acos(Ylaser2Body.dot(YlastLaser2Body));
	double translation =  diference.translation().norm(); 
	double rotation = fabs(Eigen::AngleAxisd( diference.rotation() ).angle()) ; 
	
	add_laser_scan = add_laser_scan && ( rotation > conf_point_cloud.min_rotation_for_new_line || translation > conf_point_cloud.min_distance_travelled_for_new_line || laserChange >  conf_point_cloud.min_rotation_head_for_new_line);
	 
	//if  the distance is to big means the old laser scan is not consistent anymore. 
// 	if( rotation > max_rotation|| translation > max_translation || laserChange > max_head_movement)
// 	{
// 	    scansWithTransforms.erase(scansWithTransforms.begin() + i); 
// 	    scanCount--; 
// 	    std::cout << " IcpLocalization.cpp erasing old laser scan " << std::endl; 
// 	}

/*	std::cout <<" add new scan " << add_laser_scan << std::endl; 
	std::cout << "\t translation " << (translation > conf_point_cloud.min_distance_travelled_for_new_line)<< " "  << translation << " > " << conf_point_cloud.min_distance_travelled_for_new_line << std::endl;
	std::cout << "\t rotation " << (rotation > conf_point_cloud.min_rotation_for_new_line) << " "  << rotation * 180 / M_PI << " > " << conf_point_cloud.min_rotation_for_new_line * 180 / M_PI << std::endl;
	std::cout << "\t head " << (laserChange >  conf_point_cloud.min_rotation_head_for_new_line) << "  "<< laserChange * 180 / M_PI << " > " << conf_point_cloud.min_rotation_head_for_new_line * 180 / M_PI<< std::endl; */
	
	if (!add_laser_scan) 
	    break; 
    }
    
    if ( add_laser_scan )
    {
	addLaserScan(body2Odo,body2World,laser2Body,scan_reading); 
    }
    
    return; 
  
}
コード例 #6
0
void PolygonArrayTransformer::transformPolygon(const Eigen::Affine3d& transform,
        const geometry_msgs::PolygonStamped& polygon,
        geometry_msgs::PolygonStamped& result)
{
    result.header = polygon.header;
    result.header.frame_id = frame_id_;
    for (size_t i = 0; i < polygon.polygon.points.size(); i++) {
        Eigen::Vector4d point;
        point[0] = polygon.polygon.points[i].x;
        point[1] = polygon.polygon.points[i].y;
        point[2] = polygon.polygon.points[i].z;
        point[3] = 1;             // homogenious
        Eigen::Vector4d transformed_point_eigen = transform.inverse() * point;
        geometry_msgs::Point32 transformed_point;
        transformed_point.x = transformed_point_eigen[0];
        transformed_point.y = transformed_point_eigen[1];
        transformed_point.z = transformed_point_eigen[2];
        result.polygon.points.push_back(transformed_point);
    }
}
コード例 #7
0
bool ArmMotionInterface::unpack_goal_pose(void) {
    geometry_msgs::PoseStamped poseStamped_goal = request_.poseStamped_goal;
    Eigen::Vector3d goal_origin;
    goal_origin[0] = poseStamped_goal.pose.position.x;
    goal_origin[1] = poseStamped_goal.pose.position.y;
    goal_origin[2] = poseStamped_goal.pose.position.z;
    a_tool_end_.translation() = goal_origin;

    Eigen::Quaterniond quat;
    quat.x() = poseStamped_goal.pose.orientation.x;
    quat.y() = poseStamped_goal.pose.orientation.y;
    quat.z() = poseStamped_goal.pose.orientation.z;
    quat.w() = poseStamped_goal.pose.orientation.w;
    Eigen::Matrix3d R_goal(quat);
    a_tool_end_.linear() = R_goal;
    a_flange_end_ = a_tool_end_ * A_tool_wrt_flange_.inverse();


    return true;
}
 bool CaptureStereoSynchronizer::checkNearPose(
   const geometry_msgs::Pose& new_pose)
 {
   Eigen::Affine3d new_affine;
   tf::poseMsgToEigen(new_pose, new_affine);
   for (size_t i = 0; i < poses_.size(); i++) {
     // convert pose into eigen
     Eigen::Affine3d affine;
     tf::poseMsgToEigen(poses_[i], affine);
     // compute difference
     Eigen::Affine3d diff = affine.inverse() * new_affine;
     double positional_difference = diff.translation().norm();
     if (positional_difference < positional_bin_size_) {
       Eigen::AngleAxisd rotational_difference(diff.rotation());
       if (rotational_difference.angle() < rotational_bin_size_) {
         return true;
       }
     }
   }
   return false;
 }
コード例 #9
0
void inPointCallback(const geometry_msgs::Point& pt_msg) {
    Eigen::Affine3d des_gripper1_wrt_base;

    g_des_point(0) = pt_msg.x;
    g_des_point(1) = pt_msg.y;
    g_des_point(2) = pt_msg.z;
    cout << "received des point = " << g_des_point.transpose() << endl;
    g_des_gripper_affine1.translation() = g_des_point;
    //convert this to base coords:
    g_des_gripper1_wrt_base = g_affine_lcamera_to_psm_one.inverse() * g_des_gripper_affine1;
    //try computing IK:
    if (g_ik_solver.ik_solve(g_des_gripper1_wrt_base)) {
        ROS_INFO("found IK soln");
        g_got_new_pose = true;
        g_q_vec1_start = g_q_vec1_goal;
        g_q_vec1_goal = g_ik_solver.get_soln();
        cout << g_q_vec1_goal.transpose() << endl;
        g_trajectory_point1 = g_trajectory_point2; //former goal is new start

        for (int i = 0; i < 6; i++) {
            g_trajectory_point2.positions[i] = g_q_vec1_goal[i]; //leave psm2 angles alone; just update psm1 goal angles
        }
        //gripper_affines_wrt_camera.push_back(affine_gripper_frame_wrt_camera_frame_);
        //    des_trajectory.joint_names.push_back("right_s0"); //yada-yada should fill in names
        g_des_trajectory.header.stamp = ros::Time::now();

        g_des_trajectory.points[0] = g_des_trajectory.points[1]; //former goal is new start
        g_des_trajectory.points[1] = g_trajectory_point2;



        // copy traj to goal:
        g_goal.trajectory = g_des_trajectory;
    } else {
        ROS_WARN("NO IK SOLN FOUND");
    }

}
コード例 #10
0
// static BITBOTS_INLINE Eigen::Matrix4d inverse_chain_matrix(const Eigen::Matrix4d chain_matrix) {
//     Eigen::Matrix4d inverse;
//     inverse.block<3, 3>(0, 0) = chain_matrix.block<3, 3>(0, 0).inverse();
//     inverse.col(3).head<3>() = -(inverse.block<3, 3>(0, 0) * chain_matrix.col(3).head<3>());
//     inverse.row(3) = Eigen::RowVector4d(0, 0, 0, 1);
//     return inverse;
// }
static BITBOTS_INLINE Eigen::Affine3d inverse_chain_matrix(const Eigen::Affine3d& chain_matrix) {
    return chain_matrix.inverse();
}
コード例 #11
0
/**
 * This is where the localization is done... not really callback in the offline version
 */ 
void callback(const sensor_msgs::LaserScan &scan, tf::Transform odo_pose, tf::Transform state_pose)
{
	static int counter = 0;
	counter++;
	
	static tf::TransformListener tf_listener;
	
	double looptime = TT.Tac();
	TT.Tic();
	fprintf(stderr,"Lt( %.1lfms %.1lfHz seq:%d) -",looptime*1000,1.0/looptime,scan.header.seq);
	
	if(has_sensor_offset_set == false) return;
	
	double gx,gy,gyaw,x,y,yaw;
	
	///Get the ground truth
	gyaw = tf::getYaw(state_pose.getRotation());  
	gx = state_pose.getOrigin().x();
	gy = state_pose.getOrigin().y();
	
	///Get the odometry
	yaw = tf::getYaw(odo_pose.getRotation());  
	x = odo_pose.getOrigin().x();
	y = odo_pose.getOrigin().y();
		
	mrpt::utils::CTicTac	tictac;
	tictac.Tic();

	int N =(scan.angle_max - scan.angle_min)/scan.angle_increment; ///< number of scan lines
	
	/////
	/// Pose conversions
	////
	
	Eigen::Affine3d T = getAsAffine(x,y,yaw);
	Eigen::Affine3d Tgt = getAsAffine(gx,gy,gyaw);
	
	/**
	 * We are now using the information from the ground truth to initialize the filter
	 */ 
	if(isFirstLoad){
		fprintf(stderr,"Initializing to (%lf, %lf, %lf)\n",gx,gy,gyaw);
		///Initialize the filter with 1m^2 variance in position and 20deg in heading
		ndtmcl->initializeFilter(gx, gy,gyaw,1.0, 1.0, 20.0*M_PI/180.0, 450);
		Told = T;
		Todo = Tgt;
	}
	
	///Compute the differential motion between two time steps 
	Eigen::Affine3d Tmotion = Told.inverse() * T;
	Todo = Todo*Tmotion;
	Told = T;
	
	///Get the laser pose with respect to the base
	float dy =offy;
	float dx = offx;
	float alpha = atan2(dy,dx);
	float L = sqrt(dx*dx+dy*dy);
	
	///Laser pose in base frame
	float lpx = L * cos(alpha);
	float lpy = L * sin(alpha);
	float lpa = offa;
	
	
	
	///Laser scan to PointCloud transformed with respect to the base
	pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);	
	for (int j=0;j<N;j++){
		double r  = scan.ranges[j];
		if(r>=scan.range_min && r<scan.range_max && r>0.3 && r<20.0){
			double a  = scan.angle_min + j*scan.angle_increment;
			pcl::PointXYZ pt;
			pt.x = r*cos(a+lpa)+lpx;
			pt.y = r*sin(a+lpa)+lpy;
			pt.z = 0.1+0.02 * (double)rand()/(double)RAND_MAX;
			cloud->push_back(pt);
		}
	}
	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// Now we have the differential motion and pointcloud -- Lets do MCL
	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	ndtmcl->updateAndPredict(Tmotion, *cloud); ///< does the prediction, update and resampling steps (see ndt_mcl.hpp)
	
	Eigen::Vector3d dm = ndtmcl->getMean(); ///<Maximum aposteriori estimate of the pose
	Eigen::Matrix3d cov = ndtmcl->pf.getDistributionVariances(); ///distribution variances
	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	double Time = tictac.Tac();
	fprintf(stderr,"Time elapsed %.1lfms (%lf %lf %lf) \n",Time*1000,dm[0],dm[1],dm[2]);
	isFirstLoad = false;
	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	
	//If visualization desired
#ifdef USE_VISUALIZATION_DEBUG
	///The sensor pose in the global frame for visualization
	Eigen::Vector3d origin(dm[0] + L * cos(dm[2]+alpha),dm[1] + L * sin(dm[2]+alpha),0.1);

	Eigen::Affine3d ppos = getAsAffine(dm[0],dm[1],dm[2]);
	
	//lslgeneric::transformPointCloudInPlace(Tgt, *cloud);
	lslgeneric::transformPointCloudInPlace(ppos, *cloud);
	mrpt::opengl::COpenGLScenePtr &scene = win3D.get3DSceneAndLock();	
	win3D.setCameraPointingToPoint(gx,gy,1);
	if(counter%2000==0) gl_points->clear();
	scene->clear();
	scene->insert(plane);
	
	addMap2Scene(ndtmcl->map, origin, scene);
	addPoseCovariance(dm[0],dm[1],cov,scene);
	addScanToScene(scene, origin, cloud);
	addParticlesToWorld(ndtmcl->pf,Tgt.translation(),dm, Todo.translation());
	scene->insert(gl_points);
	scene->insert(gl_particles);
	win3D.unlockAccess3DScene();
	win3D.repaint();

	if (win3D.keyHit())
	{
		mrpt::gui::mrptKeyModifier kmods;
		int key = win3D.getPushedKey(&kmods);
	}
#endif
	
	

}
コード例 #12
0
FusionResults RegistrationRandom::getTransform(Eigen::MatrixXd guess) {

    unsigned int s_nr_data = src->data.cols();//std::min(int(src->data.cols()),int(500000));
    unsigned int d_nr_data = dst->data.cols();
    refinement->allow_regularization = true;
    //printf("s_nr_data: %i d_nr_data: %i\n",s_nr_data,d_nr_data);

    int stepy = std::max(1,int(d_nr_data)/100000);

    Eigen::Matrix<double, 3, Eigen::Dynamic> Y;
    Eigen::Matrix<double, 3, Eigen::Dynamic> N;
    Y.resize(Eigen::NoChange,d_nr_data/stepy);
    N.resize(Eigen::NoChange,d_nr_data/stepy);
    unsigned int ycols = Y.cols();

    for(unsigned int i = 0; i < d_nr_data/stepy; i++) {
        Y(0,i)	= dst->data(0,i*stepy);
        Y(1,i)	= dst->data(1,i*stepy);
        Y(2,i)	= dst->data(2,i*stepy);
        N(0,i)	= dst->normals(0,i*stepy);
        N(1,i)	= dst->normals(1,i*stepy);
        N(2,i)	= dst->normals(2,i*stepy);
    }

    /// Build kd-tree
    //nanoflann::KDTreeAdaptor<Eigen::Matrix3Xd, 3, nanoflann::metric_L2_Simple>                  kdtree(Y);

    Eigen::VectorXd DST_INORMATION = Eigen::VectorXd::Zero(Y.cols());
    for(unsigned int i = 0; i < d_nr_data/stepy; i++) {
        DST_INORMATION(i) = dst->information(0,i*stepy);
    }

    double s_mean_x = 0;
    double s_mean_y = 0;
    double s_mean_z = 0;
    for(unsigned int i = 0; i < s_nr_data; i++) {
        s_mean_x += src->data(0,i);
        s_mean_y += src->data(1,i);
        s_mean_z += src->data(2,i);
    }
    s_mean_x /= double(s_nr_data);
    s_mean_y /= double(s_nr_data);
    s_mean_z /= double(s_nr_data);

    double d_mean_x = 0;
    double d_mean_y = 0;
    double d_mean_z = 0;
    for(unsigned int i = 0; i < d_nr_data; i++) {
        d_mean_x += dst->data(0,i);
        d_mean_y += dst->data(1,i);
        d_mean_z += dst->data(2,i);
    }
    d_mean_x /= double(d_nr_data);
    d_mean_y /= double(d_nr_data);
    d_mean_z /= double(d_nr_data);

    double stop		= 0.00001;

    Eigen::Affine3d Ymean = Eigen::Affine3d::Identity();
    Ymean(0,3) = d_mean_x;
    Ymean(1,3) = d_mean_y;
    Ymean(2,3) = d_mean_z;

    Eigen::Affine3d Xmean = Eigen::Affine3d::Identity();
    Xmean(0,3) = s_mean_x;
    Xmean(1,3) = s_mean_y;
    Xmean(2,3) = s_mean_z;

    std::vector< Eigen::Matrix<double, 3, Eigen::Dynamic> > all_X;
    std::vector< Eigen::Affine3d > all_res;
    std::vector< int > count_X;
    std::vector< float > score_X;
    std::vector< std::vector< Eigen::VectorXd > > all_starts;
    int stepxsmall = std::max(1,int(s_nr_data)/250);
    Eigen::VectorXd Wsmall (s_nr_data/stepxsmall);
    for(unsigned int i = 0; i < s_nr_data/stepxsmall; i++) {
        Wsmall(i) = src->information(0,i*stepxsmall);
    }


    double sumtime = 0;
    double sumtimeSum = 0;
    double sumtimeOK = 0;
    int r = 0;

    refinement->viewer = viewer;
    refinement->visualizationLvl = 0;
    //for(int r = 0; r < 1000; r++){
//	while(true){
//		double rx = 2.0*M_PI*0.0001*double(rand()%10000);
//		double ry = 2.0*M_PI*0.0001*double(rand()%10000);
//		double rz = 2.0*M_PI*0.0001*double(rand()%10000);
    //double stop = 0;
    double step = 0.1+2.0*M_PI/5;
    for(double rx = 0; rx < 2.0*M_PI; rx += step) {
        for(double ry = 0; ry < 2.0*M_PI; ry += step)
            for(double rz = 0; rz < 2.0*M_PI; rz += step) {
                printf("rx: %f ry: %f rz: %f\n",rx,ry,rz);

                double start = getTime();

                double meantime = 999999999999;
                if(r != 0) {
                    meantime = sumtimeSum/double(sumtimeOK+1.0);
                }
                refinement->maxtime = std::min(0.5,3*meantime);

                Eigen::VectorXd startparam = Eigen::VectorXd(3);
                startparam(0) = rx;
                startparam(1) = ry;
                startparam(2) = rz;

                Eigen::Affine3d randomrot = Eigen::Affine3d::Identity();

                randomrot =	Eigen::AngleAxisd(rx, Eigen::Vector3d::UnitX()) *
                            Eigen::AngleAxisd(ry, Eigen::Vector3d::UnitY()) *
                            Eigen::AngleAxisd(rz, Eigen::Vector3d::UnitZ());

                Eigen::Affine3d current_guess = Ymean*randomrot*Xmean.inverse();//*Ymean;
                refinement->target_points = 250;
                FusionResults fr = refinement->getTransform(current_guess.matrix());
//fr.timeout = timestopped;
                double stoptime = getTime();
                sumtime += stoptime-start;

                if(!fr.timeout) {
                    sumtimeSum += stoptime-start;
                    sumtimeOK++;
                }

                //printf("sumtime: %f\n",sumtime);
                stop = fr.score;
                Eigen::Matrix4d m = fr.guess;
                current_guess = m;//fr.guess;
                float m00 = current_guess(0,0);
                float m01 = current_guess(0,1);
                float m02 = current_guess(0,2);
                float m03 = current_guess(0,3);
                float m10 = current_guess(1,0);
                float m11 = current_guess(1,1);
                float m12 = current_guess(1,2);
                float m13 = current_guess(1,3);
                float m20 = current_guess(2,0);
                float m21 = current_guess(2,1);
                float m22 = current_guess(2,2);
                float m23 = current_guess(2,3);

                Eigen::Matrix<double, 3, Eigen::Dynamic> Xsmall;
                Xsmall.resize(Eigen::NoChange,s_nr_data/stepxsmall);
                for(unsigned int i = 0; i < s_nr_data/stepxsmall; i++) {
                    float x		= src->data(0,i*stepxsmall);
                    float y		= src->data(1,i*stepxsmall);
                    float z		= src->data(2,i*stepxsmall);
                    Xsmall(0,i)	= m00*x + m01*y + m02*z + m03;
                    Xsmall(1,i)	= m10*x + m11*y + m12*z + m13;
                    Xsmall(2,i)	= m20*x + m21*y + m22*z + m23;
                }

                bool exists = false;
                for(unsigned int ax = 0; ax < all_X.size(); ax++) {
                    Eigen::Matrix<double, 3, Eigen::Dynamic> axX = all_X[ax];
                    Eigen::Affine3d axT = all_res[ax];
                    double diff = (Xsmall-axX).colwise().norm().mean();
                    if(diff < 20*stop) {
                        count_X[ax]++;
                        all_starts[ax].push_back(startparam);
                        int count = count_X[ax];
                        float score = score_X[ax];
                        std::vector< Eigen::VectorXd > starts = all_starts[ax];
                        for(int bx = ax-1; bx >= 0; bx--) {
                            if(count_X[bx] < count_X[bx+1]) {
                                count_X[bx+1]		= count_X[bx];
                                score_X[bx+1]		= score_X[bx];
                                all_X[bx+1]			= all_X[bx];
                                all_starts[bx+1]	= all_starts[bx];
                                all_res[bx+1]		= all_res[bx];


                                all_X[bx] = axX;
                                count_X[bx] = count;
                                score_X[bx] = score;
                                all_starts[bx] = starts;
                                all_res[bx] = axT;
                            } else {
                                break;
                            }
                        }
                        exists = true;
                        break;
                    }
                }



                if(!exists) {
                    all_X.push_back(Xsmall);
                    count_X.push_back(1);
                    score_X.push_back(stop);
                    all_starts.push_back(std::vector< Eigen::VectorXd >());
                    all_starts.back().push_back(startparam);
                    all_res.push_back(current_guess);
                }
                r++;
            }
    }


    FusionResults fr = FusionResults();
    refinement->allow_regularization = false;

    int tpbef = refinement->target_points;
    refinement->target_points = 2000;
    for(unsigned int ax = 0; ax < all_X.size(); ax++) {
        Eigen::Matrix4d np = all_res[ax].matrix();
        refinement->visualizationLvl = visualizationLvl;
        if(ax < 25) {
            double start = getTime();
            FusionResults fr1 = refinement->getTransform(np);
            double stop = getTime();
            np = fr1.guess;
        }
        refinement->visualizationLvl = 0;
        fr.candidates.push_back(np);
        fr.counts.push_back(count_X[ax]);
        fr.scores.push_back(1.0/score_X[ax]);
    }
    refinement->target_points = tpbef;
    return fr;
}
コード例 #13
0
double transformationdiff(Eigen::Affine3d & A, Eigen::Affine3d & B, double rotationweight) {
    Eigen::Affine3d C = A.inverse()*B;
    double r = fabs(1-C(0,0))+fabs(C(0,1))+fabs(C(0,2))  +  fabs(C(1,0))+fabs(1-C(1,1))+fabs(C(1,2))  +  fabs(C(2,0))+fabs(C(2,1))+fabs(1-C(2,2));
    double t = sqrt(C(0,3)*C(0,3)+C(1,3)*C(1,3)+C(2,3)*C(2,3));
    return r*rotationweight+t;
}
コード例 #14
0
    void callback(const PointPlusConstPtr& robot_point, const PointPlusConstPtr& mass_point)
	{
	    ROS_DEBUG("Synchronized Callback triggered");
	    puppeteer_msgs::PointPlus r_pt, m_pt;

	    // if not in run or calibrate, just exit
	    if (run_system_logic())
		return;

	    // let's first correct for the radii of the objects:
	    r_pt = *robot_point;
	    r_pt = correct_vals(r_pt, DEFAULT_ROBOT_RADIUS);
	    m_pt = *mass_point;
	    m_pt = correct_vals(m_pt, DEFAULT_MASS_RADIUS);

	    if (calibrated_flag == false)
	    {
		if (calibrate_count == 0)
		{
		    calibrate_count++;
		    robot_cal_pos << 0, 0, 0;
		    mass_cal_pos << 0, 0, 0;
		    robot_start_pos << 0, 0, 0;
		    mass_start_pos << 0, 0, 0;
		    return;
		}
		else if (calibrate_count <= NUM_CALIBRATES)
		{
		    ROS_INFO_THROTTLE(1, "Calibrating...");
		    if (!m_pt.error && !r_pt.error)
		    {
			// then we got both objects successfully:
			robot_cal_pos(0) += r_pt.x;
			robot_cal_pos(1) += r_pt.y;
			robot_cal_pos(2) += r_pt.z;
			mass_cal_pos(0) += m_pt.x;
			mass_cal_pos(1) += m_pt.y;
			mass_cal_pos(2) += m_pt.z;
			calibrate_count++;
		    }
		    return;
		}
		else
		{
		    ROS_INFO("Calibration completed successfully!");
		    // if here, calibration is done!
		    // first find averages:
		    robot_cal_pos = (robot_cal_pos/((double) NUM_CALIBRATES));
		    mass_cal_pos = (mass_cal_pos/((double) NUM_CALIBRATES));
		    // now set start positions:
		    mass_start_pos << q0.xm, q0.ym, 0;
		    robot_start_pos << q0.xr, H0, 0;
		    // now get the transform:
		    mass_cal_pos -= mass_start_pos;
		    robot_cal_pos -= robot_start_pos;
		    // let's check if there is an error here:
		    ////////////////////////
                    // ERROR CHECK	  //
                    ////////////////////////
		    // if no error:
		    // cal_pos = (mass_cal_pos + robot_cal_pos)/2.0;
		    cal_pos = mass_cal_pos;

		    // reset vars:
		    calibrate_count = 0;
		    calibrated_flag = true;
		}
	    }
	    
	    // send global frame transforms:
	    send_global_transforms(m_pt.header.stamp);

	    // transform both of the points into the
	    // optimization_frame, build the output message, and
	    // publish
	    tf::Transform transform;
	    PlanarSystemConfig qmeas;
	    Eigen::Affine3d gwo;
	    Eigen::Vector3d robot_trans, mass_trans;
	    transform.setOrigin(tf::Vector3(cal_pos(0),
					    cal_pos(1), cal_pos(2)));
	    transform.setRotation(tf::Quaternion(0,0,0,1));
	    tf::TransformTFToEigen(transform, gwo);
	    gwo = gwo.inverse();
	    // transform robot:
	    robot_trans << r_pt.x, r_pt.y, r_pt.z;
	    robot_trans = gwo*robot_trans;
	    // transform mass:
	    mass_trans << m_pt.x, m_pt.y, m_pt.z;
	    mass_trans = gwo*mass_trans;
	    // calculate string length:
	    mass_trans(2) = 0; robot_trans(2) = 0;
	    double rad = (mass_trans - robot_trans).norm();
	    qmeas.xm = mass_trans(0);
	    qmeas.ym = mass_trans(1);
	    qmeas.xr = robot_trans(0);
	    qmeas.r = rad;
	    qmeas.mass_err = m_pt.error;
	    qmeas.robot_err = r_pt.error;
	    qmeas.header.stamp = m_pt.header.stamp;
	    qmeas.header.frame_id = "optimization_frame";
	    config_pub.publish(qmeas);
		    
	    return;
	}
コード例 #15
0
void CartMoveActionServer::executeCB(const actionlib::SimpleActionServer<cwru_action::cart_moveAction>::GoalConstPtr& goal) {

    ROS_INFO("in executeCB of CartMoveActionServer");
   cart_result_.err_code=0;
   cart_move_as_.isActive();
   //unpack the necessary info:
   gripper_ang1_ = goal->gripper_jaw_angle1;
   gripper_ang2_ = goal->gripper_jaw_angle2;
   arrival_time_ = goal->move_time;
   // interpret the desired gripper poses:
   geometry_msgs::PoseStamped des_pose_gripper1 = goal->des_pose_gripper1;
   geometry_msgs::PoseStamped des_pose_gripper2 = goal->des_pose_gripper2;
   // convert the above to affine objects:
   des_gripper1_affine_wrt_lcamera_ = transformPoseToEigenAffine3d(des_pose_gripper1.pose);
   cout<<"gripper1 desired pose;  "<<endl;
   cout<<des_gripper1_affine_wrt_lcamera_.linear()<<endl;
   cout<<"origin: "<<des_gripper1_affine_wrt_lcamera_.translation().transpose()<<endl;

   des_gripper2_affine_wrt_lcamera_ = transformPoseToEigenAffine3d(des_pose_gripper2.pose);
   cout<<"gripper2 desired pose;  "<<endl;
   cout<<des_gripper2_affine_wrt_lcamera_.linear()<<endl;
   cout<<"origin: "<<des_gripper2_affine_wrt_lcamera_.translation().transpose()<<endl;

   //do IK to convert these to joint angles:
    //Eigen::VectorXd q_vec1,q_vec2;
    Vectorq7x1 q_vec1,q_vec2;
    q_vec1.resize(7);
    q_vec2.resize(7);

    
    trajectory_msgs::JointTrajectory des_trajectory; // an empty trajectory 
    des_trajectory.points.clear(); // can clear components, but not entire trajectory_msgs
    des_trajectory.joint_names.clear(); //could put joint names in...but I assume a fixed order and fixed size, so this is unnecessary
    // if using wsn's trajectory streamer action server
    des_trajectory.header.stamp = ros::Time::now();

    trajectory_msgs::JointTrajectoryPoint trajectory_point; //,trajectory_point2; 
    trajectory_point.positions.resize(14);
    
    ROS_INFO("\n");
    ROS_INFO("stored previous command to gripper one: ");
    cout<<gripper1_affine_last_commanded_pose_.linear()<<endl;
    cout<<"origin: "<<gripper1_affine_last_commanded_pose_.translation().transpose()<<endl;
   

    // first, reiterate previous command:
    // this could be easier, if saved previous joint-space trajectory point...
    des_gripper_affine1_ = affine_lcamera_to_psm_one_.inverse()*gripper1_affine_last_commanded_pose_; //previous pose
    ik_solver_.ik_solve(des_gripper_affine1_); //convert desired pose into equiv joint displacements
    q_vec1 = ik_solver_.get_soln(); 
    q_vec1(6) = last_gripper_ang1_; // include desired gripper opening angle
    
    
    des_gripper_affine2_ = affine_lcamera_to_psm_two_.inverse()*gripper2_affine_last_commanded_pose_; //previous pose
    ik_solver_.ik_solve(des_gripper_affine2_); //convert desired pose into equiv joint displacements
    q_vec2 = ik_solver_.get_soln(); 
    cout<<"q_vec1 of stored pose: "<<endl;
    for (int i=0;i<6;i++) {
        cout<<q_vec1[i]<<", ";
    }
    cout<<endl;
    q_vec2(6) = last_gripper_ang2_; // include desired gripper opening angle
    
       for (int i=0;i<7;i++) {
            trajectory_point.positions[i] = q_vec1(i);
            trajectory_point.positions[i+7] = q_vec2(i);  
        }
    cout<<"start traj pt: "<<endl;
    for (int i=0;i<14;i++) {
        cout<<trajectory_point.positions[i]<<", ";
    }
    cout<<endl;
      trajectory_point.time_from_start = ros::Duration(0.0); // start time set to 0
    // PUSH IN THE START POINT:
      des_trajectory.points.push_back(trajectory_point);            

    // compute and append the goal point, in joint space trajectory:
    des_gripper_affine1_ = affine_lcamera_to_psm_one_.inverse()*des_gripper1_affine_wrt_lcamera_;
    ROS_INFO("desired gripper one location in base frame: ");
    cout<<"gripper1 desired pose;  "<<endl;
    cout<<des_gripper_affine1_.linear()<<endl;
    cout<<"origin: "<<des_gripper_affine1_.translation().transpose()<<endl;

    ik_solver_.ik_solve(des_gripper_affine1_); //convert desired pose into equiv joint displacements
    q_vec1 = ik_solver_.get_soln(); 
    q_vec1(6) = gripper_ang1_; // include desired gripper opening angle

    des_gripper_affine2_ = affine_lcamera_to_psm_two_.inverse()*des_gripper2_affine_wrt_lcamera_;
    ik_solver_.ik_solve(des_gripper_affine2_); //convert desired pose into equiv joint displacements
    q_vec2 = ik_solver_.get_soln();  
    cout<<"q_vec1 of goal pose: "<<endl;
    for (int i=0;i<6;i++) {
        cout<<q_vec1[i]<<", ";
    }
    cout<<endl;
    q_vec2(6) = gripper_ang2_;
        for (int i=0;i<7;i++) {
            trajectory_point.positions[i] = q_vec1(i);
            trajectory_point.positions[i+7] = q_vec2(i);  
        }
      trajectory_point.time_from_start = ros::Duration(arrival_time_);
   cout<<"goal traj pt: "<<endl;
    for (int i=0;i<14;i++) {
        cout<<trajectory_point.positions[i]<<", ";
    }
    cout<<endl;
      des_trajectory.points.push_back(trajectory_point);

    js_goal_.trajectory = des_trajectory;

    // Need boost::bind to pass in the 'this' pointer
  // see example: http://library.isr.ist.utl.pt/docs/roswiki/actionlib_tutorials%282f%29Tutorials%282f%29Writing%2820%29a%2820%29Callback%2820%29Based%2820%29Simple%2820%29Action%2820%29Client.html
  //  ac.sendGoal(goal,
  //              boost::bind(&MyNode::doneCb, this, _1, _2),
  //              Client::SimpleActiveCallback(),
  //              Client::SimpleFeedbackCallback());

    js_action_client_.sendGoal(js_goal_, boost::bind(&CartMoveActionServer::js_doneCb_,this,_1,_2)); // we could also name additional callback functions here, if desired
    //    action_client.sendGoal(goal, &doneCb, &activeCb, &feedbackCb); //alt--more callback funcs possible
    
    double t_timeout=arrival_time_+2.0; //wait 2 sec longer than expected duration of move
    
    bool finished_before_timeout = js_action_client_.waitForResult(ros::Duration(t_timeout));
    //bool finished_before_timeout = action_client.waitForResult(); // wait forever...
    if (!finished_before_timeout) {
        ROS_WARN("joint-space interpolation result is overdue ");
    } else {
        ROS_INFO("finished before timeout");
    }

    ROS_INFO("completed callback" );
    cart_move_as_.setSucceeded(cart_result_); // tell the client that we were successful acting on the request, and return the "result" message 
    
    //let's remember the last pose commanded, so we can use it as start pose for next move
    gripper1_affine_last_commanded_pose_ = des_gripper1_affine_wrt_lcamera_;
    gripper2_affine_last_commanded_pose_ = des_gripper2_affine_wrt_lcamera_;    
    //and the jaw opening angles:
    last_gripper_ang1_=gripper_ang1_;
    last_gripper_ang2_=gripper_ang2_;
}
コード例 #16
0
ファイル: Pose.hpp プロジェクト: Cirromulus/base-types
	/** 
	 * Test if the delta of the provided transformations is greater in 
	 * either distance or angle than the threshold.
	 *
	 * @param a2b the initial transformation from A to B
	 * @param aprime2b the next transformation from A' to B
	 *
	 * @result true if the transformation A' to A is greater than the stored thresholds
	 */
	bool test( const Eigen::Affine3d& a2b, const Eigen::Affine3d& aprime2b )
	{
	    return test( a2b.inverse() * aprime2b );
	}
コード例 #17
0
int do_inits() {
    Eigen::Matrix3d R;
    Eigen::Vector3d nvec, tvec, bvec, tip_pos;
    bvec << 0, 0, 1; //default for testing: gripper points "down"
    nvec << 1, 0, 0;
    tvec = bvec.cross(nvec);
    R.col(0) = nvec;
    R.col(1) = tvec;
    R.col(2) = bvec;
    g_des_gripper_affine1.linear() = R;
    tip_pos << -0.15, -0.03, 0.07;
    g_des_gripper_affine1.translation() = tip_pos; //will change this, but start w/ something legal

    //hard-coded camera-to-base transform, useful for simple testing/debugging
    g_affine_lcamera_to_psm_one.translation() << -0.155, -0.03265, 0.0;
    nvec << -1, 0, 0;
    tvec << 0, 1, 0;
    bvec << 0, 0, -1;
    //Eigen::Matrix3d R;
    R.col(0) = nvec;
    R.col(1) = tvec;
    R.col(2) = bvec;
    g_affine_lcamera_to_psm_one.linear() = R;

    g_q_vec1_start.resize(7);
    g_q_vec1_goal.resize(7);
    g_q_vec2_start.resize(7);
    g_q_vec2_goal.resize(7);

    g_des_gripper1_wrt_base = g_affine_lcamera_to_psm_one.inverse() * g_des_gripper_affine1;
    g_ik_solver.ik_solve(g_des_gripper1_wrt_base); // compute IK:
    g_q_vec1_goal = g_ik_solver.get_soln();
    g_q_vec1_start = g_q_vec1_goal;
    g_q_vec2_start << 0, 0, 0, 0, 0, 0, 0; //just put gripper 2 at home position
    g_q_vec2_goal << 0, 0, 0, 0, 0, 0, 0;
    //repackage q's into a trajectory;
    //populate a goal message for action server; 
    // initialize with 2 poses that are identical
    g_trajectory_point1.positions.clear();
    g_trajectory_point2.positions.clear();
    //resize these:
    for (int i=0;i<14;i++)  {
        g_trajectory_point1.positions.push_back(0.0); 
        g_trajectory_point2.positions.push_back(0.0); 
    }
        
    for (int i = 0; i < 7; i++) { 
        g_trajectory_point1.positions[i] = g_q_vec1_start[i];
        g_trajectory_point1.positions[i + 7] = g_q_vec2_start[i];
        //should fix up jaw-opening values...do this later
    }

    g_trajectory_point1.time_from_start = ros::Duration(0.0);
    g_trajectory_point2.time_from_start = ros::Duration(2.0);


    g_des_trajectory.points.clear(); // can clear components, but not entire trajectory_msgs
    g_des_trajectory.joint_names.clear();
    //    des_trajectory.joint_names.push_back("right_s0"); //yada-yada should fill in names
    g_des_trajectory.header.stamp = ros::Time::now();

    g_des_trajectory.points.push_back(g_trajectory_point1); // first point of the trajectory 
    g_des_trajectory.points.push_back(g_trajectory_point2);

    // copy traj to goal:
    g_goal.trajectory = g_des_trajectory;
    g_got_new_pose = true; //send robot to start pose
    
     ROS_INFO("getting transforms from camera to PSMs");
    tf::TransformListener tfListener;
    tf::StampedTransform tfResult_one, tfResult_two;
    bool tferr = true;
    int ntries = 0;
    ROS_INFO("waiting for tf between base and right_hand...");
    while (tferr) {
        if (ntries > 5) break; //give up and accept default after this many tries
        tferr = false;
        try {
            //The direction of the transform returned will be from the target_frame to the source_frame. 
            //Which if applied to data, will transform data in the source_frame into the target_frame. See tf/CoordinateFrameConventions#Transform_Direction
            tfListener.lookupTransform("left_camera_optical_frame", "one_psm_base_link", ros::Time(0), tfResult_one);
            //tfListener.lookupTransform("left_camera_optical_frame", "two_psm_base_link", ros::Time(0), tfResult_two);
        } catch (tf::TransformException &exception) {
            ROS_WARN("%s", exception.what());
            tferr = true;
            ros::Duration(0.5).sleep(); // sleep for half a second
            ros::spinOnce();
            ntries++;
        }
    }
    //default transform: need to match this up to camera calibration!
    if (tferr) {
        g_affine_lcamera_to_psm_one.translation() << -0.155, -0.03265, 0.0;
        Eigen::Vector3d nvec, tvec, bvec;
        nvec << -1, 0, 0;
        tvec << 0, 1, 0;
        bvec << 0, 0, -1;
        Eigen::Matrix3d R;
        R.col(0) = nvec;
        R.col(1) = tvec;
        R.col(2) = bvec;
        g_affine_lcamera_to_psm_one.linear() = R;
        g_affine_lcamera_to_psm_one.linear() = R;
        g_affine_lcamera_to_psm_one.translation() << 0.145, -0.03265, 0.0;
        ROS_WARN("using default transform");
    } else {

        ROS_INFO("tf is good");

        //affine_lcamera_to_psm_one is the position/orientation of psm1 base frame w/rt left camera link frame
        // need to extend this to camera optical frame
        g_affine_lcamera_to_psm_one = transformTFToEigen(tfResult_one);
        //affine_lcamera_to_psm_two = transformTFToEigen(tfResult_two);
    }
    ROS_INFO("transform from left camera to psm one:");
    cout << g_affine_lcamera_to_psm_one.linear() << endl;
    cout << g_affine_lcamera_to_psm_one.translation().transpose() << endl;
    //ROS_INFO("transform from left camera to psm two:");
    //cout << affine_lcamera_to_psm_two.linear() << endl;
    //cout << affine_lcamera_to_psm_two.translation().transpose() << endl;    
    
    
    ROS_INFO("done w/ inits");

    return 0;
    
}
コード例 #18
0
    // This function is responsible for converting the data from the
    // kinect into an odometry message, and tranforming it to the same
    // frame that the ekf uses    
    void send_kinect_estimate(geometry_msgs::PointStamped pt,
			      geometry_msgs::PointStamped ptlast,
			      int index, int op)
	{
	    ROS_DEBUG("send_kinect_estimate triggered");

	    Eigen::Affine3d gwo;
	    Eigen::Vector3d tmp_point; 
	    tf::Transform transform;
	    geometry_msgs::PointStamped transpt, transptlast;
	    ros::Time tstamp = pt.header.stamp;
	    

	    // set transform parameters
	    transform.setOrigin(tf::Vector3(cal_pos(0),
					    cal_pos(1), cal_pos(2)));
	    transform.setRotation(tf::Quaternion(0,0,0,1));

	    // convert to Eigen:
	    tf::TransformTFToEigen(transform, gwo);
	    gwo = gwo.inverse();

	    tmp_point << pt.point.x, pt.point.y, pt.point.z;
	    tmp_point = gwo*tmp_point;
	    transpt.header.frame_id = "optimization_frame";
	    transpt.header.stamp = tstamp;
	    transpt.point.x = tmp_point(0);
	    transpt.point.y = tmp_point(1);
	    transpt.point.z = tmp_point(2);

	    tmp_point << ptlast.point.x, ptlast.point.y, ptlast.point.z;
	    tmp_point = gwo*tmp_point;
	    transptlast.header.frame_id = "optimization_frame";
	    transptlast.header.stamp = tstamp;
	    transptlast.point.x = tmp_point(0);
	    transptlast.point.y = tmp_point(1);
	    transptlast.point.z = tmp_point(2);
	    
	    ROS_DEBUG("Done transforming PointStamped messages");
	    // Let's first get the transform from /optimization_frame
	    // to /map
	    tf::StampedTransform trans_stamped;
	    geometry_msgs::PointStamped tmp;
	    try{
	    	tf.lookupTransform(
	    	    "map", "optimization_frame",
	    	    tstamp, trans_stamped);
	    	tf.transformPoint("map", transpt, tmp);

	    }
	    catch(tf::TransformException& ex){
	    	ROS_ERROR(
	    	    "Error trying to lookupTransform from /map "
	    	    "to /optimization_frame: %s", ex.what());
	    	return;
	    }
	    
	    // Now we can publish the Kinect's estimate of the robot's
	    // pose
	    std::stringstream ss;
	    ss << "base_footprint_kinect_robot_" << index;
	    kin_pose[index].header.stamp = ros::Time::now();
	    kin_pose[index].header.frame_id = "map";
	    kin_pose[index].child_frame_id = ss.str();
	    tmp.point.z = 0.0;
	    kin_pose[index].pose.pose.position = tmp.point;
	    double theta = 0.0;
	    if (op == 2)
	    {
	    	theta = atan2(transpt.point.x-
	    			     transptlast.point.x,
	    			     transpt.point.z-
	    			     transptlast.point.z);
	    	theta = clamp_angle(theta-M_PI/2.0);
		ROS_DEBUG("Calculated angle = %f",theta);					  
	    }
	    else
	    {
	    	theta = robot_start_ori[index];
	    	theta = clamp_angle(-theta);
		ROS_DEBUG("predetermined angle = %f",theta);
	    }
	    geometry_msgs::Quaternion quat = tf::createQuaternionMsgFromYaw(theta);
	    kin_pose[index].pose.pose.orientation = quat;

	    // Let's check if this is a "bad" point
	    if (bad_array[index])
	    {
		boost::array<double,36ul> tmpcov;
		for (int i=0; i<36; i++)
		    tmpcov[i] = kincov[i]*1000.0;
		kin_pose[index].pose.covariance = tmpcov;
	    }
	    else
		kin_pose[index].pose.covariance = kincov;
	    
	    ROS_DEBUG("Done filling in Odometry message");

	    // Now let's publish the estimated pose as a
	    // nav_msgs/Odometry message on a topic called /vo
	    ROS_DEBUG("publishing /vo for robot %d", index+1);
	    robots_pub[index].publish(kin_pose[index]);

	    // now, let's publish the transforms that goes along with it
	    geometry_msgs::TransformStamped kin_trans;
	    tf::Quaternion q1, q2;
	    q1 = tf::createQuaternionFromYaw(theta);
	    q2 = tf::Quaternion(1.0,0,0,0);
	    q1 = q1*q2;
	    tf::quaternionTFToMsg(q1, quat);

	    kin_trans.header.stamp = tstamp;
	    kin_trans.header.frame_id = kin_pose[index].header.frame_id;
	    kin_trans.child_frame_id = kin_pose[index].child_frame_id;
	    kin_trans.transform.translation.x = kin_pose[index].pose.pose.position.x;
	    kin_trans.transform.translation.y = kin_pose[index].pose.pose.position.y;
	    kin_trans.transform.translation.z = kin_pose[index].pose.pose.position.z;
	    kin_trans.transform.rotation = quat;

	    ROS_DEBUG("Sending transform for output of estimator node");
	    br.sendTransform(kin_trans);
	    
	    return;
	}
コード例 #19
0
    pcl::PointCloud<pcl::PointNormal>::Ptr TSDFVolumeOctree::renderView (const Eigen::Affine3d& trans, int downsampleBy) const {
        int new_width = image_width_ / downsampleBy;
        int new_height = image_height_ / downsampleBy;
        double new_fx = focal_length_x_ / downsampleBy;
        double new_fy = focal_length_y_ / downsampleBy;
        double new_cx = principal_point_x_ / downsampleBy;
        double new_cy = principal_point_y_ / downsampleBy;
        pcl::PointCloud<pcl::PointNormal>::Ptr cloud (new pcl::PointCloud<pcl::PointNormal> (new_width, new_height));
        cloud->is_dense = false;

        float min_step = max_dist_neg_ * 3/4.;
    #pragma omp parallel for
        for (size_t i = 0; i < cloud->size (); ++i) {
            size_t x = i % new_width;
            size_t y = i / new_width;
            //Raytrace
            pcl::PointNormal& pt = cloud->operator() (x,y);
            bool found_crossing = false;
            Eigen::Vector3f du ( (x - new_cx)/new_fx,
                                 (y - new_cy)/new_fy,
                                 1);
            du.normalize ();
            //Apply transform -- rotate the ray, start at the offset
            du = trans.rotation ().cast<float> () * du;
            Eigen::Vector3f p = trans.translation ().cast<float> ();
            //Preallocate
            int x_i, y_i, z_i;
            float d;
            float w;
            float last_w = 0;
            float last_d = 0;
            float t = min_sensor_dist_;
            p += t*du;
            float step = min_step;
            //Check if ray intersects cube
            //Initially we haven't ever hit a voxel
            bool hit_voxel = false;
            int niter = 0;
            while (t < max_sensor_dist_) {
                const OctreeNode* voxel = octree_->getContainingVoxel (p (0), p(1), p(2));
                if (voxel) {
                    hit_voxel = true;
                    voxel->getData (d, w);
                    if (((d < 0 && last_d > 0) || (d > 0 && last_d < 0)) && last_w && w) {
                        found_crossing = true;
                        float old_t = t - step;
                        step = (zsize_/zres_)/2;
                        float new_d;
                        float new_w;
                        float last_new_d = d;
                        float last_new_w = w;
                        while (t >= old_t) {
                            t -= step;
                            p -= step*du;
                            voxel = octree_->getContainingVoxel (p (0), p(1), p(2));
                            if (!voxel)
                                break;
                            voxel->getData (new_d, new_w);
                            if ((last_d > 0 && new_d > 0) || (last_d < 0 && new_d < 0)) {
                                last_d = new_d;
                                last_w = new_w;
                                d = last_new_d;
                                w = last_new_w;
                                t += step;
                                p += step*du;
                                break;
                            }
                            last_new_d = d;
                            last_new_w = w;
                        }
                        break;
                    }
                    last_d = d;
                    last_w = w;
                    //Update step
                    step = std::max (voxel->getMinSize () / 4., fabs (d)*max_dist_neg_);
                }
                else {
                    if (hit_voxel)
                        break;
                }
                t += step;
                p += step*du;
                niter++;
            }
            if (!found_crossing) {
                pt.x = pt.y = pt.z =std::numeric_limits<float>::quiet_NaN();
            }
            else {
                //Get optimal t
                bool has_data = true;
                float tcurr = t;
                float tprev = t-step;
                last_d = getTSDFValue (trans.translation ().cast<float> () + (tprev) * du,
                                       &has_data);
                d = getTSDFValue (trans.translation ().cast<float> () + tcurr * du, &has_data);
                if (!has_data || pcl_isnan (d) || pcl_isnan (last_d)) {
                    pt.x = pt.y = pt.z = std::numeric_limits<float>::quiet_NaN();
                }
                float t_star = t + step * (-1 + fabs (last_d / (last_d -d)));
                pt.getVector3fMap () = trans.translation().cast<float> () + tcurr * du;
                //Get normals by looking at adjacent voxels
                const OctreeNode* voxel = octree_->getContainingVoxel (pt.x, pt.y, pt.z);
                if (!voxel) {
                    pt.normal_x = pt.normal_y = pt.normal_z = std::numeric_limits<float>::quiet_NaN();
                    continue;
                }
                float xsize, ysize, zsize;
                voxel->getSize (xsize, ysize, zsize);
                bool valid = true;
                float d_xm = getTSDFValue (pt.x-xsize, pt.y, pt.z, &valid);
                float d_xp = getTSDFValue (pt.x+xsize, pt.y, pt.z, &valid);
                float d_ym = getTSDFValue (pt.x, pt.y-ysize, pt.z, &valid);
                float d_yp = getTSDFValue (pt.x, pt.y+ysize, pt.z, &valid);
                float d_zm = getTSDFValue (pt.x, pt.y, pt.z-zsize, &valid);
                float d_zp = getTSDFValue (pt.x, pt.y, pt.z+zsize, &valid);
                if (!valid) {
                    pt.normal_x = pt.normal_y = pt.normal_z = std::numeric_limits<float>::quiet_NaN();
                    continue;
                }
                Eigen::Vector3f dF;
                dF (0) = (d_xp - d_xm)*max_dist_neg_/ (2*xsize);
                dF (1) = (d_yp - d_ym)*max_dist_neg_/ (2*ysize);
                dF (2) = (d_zp - d_zm)*max_dist_neg_/ (2*zsize);
                dF.normalize();
                pt.normal_x = dF(0);
                pt.normal_y = dF(1);
                pt.normal_z = dF(2);
            }
        }
        pcl::transformPointCloudWithNormals (*cloud, *cloud, trans.inverse());
        return (cloud);
    }