Esempio n. 1
0
void doArcball(
	double * _viewMatrix, 
	double const * _rotationCenter, 
	double const * _projectionMatrix, 
	double const * _initialViewMatrix, 
	//double const * _currentViewMatrix,
	double const * _initialMouse, 
	double const * _currentMouse, 
	bool screenSpaceRadius,
	double radius)
{
	Eigen::Map<Eigen::Vector3d const> rotationCenter(_rotationCenter);
	Eigen::Map<Eigen::Matrix4d const> initialViewMatrix(_initialViewMatrix);
	//Eigen::Map<Eigen::Matrix4d const> currentViewMatrix(_currentViewMatrix);
	Eigen::Map<Eigen::Matrix4d const> projectionMatrix(_projectionMatrix);
	Eigen::Map<Eigen::Matrix4d> viewMatrix(_viewMatrix);

	Eigen::Vector2d initialMouse(_initialMouse);
	Eigen::Vector2d currentMouse(_currentMouse);
	
	Eigen::Quaterniond q;
	Eigen::Vector3d Pa, Pc;
	if (screenSpaceRadius) {
		double aspectRatio = projectionMatrix(1, 1) / projectionMatrix(0, 0);

		initialMouse(0) *= aspectRatio;
		currentMouse(0) *= aspectRatio;

		Pa = mapToSphere(initialMouse, radius);
		Pc = mapToSphere(currentMouse, radius);

		q.setFromTwoVectors(Pa - rotationCenter, Pc - rotationCenter);
	}
	else {
		Pa = mapToSphere(projectionMatrix.inverse(), initialMouse, rotationCenter, radius);
		Pc = mapToSphere(projectionMatrix.inverse(), currentMouse, rotationCenter, radius);

		Eigen::Vector3d a = Pa - rotationCenter, b = Pc - rotationCenter;

#if 0
		std::cout << "Mouse Initial Radius = " << sqrt(a.dot(a)) << " Current Radius = " << sqrt(b.dot(b)) << std::endl;
#endif

		q.setFromTwoVectors(a, b);
	}	

	Eigen::Matrix4d qMat4;
	qMat4.setIdentity();
	qMat4.topLeftCorner<3, 3>() = q.toRotationMatrix();

	Eigen::Matrix4d translate;
	translate.setIdentity();
	translate.topRightCorner<3, 1>() = -rotationCenter;

	Eigen::Matrix4d invTranslate;
	invTranslate.setIdentity();
	invTranslate.topRightCorner<3, 1>() = rotationCenter;
	
    viewMatrix = invTranslate * qMat4 * translate * initialViewMatrix;
}
Esempio n. 2
0
/* ********************************************************************************************* */
TEST(UTILS, ROTATION) {
  using namespace dart;
  using namespace math;

  // Create Initial ExpMap
  Eigen::Vector3d axis(2.0, 1.0, 1.0);
  axis.normalize();
  double angle = 1.2;
  EXPECT_DOUBLE_EQ(axis.norm(), 1.0);
  Eigen::Vector3d expmap = axis * angle;


  // Test conversion between expmap and quaternion
  Eigen::Quaterniond q = expToQuat(expmap);
  Eigen::Vector3d expmap2 = quatToExp(q);

  EXPECT_NEAR((expmap - expmap2).norm(), 0.0, DART_EPSILON)
    << "Orig: " << expmap << " Reconstructed: " << expmap2;

  // Test conversion between matrix and euler
  Eigen::Matrix3d m = q.toRotationMatrix();
  Eigen::Vector3d e = matrixToEulerXYZ(m);
  Eigen::Matrix3d m2 = eulerXYZToMatrix(e);

  EXPECT_NEAR((m - m2).norm(), 0.0, DART_EPSILON)
    << "Orig: " << m << " Reconstructed: " << m2;
}
Esempio n. 3
0
 void BaseReferenceFrame::deserialize(ObjectData& data, IdContext& context){
   Identifiable::deserialize(data,context);
   Eigen::Quaterniond q;
   q.coeffs().fromBOSS(data,"rotation");
   _transform.translation().fromBOSS(data,"translation");
   _transform.linear()=q.toRotationMatrix();
 }
bool RosMessageContext::getOdomPose(Eigen::Isometry3d& _trans, double time){
  bool transformFound = true;
  _tfListener->waitForTransform(_odomReferenceFrameId, _baseReferenceFrameId,
				ros::Time(time), ros::Duration(1.0));
  try{
    tf::StampedTransform t;
    _tfListener->lookupTransform(_odomReferenceFrameId, _baseReferenceFrameId,
				 ros::Time(time), t);
    Eigen::Isometry3d transform;
    transform.translation().x()=t.getOrigin().x();
    transform.translation().y()=t.getOrigin().y();
    transform.translation().z()=t.getOrigin().z();
    Eigen::Quaterniond rot;
    rot.x()=t.getRotation().x();
    rot.y()=t.getRotation().y();
    rot.z()=t.getRotation().z();
    rot.w()=t.getRotation().w();
    transform.linear()=rot.toRotationMatrix();
    _trans = transform;
    transformFound = true;
  }
  catch (tf::TransformException ex){
    ROS_ERROR("%s",ex.what());
    transformFound = false;
  }
  return transformFound;
}
 void rotate(const Eigen::Quaterniond& qrot)
 {
   Eigen::Matrix3d rotmat = qrot.toRotationMatrix();
   
   for (unsigned int i = 0; i < points.size(); i++)
   {
     points[i].head<3>() = rotmat*points[i].head<3>();
   }
   
   normal = rotmat*normal;
 }
void UpperBodyPlanner::msgPose2Eigen(const geometry_msgs::Pose& input, Eigen::Vector3d& translation, Eigen::Matrix3d& rotation) {
    translation(0) = input.position.x;
    translation(1) = input.position.y;
    translation(2) = input.position.z;
    Eigen::Quaterniond convert;
    convert.w() = input.orientation.w;
    convert.x() = input.orientation.x;
    convert.y() = input.orientation.y;
    convert.z() = input.orientation.z;
    rotation = convert.toRotationMatrix();
}
void robot_state::Transforms::setTransform(const geometry_msgs::TransformStamped &transform)
{
  if (transform.child_frame_id.rfind(target_frame_) == transform.child_frame_id.length() - target_frame_.length())
  {
    Eigen::Translation3d o(transform.transform.translation.x, transform.transform.translation.y, transform.transform.translation.z);
    Eigen::Quaterniond q;
    tf::quaternionMsgToEigen(transform.transform.rotation, q);
    setTransform(Eigen::Affine3d(o*q.toRotationMatrix()), transform.header.frame_id);
  } else {
    logError("Given transform is to frame '%s', but frame '%s' was expected.", transform.child_frame_id.c_str(), target_frame_.c_str());
  }
}
Esempio n. 8
0
void create_data(Eigen::MatrixXd &pa, Eigen::MatrixXd &pb) {
	int n_data = 7;
   	Eigen::MatrixXd pa0(3, n_data);	
   	Eigen::MatrixXd pb0(3, n_data);	
	
	pb0 << -0.7045189014502934,0.31652495664145264,-0.8913587885243552,0.4196143278053829,0.33125081405575785,-1.148712511573519,-0.7211957446166447,-0.4204243223315903,-0.8922857301575797,0.41556308950696674,-0.36760757371251074,-1.1630155401570457,-0.12535642300333297,0.26708755761917147,1.5095344824450356,0.9968448409012386,0.27593113974268946,1.2189108175890786,-0.28095118914331707,-0.40276257201497045,1.3669272703783852; 
	
	Eigen::Quaterniond q = Eigen::Quaterniond(2.86073, 0.0378363, 3.59752, 0.4211619).normalized();
	std::cout << "groundtruth-quaternion. w: " << q.w() << " x " << q.x() << " y: " << q.y() << " z " << q.z() << std::endl;
	pa = q.toRotationMatrix()*pb0;
	pb = pb0;
}
Esempio n. 9
0
void planning_models::Transforms::setTransform(const geometry_msgs::TransformStamped &transform)
{
  if (transform.child_frame_id.rfind(target_frame_) == transform.child_frame_id.length() - target_frame_.length())
  {
    Eigen::Translation3d o(transform.transform.translation.x, transform.transform.translation.y, transform.transform.translation.z);
    Eigen::Quaterniond q;
    quatFromMsg(transform.transform.rotation, q);
    setTransform(Eigen::Affine3d(o*q.toRotationMatrix()), transform.header.frame_id);
  } else {
    ROS_ERROR("Given transform is to frame '%s', but frame '%s' was expected.", transform.child_frame_id.c_str(), target_frame_.c_str());
  }
}
Esempio n. 10
0
double
Camera::reprojectionError(const Eigen::Vector3d& P,
                          const Eigen::Quaterniond& camera_q,
                          const Eigen::Vector3d& camera_t,
                          const Eigen::Vector2d& observed_p) const
{
    Eigen::Vector3d P_cam = camera_q.toRotationMatrix() * P + camera_t;

    Eigen::Vector2d p;
    spaceToPlane(P_cam, p);

    return (p - observed_p).norm();
}
		void rosSpin() {					
			
			double dt;
			
			Eigen::Vector3d t(0.0, 0.0, 0.0);
			Eigen::Vector3d t_new(0.0, 0.0, 0.0);
			Eigen::Vector3d t_vel(0.0, 0.0, 0.0);
			Eigen::Vector3d t_imu(0.0, 0.0, 0.0);
			Eigen::Vector3d t_arm(-0.19, 0.0, 0.02);
			Eigen::Quaterniond q_imu;
			
			ros::Rate rate(15);	
			
			ros::Time time_prev, time_now;
			
			time_prev = ros::Time::now();

			while(ros::ok()) {	
				
				ros::spinOnce();
				
				time_now = ros::Time::now();			
				dt = (time_now - time_prev).toSec();
				time_prev = time_now;		
				
				// Compute Rotation
				
				q_imu = w_imu.getQuaternionForAngularChange(dt);
								
				q = q * q_imu;
								
				// Compute Translation		
				
				t_imu = v_wheel.getRotationArm()*v_wheel.getTranslation() - q_imu.toRotationMatrix()*t_arm + t_arm;
				
				t_new = t + q.toRotationMatrix()*t_imu;
				
				t_vel = (t_new - t)/dt;
				
				t = t_new;
								
				setPose(t, q, t_new);		
				
				pub_data.publish(odometry);
						
				rate.sleep();
			}			
			
		};
Esempio n. 12
0
void update_arrows() {
    geometry_msgs::Point origin, arrow_x_tip, arrow_y_tip, arrow_z_tip;
    Eigen::Matrix3d R;
    Eigen::Quaterniond quat;
    quat.x() = g_stamped_pose.pose.orientation.x;
    quat.y() = g_stamped_pose.pose.orientation.y;
    quat.z() = g_stamped_pose.pose.orientation.z;
    quat.w() = g_stamped_pose.pose.orientation.w;
    R = quat.toRotationMatrix();
    Eigen::Vector3d x_vec, y_vec, z_vec;
    double veclen = 0.2; //make the arrows this long
    x_vec = R.col(0) * veclen;
    y_vec = R.col(1) * veclen;
    z_vec = R.col(2) * veclen;

    //update the arrow markers w/ new pose:
    origin = g_stamped_pose.pose.position;
    arrow_x_tip = origin;
    arrow_x_tip.x += x_vec(0);
    arrow_x_tip.y += x_vec(1);
    arrow_x_tip.z += x_vec(2);
    arrow_marker_x.points.clear();
    arrow_marker_x.points.push_back(origin);
    arrow_marker_x.points.push_back(arrow_x_tip);
    arrow_marker_x.header = g_stamped_pose.header;

    arrow_y_tip = origin;
    arrow_y_tip.x += y_vec(0);
    arrow_y_tip.y += y_vec(1);
    arrow_y_tip.z += y_vec(2);

    arrow_marker_y.points.clear();
    arrow_marker_y.points.push_back(origin);
    arrow_marker_y.points.push_back(arrow_y_tip);
    arrow_marker_y.header = g_stamped_pose.header;

    arrow_z_tip = origin;
    arrow_z_tip.x += z_vec(0);
    arrow_z_tip.y += z_vec(1);
    arrow_z_tip.z += z_vec(2);

    arrow_marker_z.points.clear();
    arrow_marker_z.points.push_back(origin);
    arrow_marker_z.points.push_back(arrow_z_tip);
    arrow_marker_z.header = g_stamped_pose.header;
}
Esempio n. 13
0
int main() {
	
    //-> create and fill synthetic data
	int n_params = 7;
    Eigen::VectorXd u(n_params);
    Eigen::MatrixXd pa, pb;
    create_data(pa, pb);
    // <-

    // -> cost function
	CMAES::cost_type fcost = [&](double *params, int n_params) {
		Eigen::Quaterniond q = Eigen::Quaterniond(params[0], params[1], params[2], params[3]).normalized();
		Eigen::Vector3d s = Eigen::Vector3d(params[4], params[5], params[6]).cwiseAbs();

		params[0] = q.w();
		params[1] = q.x();
		params[2] = q.y();
		params[3] = q.z();
		params[4] = s(0);
		params[5] = s(1);
		params[6] = s(2);

		Eigen::Matrix3d rotation = q.toRotationMatrix();
		
		Eigen::Matrix3d scale = s.asDiagonal();
		
		Eigen::MatrixXd y = rotation*scale*pa;
		double cost = (pb - y).squaredNorm();
		return cost;
    };

	
	CMAES::Engine cmaes;
    Eigen::VectorXd x0(n_params);
	x0 << 1, 0, 0, 0, 1, 1, 1;
  	double c = fcost(x0.data(), n_params);
	std::cout << "x0: " << x0.transpose() << " fcost(x0): " << c << std::endl;
    double sigma0 = 1;
    Solution sol = cmaes.fmin(x0, n_params, sigma0, 6, 999, fcost);
	
	std::cout << "\nf_best: " << sol.f << "\nparams_best: " << sol.params.transpose() << std::endl;

    return 0;
}
Esempio n. 14
0
void compRollPitchCloud(pcl::PointCloud<PointXYZGD>::Ptr & cloud, double roll, double pitch)
{
	//map to points from perspective of body frame
    Eigen::Matrix4f trans;

#ifdef USE_QUATS
    trans.block(0,0,3,3) << curQuat.toRotationMatrix().cast<float>();
    trans(3,3) = 1;
#else
    trans << cos(roll), 0, sin(roll), 0,
             sin(roll)*sin(pitch), cos(pitch), -cos(roll)*sin(pitch), 0,
             -cos(pitch)*sin(roll), sin(pitch), cos(roll)*cos(pitch), 0,
             0, 0, 0, 1;
#endif

  //TODO: add translation
  //cout << trans << endl << endl;

  pcl::transformPointCloud(*cloud, *cloud_temp, trans);
  *cloud = *cloud_temp;
}
void laserOdometryHandler(const nav_msgs::Odometry::ConstPtr& laserOdometry)
{
  //timeLaserOdometry = laserOdometry->header.stamp.toSec();

  double transformSum[6];
  double roll, pitch, yaw;
  geometry_msgs::Quaternion geoQuat = laserOdometry->pose.pose.orientation;

  //tf::Matrix3x3(tf::Quaternion(geoQuat.z, -geoQuat.x, -geoQuat.y, geoQuat.w)).getRPY(roll, pitch, yaw);

  // Get 4x4 Roation Matrix
  Eigen::Quaterniond eigenQuat = Eigen::Quaternion<double>(geoQuat.w, geoQuat.x, geoQuat.y, geoQuat.z);
  Eigen::Matrix3d eigenRot = eigenQuat.toRotationMatrix();
  Eigen::Matrix4d eigenRot4 = Eigen::Matrix4d::Identity();
  eigenRot4(0,0) = eigenRot(0,0);
  eigenRot4(1,0) = eigenRot(2,0);
  eigenRot4(2,0) = eigenRot(2,0);

  eigenRot4(0,1) = eigenRot(0,1);
  eigenRot4(1,1) = eigenRot(2,1);
  eigenRot4(2,1) = eigenRot(2,1);

  eigenRot4(0,2) = eigenRot(0,2);
  eigenRot4(1,2) = eigenRot(2,2);
  eigenRot4(2,2) = eigenRot(2,2);


  // Get translation matrix
  Eigen::Affine3d trans(Eigen::Translation3d(
      laserOdometry->pose.pose.position.x,
      laserOdometry->pose.pose.position.y,
      laserOdometry->pose.pose.position.z));


  sensorTransform = trans.matrix();
  sensorTransform *= eigenRot4;

  ROS_INFO_STREAM(sensorTransform);

}
Esempio n. 16
0
  // translate kpts by 6DOF transformation of frame
  void Frame::setTKpts(Eigen::Vector4d trans, Eigen::Quaterniond rot)
  {
    Vector3d tr;
    tr = trans.head<3>();
    // set up 3x4 transformation from kpt+disp to kpt
    Matrix4d Q;
    Q << 1.0, 0.0, 0.0, -cam.cx, // fy should enter in here somewhere...
         0.0, 1.0, 0.0, -cam.cy,
         0.0, 0.0, 0.0, cam.fx,
         0.0, 0.0, 1.0/cam.tx, 0;
    Matrix<double,3,4> P;
    P << cam.fx, 0.0, cam.cx, 0.0,
         0.0, cam.fx, cam.cy, 0.0,
         0.0, 0.0, 1.0, 0.0;
    // 3D point transform - inverse of frame motion
    Matrix4d T;
    T.setZero();
    Matrix3d R = rot.toRotationMatrix();
    T.block<3,3>(0,0) = R.transpose();
    T.block<3,1>(0,3) = -R*tr;
    T(3,3) = 1.0;
    
    P = P*T*Q;

    // go through points and set up transformed ones
    tkpts.resize(kpts.size());
    Vector4d v(0.0,0.0,0.0,1.0);
    for (int i=0; i<(int)kpts.size(); i++)
      {
        Vector3d vk;
        v(0) = kpts[i].pt.x;
        v(1) = kpts[i].pt.y;
        v(2) = disps[i];
        vk = P*v;
        tkpts[i].pt.x = vk(0)/vk(2);
        tkpts[i].pt.y = vk(1)/vk(2);
      }
  }
Esempio n. 17
0
Eigen::Matrix4d state2Matrix(state s) {
	// take a state and return the equivalent 4x4 homogeneous transformation matrix
	Eigen::Matrix4d mat = Eigen::Matrix4d::Zero();
	// rotation part
	Eigen::Quaterniond q;
	q.x() = s.qx;
	q.y() = s.qy;
	q.z() = s.qz;
	q.w() = s.qw;
	q.normalize();
	Eigen::Matrix3d rot = q.toRotationMatrix();
	mat.block<3, 3>(0, 0) = rot;
	// translation part
	Eigen::Vector3d t;
	t[0] = s.x;
	t[1] = s.y;
	t[2] = s.z;
	mat.block<3, 1>(0, 3) = t;
	// homogeneous
	mat(3, 3) = 1;
	return mat;

}
Eigen::MatrixXd MainWindow::quaternion2rpy( Eigen::Quaterniond quaternion )
{
  Eigen::MatrixXd _rpy = rotation2rpy( quaternion.toRotationMatrix() );

  return _rpy;
}
Esempio n. 19
0
void PartFeatureModel::func_zeroedyi_and_dzeroedyi_by_dxp_and_dzeroedyi_by_dyi(
    const Eigen::VectorXd &yi, const Eigen::VectorXd &xp)
{
  // Extract cartesian and quaternion components of xp
  motion_model_->func_r(xp);
  motion_model_->func_q(xp);

  // Extract ri and hhati components of yi = ypi
  func_ri(yi);
  func_hhati(yi);

  // ri part: transformation is just the same as in the normal point case
  // zeroedri = RRW(rWi - rW)

  // ri - r
  Eigen::Vector3d yWiminusrW = riRES_ - motion_model_->rRES_;

  Eigen::Quaterniond qRW = motion_model_->qRES_.inverse();
  Eigen::Matrix4d dqRW_by_dq = dqbar_by_dq();

  // Rotation RRW
  Eigen::Matrix3d RRW = qRW.toRotationMatrix();

  // RRW(rWi - rW)
  Eigen::Vector3d zeroedri = RRW * yWiminusrW;

  // Now calculate Jacobians
  // dzeroedri_by_dri is RRW
  // dzeroedri_by_dhhati = 0
  Eigen::Matrix3d dzeroedri_by_dri = RRW;

  // dzeroedyi_by_dxp:
  // dzeroedri_by_dr = -RRW
  // dzeroedri_by_dq = d_dq(RRW (ri - r))
  Eigen::Matrix3d dzeroedri_by_dr = RRW * -1.0;

  Eigen::Matrix<double, 3, 4> dzeroedri_by_dqRW = dRq_times_a_by_dq(qRW, yWiminusrW);
  Eigen::Matrix<double, 3, 4> dzeroedri_by_dq = dzeroedri_by_dqRW * dqRW_by_dq;

  // Now for the hhati part (easier...)
  // zeroedhhati = RRW hhati
  Eigen::Vector3d zeroedhhati = RRW * hhatiRES_;

  // Jacobians
  // dzeroedhhati_by_dr = 0
  // dzeroedhhati_by_dq = d_dq(RRW hhati)
  // dzeroedhhati_by_dhhati = RRW
  // dzeroedhhati_by_dri = 0
  Eigen::Matrix<double, 3, 4> dzeroedhhati_by_dqRW = dRq_times_a_by_dq(qRW, hhatiRES_);
  Eigen::Matrix<double, 3, 4> dzeroedhhati_by_dq = dzeroedhhati_by_dqRW * dqRW_by_dq;
  Eigen::Matrix<double, 3, 3> dzeroedhhati_by_dhhati = RRW;

  // And put it all together
  zeroedyiRES_ << zeroedri, zeroedhhati;

  dzeroedyi_by_dxpRES_.setZero();
  dzeroedyi_by_dxpRES_.block(0, 0, 3, 3) = dzeroedri_by_dr;
  dzeroedyi_by_dxpRES_.block(0, 3, 3, 4) = dzeroedri_by_dq;
  dzeroedyi_by_dxpRES_.block(3, 3, 3, 4) = dzeroedhhati_by_dq;

  dzeroedyi_by_dyiRES_.setZero();
  dzeroedyi_by_dyiRES_.block(0, 0, 3, 3) = dzeroedri_by_dri;
  dzeroedyi_by_dyiRES_.block(3, 3, 3, 3) = dzeroedhhati_by_dhhati;
}
Eigen::MatrixXd MainWindow::quaternion2rotation( Eigen::Quaterniond quaternion )
{
  Eigen::MatrixXd _rotation = quaternion.toRotationMatrix();

  return _rotation;
}
Esempio n. 21
0
  void imageCb(const sensor_msgs::ImageConstPtr& l_image,
               const sensor_msgs::CameraInfoConstPtr& l_cam_info,
               const sensor_msgs::ImageConstPtr& r_image,
               const sensor_msgs::CameraInfoConstPtr& r_cam_info)
  {
    ROS_INFO("In callback, seq = %u", l_cam_info->header.seq);
    
    // Convert ROS messages for use with OpenCV
    cv::Mat left, right;
    try {
      left  = l_bridge_.imgMsgToCv(l_image, "mono8");
      right = r_bridge_.imgMsgToCv(r_image, "mono8");
    }
    catch (sensor_msgs::CvBridgeException& e) {
      ROS_ERROR("Conversion error: %s", e.what());
      return;
    }
    cam_model_.fromCameraInfo(l_cam_info, r_cam_info);

    frame_common::CamParams cam_params;
    cam_params.fx = cam_model_.left().fx();
    cam_params.fy = cam_model_.left().fy();
    cam_params.cx = cam_model_.left().cx();
    cam_params.cy = cam_model_.left().cy();
    cam_params.tx = cam_model_.baseline();

    if (vslam_system_.addFrame(cam_params, left, right)) {
      /// @todo Not rely on broken encapsulation of VslamSystem here
      int size = vslam_system_.sba_.nodes.size();
      if (size % 2 == 0) {
        // publish markers
        sba::drawGraph(vslam_system_.sba_, cam_marker_pub_, point_marker_pub_);
      }

      // Publish VO tracks
      if (vo_tracks_pub_.getNumSubscribers() > 0) {
        frame_common::drawVOtracks(left, vslam_system_.vo_.frames, vo_display_);
        IplImage ipl = vo_display_;
        sensor_msgs::ImagePtr msg = sensor_msgs::CvBridge::cvToImgMsg(&ipl);
        msg->header = l_cam_info->header;
        vo_tracks_pub_.publish(msg, l_cam_info);
      }
      
      // Refine large-scale SBA.
      const int LARGE_SBA_INTERVAL = 10;
      if (size > 4 && size % LARGE_SBA_INTERVAL == 0) {
        ROS_INFO("Running large SBA on %d nodes", size);
        vslam_system_.refine();
      }
      
      if (pointcloud_pub_.getNumSubscribers() > 0 && size % 2 == 0)
        publishRegisteredPointclouds(vslam_system_.sba_, vslam_system_.frames_, pointcloud_pub_);
      
      // Publish odometry data to tf.
      if (0) // TODO: Change this to parameter.
      {
        ros::Time stamp = l_cam_info->header.stamp;
        std::string image_frame = l_cam_info->header.frame_id;
        Eigen::Vector4d trans = -vslam_system_.sba_.nodes.back().trans;
        Eigen::Quaterniond rot = vslam_system_.sba_.nodes.back().qrot.conjugate();
        
        trans.head<3>() = rot.toRotationMatrix()*trans.head<3>(); 
        
        tf_transform_.setOrigin(tf::Vector3(trans(0), trans(1), trans(2)));
        tf_transform_.setRotation(tf::Quaternion(rot.x(), rot.y(), rot.z(), rot.w()) );
        
        tf::Transform simple_transform;
        simple_transform.setOrigin(tf::Vector3(0, 0, 0));
        simple_transform.setRotation(tf::Quaternion(.5, -.5, .5, .5));
        
        tf_broadcast_.sendTransform(tf::StampedTransform(tf_transform_, stamp, image_frame, "visual_odom"));
        tf_broadcast_.sendTransform(tf::StampedTransform(simple_transform, stamp, "visual_odom", "pgraph"));
      
      
        // Publish odometry data on topic.
        if (odom_pub_.getNumSubscribers() > 0)
        {
          tf::StampedTransform base_to_image;
          tf::Transform base_to_visodom;
         
          try
          {
            tf_listener_.lookupTransform(image_frame, "/base_footprint",
                                 stamp, base_to_image);
          }
          catch (tf::TransformException ex)
          {
              ROS_WARN("%s",ex.what());
              return;
          }
                                 
          base_to_visodom = tf_transform_.inverse() * base_to_image;
          
          geometry_msgs::PoseStamped pose;
          nav_msgs::Odometry odom;
          pose.header.frame_id = "/visual_odom";
          pose.pose.position.x = base_to_visodom.getOrigin().x();
          pose.pose.position.y = base_to_visodom.getOrigin().y();
          pose.pose.position.z = base_to_visodom.getOrigin().z();
          pose.pose.orientation.x = base_to_visodom.getRotation().x();
          pose.pose.orientation.y = base_to_visodom.getRotation().y();
          pose.pose.orientation.z = base_to_visodom.getRotation().z();
          pose.pose.orientation.w = base_to_visodom.getRotation().w();
          
          odom.header.stamp = stamp;
          odom.header.frame_id = "/visual_odom";
          odom.child_frame_id = "/base_footprint";
          odom.pose.pose = pose.pose;
          /* odom.pose.covariance[0] = 1;
          odom.pose.covariance[7] = 1;
          odom.pose.covariance[14] = 1;
          odom.pose.covariance[21] = 1;
          odom.pose.covariance[28] = 1;
          odom.pose.covariance[35] = 1; */
          odom_pub_.publish(odom);
        }
      }
    }
  }
Esempio n. 22
0
bool leatherman::poseFromMsg(const geometry_msgs::Pose &tmsg, Eigen::Affine3d &t)
{
  Eigen::Quaterniond q; bool r = quatFromMsg(tmsg.orientation, q);
  t = Eigen::Affine3d(Eigen::Translation3d(tmsg.position.x, tmsg.position.y, tmsg.position.z)*q.toRotationMatrix());
  return r;
}
Esempio n. 23
0
Eigen::Vector3d quaternion_to_rpy(const Eigen::Quaterniond &q)
{
	// YPR - ZYX
	return q.toRotationMatrix().eulerAngles(2, 1, 0).reverse();
}
bool constraint_samplers::IKConstraintSampler::samplePose(Eigen::Vector3d &pos, Eigen::Quaterniond &quat,
                                                          const planning_models::KinematicState &ks,
                                                          unsigned int max_attempts)
{  
  if (sampling_pose_.position_constraint_)
  {
    const std::vector<bodies::BodyPtr> &b = sampling_pose_.position_constraint_->getConstraintRegions();
    if (!b.empty())
    {
      bool found = false;
      std::size_t k = random_number_generator_.uniformInteger(0, b.size() - 1);
      for (std::size_t i = 0 ; i < b.size() ; ++i)
        if (b[(i+k) % b.size()]->samplePointInside(random_number_generator_, max_attempts, pos))
        {
          found = true;
          break;
        }
      if (!found)
      {   
        ROS_ERROR("Unable to sample a point inside the constraint region");
        return false;
      }
    }
    else
    {   
      ROS_ERROR("Unable to sample a point inside the constraint region. Constraint region is empty when it should not be.");
      return false;
    }
    
    // if this constraint is with respect a mobile frame, we need to convert this rotation to the root frame of the model
    if (sampling_pose_.position_constraint_->mobileReferenceFrame())
    {
      const planning_models::KinematicState::LinkState *ls = ks.getLinkState(sampling_pose_.position_constraint_->getReferenceFrame());
      pos = ls->getGlobalLinkTransform() * pos;
    }
  }
  else
  {
    // do FK for rand state
    planning_models::KinematicState tempState(ks);
    planning_models::KinematicState::JointStateGroup *tmp = tempState.getJointStateGroup(jmg_->getName());
    if (tmp)
    {
      tmp->setToRandomValues();
      pos = tempState.getLinkState(sampling_pose_.orientation_constraint_->getLinkModel()->getName())->getGlobalLinkTransform().translation();
    }
    else
    {
      ROS_ERROR("Passed a JointStateGroup for a mismatching JointModelGroup");
      return false;
    }
  }
  
  if (sampling_pose_.orientation_constraint_)
  {
    // sample a rotation matrix within the allowed bounds
    double angle_x = 2.0 * (random_number_generator_.uniform01() - 0.5) * sampling_pose_.orientation_constraint_->getXAxisTolerance();
    double angle_y = 2.0 * (random_number_generator_.uniform01() - 0.5) * sampling_pose_.orientation_constraint_->getYAxisTolerance();
    double angle_z = 2.0 * (random_number_generator_.uniform01() - 0.5) * sampling_pose_.orientation_constraint_->getZAxisTolerance();
    Eigen::Affine3d diff(Eigen::AngleAxisd(angle_x, Eigen::Vector3d::UnitX())
                         * Eigen::AngleAxisd(angle_y, Eigen::Vector3d::UnitY())
                         * Eigen::AngleAxisd(angle_z, Eigen::Vector3d::UnitZ()));
    Eigen::Affine3d reqr(sampling_pose_.orientation_constraint_->getDesiredRotationMatrix() * diff.rotation());
    quat = Eigen::Quaterniond(reqr.rotation());

    // if this constraint is with respect a mobile frame, we need to convert this rotation to the root frame of the model
    if (sampling_pose_.orientation_constraint_->mobileReferenceFrame())
    {
      const planning_models::KinematicState::LinkState *ls = ks.getLinkState(sampling_pose_.orientation_constraint_->getReferenceFrame());
      Eigen::Affine3d rt(ls->getGlobalLinkTransform().rotation() * quat.toRotationMatrix());
      quat = Eigen::Quaterniond(rt.rotation());
    }
  }
  else
  {
    // sample a random orientation
    double q[4];
    random_number_generator_.quaternion(q);
    quat = Eigen::Quaterniond(q[3], q[0], q[1], q[2]);
  }
  
  // if there is an offset, we need to undo the induced rotation in the sampled transform origin (point)
  if (sampling_pose_.position_constraint_ && sampling_pose_.position_constraint_->hasLinkOffset())
    // the rotation matrix that corresponds to the desired orientation
    pos = pos - quat.toRotationMatrix() * sampling_pose_.position_constraint_->getLinkOffset();
  
  // we now have the transform we wish to perform IK for, in the planning frame
  
  if (transform_ik_)
  {
    // we need to convert this transform to the frame expected by the IK solver
    // both the planning frame and the frame for the IK are assumed to be robot links
    Eigen::Affine3d ikq(Eigen::Translation3d(pos) * quat.toRotationMatrix());
    
    const planning_models::KinematicState::LinkState *ls = ks.getLinkState(ik_frame_);
    ikq = ls->getGlobalLinkTransform().inverse() * ikq;
    
    pos = ikq.translation();
    quat = Eigen::Quaterniond(ikq.rotation());
  }
  
  return true;
}