Ejemplo n.º 1
0
pcl::PointCloud<pcl::PointXYZRGBA>::Ptr Conversions::toPointCloudColor(const Eigen::Isometry3d &T, DepthImageDataPtr depth_data)
{
    pcl::PointCloud<pcl::PointXYZRGBA>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZRGBA>());
    cv::Mat depth_img;
    cv::Mat color_img;

    cv_bridge::CvImagePtr depth_bridge;
    cv_bridge::CvImagePtr rgb_bridge;
    try {
        depth_bridge = cv_bridge::toCvCopy(depth_data->depth_image_, sensor_msgs::image_encodings::TYPE_32FC1);
        depth_img = depth_bridge->image;
        rgb_bridge = cv_bridge::toCvCopy(depth_data->color_image_, sensor_msgs::image_encodings::BGR8);
        color_img = rgb_bridge->image;
    } catch (cv_bridge::Exception& e) {
        ROS_ERROR("cv_bridge exception: %s",e.what());
        return cloud;
    }

    // Get camera info for 3D point estimation.
    image_geometry::PinholeCameraModel cam = depth_data->camera_model_;

    cloud->header.frame_id = depth_data->sensor_frame_;
    cloud->is_dense = false;
    cloud->height = depth_img.rows;
    cloud->width = depth_img.cols;
    cloud->sensor_origin_ = Eigen::Vector4f( T.translation()(0), T.translation()(1), T.translation()(2), 1.f );
    cloud->sensor_orientation_ = Eigen::Quaternionf(T.rotation().cast<float>());
    cloud->points.resize( cloud->height * cloud->width );

    size_t idx = 0;
    const float* depthdata = reinterpret_cast<float*>( &depth_img.data[0] );
    const unsigned char* colordata = &color_img.data[0];
    for(int v = 0; v < depth_img.rows; ++v) {
        for(int u = 0; u < depth_img.cols; ++u) {
            pcl::PointXYZRGBA& p = cloud->points[ idx ]; ++idx;

            float d = (*depthdata++);

            if (d > 0 && !isnan(d)/* && p.z <= 7.5*/) {
                p.z = d;
                p.x = (u - cam.cx()) * d / cam.fx();
                p.y = (v - cam.cy()) * d / cam.fy();

                cv::Point3_<uchar>* rgb = color_img.ptr<cv::Point3_<uchar> >(v,u);
                p.b = rgb->x;
                p.g = rgb->y;
                p.r = rgb->z;
                p.a = 255;
            } else {
                p.x = std::numeric_limits<float>::quiet_NaN();;
                p.y = std::numeric_limits<float>::quiet_NaN();;
                p.z = std::numeric_limits<float>::quiet_NaN();;
                p.rgb = 0;
                colordata += 3;
            }
        }
    }

    return cloud;
}
Ejemplo n.º 2
0
/**
 * @function setRootTransform
 * @brief Set q (SCREW) from pose <x,y,z,r,p,y> 
 */
void InspectorTab::setRootTransform( dart::dynamics::Skeleton* robot, 
				     const Eigen::Matrix<double, 6, 1>& pose ) {
  dart::dynamics::Joint* joint = robot->getRootBodyNode()->getParentJoint();

  if(dynamic_cast<dart::dynamics::FreeJoint*>(joint)) {
    Matrix<double, 6, 1> q;
    Eigen::Isometry3d transform = Eigen::Isometry3d::Identity();
    transform.translation() = pose.head<3>();
    transform.linear() = dart::math::eulerXYZToMatrix(pose.tail<3>());
    q = dart::math::logMap(transform);
    joint->set_q( q );
  
  }
  else {
    Eigen::Isometry3d transform;
    transform.makeAffine();
    transform.linear() = dart::math::eulerXYZToMatrix(pose.tail<3>());
    transform.translation() = pose.head<3>();
    joint->setTransformFromParentBodyNode(transform);
  }
  
  for (int i = 0; i < robot->getNumBodyNodes(); ++i) {
    robot->getBodyNode(i)->updateTransform();
  }
}
Ejemplo n.º 3
0
void PointMass::draw(renderer::RenderInterface* _ri,
                     const Eigen::Vector4d& _color,
                     bool _useDefaultColor) const
{
  if (_ri == NULL)
    return;

  _ri->pushMatrix();

  // render the self geometry
//  mParentJoint->applyGLTransform(_ri);
  Eigen::Isometry3d T = Eigen::Isometry3d::Identity();
  T.translation() = mX;
  _ri->transform(T);
  Eigen::Vector4d color1;
  color1 << 0.8, 0.3, 0.3, 1.0;
  mShape->draw(_ri, color1, false);
  _ri->popMatrix();

//  _ri->pushName((unsigned)mID);
  _ri->pushMatrix();
  T.translation() = mX0;
  _ri->transform(T);
  Eigen::Vector4d color2;
  color2 << 0.3, 0.8, 0.3, 1.0;
  mShape->draw(_ri, color2, false);
  _ri->popMatrix();
//  _ri->popName();

}
Ejemplo n.º 4
0
pcl::PointCloud<pcl::PointXYZ>::Ptr Conversions::toPointCloud(const Eigen::Isometry3d &T, const image_geometry::PinholeCameraModel &cam, const cv::Mat &depth_img)
{
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZ>());
    cloud->header.frame_id = "cam";
    cloud->is_dense = false;
    cloud->height = depth_img.rows;
    cloud->width = depth_img.cols;
    cloud->sensor_origin_ = Eigen::Vector4f( T.translation()(0), T.translation()(1), T.translation()(2), 1.f );
    cloud->sensor_orientation_ = Eigen::Quaternionf(T.rotation().cast<float>());
    cloud->points.resize( cloud->height * cloud->width );

    size_t idx = 0;
    const float* depthdata = reinterpret_cast<float*>( &depth_img.data[0] );
    for(int v = 0; v < depth_img.rows; ++v) {
        for(int u = 0; u < depth_img.cols; ++u) {
            pcl::PointXYZ& p = cloud->points[ idx ]; ++idx;

            float d = (*depthdata++);

            if (d > 0 && !isnan(d)) {
                p.z = d;
                p.x = (u - cam.cx()) * d / cam.fx();
                p.y = (v - cam.cy()) * d / cam.fy();
            } else {
                p.x = std::numeric_limits<float>::quiet_NaN();
                p.y = std::numeric_limits<float>::quiet_NaN();
                p.z = std::numeric_limits<float>::quiet_NaN();
            }
        }
    }
    return cloud;
}
Ejemplo n.º 5
0
bool RosMessageContext::getOdomPose(Eigen::Isometry3d& _trans, double time){
  bool transformFound = true;
  _tfListener->waitForTransform(_odomReferenceFrameId, _baseReferenceFrameId,
				ros::Time(time), ros::Duration(1.0));
  try{
    tf::StampedTransform t;
    _tfListener->lookupTransform(_odomReferenceFrameId, _baseReferenceFrameId,
				 ros::Time(time), t);
    Eigen::Isometry3d transform;
    transform.translation().x()=t.getOrigin().x();
    transform.translation().y()=t.getOrigin().y();
    transform.translation().z()=t.getOrigin().z();
    Eigen::Quaterniond rot;
    rot.x()=t.getRotation().x();
    rot.y()=t.getRotation().y();
    rot.z()=t.getRotation().z();
    rot.w()=t.getRotation().w();
    transform.linear()=rot.toRotationMatrix();
    _trans = transform;
    transformFound = true;
  }
  catch (tf::TransformException ex){
    ROS_ERROR("%s",ex.what());
    transformFound = false;
  }
  return transformFound;
}
Ejemplo n.º 6
0
Eigen::Vector6d BodyNode::getWorldVelocity(
    const Eigen::Vector3d& _offset, bool _isLocal) const {
  Eigen::Isometry3d T = mW;
  if (_isLocal)
    T.translation() = mW.linear() * -_offset;
  else
    T.translation() = -_offset;
  return math::AdT(T, mV);
}
Ejemplo n.º 7
0
math::Jacobian BodyNode::getWorldJacobian(
    const Eigen::Vector3d& _offset, bool _isOffsetLocal) {
  Eigen::Isometry3d T = mW;
  if (_isOffsetLocal)
    T.translation() = mW.linear() * -_offset;
  else
    T.translation() = -_offset;
  return math::AdTJac(T, getBodyJacobian());
}
bot_core::pose_t getPoseAsBotPose(Eigen::Isometry3d pose, int64_t utime){
  bot_core::pose_t pose_msg;
  pose_msg.utime =   utime;
  pose_msg.pos[0] = pose.translation().x();
  pose_msg.pos[1] = pose.translation().y();
  pose_msg.pos[2] = pose.translation().z();  
  Eigen::Quaterniond r_x(pose.rotation());
  pose_msg.orientation[0] =  r_x.w();  
  pose_msg.orientation[1] =  r_x.x();  
  pose_msg.orientation[2] =  r_x.y();  
  pose_msg.orientation[3] =  r_x.z();  
  return pose_msg;
}
Ejemplo n.º 9
0
Eigen::Vector6d BodyNode::getWorldAcceleration(
    const Eigen::Vector3d& _offset, bool _isOffsetLocal) const {
  Eigen::Isometry3d T = mW;
  if (_isOffsetLocal)
    T.translation() = mW.linear() * -_offset;
  else
    T.translation() = -_offset;

  Eigen::Vector6d dV = mdV;
  dV.tail<3>() += mV.head<3>().cross(mV.tail<3>());

  return math::AdT(T, dV);
}
Ejemplo n.º 10
0
// TODO: consider orientations other than identity
std::vector<reachability_t> TestReachability(double rotation)
{
	const double XMIN = -0.1, XMAX = 0.7, YMIN = -0.8, YMAX = .2, ZMIN = -1, ZMAX =
			0.3, STEPSIZE = 0.05;
	HK::HuboKin hubo;
	std::vector<reachability_t> results;

	Eigen::VectorXd weight(7);
	weight << 1, 1, 1, 1, 1, 1, 1; //change this for weights!
	Vector6d q;
	int count = 0, valid = 0;
	for (double x = XMIN; x <= XMAX; x += STEPSIZE)
	{
		for (double y = YMIN; y <= YMAX; y += STEPSIZE)
		{
			for (double z = ZMIN; z <= ZMAX; z += STEPSIZE)
			{
				++count;
				Eigen::Isometry3d target = Eigen::Isometry3d::Identity();
				Eigen::Isometry3d result;
				target.translation().x() = x;
				target.translation().y() = y;
				target.translation().z() = z;
				target.rotate(Eigen::AngleAxisd(-(M_PI/6) * rotation, Eigen::Vector3d::UnitX()));
				hubo.armIK(q, target, Vector6d::Zero(), RIGHT);
				hubo.armFK(result, q, RIGHT);
				double ret = compareT(target, result, weight);
				//std::cout << ret << "\t";
				if (ret > 0.15)
				{
					continue;
				}
				else
				{
					valid++;
					reachability_t pointScore;
					pointScore.x = x;
					pointScore.y = y;
					pointScore.z = z;
					pointScore.error = ret;
					results.push_back(pointScore);
				}

			}

		}

	}

	return results;
}
Ejemplo n.º 11
0
double compareT(Eigen::Isometry3d a, Eigen::Isometry3d b,
		Eigen::VectorXd weight)
{
	Eigen::Quaterniond qa(a.rotation());
	Eigen::Quaterniond qb(b.rotation());
	Eigen::Vector3d pa = a.translation();
	Eigen::Vector3d pb = b.translation();
	Eigen::VectorXd va(7), vb(7), verr(7), vScaled(7);
	va << pa, qa.x(), qa.y(), qa.z(), qa.w();
	vb << pb, qb.x(), qb.y(), qb.z(), qb.w();
	verr = vb - va;
	vScaled = weight.cwiseProduct(verr);
	return vScaled.squaredNorm();
}
Ejemplo n.º 12
0
int collideSphereSphere(CollisionObject* o1, CollisionObject* o2, const double& _r0, const Eigen::Isometry3d& c0,
                        const double& _r1, const Eigen::Isometry3d& c1,
                        CollisionResult& result)
{
  double r0 = _r0;
  double r1 = _r1;
  double rsum = r0 + r1;
  Eigen::Vector3d normal = c0.translation() - c1.translation();
  double normal_sqr = normal.squaredNorm();

  if ( normal_sqr > rsum * rsum )
  {
    return 0;
  }

  r0 /= rsum;
  r1 /= rsum;

  Eigen::Vector3d point = r1 * c0.translation() + r0 * c1.translation();
  double penetration;

  if (normal_sqr < DART_COLLISION_EPS)
  {
    normal.setZero();
    penetration = rsum;

    Contact contact;
    contact.collisionObject1 = o1;
    contact.collisionObject2 = o2;
    contact.point = point;
    contact.normal = normal;
    contact.penetrationDepth = penetration;
    result.addContact(contact);
    return 1;
  }

  normal_sqr = sqrt(normal_sqr);
  normal *= (1.0/normal_sqr);
  penetration = rsum - normal_sqr;

  Contact contact;
  contact.collisionObject1 = o1;
  contact.collisionObject2 = o2;
  contact.point = point;
  contact.normal = normal;
  contact.penetrationDepth = penetration;
  result.addContact(contact);
  return 1;

}
Ejemplo n.º 13
0
std::string toString(const Eigen::Isometry3d& _v)
{
    std::ostringstream ostr;
    ostr.precision(6);

    Eigen::Vector3d xyz = math::matrixToEulerXYZ(_v.linear());

    ostr << _v.translation()(0) << " "
         << _v.translation()(1) << " "
         << _v.translation()(2) << " ";
    ostr << xyz[0] << " " << xyz[1] << " " << xyz[2];

    return ostr.str();
}
Ejemplo n.º 14
0
void App::doWork(){

  std::vector<float> xA;
  std::vector<float> yA;
  readCSVFile(cl_cfg_.filenameA, xA, yA);
  std::cout << xA.size() << " points in File A\n";
  lidarOdom_->doOdometry(xA, yA, xA.size(), 0);

  // Match second scan without a prior
  std::string i;
  cout << "Match B: Continue? ";
  cin >> i;

  std::vector<float> xB;
  std::vector<float> yB;
  readCSVFile(cl_cfg_.filenameB, xB, yB);
  std::cout << xB.size() << " points in File B\n";
  lidarOdom_->doOdometry(xB, yB, xB.size(), 1);

  // Match third scan giving a prior rotation for the heading
  // this alignment would otherwise fail:
  cout << "Match C: Continue? ";
  cin >> i;

  std::vector<float> xC;
  std::vector<float> yC;
  readCSVFile(cl_cfg_.filenameC, xC, yC);
  std::cout << xC.size() << " points in File C\n";

  ScanTransform prior;
  prior.x = 0;
  prior.y = 0;
  prior.theta = 1.7;
  lidarOdom_->doOdometry(xC, yC, xC.size(), 2, &prior);



  // 2. Determine the body position using the LIDAR motion estimate:
  Eigen::Isometry3d pose = lidarOdom_->getCurrentPose();
  Eigen::Quaterniond orientation(pose.rotation());

  Eigen::Vector3d rpy = orientation.matrix().eulerAngles(0,1,2);

  std::cout << "\n";
  std::cout << "x,y,yaw (m,m,deg): "<< pose.translation().x() << ", " << pose.translation().y()
            << ", "
            << rpy[2]*180/M_PI << "\n";
}
Ejemplo n.º 15
0
// A 'halo' camera - a circular ring of poses all pointing at a center point
// @param: focus_center: the center points
// @param: halo_r: radius of the ring
// @param: halo_dz: elevation of the camera above/below focus_center's z value
// @param: n_poses: number of generated poses
void
generate_halo(
  std::vector<Eigen::Isometry3d, Eigen::aligned_allocator<Eigen::Isometry3d> > &poses,
  Eigen::Vector3d focus_center,double halo_r,double halo_dz,int n_poses){
  
  for (double t=0;t < (2*M_PI);t =t + (2*M_PI)/ ((double) n_poses) ){
    double x = halo_r*cos(t);
    double y = halo_r*sin(t);
    double z = halo_dz;
    double pitch =atan2( halo_dz,halo_r); 
    double yaw = atan2(-y,-x);
   
    Eigen::Isometry3d pose;
    pose.setIdentity();
    Eigen::Matrix3d m;
    m = AngleAxisd(yaw, Eigen::Vector3d::UnitZ())
	* AngleAxisd(pitch, Eigen::Vector3d::UnitY())
	* AngleAxisd(0, Eigen::Vector3d::UnitZ());    

    pose *=m;
    Vector3d v(x,y,z);
    v += focus_center;
    pose.translation() = v;
    poses.push_back(pose);  
  }
  return ;
}
Ejemplo n.º 16
0
Eigen::Isometry3d toIsometry3d(const std::string& _str)
{
    Eigen::Isometry3d T = Eigen::Isometry3d::Identity();
    Eigen::Vector6d elements = Eigen::Vector6d::Zero();
    std::vector<std::string> pieces;
    std::string trimedStr = boost::trim_copy(_str);
    boost::split(pieces, trimedStr, boost::is_any_of(" "), boost::token_compress_on);
    assert(pieces.size() == 6);

    for (size_t i = 0; i < pieces.size(); ++i)
    {
        if (pieces[i] != "")
        {
            try
            {
                elements(i) = boost::lexical_cast<double>(pieces[i].c_str());
            }
            catch(boost::bad_lexical_cast& e)
            {
                std::cerr << "value ["
                          << pieces[i]
                          << "] is not a valid double for SE3["
                          << i
                          << "]"
                          << std::endl;
            }
        }
    }

    T.linear() = math::eulerXYZToMatrix(elements.tail<3>());
    T.translation() = elements.head<3>();
    return T;
}
Ejemplo n.º 17
0
//==============================================================================
Eigen::Isometry3d State::getCOMFrame() const
{
  Eigen::Isometry3d T = Eigen::Isometry3d::Identity();

  // Y-axis
  Eigen::Vector3d yAxis = Eigen::Vector3d::UnitY();

  // X-axis
  Eigen::Vector3d xAxis = mPelvis->getTransform().linear().col(0);
  Eigen::Vector3d pelvisXAxis = mPelvis->getTransform().linear().col(0);
  double mag = yAxis.dot(pelvisXAxis);
  pelvisXAxis -= mag * yAxis;
  xAxis = pelvisXAxis.normalized();

  // Z-axis
  Eigen::Vector3d zAxis = xAxis.cross(yAxis);

  T.translation() = getCOM();

  T.linear().col(0) = xAxis;
  T.linear().col(1) = yAxis;
  T.linear().col(2) = zAxis;

  return T;
}
Ejemplo n.º 18
0
  void Line3D::jacobian(Matrix7x6d& Jp, Matrix7x6d& Jl, const Eigen::Isometry3d& x, const Line3D& l){
    Jp.setZero();
    Jl.setZero();
    Matrix6d A=Matrix6d::Zero();
    A.block<3,3>(0,0)=x.linear();
    A.block<3,3>(0,3)= _skew(x.translation())*x.linear();
    A.block<3,3>(3,3)=x.linear();
    Vector6d v=(Vector6d)l;
    Line3D lRemapped(v);

    Matrix6d D = Matrix6d::Zero();
    D.block<3,3>(0,0) = -_skew(l.d());
    D.block<3,3>(0,3) = -2*_skew(l.w());
    D.block<3,3>(3,3) = -2*_skew(l.d());
    Jp.block<6,6>(0,0) = A*D;

    Vector3d d = l.d();
    Vector3d w = l.w();
    double ln = d.norm();
    double iln = 1./ln;
    double iln3 = std::pow(iln,3);
    Matrix6d Jll;
    Jll <<
      iln,0,0,-(w.x()*d.x())*iln3,-(w.x()*d.y())*iln3,-(w.x()*d.z())*iln3,
      0,iln,0,-(w.y()*d.x())*iln3,-(w.y()*d.y())*iln3,-(w.y()*d.z())*iln3,
      0,0,iln,-(w.z()*d.x())*iln3,-(w.z()*d.y())*iln3,-(w.z()*d.z())*iln3,
      0,0,0,-(d.x()*d.x()-ln*ln)*iln3,-(d.x()*d.y())*iln3,-(d.x()*d.z())*iln3,
      0,0,0,-(d.x()*d.y())*iln3,-(d.y()*d.y()-ln*ln)*iln3,-(d.y()*d.z())*iln3,
      0,0,0,-(d.x()*d.z())*iln3,-(d.y()*d.z())*iln3,-(d.z()*d.z()-ln*ln)*iln3;
    Jl.block<6,6>(0,0) = A*Jll;
    Jl.block<1,6>(6,0) << 2*d.x(),2*d.y(),2*d.z();
  }
Ejemplo n.º 19
0
///////////////////////////////////
//time x y z qx qy qz qw - all floating points
void read_poses_csv(std::string poses_files, std::vector<Isometry3dTime>& poses){
  int counter=0;
  string line;
  ifstream myfile (poses_files.c_str());
  if (myfile.is_open()){
    while ( myfile.good() ){
      getline (myfile,line);
      if (line.size() > 4){
	double quat[4];
	double pos[3];
	
	int64_t dtime;
 	sscanf(line.c_str(),"%ld,%lf,%lf,%lf,%lf,%lf,%lf,%lf",
           &dtime,
           &pos[0],&pos[1],&pos[2],
   	   &quat[1],&quat[2],&quat[3],&quat[0]); // NBNBN: note the order
	
	Eigen::Quaterniond quat2(quat[0],quat[1],quat[2],quat[3]);
	Eigen::Isometry3d pose;
	pose.setIdentity();
	pose.translation()  << pos[0] ,pos[1],pos[2];
	pose.rotate(quat2);
	
	 Isometry3dTime poseT(dtime, pose);

	counter++;
 	poses.push_back(poseT);
      }
    }
    myfile.close();
  } else{
    printf( "Unable to open poses file\n%s",poses_files.c_str());
    return;
  }    
}
Ejemplo n.º 20
0
Eigen::Isometry3d getRelativeTransform(TagMatch const& match, Eigen::Matrix3d const & camera_matrix, const Eigen::Vector4d &distortion_coefficients, double tag_size) 
{
  std::vector<cv::Point3f> objPts;
  std::vector<cv::Point2f> imgPts;
  double s = tag_size/2.;
  objPts.push_back(cv::Point3f(-s,-s, 0));
  objPts.push_back(cv::Point3f( s,-s, 0));
  objPts.push_back(cv::Point3f( s, s, 0));
  objPts.push_back(cv::Point3f(-s, s, 0));


  imgPts.push_back(match.p0);
  imgPts.push_back(match.p1);
  imgPts.push_back(match.p2);
  imgPts.push_back(match.p3);

  cv::Mat rvec, tvec;
  cv::Matx33f cameraMatrix(
                           camera_matrix(0,0), 0, camera_matrix(0,2),
                           0, camera_matrix(1,1), camera_matrix(1,2),
                           0,  0,  1);

  cv::Vec4f distParam(distortion_coefficients(0), distortion_coefficients(1), distortion_coefficients(2), distortion_coefficients(3)); 
  cv::solvePnP(objPts, imgPts, cameraMatrix, distParam, rvec, tvec);
  cv::Matx33d r;
  cv::Rodrigues(rvec, r);
  Eigen::Matrix3d wRo;
  wRo << r(0,0), r(0,1), r(0,2), r(1,0), r(1,1), r(1,2), r(2,0), r(2,1), r(2,2);

  Eigen::Isometry3d T; 
  T.linear() = wRo;
  T.translation() << tvec.at<double>(0), tvec.at<double>(1), tvec.at<double>(2);
  return T;
}
Ejemplo n.º 21
0
void print_Isometry3d(Eigen::Isometry3d pose, std::stringstream &ss){
  Eigen::Vector3d t(pose.translation());
  Eigen::Quaterniond r(pose.rotation());
  ss <<t[0]<<", "<<t[1]<<", "<<t[2]<<" | " 
       <<r.w()<<", "<<r.x()<<", "<<r.y()<<", "<<r.z() ;
  //  std::cout << ss.str() << "q\n";
}
Ejemplo n.º 22
0
//==============================================================================
void ShapeNode::setRelativeTranslation(const Eigen::Vector3d& translation)
{
  Eigen::Isometry3d transform = getRelativeTransform();
  transform.translation() = translation;

  setRelativeTransform(transform);
}
Ejemplo n.º 23
0
void VideoIMUFusion::RunningData::handleVideoTrackerReport(
    const OSVR_TimeValue &timestamp, const OSVR_PoseReport &report) {
    Eigen::Isometry3d roomPose = takeCameraPoseToRoom(report.pose);

#ifdef OSVR_FPE
    FPExceptionEnabler fpe;
#endif
    Eigen::Quaterniond orientation(roomPose.rotation());
    auto oriChange = state().getQuaternion().angularDistance(orientation);
    if (std::abs(oriChange) > M_PI / 2.) {
        OSVR_DEV_VERBOSE("Throwing out a bad video pose");
        return;
    }
    if (preReport(timestamp)) {
        m_cameraMeasPos.setMeasurement(roomPose.translation());
        osvr::kalman::correct(state(), processModel(), m_cameraMeasPos);
        m_cameraMeasOri.setMeasurement(orientation);
        osvr::kalman::correct(state(), processModel(), m_cameraMeasOri);

#if 0
    OSVR_DEV_VERBOSE(
        "State: " << state().stateVector().transpose() << "\n"
                  << "Quat: "
                  << state().getQuaternion().coeffs().transpose()
                  << "\n"
                     "Error:\n"
                  << state().errorCovariance());
#endif
    }
}
Ejemplo n.º 24
0
void joints2frames::publishRigidTransform(Eigen::Isometry3d pose, int64_t utime, std::string channel){
  if (!shouldPublish(utime, channel) )
    return; 

  bot_core::rigid_transform_t tf;
  tf.utime = utime;
  tf.trans[0] = pose.translation().x();
  tf.trans[1] = pose.translation().y();
  tf.trans[2] = pose.translation().z();
  Eigen::Quaterniond quat(pose.rotation());
  tf.quat[0] = quat.w();
  tf.quat[1] = quat.x();
  tf.quat[2] = quat.y();
  tf.quat[3] = quat.z();
  lcm_->publish(channel, &tf);    
}
Ejemplo n.º 25
0
    bool FLOATING_T::run(robot::robot_state_t& target, control_data_t* data)
    {
        // 1. Setup
        Eigen::Isometry3d body;
        target.get_body(body);

        // 1. Variables
        Eigen::Quaterniond save_rotation(data->joy_rotation);
        Eigen::Vector3d save_position = data->joy_position;
        
        Eigen::Quaterniond rotation(data->joy_rotation);
        Eigen::Vector3d& position = data->joy_position;

        // 2. Clamp to xyy
        position[2] = 0;
        rotation.y() = 0;
        rotation.x() = 0;

        body.linear() = body.linear() * rotation.matrix();
        body.translation() += position;

        // 3.
        target.set_body(body);

        return true;
    }    
Ejemplo n.º 26
0
//==============================================================================
Eigen::Vector6d FreeJoint::convertToPositions(const Eigen::Isometry3d& _tf)
{
  Eigen::Vector6d x;
  x.head<3>() = math::logMap(_tf.linear());
  x.tail<3>() = _tf.translation();
  return x;
}
Ejemplo n.º 27
0
void joints2frames::publishPose(Eigen::Isometry3d pose, int64_t utime, std::string channel){
  if (!shouldPublish(utime, channel) )
    return; 
  
  bot_core::pose_t pose_msg;
  pose_msg.utime =   utime;
  pose_msg.pos[0] = pose.translation().x();
  pose_msg.pos[1] = pose.translation().y();
  pose_msg.pos[2] = pose.translation().z();  
  Eigen::Quaterniond r_x(pose.rotation());
  pose_msg.orientation[0] =  r_x.w();  
  pose_msg.orientation[1] =  r_x.x();  
  pose_msg.orientation[2] =  r_x.y();  
  pose_msg.orientation[3] =  r_x.z();  
  lcm_->publish( channel, &pose_msg);
}
Ejemplo n.º 28
0
//==============================================================================
btTransform convertTransform(const Eigen::Isometry3d& _T)
{
  btTransform trans;
  trans.setOrigin(convertVector3(_T.translation()));
  trans.setBasis(convertMatrix3x3(_T.linear()));
  return trans;
}
Ejemplo n.º 29
0
static Eigen::Affine3d toAffine(const Eigen::Isometry3d& pose)
{
  Eigen::Affine3d p(pose.rotation());
  p.translation() = pose.translation();

  return p;
}
int GraphOptimizer_G2O::addVertex(Eigen::Matrix4d& vertexPose, int id, bool isFixed)
{

    g2o::Vector3d t(vertexPose(0,3),vertexPose(1,3),vertexPose(2,3));

    Eigen::Matrix3d rot = vertexPose.block(0,0,3,3);
    g2o::Quaterniond q(rot);
    q.normalize();

    // set up node
    g2o::VertexSE3 *vc = new g2o::VertexSE3();
    Eigen::Isometry3d cam; // camera pose
    cam = q;
    cam.translation() = t;

    vc->setEstimate(cam);
    vc->setId(id);      // vertex id

    //set pose fixed
    if (isFixed) {
        vc->setFixed(true);
    }

    // add to optimizer
    optimizer.addVertex(vc);
    vertexIdVec.push_back(id);

    return id;
}