void Visualization::transformPose(geometry_msgs::Pose& pose,
    const Vector3d& trans, const Quaterniond& rot)
{
  Vector3d pos;
  pos.x() = pose.position.x;
  pos.y() = pose.position.y;
  pos.z() = pose.position.z;

  pos = rot * pos + trans;

  pose.position.x = pos.x();
  pose.position.y = pos.y();
  pose.position.z = pos.z();

  Quaterniond orientation;
  orientation.x() = pose.orientation.x;
  orientation.y() = pose.orientation.y;
  orientation.z() = pose.orientation.z;
  orientation.w() = pose.orientation.w;

  orientation = rot * orientation;

  pose.orientation.x = orientation.x();
  pose.orientation.y = orientation.y();
  pose.orientation.z = orientation.z();
  pose.orientation.w = orientation.w();
}
Exemplo n.º 2
0
// qEst = qEst + (qDot - beta*gradF.normalised)*deltaT
// inputs: qEst, w, a, deltaT, beta
// output: qEst 
void UpdateAttitude( 	Quaterniond& qEst, 		// Reference to the current esitmate- will be update to reflect the new estimate
						const Quaterniond& w,	// [0, wx, wy, wz] rad/s
						const Quaterniond& a,	// [0, ax, ay, az] m/s/s
						const double deltaT_sec,// sample period (seconds)
						const double beta ) 	// Gain factor to account for all zero mean noise (sqrt(3/4)*[o,wx_noise, wy_noise, wz_noise])
{
	// rate of change of orientation
	Quaterniond qDot;
	qDot = (qEst*w).coeffs() * 0.5;
	
	// Jacobian of the objective function F
	MatrixXd J(3,4);
	J << 	-2*qEst.y(), 2*qEst.z(), -2*qEst.w(), 2*qEst.x(),
			 2*qEst.x(), 2*qEst.w(),  2*qEst.z(), 2*qEst.y(),
			 0,			-4*qEst.x(), -4*qEst.y(), 0;
	
	cout << J << endl;
	// The objective function F minimising a measured gravitational field with an assumed gravity vector of 0,0,1
	MatrixXd F(3,1);
	F << 	2*(qEst.x()*qEst.z() - qEst.w()*qEst.y()) - a.x(),
			2*(qEst.w()*qEst.x() + qEst.y()*qEst.z()) - a.y(),
			2*(0.5 - qEst.x()*qEst.x() - qEst.y()*qEst.y()) - a.z();		 
	
	cout << F << endl;
	// The gradient of the solution solution surface
	MatrixXd gradF(4,1);
	gradF = J.transpose() * F;

	//qEst = qEst + (qDot - beta*gradF.normalized)*deltaT
	qEst.coeffs() += (qDot.coeffs() - beta*gradF.normalized())*deltaT_sec;	
}
void pubOdometry()
{
    nav_msgs::Odometry odom;
    {
        odom.header.stamp = _last_imu_stamp;
        odom.header.frame_id = "map";

        odom.pose.pose.position.x = x(0);
        odom.pose.pose.position.y = x(1);
        odom.pose.pose.position.z = x(2);

        Quaterniond q;
        q = RPYtoR(x(3), x(4), x(5)).block<3,3>(0, 0);
        odom.pose.pose.orientation.x = q.x();
        odom.pose.pose.orientation.y = q.y();
        odom.pose.pose.orientation.z = q.z();
        odom.pose.pose.orientation.w = q.w();
    }

    //ROS_WARN("[update] publication done");
    ROS_WARN_STREAM("[final] b_g = " << x.segment(_B_G_BEG, _B_G_LEN).transpose());
    ROS_WARN_STREAM("[final] b_a = " << x.segment(_B_A_BEG, _B_A_LEN).transpose() << endl);
    ///ROS_WARN_STREAM("[final] cov_x = " << endl << cov_x << endl);
    odom_pub.publish(odom);
}
Exemplo n.º 4
0
void SlamSystem::processIMU(double dt, const Vector3d&linear_acceleration, const Vector3d &angular_velocity)
{
    Quaterniond dq;

     dq.x() = angular_velocity(0)*dt*0.5;
     dq.y() = angular_velocity(1)*dt*0.5;
     dq.z() = angular_velocity(2)*dt*0.5;
     dq.w() = sqrt(1 - SQ(dq.x()) * SQ(dq.y()) * SQ(dq.z()));

     Matrix3d deltaR(dq);
     //R_c_0 = R_c_0 * deltaR;
     //T_c_0 = ;
     Frame *current = slidingWindow[tail].get();

     Matrix<double, 9, 9> F = Matrix<double, 9, 9>::Zero();
     F.block<3, 3>(0, 3) = Matrix3d::Identity();
     F.block<3, 3>(3, 6) = -current->R_k1_k* vectorToSkewMatrix(linear_acceleration);
     F.block<3, 3>(6, 6) = -vectorToSkewMatrix(angular_velocity);

     Matrix<double, 6, 6> Q = Matrix<double, 6, 6>::Zero();
     Q.block<3, 3>(0, 0) = acc_cov;
     Q.block<3, 3>(3, 3) = gyr_cov;

     Matrix<double, 9, 6> G = Matrix<double, 9, 6>::Zero();
     G.block<3, 3>(3, 0) = -current->R_k1_k;
     G.block<3, 3>(6, 3) = -Matrix3d::Identity();

     current->P_k = (Matrix<double, 9, 9>::Identity() + dt * F) * current->P_k * (Matrix<double, 9, 9>::Identity() + dt * F).transpose() + (dt * G) * Q * (dt * G).transpose();
     //current->R_k1_k = current->R_k1_k*deltaR;
     current->alpha_c_k += current->beta_c_k*dt + current->R_k1_k*linear_acceleration * dt * dt * 0.5 ;
     current->beta_c_k += current->R_k1_k*linear_acceleration*dt;
     current->R_k1_k = current->R_k1_k*deltaR;
     current->timeIntegral += dt;
}
Exemplo n.º 5
0
void KeyFrame::addConnection(int index, KeyFrame* connected_kf, Vector3d relative_t, Quaterniond relative_q, double relative_yaw)
{
    Eigen::Matrix<double, 8, 1> connected_info;
    connected_info <<relative_t.x(), relative_t.y(), relative_t.z(),
    relative_q.w(), relative_q.x(), relative_q.y(), relative_q.z(),
    relative_yaw;
    connection_list.push_back(make_pair(index, connected_info));
}
Exemplo n.º 6
0
void KeyFrame::updateLoopConnection(Vector3d relative_t, Quaterniond relative_q, double relative_yaw)
{
    Eigen::Matrix<double, 8, 1> connected_info;
    connected_info <<relative_t.x(), relative_t.y(), relative_t.z(),
    relative_q.w(), relative_q.x(), relative_q.y(), relative_q.z(),
    relative_yaw;
    loop_info = connected_info;
}
Exemplo n.º 7
0
/*! Return the angular velocity at the specified time (TDB). The default
 *  implementation computes the angular velocity via differentiation.
 */
Vector3d
RotationModel::angularVelocityAtTime(double tdb) const
{
    double dt = chooseDiffTimeDelta(*this);
    Quaterniond q0 = orientationAtTime(tdb);
    Quaterniond q1 = orientationAtTime(tdb + dt);
    Quaterniond dq = q1.conjugate() * q0;

    if (std::abs(dq.w()) > 0.99999999)
        return Vector3d::Zero();

    return dq.vec().normalized() * (2.0 * acos(dq.w()) / dt);
#ifdef CELVEC
    Vector3d v(dq.x, dq.y, dq.z);
	v.normalize();
	return v * (2.0 * acos(dq.w) / dt);
#endif
}
Exemplo n.º 8
0
Matrix3d quaternion2mat(Quaterniond q)
{
  Matrix3d m;
  double a = q.w(), b = q.x(), c = q.y(), d = q.z();
  m << a*a + b*b - c*c - d*d, 2*(b*c - a*d), 2*(b*d+a*c),
       2*(b*c+a*d), a*a - b*b + c*c - d*d, 2*(c*d - a*b),
       2*(b*d - a*c), 2*(c*d+a*b), a*a-b*b - c*c + d*d;
  return m;
}
Exemplo n.º 9
0
void BenchmarkNode::runFromFolder(int start)
{
  ofstream outfile;
  outfile.open ("/home/worxli/Datasets/data/associate_unscaled.txt");
  // outfile.open ("/home/worxli/data/test/associate_unscaled.txt");
  for(int img_id = start;;++img_id)
  {

    // load image
    std::stringstream ss;
    ss << "/home/worxli/Datasets/data/img/color" << img_id << ".png";
	  // ss << "/home/worxli/data/test/img/color" << img_id << ".png";
    std::cout << "reading image " << ss.str() << std::endl;
    cv::Mat img(cv::imread(ss.str().c_str(), 0));

    // end loop if no images left
    if(img.empty())
      break;

    assert(!img.empty());

    // process frame
    vo_->addImage(img, img_id);

    // display tracking quality
    if(vo_->lastFrame() != NULL)
    {
      int id = vo_->lastFrame()->id_;
    	std::cout << "Frame-Id: " << id << " \t"
                  << "#Features: " << vo_->lastNumObservations() << " \t"
                  << "Proc. Time: " << vo_->lastProcessingTime()*1000 << "ms \n";

    	// access the pose of the camera via vo_->lastFrame()->T_f_w_.
	//std::cout << vo_->lastFrame()->T_f_w_ << endl;

	//std::count << vo_->lastFrame()->pos() << endl;
  Quaterniond quat = vo_->lastFrame()->T_f_w_.unit_quaternion();
  Vector3d trans = vo_->lastFrame()->T_f_w_.translation();

	outfile << trans.x() << " "
          << trans.y() << " "
          << trans.z() << " " 

          << quat.x()  << " " 
          << quat.y()  << " " 
          << quat.z()  << " " 
          << quat.w()  << " " 

          << "depth/mapped" << img_id << ".png "
          << "img/color" << img_id << ".png\n";

    }
  }
  outfile.close();
}
Exemplo n.º 10
0
geometry_msgs::Quaternion eigenQuaterniondToTfQuaternion( Quaterniond q ){

    geometry_msgs::Quaternion tfq;

    tfq.x = q.x();
    tfq.y = q.y();
    tfq.z = q.z();
    tfq.w = q.w();

    return tfq;

}
bool addPlaneToCollisionModel(const std::string &armName, double sx, const Quaterniond &q)
{
    std::string arm2Name;
    ros::NodeHandle nh("~") ;

    ros::service::waitForService("/environment_server/set_planning_scene_diff");
    ros::ServiceClient get_planning_scene_client =
      nh.serviceClient<arm_navigation_msgs::GetPlanningScene>("/environment_server/set_planning_scene_diff");

    arm_navigation_msgs::GetPlanningScene::Request planning_scene_req;
    arm_navigation_msgs::GetPlanningScene::Response planning_scene_res;

    arm_navigation_msgs::AttachedCollisionObject att_object;

    att_object.link_name = armName + "_gripper";

    att_object.object.id = "attached_plane";
    att_object.object.operation.operation = arm_navigation_msgs::CollisionObjectOperation::ADD;

    att_object.object.header.frame_id = armName + "_ee" ;
    att_object.object.header.stamp = ros::Time::now();

    arm_navigation_msgs::Shape object;

    object.type = arm_navigation_msgs::Shape::BOX;
    object.dimensions.resize(3);
    object.dimensions[0] = sx;
    object.dimensions[1] = sx;
    object.dimensions[2] = 0.01;

    geometry_msgs::Pose pose;
    pose.position.x = 0 ;
    pose.position.y = 0 ;
    pose.position.z = sx/2 ;


    pose.orientation.x = q.x();
    pose.orientation.y = q.y();
    pose.orientation.z = q.z();
    pose.orientation.w = q.w();



    att_object.object.shapes.push_back(object);
    att_object.object.poses.push_back(pose);

    planning_scene_req.planning_scene_diff.attached_collision_objects.push_back(att_object);

    if(!get_planning_scene_client.call(planning_scene_req, planning_scene_res)) return false;


    return true ;
}
Exemplo n.º 12
0
Quaterniond mat2quaternion(Matrix3d m)
{
  //return euler2quaternion(mat2euler(m));
  Quaterniond q;
  double a, b, c, d;
  a = sqrt(1 + m(0, 0) + m(1, 1) + m(2, 2))/2;
  b = (m(2, 1) - m(1, 2))/(4*a);
  c = (m(0, 2) - m(2, 0))/(4*a);
  d = (m(1, 0) - m(0, 1))/(4*a);
  q.w() = a; q.x() = b; q.y() = c; q.z() = d;
  return q;
}
Exemplo n.º 13
0
Vector3d
CachingRotationModel::computeAngularVelocity(double tjd) const
{
    double dt = chooseDiffTimeDelta(*this);
    Quaterniond q0 = orientationAtTime(tjd);
    
    // Call computeSpin/computeEquatorOrientation instead of orientationAtTime
    // in order to avoid affecting the cache.
    Quaterniond spin = computeSpin(tjd + dt);
    Quaterniond equator = computeEquatorOrientation(tjd + dt);
    Quaterniond q1 = spin * equator;
    Quaterniond dq = q1.conjugate() * q0;
    
    if (std::abs(dq.w()) > 0.99999999)
        return Vector3d::Zero();
    
    return dq.vec().normalized() * (2.0 * acos(dq.w()) / dt);
#ifdef CELVEC
	Vec3d v(dq.x, dq.y, dq.z);
	v.normalize();
    return v * (2.0 * acos(dq.w) / dt);
#endif
}
Exemplo n.º 14
0
void TransformerTF2::convertTransformToTf(const Transform &t,
		geometry_msgs::TransformStamped &tOut) {

	Quaterniond eigenQuat = t.getRotationQuat();
	tOut.header.frame_id = t.getFrameParent();
	tOut.header.stamp = ros::Time::fromBoost(t.getTime());
	tOut.child_frame_id = t.getFrameChild();
	tOut.transform.rotation.w = eigenQuat.w();
	tOut.transform.rotation.x = eigenQuat.x();
	tOut.transform.rotation.y = eigenQuat.y();
	tOut.transform.rotation.z = eigenQuat.z();
	tOut.transform.translation.x = t.getTranslation()(0);
	tOut.transform.translation.y = t.getTranslation()(1);
	tOut.transform.translation.z = t.getTranslation()(2);
}
Exemplo n.º 15
0
/**
Euler angle defination: zyx
Rotation matrix: C_body2ned
**/
Quaterniond euler2quaternion(Vector3d euler)
{
  double cr = cos(euler(0)/2);
  double sr = sin(euler(0)/2);
  double cp = cos(euler(1)/2);
  double sp = sin(euler(1)/2);
  double cy = cos(euler(2)/2);
  double sy = sin(euler(2)/2);
  Quaterniond q;
  q.w() = cr*cp*cy + sr*sp*sy;
  q.x() = sr*cp*cy - cr*sp*sy;
  q.y() = cr*sp*cy + sr*cp*sy;
  q.z() = cr*cp*sy - sr*sp*cy;
  return q; 
}
Exemplo n.º 16
0
geometry_msgs::Pose eigenPoseToROS(const Vector3d &pos, const Quaterniond &orient)
{
    geometry_msgs::Pose pose ;

    pose.position.x = pos.x() ;
    pose.position.y = pos.y() ;
    pose.position.z = pos.z() ;

    pose.orientation.x = orient.x() ;
    pose.orientation.y = orient.y() ;
    pose.orientation.z = orient.z() ;
    pose.orientation.w = orient.w() ;

    return pose ;

}
void ComputeSmdTlsphShape::compute_peratom() {
	double *contact_radius = atom->contact_radius;
	invoked_peratom = update->ntimestep;

	// grow vector array if necessary

	if (atom->nlocal > nmax) {
		memory->destroy(strainVector);
		nmax = atom->nmax;
		memory->create(strainVector, nmax, size_peratom_cols, "strainVector");
		array_atom = strainVector;
	}

	int *mask = atom->mask;
	double **smd_data_9 = atom->smd_data_9;
	int nlocal = atom->nlocal;
	Matrix3d E, eye, R, U, F;
	eye.setIdentity();
	Quaterniond q;
	bool status;

	for (int i = 0; i < nlocal; i++) {
		if (mask[i] & groupbit) {

			F = Map<Matrix3d>(smd_data_9[i]);
			status = PolDec(F, R, U, false); // polar decomposition of the deformation gradient, F = R * U
			if (!status) {
				error->message(FLERR, "Polar decomposition of deformation gradient failed.\n");
			}

			E = 0.5 * (F.transpose() * F - eye); // Green-Lagrange strain
			strainVector[i][0] = contact_radius[i] * (1.0 + E(0, 0));
			strainVector[i][1] = contact_radius[i] * (1.0 + E(1, 1));
			strainVector[i][2] = contact_radius[i] * (1.0 + E(2, 2));

			q = R; // convert pure rotation matrix to quaternion
			strainVector[i][3] = q.w();
			strainVector[i][4] = q.x();
			strainVector[i][5] = q.y();
			strainVector[i][6] = q.z();
		} else {
			for (int j = 0; j < size_peratom_cols; j++) {
				strainVector[i][j] = 0.0;
			}
		}
	}
}
Exemplo n.º 18
0
int main()
{   // Quaterniond specified in (w,x,y,z)
	Quaterniond qEst = Quaterniond(0,0,0,1); // unit z (normalized) // initial guess
	Quaterniond w = Quaterniond(0, M_PI*0.5, 0, 0); // pi/2 rad/s around x
	Quaterniond a = Quaterniond(0, 0, 0, 1); //(0, ax, ay, az) in m/s/s (normalized)
	AngleAxisd offset = AngleAxisd(M_PI/10,Vector3d::UnitX());
	a = a*offset;
	cout << "a: " <<endl << a.coeffs() <<endl;
	double deltaT = 0.01;
	double beta = 0.033;	
	UpdateAttitude(qEst, w, a, deltaT, beta);
	cout << "qEst:" << endl << qEst.coeffs() << endl;
	// manual quaternion multiplication
	Vector4d result;
	result << 
	qEst.w()*w.w() - qEst.x()*w.x() - qEst.y()*w.y() - qEst.z()*w.z() , 
	qEst.w()*w.x() + qEst.x()*w.w() + qEst.y()*w.z() - qEst.z()*w.y() , 
	qEst.w()*w.y() - qEst.x()*w.z() + qEst.y()*w.w() + qEst.z()*w.x() , 
	qEst.w()*w.z() + qEst.x()*w.y() - qEst.y()*w.x() + qEst.z()*w.w() ;

	cout << "manual qDot: " << endl << 0.5*result << endl;
}
int main(int argc, char **argv) {

    ros::init(argc, argv, "handeye_calibration");
    ros::NodeHandle nh ;

    srand(clock()) ;

    int c = 1 ;

    while ( c < argc )
    {
        if ( strncmp(argv[c], "--camera", 8) == 0 )
        {
            if ( c + 1 < argc ) {
                camera_id = argv[++c] ;
            }
        }
        else if ( strncmp(argv[c], "--out", 5) == 0 )
        {
            if ( c + 1 < argc ) {
                outFolder = argv[++c] ;
            }
        }
        else if ( strncmp(argv[c], "--data", 6) == 0 )
        {
            if ( c + 1 < argc ) {
                dataFolder = argv[++c] ;
            }
        }
        else if ( strncmp(argv[c], "--arm", 5) == 0 )
        {
            if ( c + 1 < argc ) {
                armName = argv[++c] ;
            }
        }
        else if ( strncmp(argv[c], "--board", 7) == 0 )
        {
            int bx = -1, by = -1 ;

            if ( c + 1 < argc ) {
                bx = atof(argv[++c]) ;
            }
            if ( c + 1 < argc ) {
                by = atof(argv[++c]) ;
            }

            if ( bx > 0 && by > 0 )
                boardSize = cv::Size(bx, by) ;
        }
        else if ( strncmp(argv[c], "--cell", 7) == 0 )
        {
            if ( c + 1 < argc ) {
                string cs = argv[++c] ;
                cellSize = atof(cs.c_str()) ;;
            }
        }
        else if ( strncmp(argv[c], "--stations", 10) == 0 )
        {
            if ( c + 1 < argc ) {
                nStations = atoi(argv[++c]) ;
            }
        }
        else if ( strncmp(argv[c], "--bbox", 6) == 0 )
        {
            if ( c + 1 < argc ) {
                minX = atof(argv[++c]) ;
            }
            if ( c + 1 < argc ) {
                minY = atof(argv[++c]) ;
            }
            if ( c + 1 < argc ) {
                minZ = atof(argv[++c]) ;
            }
            if ( c + 1 < argc ) {
                maxX = atof(argv[++c]) ;
            }
            if ( c + 1 < argc ) {
                maxY = atof(argv[++c]) ;
            }
            if ( c + 1 < argc ) {
                maxZ = atof(argv[++c]) ;
            }

        }
        else if ( strncmp(argv[c], "--orient", 8) == 0 )
        {
            double ox, oy, oz ;

            if ( c + 1 < argc ) {
                ox = atof(argv[++c]) ;
            }
            if ( c + 1 < argc ) {
                oy = atof(argv[++c]) ;
            }
            if ( c + 1 < argc ) {
                oz = atof(argv[++c]) ;
            }

            orient = Vector3d(ox, oy, oz) ;
        }
        else if ( strncmp(argv[c], "--fixed", 7) == 0 )
        {
            fixedCam = true ;

        }

        ++c ;
    }


    if ( camera_id.empty() )
    {
        ROS_ERROR("No camera specified") ;
        return 0 ;
    }

    if ( outFolder.empty() )
    {
        outFolder = "/home/" ;
        outFolder += getenv("USER") ;
        outFolder += "/.ros/clopema_calibration/" ;
        outFolder += camera_id ;
        outFolder += "/handeye_rgb.calib" ;
    }

    ros::AsyncSpinner spinner(4) ;
    spinner.start() ;

    // open grabber and wait until connection

    camera_helpers::OpenNICaptureRGBD grabber(camera_id) ;

    if ( !grabber.connect() )
    {
        ROS_ERROR("Cannot connect to frame grabber: %s", camera_id.c_str()) ;
        return 0 ;
    }

    c = 0 ;

    // move robot and grab images

    robot_helpers::MoveRobot mv ;
    mv.setServoMode(false);

    tf::TransformListener listener(ros::Duration(1.0));

    double cx, cy, fx, fy ;

    while ( c < nStations )
    {
        // move the robot to the next position

        double X = minX + (maxX - minX)*(rand()/(double)RAND_MAX) ;
        double Y = minY + (maxY - minY)*(rand()/(double)RAND_MAX) ;
        double Z = minZ + (maxZ - minZ)*(rand()/(double)RAND_MAX) ;

        const double qscale = 0.3 ;
        double qx = qscale * (rand()/(double)RAND_MAX - 0.5) ;
        double qy = qscale * (rand()/(double)RAND_MAX - 0.5) ;
        double qz = qscale * (rand()/(double)RAND_MAX - 0.5) ;
        double qw = qscale * (rand()/(double)RAND_MAX - 0.5) ;

        Quaterniond q = robot_helpers::lookAt(orient, M_PI) ;

       q = Quaterniond(q.x() + qx, q.y() + qy, q.z() + qz, q.w() + qw) ;
       q.normalize();

        if ( fixedCam ) {

            addPlaneToCollisionModel(armName, 0.3, q) ;
            if ( !robot_helpers::moveGripper(mv, armName, Eigen::Vector3d(X, Y, Z), q) ) continue ;
            robot_helpers::resetCollisionModel() ;
        }
        else
        {
            // for an Xtion on arm the bounding box indicates the target position and the orient the lookAt direction
            // so move back the sensor far enough so that the target is visible. This is currently hardcoded.

            Vector3d dir = q*Vector3d(0, 0, 1) ;
            Vector3d c = Vector3d(X, Y, Z) - 0.9*dir ;

            trajectory_msgs::JointTrajectory traj ;
            if ( !robot_helpers::planXtionToPose(armName, c, q, traj) ) continue ;

            mv.execTrajectory(traj) ;

        }

        image_geometry::PinholeCameraModel cm ;
        cv::Mat clr, depth ;

        ros::Time ts ;

        grabber.grab(clr, depth, ts, cm) ;

        // camera intrinsics. we assume a rectfied and calibrated frame

        cx = cm.cx() ;
        cy = cm.cy() ;
        fx = cm.fx() ;
        fy = cm.fy() ;

        string filenamePrefix = dataFolder + str(boost::format("/grab_%06d") % c) ;

        cv::imwrite(filenamePrefix + "_c.png", clr) ;
        cv::imwrite(filenamePrefix + "_d.png", depth) ;

        Eigen::Affine3d pose_ ;

        if ( fixedCam ) pose_ = robot_helpers::getPose(armName) ;
        else
        {
            tf::StampedTransform transform;

            try {
                listener.waitForTransform(armName + "_link_6", "base_link", ts, ros::Duration(1) );
                listener.lookupTransform(armName + "_link_6", "base_link", ts, transform);

                tf::TransformTFToEigen(transform, pose_);

                } catch (tf::TransformException ex) {
                    ROS_ERROR("%s",ex.what());
                    continue ;
            }
        }



        {
            ofstream strm((filenamePrefix + "_pose.txt").c_str()) ;

            strm << pose_.rotation() << endl << pose_.translation() ;
        }

         ++c ;

    }

    grabber.disconnect();


    robot_helpers::setServoPowerOff() ;

//    exit(1) ;

    vector<Affine3d> gripper_to_base, target_to_sensor ;
    Affine3d sensor_to_base, sensor_to_gripper ;

    cv::Mat cameraMatrix ;

    cameraMatrix = cv::Mat::eye(3, 3, CV_64F);

    cameraMatrix.at<double>(0, 0) = fx ;
    cameraMatrix.at<double>(1, 1) = fy ;
    cameraMatrix.at<double>(0, 2) = cx ;
    cameraMatrix.at<double>(1, 2) = cy ;



 //   find_target_motions("grab_",  "/home/malasiot/images/clothes/calibration/calib_xtion2/", boardSize, cellSize, cameraMatrix, true, gripper_to_base, target_to_sensor) ;

       find_target_motions("grab_",  dataFolder, boardSize, cellSize, cameraMatrix, true, gripper_to_base, target_to_sensor) ;


    if ( fixedCam )
        solveHandEyeFixed(gripper_to_base, target_to_sensor, Tsai, true, sensor_to_base) ;
    else
        solveHandEyeMoving(gripper_to_base, target_to_sensor, Horaud, false, sensor_to_gripper) ;


    certh_libs::createDir(boost::filesystem::path(outFolder).parent_path().string(), true) ;

    ofstream ostrm(outFolder.c_str()) ;

    if ( fixedCam )
    {
        ostrm << sensor_to_base.inverse().matrix() ;
//        cout << sensor_to_base.inverse().matrix() ;
    }
    else
    {
        ostrm << sensor_to_gripper.inverse().matrix() ;
      // cout << sensor_to_gripper.inverse().matrix() << endl ;
/*
       for(int i=0 ; i<gripper_to_base.size() ; i++)
           cout << (gripper_to_base[i] * sensor_to_gripper * target_to_sensor[i]).matrix() << endl ;
           */
    }

    return 1;
}
Exemplo n.º 20
0
void PoseGraph::updatePath()
{
    m_keyframelist.lock();
    list<KeyFrame*>::iterator it;
    for (int i = 1; i <= sequence_cnt; i++)
    {
        path[i].poses.clear();
    }
    base_path.poses.clear();
    posegraph_visualization->reset();

    if (SAVE_LOOP_PATH)
    {
        ofstream loop_path_file_tmp(VINS_RESULT_PATH, ios::out);
        loop_path_file_tmp.close();
    }

    for (it = keyframelist.begin(); it != keyframelist.end(); it++)
    {
        Vector3d P;
        Matrix3d R;
        (*it)->getPose(P, R);
        Quaterniond Q;
        Q = R;
//        printf("path p: %f, %f, %f\n",  P.x(),  P.z(),  P.y() );

        geometry_msgs::PoseStamped pose_stamped;
        pose_stamped.header.stamp = ros::Time((*it)->time_stamp);
        pose_stamped.header.frame_id = "world";
        pose_stamped.pose.position.x = P.x() + VISUALIZATION_SHIFT_X;
        pose_stamped.pose.position.y = P.y() + VISUALIZATION_SHIFT_Y;
        pose_stamped.pose.position.z = P.z();
        pose_stamped.pose.orientation.x = Q.x();
        pose_stamped.pose.orientation.y = Q.y();
        pose_stamped.pose.orientation.z = Q.z();
        pose_stamped.pose.orientation.w = Q.w();
        if((*it)->sequence == 0)
        {
            base_path.poses.push_back(pose_stamped);
            base_path.header = pose_stamped.header;
        }
        else
        {
            path[(*it)->sequence].poses.push_back(pose_stamped);
            path[(*it)->sequence].header = pose_stamped.header;
        }

        if (SAVE_LOOP_PATH)
        {
            ofstream loop_path_file(VINS_RESULT_PATH, ios::app);
            loop_path_file.setf(ios::fixed, ios::floatfield);
            loop_path_file.precision(0);
            loop_path_file << (*it)->time_stamp * 1e9 << ",";
            loop_path_file.precision(5);
            loop_path_file  << P.x() << ","
                  << P.y() << ","
                  << P.z() << ","
                  << Q.w() << ","
                  << Q.x() << ","
                  << Q.y() << ","
                  << Q.z() << ","
                  << endl;
            loop_path_file.close();
        }
        //draw local connection
        if (SHOW_S_EDGE)
        {
            list<KeyFrame*>::reverse_iterator rit = keyframelist.rbegin();
            list<KeyFrame*>::reverse_iterator lrit;
            for (; rit != keyframelist.rend(); rit++)  
            {  
                if ((*rit)->index == (*it)->index)
                {
                    lrit = rit;
                    lrit++;
                    for (int i = 0; i < 4; i++)
                    {
                        if (lrit == keyframelist.rend())
                            break;
                        if((*lrit)->sequence == (*it)->sequence)
                        {
                            Vector3d conncected_P;
                            Matrix3d connected_R;
                            (*lrit)->getPose(conncected_P, connected_R);
                            posegraph_visualization->add_edge(P, conncected_P);
                        }
                        lrit++;
                    }
                    break;
                }
            } 
        }
        if (SHOW_L_EDGE)
        {
            if ((*it)->has_loop && (*it)->sequence == sequence_cnt)
            {
                
                KeyFrame* connected_KF = getKeyFrame((*it)->loop_index);
                Vector3d connected_P;
                Matrix3d connected_R;
                connected_KF->getPose(connected_P, connected_R);
                //(*it)->getVioPose(P, R);
                (*it)->getPose(P, R);
                if((*it)->sequence > 0)
                {
                    posegraph_visualization->add_loopedge(P, connected_P + Vector3d(VISUALIZATION_SHIFT_X, VISUALIZATION_SHIFT_Y, 0));
                }
            }
        }

    }
    publish();
    m_keyframelist.unlock();
}
Exemplo n.º 21
0
void SlamSystem::BA()
{
    R.resize(numOfState);
    T.resize(numOfState);
    vel.resize(numOfState);
    for (int i = 0; i < numOfState ; i++)
    {
        int k = head + i;
        if (k >= slidingWindowSize){
          k -= slidingWindowSize;
        }
        R[k] = slidingWindow[k]->R_bk_2_b0;
        T[k] = slidingWindow[k]->T_bk_2_b0;
        vel[k] = slidingWindow[k]->v_bk;
    }

    int sizeofH = 9 * numOfState;
    MatrixXd HTH(sizeofH, sizeofH);
    VectorXd HTb(sizeofH);
    MatrixXd tmpHTH;
    VectorXd tmpHTb;

    MatrixXd H_k1_2_k(9, 18);
    MatrixXd H_k1_2_k_T;
    VectorXd residualIMU(9);

    MatrixXd H_i_2_j(9, 18);
    MatrixXd H_i_2_j_T;
    VectorXd residualCamera(9);
    MatrixXd tmpP_inv(9, 9);

    for (int iterNum = 0; iterNum < maxIterationBA; iterNum++)
    {
      HTH.setZero();
      HTb.setZero();

      //1. prior constraints
      int m_sz = margin.size;
      VectorXd dx = VectorXd::Zero(STATE_SZ(numOfState - 1));

      if (m_sz != (numOfState - 1)){
        assert("prior matrix goes wrong!!!");
      }

      for (int i = numOfState - 2; i >= 0; i--)
      {
        int k = head + i;
        if (k >= slidingWindowSize){
          k -= slidingWindowSize;
        }
        //dp, dv, dq
        dx.segment(STATE_SZ(i), 3) = T[k] - slidingWindow[k]->T_bk_2_b0;
        dx.segment(STATE_SZ(i) + 3, 3) = vel[k] - slidingWindow[k]->v_bk;
        dx.segment(STATE_SZ(i) + 6, 3) = Quaterniond(slidingWindow[k]->R_bk_2_b0.transpose() * R[k]).vec() * 2.0;
      }
      HTH.block(0, 0, STATE_SZ(m_sz), STATE_SZ(m_sz)) += margin.Ap.block(0, 0, STATE_SZ(m_sz), STATE_SZ(m_sz));
      HTb.segment(0, STATE_SZ(m_sz)) -= margin.Ap.block(0, 0, STATE_SZ(m_sz), STATE_SZ(m_sz))*dx;
      HTb.segment(0, STATE_SZ(m_sz)) -= margin.bp.segment(0, STATE_SZ(m_sz));

      //2. imu constraints
      for (int i = numOfState-2; i >= 0; i-- )
      {
        int k = head + i;
        if (k >= slidingWindowSize){
          k -= slidingWindowSize;
        }
        if ( slidingWindow[k]->imuLinkFlag == false){
          continue;
        }
        int k1 = k + 1;
        if (k1 >= slidingWindowSize){
          k1 -= slidingWindowSize;
        }
        residualIMU = math.ResidualImu(T[k], vel[k], R[k],
                                   T[k1], vel[k1], R[k1],
                                   gravity_b0, slidingWindow[k]->timeIntegral,
                                   slidingWindow[k]->alpha_c_k, slidingWindow[k]->beta_c_k, slidingWindow[k]->R_k1_k);
        H_k1_2_k = math.JacobianImu(T[k], vel[k], R[k],
                                   T[k1], vel[k1], R[k1],
                                   gravity_b0, slidingWindow[k]->timeIntegral);
        H_k1_2_k_T = H_k1_2_k.transpose();
        H_k1_2_k_T *= slidingWindow[k]->P_k.inverse();
        HTH.block(i * 9, i * 9, 18, 18) += H_k1_2_k_T  *  H_k1_2_k;
        HTb.segment(i * 9, 18) -= H_k1_2_k_T * residualIMU;
      }

      //3. camera constraints
      for (int i = 0; i < numOfState; i++)
      {
        int currentStateID = head + i;
        if (currentStateID >= slidingWindowSize){
          currentStateID -= slidingWindowSize;
        }
        if (slidingWindow[currentStateID]->keyFrameFlag == false){
          continue;
        }

        list<int>::iterator iter = slidingWindow[currentStateID]->cameraLinkList.begin();
        for (; iter != slidingWindow[currentStateID]->cameraLinkList.end(); iter++ )
        {
          int linkID = *iter;

          H_i_2_j = math.JacobianDenseTracking(T[currentStateID], R[currentStateID], T[linkID], R[linkID] ) ;
          residualCamera = math.ResidualDenseTracking(T[currentStateID], R[currentStateID], T[linkID], R[linkID],
                                                      slidingWindow[currentStateID]->cameraLink[linkID].T_bi_2_bj,
                                                      slidingWindow[currentStateID]->cameraLink[linkID].R_bi_2_bj ) ;
//          residualCamera.segment(0, 3) = R[linkID].transpose()*(T[currentStateID] - T[linkID]) - slidingWindow[currentStateID]->cameraLink[linkID].T_bi_2_bj;
//          residualCamera.segment(3, 3).setZero();
//          residualCamera.segment(6, 3) = 2.0 * (Quaterniond(slidingWindow[currentStateID]->cameraLink[linkID].R_bi_2_bj.transpose()) * Quaterniond(R[linkID].transpose()*R[currentStateID])).vec();

          tmpP_inv.setZero();
          tmpP_inv.block(0, 0, 3, 3) = slidingWindow[currentStateID]->cameraLink[linkID].P_inv.block(0, 0, 3, 3) ;
          tmpP_inv.block(0, 6, 3, 3) = slidingWindow[currentStateID]->cameraLink[linkID].P_inv.block(0, 3, 3, 3) ;
          tmpP_inv.block(6, 0, 3, 3) = slidingWindow[currentStateID]->cameraLink[linkID].P_inv.block(3, 0, 3, 3) ;
          tmpP_inv.block(6, 6, 3, 3) = slidingWindow[currentStateID]->cameraLink[linkID].P_inv.block(3, 3, 3, 3) ;
          double r_v = residualCamera.segment(0, 3).norm() ;
          if ( r_v > huber_r_v ){
            tmpP_inv *= huber_r_v/r_v ;
          }
          double r_w = residualCamera.segment(6, 3).norm() ;
          if ( r_w > huber_r_w ){
            tmpP_inv *= huber_r_w/r_w ;
          }

          H_i_2_j_T = H_i_2_j.transpose();
          H_i_2_j_T *= tmpP_inv;

          tmpHTH = H_i_2_j_T  *  H_i_2_j;
          tmpHTb = H_i_2_j_T * residualCamera;

          int currentStateIDIndex = currentStateID - head;
          if ( currentStateIDIndex < 0){
            currentStateIDIndex += slidingWindowSize;
          }
          int linkIDIndex = linkID - head  ;
          if (linkIDIndex < 0){
            linkIDIndex += slidingWindowSize;
          }

          HTH.block(currentStateIDIndex * 9, currentStateIDIndex * 9, 9, 9) += tmpHTH.block(0, 0, 9, 9);
          HTH.block(currentStateIDIndex * 9, linkIDIndex * 9, 9, 9) += tmpHTH.block(0, 9, 9, 9);
          HTH.block(linkIDIndex * 9, currentStateIDIndex * 9, 9, 9) += tmpHTH.block(9, 0, 9, 9);
          HTH.block(linkIDIndex * 9, linkIDIndex * 9, 9, 9) += tmpHTH.block(9, 9, 9, 9);

          HTb.segment(currentStateIDIndex * 9, 9) -= tmpHTb.segment(0, 9);
          HTb.segment(linkIDIndex * 9, 9) -= tmpHTb.segment(9, 9);
        }
      }
//      printf("[numList in BA]=%d\n", numList ) ;

      //solve the BA
      //cout << "HTH\n" << HTH << endl;

      LLT<MatrixXd> lltOfHTH = HTH.llt();
      ComputationInfo info = lltOfHTH.info();
      if (info == Success)
      {
        VectorXd dx = lltOfHTH.solve(HTb);

 //       printf("[BA] %d %f\n",iterNum, dx.norm() ) ;

 //       cout << iterNum << endl ;
 //       cout << dx.transpose() << endl ;

        //cout << "iteration " << iterNum << "\n" << dx << endl;
#ifdef DEBUG_INFO
        geometry_msgs::Vector3 to_pub ;
        to_pub.x = dx.norm() ;
        printf("%d %f\n",iterNum, to_pub.x ) ;
        pub_BA.publish( to_pub ) ;
#endif

        VectorXd errorUpdate(9);
        for (int i = 0; i < numOfState; i++)
        {
          int k = head + i;
          if (k >= slidingWindowSize){
            k -= slidingWindowSize;
          }
          errorUpdate = dx.segment(i * 9, 9);
          T[k] += errorUpdate.segment(0, 3);
          vel[k] += errorUpdate.segment(3, 3);

          Quaterniond q(R[k]);
          Quaterniond dq;
          dq.x() = errorUpdate(6) * 0.5;
          dq.y() = errorUpdate(7) * 0.5;
          dq.z() = errorUpdate(8) * 0.5;
          dq.w() = sqrt(1 - SQ(dq.x()) * SQ(dq.y()) * SQ(dq.z()));
          R[k] = (q * dq).normalized().toRotationMatrix();
        }
        //cout << T[head].transpose() << endl;
      }
      else
      {
        ROS_WARN("LLT error!!!");
        iterNum = maxIterationBA;
        //cout << HTH << endl;
        //FullPivLU<MatrixXd> luHTH(HTH);
        //printf("rank = %d\n", luHTH.rank() ) ;
        //HTH.rank() ;
      }
    }

    // Include correction for information vector
    int m_sz = margin.size;
    VectorXd r0 = VectorXd::Zero(STATE_SZ(numOfState - 1));
    for (int i = numOfState - 2; i >= 0; i--)
    {
      int k = head + i;
      if (k >= slidingWindowSize){
        k -= slidingWindowSize;
      }
      //dp, dv, dq
      r0.segment(STATE_SZ(i), 3) = T[k] - slidingWindow[k]->T_bk_2_b0;
      r0.segment(STATE_SZ(i) + 3, 3) = vel[k] - slidingWindow[k]->v_bk;
      r0.segment(STATE_SZ(i) + 6, 3) = Quaterniond(slidingWindow[k]->R_bk_2_b0.transpose() * R[k]).vec() * 2.0;
    }
    margin.bp.segment(0, STATE_SZ(m_sz)) += margin.Ap.block(0, 0, STATE_SZ(m_sz), STATE_SZ(m_sz))*r0;

    for (int i = 0; i < numOfState ; i++)
    {
        int k = head + i;
        if (k >= slidingWindowSize){
          k -= slidingWindowSize;
        }
        slidingWindow[k]->R_bk_2_b0 = R[k];
        slidingWindow[k]->T_bk_2_b0 = T[k];
        slidingWindow[k]->v_bk = vel[k];
    }
}
vector<Marker> Visualization::visualizeGripper(const string& ns,
    const string& frame_id, const geometry_msgs::Pose& pose, double gripper_state) const
{
  Vector3d trans(pose.position.x, pose.position.y, pose.position.z);
  Quaterniond rot(pose.orientation.w, pose.orientation.x, pose.orientation.y, pose.orientation.z);

  Marker l_finger = visualizeGripperPart("package://pr2_description/meshes/gripper_v0"
    "/l_finger_tip.dae", ns, frame_id, 0);
  Marker r_finger = visualizeGripperPart("package://pr2_description/meshes/gripper_v0"
    "/l_finger_tip.dae", ns, frame_id, 1);
  Marker palm;
  palm = visualizeGripperPart("package://pr2_description/meshes/gripper_v0"
    "/gripper_palm.dae", ns, frame_id, 2);

  Marker bounding_box = visualizeGripperBox("bounding_box", frame_id, 4);
  bounding_box.pose = pose;

  Vector3d position(-0.051, 0, -0.025);
  Quaterniond rotation = Quaterniond::Identity();
  position = rot * position + trans;
  rotation = rotation * rot;
  palm.pose.position.x = position.x();
  palm.pose.position.y = position.y();
  palm.pose.position.z = position.z();
  palm.pose.orientation.w = rotation.w();
  palm.pose.orientation.x = rotation.x();
  palm.pose.orientation.y = rotation.y();
  palm.pose.orientation.z = rotation.z();

  double finger_y_delta = gripper_state * 0.8 / 2;
  double finger_x_delta = 0;

  position = Vector3d(0.1175 - finger_x_delta, 0.015 + finger_y_delta, -0.025);
  rotation = Quaterniond::Identity();
  position = rot * position + trans;
  rotation = rotation * rot;
  l_finger.pose.position.x = position.x();
  l_finger.pose.position.y = position.y();
  l_finger.pose.position.z = position.z();
  l_finger.pose.orientation.w = rotation.w();
  l_finger.pose.orientation.x = rotation.x();
  l_finger.pose.orientation.y = rotation.y();
  l_finger.pose.orientation.z = rotation.z();

  position = Vector3d(0.1175 - finger_x_delta, -0.015 - finger_y_delta, -0.025);
  rotation = Quaterniond(AngleAxisd(M_PI, Vector3d(1, 0, 0)));
  position = rot * position + trans;
  rotation = rot * rotation;
  r_finger.pose.position.x = position.x();
  r_finger.pose.position.y = position.y();
  r_finger.pose.position.z = position.z();
  r_finger.pose.orientation.w = rotation.w();
  r_finger.pose.orientation.x = rotation.x();
  r_finger.pose.orientation.y = rotation.y();
  r_finger.pose.orientation.z = rotation.z();

  vector < Marker > result;
  result.push_back(palm);
  result.push_back(l_finger);
  result.push_back(r_finger);

  /* palm marker */
  getTemplateExtractionPoint(position);
  rotation = Quaterniond::Identity();
  position = rot * position + trans;
  rotation = rotation * rot;

  Marker palm_marker;
  palm_marker.header.frame_id = frame_id;
  palm_marker.header.stamp = ros::Time::now();
  palm_marker.ns = ns;
  palm_marker.id = 3;
  palm_marker.type = Marker::SPHERE;
  palm_marker.action = Marker::ADD;
  palm_marker.pose.position.x = position.x();
  palm_marker.pose.position.y = position.y();
  palm_marker.pose.position.z = position.z();
  palm_marker.pose.orientation.x = 0.0;
  palm_marker.pose.orientation.y = 0.0;
  palm_marker.pose.orientation.z = 0.0;
  palm_marker.pose.orientation.w = 1.0;
  palm_marker.scale.x = 0.01;
  palm_marker.scale.y = 0.01;
  palm_marker.scale.z = 0.01;
  palm_marker.color.a = 1.0;
  palm_marker.color.r = 0.0;
  palm_marker.color.g = 1.0;

  result.push_back(palm_marker);
  return result;
}
void odom_callback(const nav_msgs::Odometry::ConstPtr &msg)
{
    //your code for update
    //camera position in the IMU frame = (0, -0.05, +0.02)
    //camera orientaion in the IMU frame = Quaternion(0, 0, -1, 0); w x y z, respectively
    VectorXd z = VectorXd::Zero(_DIM_LEN << 1);
    MatrixXd R = MatrixXd::Zero(_DIM_LEN << 1, _DIM_LEN << 1);
    {   // get the measurement from the msg
        auto q = msg->pose.pose.orientation;
        auto p = msg->pose.pose.position;
        Quaterniond q_ic(0, 0, -1, 0);
        Quaterniond q_cw(q.w, q.x, q.y, q.z);

        MatrixXd g_cw = MatrixXd::Zero(4, 4), g_ic = MatrixXd::Zero(4, 4);

        g_cw.block(0, 0, 3, 3) << q_cw.toRotationMatrix();
        g_cw.block(0, 3, 3, 1) << p.x, p.y, p.z;
        g_cw(3, 3) = 1.0;

        g_ic.block(0, 0, 3, 3) << q_ic.toRotationMatrix();
        g_ic.block(0, 3, 3, 1) << 0, -0.05, +0.02;
        g_ic(3, 3) = 1.0;

        MatrixXd g_wi = (g_ic * g_cw).inverse();
        //ROS_WARN_STREAM("g_ic = " << endl << g_ic);
        //ROS_WARN_STREAM("g_cw = " << endl << g_cw);
        //ROS_WARN_STREAM("g_wi = " << endl << g_wi);


        z <<
          g_wi.block(0, 3, 3, 1),
                     RotMtoRPY(g_wi.block(0, 0, 3, 3));

        nav_msgs::Odometry odom = *msg;
        Quaterniond qq ;
        qq = RPYtoR(z.segment(3, 3)).block<3, 3>(0, 0);
        odom.pose.pose.position.x = z(0);
        odom.pose.pose.position.y = z(1);
        odom.pose.pose.position.z = z(2);
        odom.pose.pose.orientation.w = qq.w();
        odom.pose.pose.orientation.x = qq.x();
        odom.pose.pose.orientation.y = qq.y();
        odom.pose.pose.orientation.z = qq.z();
        msm_pub.publish(odom);

#if 1
        R(0, 0) = 0.02 * 0.02;
        R(1, 1) = 0.02 * 0.02;
        R(2, 2) = 0.02 * 0.02;
        R(3, 3) = (2.0/180.0)*acos(-1) * (2.0/180.0)*acos(-1);
        R(4, 4) = (2.0/180.0)*acos(-1) * (2.0/180.0)*acos(-1);
        R(5, 5) = (2.0/180.0)*acos(-1) * (2.0/180.0)*acos(-1);
#endif
    }

    //ROS_WARN("[update] preparation done");

    MatrixXd C_t = MatrixXd::Zero(_DIM_LEN << 1, _STT_LEN);
    MatrixXd K_t = MatrixXd::Zero(_STT_LEN, _DIM_LEN << 1);
    {   // compute the C_t and K_t
        C_t.block(_P_M_BEG, _POS_BEG, _P_M_LEN, _POS_LEN) <<
                Matrix3d::Identity();
        C_t.block(_O_M_BEG, _ORT_BEG, _O_M_LEN, _ORT_LEN) <<
                Matrix3d::Identity();
        K_t = cov_x * C_t.transpose() * (C_t * cov_x * C_t.transpose() + R).inverse();
    }

    //ROS_WARN("[update] C_t K_t done");
#if 1
    {
        VectorXd error = z - C_t * x;
        for (int i = _O_M_BEG; i < _O_M_END; ++i)
        {
            if (error(i) < -_PI)
            {
                //ROS_WARN_STREAM("error = " << error.transpose() << endl);
                error(i) += 2.0 * _PI;
                //ROS_WARN_STREAM("ERROR = " << error.transpose() << endl);
            }
            if (error(i) > _PI)
            {
                //ROS_WARN_STREAM("error = " << error.transpose() << endl);
                error(i) -= 2.0 * _PI;
                //ROS_WARN_STREAM("ERROR = " << error.transpose() << endl);
            }

        }
        x = x + K_t * error;
        cov_x = cov_x - K_t * C_t * cov_x;
    }
#endif
    //ROS_WARN("[update] x cov_x done");

    //pubOdometry();
}
Exemplo n.º 24
0
void Visualizer::publishMinimal(
    const cv::Mat& img,
    const FramePtr& frame,
    const FrameHandlerMono& slam,
    const double timestamp)
{
  ++trace_id_;
  std_msgs::Header header_msg;
  header_msg.frame_id = "/camera";
  header_msg.seq = trace_id_;
  header_msg.stamp = ros::Time(timestamp);

  // publish info msg.
  if(pub_info_.getNumSubscribers() > 0)
  {
    svo_msgs::Info msg_info;
    msg_info.header = header_msg;
    msg_info.processing_time = slam.lastProcessingTime();
    msg_info.keyframes.resize(slam.map().keyframes_.size());
    for(list<FramePtr>::const_iterator it=slam.map().keyframes_.begin(); it!=slam.map().keyframes_.end(); ++it)
      msg_info.keyframes.push_back((*it)->id_);
    msg_info.stage = static_cast<int>(slam.stage());
    msg_info.tracking_quality = static_cast<int>(slam.trackingQuality());
    if(frame != NULL)
      msg_info.num_matches = slam.lastNumObservations();
    else
      msg_info.num_matches = 0;
    pub_info_.publish(msg_info);
  }

  if(frame == NULL)
  {
    if(pub_images_.getNumSubscribers() > 0 && slam.stage() == FrameHandlerBase::STAGE_PAUSED)
    {
      // Display image when slam is not running.
      cv_bridge::CvImage img_msg;
      img_msg.header.stamp = ros::Time::now();
      img_msg.header.frame_id = "/image";
      img_msg.image = img;
      img_msg.encoding = sensor_msgs::image_encodings::MONO8;
      pub_images_.publish(img_msg.toImageMsg());
    }
    return;
  }

  // Publish pyramid-image every nth-frame.
  if(img_pub_nth_ > 0 && trace_id_%img_pub_nth_ == 0 && pub_images_.getNumSubscribers() > 0)
  {
    const int scale = (1<<img_pub_level_);
    cv::Mat img_rgb(frame->img_pyr_[img_pub_level_].size(), CV_8UC3);
    cv::cvtColor(frame->img_pyr_[img_pub_level_], img_rgb, CV_GRAY2RGB);

    if(slam.stage() == FrameHandlerBase::STAGE_SECOND_FRAME)
    {
      // During initialization, draw lines.
      const vector<cv::Point2f>& px_ref(slam.initFeatureTrackRefPx());
      const vector<cv::Point2f>& px_cur(slam.initFeatureTrackCurPx());
      for(vector<cv::Point2f>::const_iterator it_ref=px_ref.begin(), it_cur=px_cur.begin();
          it_ref != px_ref.end(); ++it_ref, ++it_cur)
        cv::line(img_rgb,
                 cv::Point2f(it_cur->x/scale, it_cur->y/scale),
                 cv::Point2f(it_ref->x/scale, it_ref->y/scale), cv::Scalar(0,255,0), 2);
    }

    if(img_pub_level_ == 0)
    {
      for(Features::iterator it=frame->fts_.begin(); it!=frame->fts_.end(); ++it)
      {
        if((*it)->type == Feature::EDGELET)
          cv::line(img_rgb,
                   cv::Point2f((*it)->px[0]+3*(*it)->grad[1], (*it)->px[1]-3*(*it)->grad[0]),
                   cv::Point2f((*it)->px[0]-3*(*it)->grad[1], (*it)->px[1]+3*(*it)->grad[0]),
                   cv::Scalar(255,0,255), 2);
        else
          cv::rectangle(img_rgb,
                        cv::Point2f((*it)->px[0]-2, (*it)->px[1]-2),
                        cv::Point2f((*it)->px[0]+2, (*it)->px[1]+2),
                        cv::Scalar(0,255,0), CV_FILLED);
      }
    }
    if(img_pub_level_ == 1)
      for(Features::iterator it=frame->fts_.begin(); it!=frame->fts_.end(); ++it)
        cv::rectangle(img_rgb,
                      cv::Point2f((*it)->px[0]/scale-1, (*it)->px[1]/scale-1),
                      cv::Point2f((*it)->px[0]/scale+1, (*it)->px[1]/scale+1),
                      cv::Scalar(0,255,0), CV_FILLED);
    else
      for(Features::iterator it=frame->fts_.begin(); it!=frame->fts_.end(); ++it)
        cv::rectangle(img_rgb,
                      cv::Point2f((*it)->px[0]/scale, (*it)->px[1]/scale),
                      cv::Point2f((*it)->px[0]/scale, (*it)->px[1]/scale),
                      cv::Scalar(0,255,0), CV_FILLED);

    cv_bridge::CvImage img_msg;
    img_msg.header = header_msg;
    img_msg.image = img_rgb;
    img_msg.encoding = sensor_msgs::image_encodings::BGR8;
    pub_images_.publish(img_msg.toImageMsg());
  }

  if(pub_pose_.getNumSubscribers() > 0 && slam.stage() == FrameHandlerBase::STAGE_DEFAULT_FRAME)
  {
    Quaterniond q;
    Vector3d p;
    Matrix<double,6,6> Cov;
    if(publish_world_in_cam_frame_)
    {
      // publish world in cam frame
      SE3 T_cam_from_world(frame->T_f_w_* T_world_from_vision_);
      q = Quaterniond(T_cam_from_world.rotation_matrix());
      p = T_cam_from_world.translation();
      Cov = frame->Cov_;
    }
    else
    {
      // publish cam in world frame
      SE3 T_world_from_cam(T_world_from_vision_*frame->T_f_w_.inverse());
      q = Quaterniond(T_world_from_cam.rotation_matrix()*T_world_from_vision_.rotation_matrix().transpose());
      p = T_world_from_cam.translation();
      Cov = T_world_from_cam.Adj()*frame->Cov_*T_world_from_cam.inverse().Adj();
    }
    geometry_msgs::PoseWithCovarianceStampedPtr msg_pose(new geometry_msgs::PoseWithCovarianceStamped);
    msg_pose->header = header_msg;
    msg_pose->pose.pose.position.x = p[0];
    msg_pose->pose.pose.position.y = p[1];
    msg_pose->pose.pose.position.z = p[2];
    msg_pose->pose.pose.orientation.x = q.x();
    msg_pose->pose.pose.orientation.y = q.y();
    msg_pose->pose.pose.orientation.z = q.z();
    msg_pose->pose.pose.orientation.w = q.w();
    for(size_t i=0; i<36; ++i)
      msg_pose->pose.covariance[i] = Cov(i%6, i/6);
    pub_pose_.publish(msg_pose);
  }
}