예제 #1
0
void SlamSystem::processIMU(double dt, const Vector3d&linear_acceleration, const Vector3d &angular_velocity)
{
    Quaterniond dq;

     dq.x() = angular_velocity(0)*dt*0.5;
     dq.y() = angular_velocity(1)*dt*0.5;
     dq.z() = angular_velocity(2)*dt*0.5;
     dq.w() = sqrt(1 - SQ(dq.x()) * SQ(dq.y()) * SQ(dq.z()));

     Matrix3d deltaR(dq);
     //R_c_0 = R_c_0 * deltaR;
     //T_c_0 = ;
     Frame *current = slidingWindow[tail].get();

     Matrix<double, 9, 9> F = Matrix<double, 9, 9>::Zero();
     F.block<3, 3>(0, 3) = Matrix3d::Identity();
     F.block<3, 3>(3, 6) = -current->R_k1_k* vectorToSkewMatrix(linear_acceleration);
     F.block<3, 3>(6, 6) = -vectorToSkewMatrix(angular_velocity);

     Matrix<double, 6, 6> Q = Matrix<double, 6, 6>::Zero();
     Q.block<3, 3>(0, 0) = acc_cov;
     Q.block<3, 3>(3, 3) = gyr_cov;

     Matrix<double, 9, 6> G = Matrix<double, 9, 6>::Zero();
     G.block<3, 3>(3, 0) = -current->R_k1_k;
     G.block<3, 3>(6, 3) = -Matrix3d::Identity();

     current->P_k = (Matrix<double, 9, 9>::Identity() + dt * F) * current->P_k * (Matrix<double, 9, 9>::Identity() + dt * F).transpose() + (dt * G) * Q * (dt * G).transpose();
     //current->R_k1_k = current->R_k1_k*deltaR;
     current->alpha_c_k += current->beta_c_k*dt + current->R_k1_k*linear_acceleration * dt * dt * 0.5 ;
     current->beta_c_k += current->R_k1_k*linear_acceleration*dt;
     current->R_k1_k = current->R_k1_k*deltaR;
     current->timeIntegral += dt;
}
void Visualization::transformPose(geometry_msgs::Pose& pose,
    const Vector3d& trans, const Quaterniond& rot)
{
  Vector3d pos;
  pos.x() = pose.position.x;
  pos.y() = pose.position.y;
  pos.z() = pose.position.z;

  pos = rot * pos + trans;

  pose.position.x = pos.x();
  pose.position.y = pos.y();
  pose.position.z = pos.z();

  Quaterniond orientation;
  orientation.x() = pose.orientation.x;
  orientation.y() = pose.orientation.y;
  orientation.z() = pose.orientation.z;
  orientation.w() = pose.orientation.w;

  orientation = rot * orientation;

  pose.orientation.x = orientation.x();
  pose.orientation.y = orientation.y();
  pose.orientation.z = orientation.z();
  pose.orientation.w = orientation.w();
}
예제 #3
0
// qEst = qEst + (qDot - beta*gradF.normalised)*deltaT
// inputs: qEst, w, a, deltaT, beta
// output: qEst 
void UpdateAttitude( 	Quaterniond& qEst, 		// Reference to the current esitmate- will be update to reflect the new estimate
						const Quaterniond& w,	// [0, wx, wy, wz] rad/s
						const Quaterniond& a,	// [0, ax, ay, az] m/s/s
						const double deltaT_sec,// sample period (seconds)
						const double beta ) 	// Gain factor to account for all zero mean noise (sqrt(3/4)*[o,wx_noise, wy_noise, wz_noise])
{
	// rate of change of orientation
	Quaterniond qDot;
	qDot = (qEst*w).coeffs() * 0.5;
	
	// Jacobian of the objective function F
	MatrixXd J(3,4);
	J << 	-2*qEst.y(), 2*qEst.z(), -2*qEst.w(), 2*qEst.x(),
			 2*qEst.x(), 2*qEst.w(),  2*qEst.z(), 2*qEst.y(),
			 0,			-4*qEst.x(), -4*qEst.y(), 0;
	
	cout << J << endl;
	// The objective function F minimising a measured gravitational field with an assumed gravity vector of 0,0,1
	MatrixXd F(3,1);
	F << 	2*(qEst.x()*qEst.z() - qEst.w()*qEst.y()) - a.x(),
			2*(qEst.w()*qEst.x() + qEst.y()*qEst.z()) - a.y(),
			2*(0.5 - qEst.x()*qEst.x() - qEst.y()*qEst.y()) - a.z();		 
	
	cout << F << endl;
	// The gradient of the solution solution surface
	MatrixXd gradF(4,1);
	gradF = J.transpose() * F;

	//qEst = qEst + (qDot - beta*gradF.normalized)*deltaT
	qEst.coeffs() += (qDot.coeffs() - beta*gradF.normalized())*deltaT_sec;	
}
void pubOdometry()
{
    nav_msgs::Odometry odom;
    {
        odom.header.stamp = _last_imu_stamp;
        odom.header.frame_id = "map";

        odom.pose.pose.position.x = x(0);
        odom.pose.pose.position.y = x(1);
        odom.pose.pose.position.z = x(2);

        Quaterniond q;
        q = RPYtoR(x(3), x(4), x(5)).block<3,3>(0, 0);
        odom.pose.pose.orientation.x = q.x();
        odom.pose.pose.orientation.y = q.y();
        odom.pose.pose.orientation.z = q.z();
        odom.pose.pose.orientation.w = q.w();
    }

    //ROS_WARN("[update] publication done");
    ROS_WARN_STREAM("[final] b_g = " << x.segment(_B_G_BEG, _B_G_LEN).transpose());
    ROS_WARN_STREAM("[final] b_a = " << x.segment(_B_A_BEG, _B_A_LEN).transpose() << endl);
    ///ROS_WARN_STREAM("[final] cov_x = " << endl << cov_x << endl);
    odom_pub.publish(odom);
}
예제 #5
0
void KeyFrame::updateLoopConnection(Vector3d relative_t, Quaterniond relative_q, double relative_yaw)
{
    Eigen::Matrix<double, 8, 1> connected_info;
    connected_info <<relative_t.x(), relative_t.y(), relative_t.z(),
    relative_q.w(), relative_q.x(), relative_q.y(), relative_q.z(),
    relative_yaw;
    loop_info = connected_info;
}
예제 #6
0
void KeyFrame::addConnection(int index, KeyFrame* connected_kf, Vector3d relative_t, Quaterniond relative_q, double relative_yaw)
{
    Eigen::Matrix<double, 8, 1> connected_info;
    connected_info <<relative_t.x(), relative_t.y(), relative_t.z(),
    relative_q.w(), relative_q.x(), relative_q.y(), relative_q.z(),
    relative_yaw;
    connection_list.push_back(make_pair(index, connected_info));
}
예제 #7
0
Matrix3d quaternion2mat(Quaterniond q)
{
  Matrix3d m;
  double a = q.w(), b = q.x(), c = q.y(), d = q.z();
  m << a*a + b*b - c*c - d*d, 2*(b*c - a*d), 2*(b*d+a*c),
       2*(b*c+a*d), a*a - b*b + c*c - d*d, 2*(c*d - a*b),
       2*(b*d - a*c), 2*(c*d+a*b), a*a-b*b - c*c + d*d;
  return m;
}
예제 #8
0
void BenchmarkNode::runFromFolder(int start)
{
  ofstream outfile;
  outfile.open ("/home/worxli/Datasets/data/associate_unscaled.txt");
  // outfile.open ("/home/worxli/data/test/associate_unscaled.txt");
  for(int img_id = start;;++img_id)
  {

    // load image
    std::stringstream ss;
    ss << "/home/worxli/Datasets/data/img/color" << img_id << ".png";
	  // ss << "/home/worxli/data/test/img/color" << img_id << ".png";
    std::cout << "reading image " << ss.str() << std::endl;
    cv::Mat img(cv::imread(ss.str().c_str(), 0));

    // end loop if no images left
    if(img.empty())
      break;

    assert(!img.empty());

    // process frame
    vo_->addImage(img, img_id);

    // display tracking quality
    if(vo_->lastFrame() != NULL)
    {
      int id = vo_->lastFrame()->id_;
    	std::cout << "Frame-Id: " << id << " \t"
                  << "#Features: " << vo_->lastNumObservations() << " \t"
                  << "Proc. Time: " << vo_->lastProcessingTime()*1000 << "ms \n";

    	// access the pose of the camera via vo_->lastFrame()->T_f_w_.
	//std::cout << vo_->lastFrame()->T_f_w_ << endl;

	//std::count << vo_->lastFrame()->pos() << endl;
  Quaterniond quat = vo_->lastFrame()->T_f_w_.unit_quaternion();
  Vector3d trans = vo_->lastFrame()->T_f_w_.translation();

	outfile << trans.x() << " "
          << trans.y() << " "
          << trans.z() << " " 

          << quat.x()  << " " 
          << quat.y()  << " " 
          << quat.z()  << " " 
          << quat.w()  << " " 

          << "depth/mapped" << img_id << ".png "
          << "img/color" << img_id << ".png\n";

    }
  }
  outfile.close();
}
예제 #9
0
Quaterniond mat2quaternion(Matrix3d m)
{
  //return euler2quaternion(mat2euler(m));
  Quaterniond q;
  double a, b, c, d;
  a = sqrt(1 + m(0, 0) + m(1, 1) + m(2, 2))/2;
  b = (m(2, 1) - m(1, 2))/(4*a);
  c = (m(0, 2) - m(2, 0))/(4*a);
  d = (m(1, 0) - m(0, 1))/(4*a);
  q.w() = a; q.x() = b; q.y() = c; q.z() = d;
  return q;
}
bool addPlaneToCollisionModel(const std::string &armName, double sx, const Quaterniond &q)
{
    std::string arm2Name;
    ros::NodeHandle nh("~") ;

    ros::service::waitForService("/environment_server/set_planning_scene_diff");
    ros::ServiceClient get_planning_scene_client =
      nh.serviceClient<arm_navigation_msgs::GetPlanningScene>("/environment_server/set_planning_scene_diff");

    arm_navigation_msgs::GetPlanningScene::Request planning_scene_req;
    arm_navigation_msgs::GetPlanningScene::Response planning_scene_res;

    arm_navigation_msgs::AttachedCollisionObject att_object;

    att_object.link_name = armName + "_gripper";

    att_object.object.id = "attached_plane";
    att_object.object.operation.operation = arm_navigation_msgs::CollisionObjectOperation::ADD;

    att_object.object.header.frame_id = armName + "_ee" ;
    att_object.object.header.stamp = ros::Time::now();

    arm_navigation_msgs::Shape object;

    object.type = arm_navigation_msgs::Shape::BOX;
    object.dimensions.resize(3);
    object.dimensions[0] = sx;
    object.dimensions[1] = sx;
    object.dimensions[2] = 0.01;

    geometry_msgs::Pose pose;
    pose.position.x = 0 ;
    pose.position.y = 0 ;
    pose.position.z = sx/2 ;


    pose.orientation.x = q.x();
    pose.orientation.y = q.y();
    pose.orientation.z = q.z();
    pose.orientation.w = q.w();



    att_object.object.shapes.push_back(object);
    att_object.object.poses.push_back(pose);

    planning_scene_req.planning_scene_diff.attached_collision_objects.push_back(att_object);

    if(!get_planning_scene_client.call(planning_scene_req, planning_scene_res)) return false;


    return true ;
}
예제 #11
0
geometry_msgs::Quaternion eigenQuaterniondToTfQuaternion( Quaterniond q ){

    geometry_msgs::Quaternion tfq;

    tfq.x = q.x();
    tfq.y = q.y();
    tfq.z = q.z();
    tfq.w = q.w();

    return tfq;

}
예제 #12
0
/**
Euler angle defination: zyx
Rotation matrix: C_body2ned
**/
Quaterniond euler2quaternion(Vector3d euler)
{
  double cr = cos(euler(0)/2);
  double sr = sin(euler(0)/2);
  double cp = cos(euler(1)/2);
  double sp = sin(euler(1)/2);
  double cy = cos(euler(2)/2);
  double sy = sin(euler(2)/2);
  Quaterniond q;
  q.w() = cr*cp*cy + sr*sp*sy;
  q.x() = sr*cp*cy - cr*sp*sy;
  q.y() = cr*sp*cy + sr*cp*sy;
  q.z() = cr*cp*sy - sr*sp*cy;
  return q; 
}
예제 #13
0
void TransformerTF2::convertTransformToTf(const Transform &t,
		geometry_msgs::TransformStamped &tOut) {

	Quaterniond eigenQuat = t.getRotationQuat();
	tOut.header.frame_id = t.getFrameParent();
	tOut.header.stamp = ros::Time::fromBoost(t.getTime());
	tOut.child_frame_id = t.getFrameChild();
	tOut.transform.rotation.w = eigenQuat.w();
	tOut.transform.rotation.x = eigenQuat.x();
	tOut.transform.rotation.y = eigenQuat.y();
	tOut.transform.rotation.z = eigenQuat.z();
	tOut.transform.translation.x = t.getTranslation()(0);
	tOut.transform.translation.y = t.getTranslation()(1);
	tOut.transform.translation.z = t.getTranslation()(2);
}
예제 #14
0
geometry_msgs::Pose eigenPoseToROS(const Vector3d &pos, const Quaterniond &orient)
{
    geometry_msgs::Pose pose ;

    pose.position.x = pos.x() ;
    pose.position.y = pos.y() ;
    pose.position.z = pos.z() ;

    pose.orientation.x = orient.x() ;
    pose.orientation.y = orient.y() ;
    pose.orientation.z = orient.z() ;
    pose.orientation.w = orient.w() ;

    return pose ;

}
void ComputeSmdTlsphShape::compute_peratom() {
	double *contact_radius = atom->contact_radius;
	invoked_peratom = update->ntimestep;

	// grow vector array if necessary

	if (atom->nlocal > nmax) {
		memory->destroy(strainVector);
		nmax = atom->nmax;
		memory->create(strainVector, nmax, size_peratom_cols, "strainVector");
		array_atom = strainVector;
	}

	int *mask = atom->mask;
	double **smd_data_9 = atom->smd_data_9;
	int nlocal = atom->nlocal;
	Matrix3d E, eye, R, U, F;
	eye.setIdentity();
	Quaterniond q;
	bool status;

	for (int i = 0; i < nlocal; i++) {
		if (mask[i] & groupbit) {

			F = Map<Matrix3d>(smd_data_9[i]);
			status = PolDec(F, R, U, false); // polar decomposition of the deformation gradient, F = R * U
			if (!status) {
				error->message(FLERR, "Polar decomposition of deformation gradient failed.\n");
			}

			E = 0.5 * (F.transpose() * F - eye); // Green-Lagrange strain
			strainVector[i][0] = contact_radius[i] * (1.0 + E(0, 0));
			strainVector[i][1] = contact_radius[i] * (1.0 + E(1, 1));
			strainVector[i][2] = contact_radius[i] * (1.0 + E(2, 2));

			q = R; // convert pure rotation matrix to quaternion
			strainVector[i][3] = q.w();
			strainVector[i][4] = q.x();
			strainVector[i][5] = q.y();
			strainVector[i][6] = q.z();
		} else {
			for (int j = 0; j < size_peratom_cols; j++) {
				strainVector[i][j] = 0.0;
			}
		}
	}
}
예제 #16
0
int main()
{   // Quaterniond specified in (w,x,y,z)
	Quaterniond qEst = Quaterniond(0,0,0,1); // unit z (normalized) // initial guess
	Quaterniond w = Quaterniond(0, M_PI*0.5, 0, 0); // pi/2 rad/s around x
	Quaterniond a = Quaterniond(0, 0, 0, 1); //(0, ax, ay, az) in m/s/s (normalized)
	AngleAxisd offset = AngleAxisd(M_PI/10,Vector3d::UnitX());
	a = a*offset;
	cout << "a: " <<endl << a.coeffs() <<endl;
	double deltaT = 0.01;
	double beta = 0.033;	
	UpdateAttitude(qEst, w, a, deltaT, beta);
	cout << "qEst:" << endl << qEst.coeffs() << endl;
	// manual quaternion multiplication
	Vector4d result;
	result << 
	qEst.w()*w.w() - qEst.x()*w.x() - qEst.y()*w.y() - qEst.z()*w.z() , 
	qEst.w()*w.x() + qEst.x()*w.w() + qEst.y()*w.z() - qEst.z()*w.y() , 
	qEst.w()*w.y() - qEst.x()*w.z() + qEst.y()*w.w() + qEst.z()*w.x() , 
	qEst.w()*w.z() + qEst.x()*w.y() - qEst.y()*w.x() + qEst.z()*w.w() ;

	cout << "manual qDot: " << endl << 0.5*result << endl;
}
vector<Marker> Visualization::visualizeGripper(const string& ns,
    const string& frame_id, const geometry_msgs::Pose& pose, double gripper_state) const
{
  Vector3d trans(pose.position.x, pose.position.y, pose.position.z);
  Quaterniond rot(pose.orientation.w, pose.orientation.x, pose.orientation.y, pose.orientation.z);

  Marker l_finger = visualizeGripperPart("package://pr2_description/meshes/gripper_v0"
    "/l_finger_tip.dae", ns, frame_id, 0);
  Marker r_finger = visualizeGripperPart("package://pr2_description/meshes/gripper_v0"
    "/l_finger_tip.dae", ns, frame_id, 1);
  Marker palm;
  palm = visualizeGripperPart("package://pr2_description/meshes/gripper_v0"
    "/gripper_palm.dae", ns, frame_id, 2);

  Marker bounding_box = visualizeGripperBox("bounding_box", frame_id, 4);
  bounding_box.pose = pose;

  Vector3d position(-0.051, 0, -0.025);
  Quaterniond rotation = Quaterniond::Identity();
  position = rot * position + trans;
  rotation = rotation * rot;
  palm.pose.position.x = position.x();
  palm.pose.position.y = position.y();
  palm.pose.position.z = position.z();
  palm.pose.orientation.w = rotation.w();
  palm.pose.orientation.x = rotation.x();
  palm.pose.orientation.y = rotation.y();
  palm.pose.orientation.z = rotation.z();

  double finger_y_delta = gripper_state * 0.8 / 2;
  double finger_x_delta = 0;

  position = Vector3d(0.1175 - finger_x_delta, 0.015 + finger_y_delta, -0.025);
  rotation = Quaterniond::Identity();
  position = rot * position + trans;
  rotation = rotation * rot;
  l_finger.pose.position.x = position.x();
  l_finger.pose.position.y = position.y();
  l_finger.pose.position.z = position.z();
  l_finger.pose.orientation.w = rotation.w();
  l_finger.pose.orientation.x = rotation.x();
  l_finger.pose.orientation.y = rotation.y();
  l_finger.pose.orientation.z = rotation.z();

  position = Vector3d(0.1175 - finger_x_delta, -0.015 - finger_y_delta, -0.025);
  rotation = Quaterniond(AngleAxisd(M_PI, Vector3d(1, 0, 0)));
  position = rot * position + trans;
  rotation = rot * rotation;
  r_finger.pose.position.x = position.x();
  r_finger.pose.position.y = position.y();
  r_finger.pose.position.z = position.z();
  r_finger.pose.orientation.w = rotation.w();
  r_finger.pose.orientation.x = rotation.x();
  r_finger.pose.orientation.y = rotation.y();
  r_finger.pose.orientation.z = rotation.z();

  vector < Marker > result;
  result.push_back(palm);
  result.push_back(l_finger);
  result.push_back(r_finger);

  /* palm marker */
  getTemplateExtractionPoint(position);
  rotation = Quaterniond::Identity();
  position = rot * position + trans;
  rotation = rotation * rot;

  Marker palm_marker;
  palm_marker.header.frame_id = frame_id;
  palm_marker.header.stamp = ros::Time::now();
  palm_marker.ns = ns;
  palm_marker.id = 3;
  palm_marker.type = Marker::SPHERE;
  palm_marker.action = Marker::ADD;
  palm_marker.pose.position.x = position.x();
  palm_marker.pose.position.y = position.y();
  palm_marker.pose.position.z = position.z();
  palm_marker.pose.orientation.x = 0.0;
  palm_marker.pose.orientation.y = 0.0;
  palm_marker.pose.orientation.z = 0.0;
  palm_marker.pose.orientation.w = 1.0;
  palm_marker.scale.x = 0.01;
  palm_marker.scale.y = 0.01;
  palm_marker.scale.z = 0.01;
  palm_marker.color.a = 1.0;
  palm_marker.color.r = 0.0;
  palm_marker.color.g = 1.0;

  result.push_back(palm_marker);
  return result;
}
예제 #18
0
void SlamSystem::BA()
{
    R.resize(numOfState);
    T.resize(numOfState);
    vel.resize(numOfState);
    for (int i = 0; i < numOfState ; i++)
    {
        int k = head + i;
        if (k >= slidingWindowSize){
          k -= slidingWindowSize;
        }
        R[k] = slidingWindow[k]->R_bk_2_b0;
        T[k] = slidingWindow[k]->T_bk_2_b0;
        vel[k] = slidingWindow[k]->v_bk;
    }

    int sizeofH = 9 * numOfState;
    MatrixXd HTH(sizeofH, sizeofH);
    VectorXd HTb(sizeofH);
    MatrixXd tmpHTH;
    VectorXd tmpHTb;

    MatrixXd H_k1_2_k(9, 18);
    MatrixXd H_k1_2_k_T;
    VectorXd residualIMU(9);

    MatrixXd H_i_2_j(9, 18);
    MatrixXd H_i_2_j_T;
    VectorXd residualCamera(9);
    MatrixXd tmpP_inv(9, 9);

    for (int iterNum = 0; iterNum < maxIterationBA; iterNum++)
    {
      HTH.setZero();
      HTb.setZero();

      //1. prior constraints
      int m_sz = margin.size;
      VectorXd dx = VectorXd::Zero(STATE_SZ(numOfState - 1));

      if (m_sz != (numOfState - 1)){
        assert("prior matrix goes wrong!!!");
      }

      for (int i = numOfState - 2; i >= 0; i--)
      {
        int k = head + i;
        if (k >= slidingWindowSize){
          k -= slidingWindowSize;
        }
        //dp, dv, dq
        dx.segment(STATE_SZ(i), 3) = T[k] - slidingWindow[k]->T_bk_2_b0;
        dx.segment(STATE_SZ(i) + 3, 3) = vel[k] - slidingWindow[k]->v_bk;
        dx.segment(STATE_SZ(i) + 6, 3) = Quaterniond(slidingWindow[k]->R_bk_2_b0.transpose() * R[k]).vec() * 2.0;
      }
      HTH.block(0, 0, STATE_SZ(m_sz), STATE_SZ(m_sz)) += margin.Ap.block(0, 0, STATE_SZ(m_sz), STATE_SZ(m_sz));
      HTb.segment(0, STATE_SZ(m_sz)) -= margin.Ap.block(0, 0, STATE_SZ(m_sz), STATE_SZ(m_sz))*dx;
      HTb.segment(0, STATE_SZ(m_sz)) -= margin.bp.segment(0, STATE_SZ(m_sz));

      //2. imu constraints
      for (int i = numOfState-2; i >= 0; i-- )
      {
        int k = head + i;
        if (k >= slidingWindowSize){
          k -= slidingWindowSize;
        }
        if ( slidingWindow[k]->imuLinkFlag == false){
          continue;
        }
        int k1 = k + 1;
        if (k1 >= slidingWindowSize){
          k1 -= slidingWindowSize;
        }
        residualIMU = math.ResidualImu(T[k], vel[k], R[k],
                                   T[k1], vel[k1], R[k1],
                                   gravity_b0, slidingWindow[k]->timeIntegral,
                                   slidingWindow[k]->alpha_c_k, slidingWindow[k]->beta_c_k, slidingWindow[k]->R_k1_k);
        H_k1_2_k = math.JacobianImu(T[k], vel[k], R[k],
                                   T[k1], vel[k1], R[k1],
                                   gravity_b0, slidingWindow[k]->timeIntegral);
        H_k1_2_k_T = H_k1_2_k.transpose();
        H_k1_2_k_T *= slidingWindow[k]->P_k.inverse();
        HTH.block(i * 9, i * 9, 18, 18) += H_k1_2_k_T  *  H_k1_2_k;
        HTb.segment(i * 9, 18) -= H_k1_2_k_T * residualIMU;
      }

      //3. camera constraints
      for (int i = 0; i < numOfState; i++)
      {
        int currentStateID = head + i;
        if (currentStateID >= slidingWindowSize){
          currentStateID -= slidingWindowSize;
        }
        if (slidingWindow[currentStateID]->keyFrameFlag == false){
          continue;
        }

        list<int>::iterator iter = slidingWindow[currentStateID]->cameraLinkList.begin();
        for (; iter != slidingWindow[currentStateID]->cameraLinkList.end(); iter++ )
        {
          int linkID = *iter;

          H_i_2_j = math.JacobianDenseTracking(T[currentStateID], R[currentStateID], T[linkID], R[linkID] ) ;
          residualCamera = math.ResidualDenseTracking(T[currentStateID], R[currentStateID], T[linkID], R[linkID],
                                                      slidingWindow[currentStateID]->cameraLink[linkID].T_bi_2_bj,
                                                      slidingWindow[currentStateID]->cameraLink[linkID].R_bi_2_bj ) ;
//          residualCamera.segment(0, 3) = R[linkID].transpose()*(T[currentStateID] - T[linkID]) - slidingWindow[currentStateID]->cameraLink[linkID].T_bi_2_bj;
//          residualCamera.segment(3, 3).setZero();
//          residualCamera.segment(6, 3) = 2.0 * (Quaterniond(slidingWindow[currentStateID]->cameraLink[linkID].R_bi_2_bj.transpose()) * Quaterniond(R[linkID].transpose()*R[currentStateID])).vec();

          tmpP_inv.setZero();
          tmpP_inv.block(0, 0, 3, 3) = slidingWindow[currentStateID]->cameraLink[linkID].P_inv.block(0, 0, 3, 3) ;
          tmpP_inv.block(0, 6, 3, 3) = slidingWindow[currentStateID]->cameraLink[linkID].P_inv.block(0, 3, 3, 3) ;
          tmpP_inv.block(6, 0, 3, 3) = slidingWindow[currentStateID]->cameraLink[linkID].P_inv.block(3, 0, 3, 3) ;
          tmpP_inv.block(6, 6, 3, 3) = slidingWindow[currentStateID]->cameraLink[linkID].P_inv.block(3, 3, 3, 3) ;
          double r_v = residualCamera.segment(0, 3).norm() ;
          if ( r_v > huber_r_v ){
            tmpP_inv *= huber_r_v/r_v ;
          }
          double r_w = residualCamera.segment(6, 3).norm() ;
          if ( r_w > huber_r_w ){
            tmpP_inv *= huber_r_w/r_w ;
          }

          H_i_2_j_T = H_i_2_j.transpose();
          H_i_2_j_T *= tmpP_inv;

          tmpHTH = H_i_2_j_T  *  H_i_2_j;
          tmpHTb = H_i_2_j_T * residualCamera;

          int currentStateIDIndex = currentStateID - head;
          if ( currentStateIDIndex < 0){
            currentStateIDIndex += slidingWindowSize;
          }
          int linkIDIndex = linkID - head  ;
          if (linkIDIndex < 0){
            linkIDIndex += slidingWindowSize;
          }

          HTH.block(currentStateIDIndex * 9, currentStateIDIndex * 9, 9, 9) += tmpHTH.block(0, 0, 9, 9);
          HTH.block(currentStateIDIndex * 9, linkIDIndex * 9, 9, 9) += tmpHTH.block(0, 9, 9, 9);
          HTH.block(linkIDIndex * 9, currentStateIDIndex * 9, 9, 9) += tmpHTH.block(9, 0, 9, 9);
          HTH.block(linkIDIndex * 9, linkIDIndex * 9, 9, 9) += tmpHTH.block(9, 9, 9, 9);

          HTb.segment(currentStateIDIndex * 9, 9) -= tmpHTb.segment(0, 9);
          HTb.segment(linkIDIndex * 9, 9) -= tmpHTb.segment(9, 9);
        }
      }
//      printf("[numList in BA]=%d\n", numList ) ;

      //solve the BA
      //cout << "HTH\n" << HTH << endl;

      LLT<MatrixXd> lltOfHTH = HTH.llt();
      ComputationInfo info = lltOfHTH.info();
      if (info == Success)
      {
        VectorXd dx = lltOfHTH.solve(HTb);

 //       printf("[BA] %d %f\n",iterNum, dx.norm() ) ;

 //       cout << iterNum << endl ;
 //       cout << dx.transpose() << endl ;

        //cout << "iteration " << iterNum << "\n" << dx << endl;
#ifdef DEBUG_INFO
        geometry_msgs::Vector3 to_pub ;
        to_pub.x = dx.norm() ;
        printf("%d %f\n",iterNum, to_pub.x ) ;
        pub_BA.publish( to_pub ) ;
#endif

        VectorXd errorUpdate(9);
        for (int i = 0; i < numOfState; i++)
        {
          int k = head + i;
          if (k >= slidingWindowSize){
            k -= slidingWindowSize;
          }
          errorUpdate = dx.segment(i * 9, 9);
          T[k] += errorUpdate.segment(0, 3);
          vel[k] += errorUpdate.segment(3, 3);

          Quaterniond q(R[k]);
          Quaterniond dq;
          dq.x() = errorUpdate(6) * 0.5;
          dq.y() = errorUpdate(7) * 0.5;
          dq.z() = errorUpdate(8) * 0.5;
          dq.w() = sqrt(1 - SQ(dq.x()) * SQ(dq.y()) * SQ(dq.z()));
          R[k] = (q * dq).normalized().toRotationMatrix();
        }
        //cout << T[head].transpose() << endl;
      }
      else
      {
        ROS_WARN("LLT error!!!");
        iterNum = maxIterationBA;
        //cout << HTH << endl;
        //FullPivLU<MatrixXd> luHTH(HTH);
        //printf("rank = %d\n", luHTH.rank() ) ;
        //HTH.rank() ;
      }
    }

    // Include correction for information vector
    int m_sz = margin.size;
    VectorXd r0 = VectorXd::Zero(STATE_SZ(numOfState - 1));
    for (int i = numOfState - 2; i >= 0; i--)
    {
      int k = head + i;
      if (k >= slidingWindowSize){
        k -= slidingWindowSize;
      }
      //dp, dv, dq
      r0.segment(STATE_SZ(i), 3) = T[k] - slidingWindow[k]->T_bk_2_b0;
      r0.segment(STATE_SZ(i) + 3, 3) = vel[k] - slidingWindow[k]->v_bk;
      r0.segment(STATE_SZ(i) + 6, 3) = Quaterniond(slidingWindow[k]->R_bk_2_b0.transpose() * R[k]).vec() * 2.0;
    }
    margin.bp.segment(0, STATE_SZ(m_sz)) += margin.Ap.block(0, 0, STATE_SZ(m_sz), STATE_SZ(m_sz))*r0;

    for (int i = 0; i < numOfState ; i++)
    {
        int k = head + i;
        if (k >= slidingWindowSize){
          k -= slidingWindowSize;
        }
        slidingWindow[k]->R_bk_2_b0 = R[k];
        slidingWindow[k]->T_bk_2_b0 = T[k];
        slidingWindow[k]->v_bk = vel[k];
    }
}
예제 #19
0
void setupSBA(SysSBA &sys)
{
    // Create camera parameters.
    frame_common::CamParams cam_params;
    cam_params.fx = 430; // Focal length in x
    cam_params.fy = 430; // Focal length in y
    cam_params.cx = 320; // X position of principal point
    cam_params.cy = 240; // Y position of principal point
    cam_params.tx = -30; // Baseline (no baseline since this is monocular)

    // Define dimensions of the image.
    int maxx = 640;
    int maxy = 480;

    // Create a plane containing a wall of points.
    Plane middleplane;
    middleplane.resize(3, 2, 10, 5);
    middleplane.translate(0.0, 0.0, 7.0);
    
    Plane leftplane;
    leftplane.resize(1, 2, 6, 12);
    //    leftplane.rotate(-PI/4.0, 0, 1, 0);
    leftplane.translate(0, 0, 7.0);
    
    Plane rightplane;
    rightplane.resize(1, 2, 6, 12);
    //    rightplane.rotate(PI/4.0, 0, 1, 0);
    rightplane.translate(2, 0, 7.0);
    
    Plane topplane;
    topplane.resize(1, 1.5, 6, 12);
    //    topplane.rotate(PI/4.0, 1, 0, 0);
    topplane.translate(2, 0, 7.0);

    // Vector containing the true point positions.
    rightplane.normal = rightplane.normal; 
  
    vector<Point, Eigen::aligned_allocator<Point> > points;
    vector<Eigen::Vector3d, Eigen::aligned_allocator<Eigen::Vector3d> > normals;
    
    points.insert(points.end(), middleplane.points.begin(), middleplane.points.end());
    normals.insert(normals.end(), middleplane.points.size(), middleplane.normal);
    
    points.insert(points.end(), leftplane.points.begin(), leftplane.points.end());
    normals.insert(normals.end(), leftplane.points.size(), leftplane.normal);
    
    points.insert(points.end(), rightplane.points.begin(), rightplane.points.end());
    normals.insert(normals.end(), rightplane.points.size(), rightplane.normal);
    
    points.insert(points.end(), topplane.points.begin(), topplane.points.end());
    normals.insert(normals.end(), topplane.points.size(), topplane.normal);
    
    // Create nodes and add them to the system.
    unsigned int nnodes = 2; // Number of nodes.
    double path_length = 0.5; // Length of the path the nodes traverse.

    // Set the random seed.
    unsigned short seed = (unsigned short)time(NULL);
    seed48(&seed);
    
    unsigned int i = 0, j = 0;
    
    for (i = 0; i < nnodes; i++)
    { 
      // Translate in the x direction over the node path.
      Vector4d trans(i/(nnodes-1.0)*path_length, 0, 0, 1);
            
#if 1
      if (i >= 0)
	{
	  // perturb a little
	  double tnoise = 0.5;	// meters
	  trans.x() += tnoise*(drand48()-0.5);
	  trans.y() += tnoise*(drand48()-0.5);
	  trans.z() += tnoise*(drand48()-0.5);
	}
#endif

      // Don't rotate.
      Quaterniond rot(1, 0, 0, 0);
#if 1
      if (i >= 0)
	{
	  // perturb a little
	  double qnoise = 0.1;	// meters
	  rot.x() += qnoise*(drand48()-0.5);
	  rot.y() += qnoise*(drand48()-0.5);
	  rot.z() += qnoise*(drand48()-0.5);
	}
#endif
      rot.normalize();
      
      // Add a new node to the system.
      sys.addNode(trans, rot, cam_params, false);
    }
        
    double pointnoise = 1.0;
    
    // Add points into the system, and add noise.
    for (i = 0; i < points.size(); i++)
    {
      // Add up to .5 points of noise.
      Vector4d temppoint = points[i];
      temppoint.x() += pointnoise*(drand48() - 0.5);
      temppoint.y() += pointnoise*(drand48() - 0.5);
      temppoint.z() += pointnoise*(drand48() - 0.5);
      sys.addPoint(temppoint);
    }
    
    Vector2d proj2d;
    Vector3d proj, pc, baseline;
    
    Vector3d imagenormal(0, 0, 1);
    
    Matrix3d covar0;
    covar0 << sqrt(imagenormal(0)), 0, 0, 0, sqrt(imagenormal(1)), 0, 0, 0, sqrt(imagenormal(2));
    Matrix3d covar;
    
    Quaterniond rotation;
    Matrix3d rotmat;
    
    unsigned int midindex = middleplane.points.size();
    unsigned int leftindex = midindex + leftplane.points.size();
    unsigned int rightindex = leftindex + rightplane.points.size();
    printf("Normal for Middle Plane: [%f %f %f], index %d -> %d\n", middleplane.normal.x(), middleplane.normal.y(), middleplane.normal.z(), 0, midindex-1);
    printf("Normal for Left Plane:   [%f %f %f], index %d -> %d\n", leftplane.normal.x(), leftplane.normal.y(), leftplane.normal.z(), midindex, leftindex-1);
    printf("Normal for Right Plane:  [%f %f %f], index %d -> %d\n", rightplane.normal.x(), rightplane.normal.y(), rightplane.normal.z(), leftindex, rightindex-1);
    
    // Project points into nodes.
    for (i = 0; i < points.size(); i++)
    {
      for (j = 0; j < sys.nodes.size(); j++)
      {
        // Project the point into the node's image coordinate system.
        sys.nodes[j].setProjection();
        sys.nodes[j].project2im(proj2d, points[i]);
        
        
        // Camera coords for right camera
        baseline << sys.nodes[j].baseline, 0, 0;
        pc = sys.nodes[j].Kcam * (sys.nodes[j].w2n*points[i] - baseline); 
        proj.head<2>() = proj2d;
        proj(2) = pc(0)/pc(2);
        
        // If valid (within the range of the image size), add the stereo 
        // projection to SBA.
        if (proj.x() > 0 && proj.x() < maxx && proj.y() > 0 && proj.y() < maxy)
        {
          sys.addStereoProj(j, i, proj);
          
          // Create the covariance matrix: 
          // image plane normal = [0 0 1]
          // wall normal = [0 0 -1]
          // covar = (R)T*[0 0 0;0 0 0;0 0 1]*R
          
          rotation.setFromTwoVectors(imagenormal, normals[i]);
          rotmat = rotation.toRotationMatrix();
          covar = rotmat.transpose()*covar0*rotmat;
          
	  //          if (!(i % sys.nodes.size() == j))
	  //            sys.setProjCovariance(j, i, covar);
        }
      }
    }
    
    // Add noise to node position.
    
    double transscale = 2.0;
    double rotscale = 0.2;
    
    // Don't actually add noise to the first node, since it's fixed.
    for (i = 1; i < sys.nodes.size(); i++)
    {
      Vector4d temptrans = sys.nodes[i].trans;
      Quaterniond tempqrot = sys.nodes[i].qrot;
      
      // Add error to both translation and rotation.
      temptrans.x() += transscale*(drand48() - 0.5);
      temptrans.y() += transscale*(drand48() - 0.5);
      temptrans.z() += transscale*(drand48() - 0.5);
      tempqrot.x() += rotscale*(drand48() - 0.5);
      tempqrot.y() += rotscale*(drand48() - 0.5);
      tempqrot.z() += rotscale*(drand48() - 0.5);
      tempqrot.normalize();
      
      sys.nodes[i].trans = temptrans;
      sys.nodes[i].qrot = tempqrot;
      
      // These methods should be called to update the node.
      sys.nodes[i].normRot();
      sys.nodes[i].setTransform();
      sys.nodes[i].setProjection();
      sys.nodes[i].setDr(true);
    }
        
}
예제 #20
0
void Visualizer::publishMinimal(
    const cv::Mat& img,
    const FramePtr& frame,
    const FrameHandlerMono& slam,
    const double timestamp)
{
  ++trace_id_;
  std_msgs::Header header_msg;
  header_msg.frame_id = "/camera";
  header_msg.seq = trace_id_;
  header_msg.stamp = ros::Time(timestamp);

  // publish info msg.
  if(pub_info_.getNumSubscribers() > 0)
  {
    svo_msgs::Info msg_info;
    msg_info.header = header_msg;
    msg_info.processing_time = slam.lastProcessingTime();
    msg_info.keyframes.resize(slam.map().keyframes_.size());
    for(list<FramePtr>::const_iterator it=slam.map().keyframes_.begin(); it!=slam.map().keyframes_.end(); ++it)
      msg_info.keyframes.push_back((*it)->id_);
    msg_info.stage = static_cast<int>(slam.stage());
    msg_info.tracking_quality = static_cast<int>(slam.trackingQuality());
    if(frame != NULL)
      msg_info.num_matches = slam.lastNumObservations();
    else
      msg_info.num_matches = 0;
    pub_info_.publish(msg_info);
  }

  if(frame == NULL)
  {
    if(pub_images_.getNumSubscribers() > 0 && slam.stage() == FrameHandlerBase::STAGE_PAUSED)
    {
      // Display image when slam is not running.
      cv_bridge::CvImage img_msg;
      img_msg.header.stamp = ros::Time::now();
      img_msg.header.frame_id = "/image";
      img_msg.image = img;
      img_msg.encoding = sensor_msgs::image_encodings::MONO8;
      pub_images_.publish(img_msg.toImageMsg());
    }
    return;
  }

  // Publish pyramid-image every nth-frame.
  if(img_pub_nth_ > 0 && trace_id_%img_pub_nth_ == 0 && pub_images_.getNumSubscribers() > 0)
  {
    const int scale = (1<<img_pub_level_);
    cv::Mat img_rgb(frame->img_pyr_[img_pub_level_].size(), CV_8UC3);
    cv::cvtColor(frame->img_pyr_[img_pub_level_], img_rgb, CV_GRAY2RGB);

    if(slam.stage() == FrameHandlerBase::STAGE_SECOND_FRAME)
    {
      // During initialization, draw lines.
      const vector<cv::Point2f>& px_ref(slam.initFeatureTrackRefPx());
      const vector<cv::Point2f>& px_cur(slam.initFeatureTrackCurPx());
      for(vector<cv::Point2f>::const_iterator it_ref=px_ref.begin(), it_cur=px_cur.begin();
          it_ref != px_ref.end(); ++it_ref, ++it_cur)
        cv::line(img_rgb,
                 cv::Point2f(it_cur->x/scale, it_cur->y/scale),
                 cv::Point2f(it_ref->x/scale, it_ref->y/scale), cv::Scalar(0,255,0), 2);
    }

    if(img_pub_level_ == 0)
    {
      for(Features::iterator it=frame->fts_.begin(); it!=frame->fts_.end(); ++it)
      {
        if((*it)->type == Feature::EDGELET)
          cv::line(img_rgb,
                   cv::Point2f((*it)->px[0]+3*(*it)->grad[1], (*it)->px[1]-3*(*it)->grad[0]),
                   cv::Point2f((*it)->px[0]-3*(*it)->grad[1], (*it)->px[1]+3*(*it)->grad[0]),
                   cv::Scalar(255,0,255), 2);
        else
          cv::rectangle(img_rgb,
                        cv::Point2f((*it)->px[0]-2, (*it)->px[1]-2),
                        cv::Point2f((*it)->px[0]+2, (*it)->px[1]+2),
                        cv::Scalar(0,255,0), CV_FILLED);
      }
    }
    if(img_pub_level_ == 1)
      for(Features::iterator it=frame->fts_.begin(); it!=frame->fts_.end(); ++it)
        cv::rectangle(img_rgb,
                      cv::Point2f((*it)->px[0]/scale-1, (*it)->px[1]/scale-1),
                      cv::Point2f((*it)->px[0]/scale+1, (*it)->px[1]/scale+1),
                      cv::Scalar(0,255,0), CV_FILLED);
    else
      for(Features::iterator it=frame->fts_.begin(); it!=frame->fts_.end(); ++it)
        cv::rectangle(img_rgb,
                      cv::Point2f((*it)->px[0]/scale, (*it)->px[1]/scale),
                      cv::Point2f((*it)->px[0]/scale, (*it)->px[1]/scale),
                      cv::Scalar(0,255,0), CV_FILLED);

    cv_bridge::CvImage img_msg;
    img_msg.header = header_msg;
    img_msg.image = img_rgb;
    img_msg.encoding = sensor_msgs::image_encodings::BGR8;
    pub_images_.publish(img_msg.toImageMsg());
  }

  if(pub_pose_.getNumSubscribers() > 0 && slam.stage() == FrameHandlerBase::STAGE_DEFAULT_FRAME)
  {
    Quaterniond q;
    Vector3d p;
    Matrix<double,6,6> Cov;
    if(publish_world_in_cam_frame_)
    {
      // publish world in cam frame
      SE3 T_cam_from_world(frame->T_f_w_* T_world_from_vision_);
      q = Quaterniond(T_cam_from_world.rotation_matrix());
      p = T_cam_from_world.translation();
      Cov = frame->Cov_;
    }
    else
    {
      // publish cam in world frame
      SE3 T_world_from_cam(T_world_from_vision_*frame->T_f_w_.inverse());
      q = Quaterniond(T_world_from_cam.rotation_matrix()*T_world_from_vision_.rotation_matrix().transpose());
      p = T_world_from_cam.translation();
      Cov = T_world_from_cam.Adj()*frame->Cov_*T_world_from_cam.inverse().Adj();
    }
    geometry_msgs::PoseWithCovarianceStampedPtr msg_pose(new geometry_msgs::PoseWithCovarianceStamped);
    msg_pose->header = header_msg;
    msg_pose->pose.pose.position.x = p[0];
    msg_pose->pose.pose.position.y = p[1];
    msg_pose->pose.pose.position.z = p[2];
    msg_pose->pose.pose.orientation.x = q.x();
    msg_pose->pose.pose.orientation.y = q.y();
    msg_pose->pose.pose.orientation.z = q.z();
    msg_pose->pose.pose.orientation.w = q.w();
    for(size_t i=0; i<36; ++i)
      msg_pose->pose.covariance[i] = Cov(i%6, i/6);
    pub_pose_.publish(msg_pose);
  }
}
예제 #21
0
void setupSBA(SysSBA &sys)
{
    // Create camera parameters.
    frame_common::CamParams cam_params;
    cam_params.fx = 430; // Focal length in x
    cam_params.fy = 430; // Focal length in y
    cam_params.cx = 320; // X position of principal point
    cam_params.cy = 240; // Y position of principal point
    cam_params.tx = 0;   // Baseline (no baseline since this is monocular)

    // Define dimensions of the image.
    int maxx = 640;
    int maxy = 480;

    // Create a plane containing a wall of points.
    int npts_x = 10; // Number of points on the plane in x
    int npts_y = 5;  // Number of points on the plane in y
    
    double plane_width = 5;     // Width of the plane on which points are positioned (x)
    double plane_height = 2.5;    // Height of the plane on which points are positioned (y)
    double plane_distance = 5; // Distance of the plane from the cameras (z)

    // Vector containing the true point positions.
    vector<Point, Eigen::aligned_allocator<Point> > points;

    for (int ix = 0; ix < npts_x ; ix++)
    {
      for (int iy = 0; iy < npts_y ; iy++)
      {
        // Create a point on the plane in a grid.
        points.push_back(Point(plane_width/npts_x*(ix+.5), -plane_height/npts_y*(iy+.5), plane_distance, 1.0));
      }
    }
    
    // Create nodes and add them to the system.
    unsigned int nnodes = 5; // Number of nodes.
    double path_length = 3; // Length of the path the nodes traverse.
    
    unsigned int i = 0, j = 0;
    
    for (i = 0; i < nnodes; i++)
    { 
      // Translate in the x direction over the node path.
      Vector4d trans(i/(nnodes-1.0)*path_length, 0, 0, 1);
            
      // Don't rotate.
      Quaterniond rot(1, 0, 0, 0);
      rot.normalize();
      
      // Add a new node to the system.
      sys.addNode(trans, rot, cam_params, false);
    }
        
    // Set the random seed.
    unsigned short seed = (unsigned short)time(NULL);
    seed48(&seed);
    double ptscale = 1.0;
        
    // Add points into the system, and add noise.
    for (i = 0; i < points.size(); i++)
    {
      // Add up to .5 points of noise.
      Vector4d temppoint = points[i];
      temppoint.x() += ptscale*(drand48() - 0.5);
      temppoint.y() += ptscale*(drand48() - 0.5);
      temppoint.z() += ptscale*(drand48() - 0.5);
      sys.addPoint(temppoint);
    }
    
    Vector2d proj;
    
    // Project points into nodes.
    for (i = 0; i < points.size(); i++)
    {
      for (j = 0; j < sys.nodes.size(); j++)
      {
        // Project the point into the node's image coordinate system.
        sys.nodes[j].setProjection();
        sys.nodes[j].project2im(proj, points[i]);
        
        // If valid (within the range of the image size), add the monocular 
        // projection to SBA.
        if (proj.x() > 0 && proj.x() < maxx-1 && proj.y() > 0 && proj.y() < maxy-1)
        {
          sys.addMonoProj(j, i, proj);
          //printf("Adding projection: Node: %d Point: %d Proj: %f %f\n", j, i, proj.x(), proj.y());
        }
      }
    }
    
    // Add noise to node position.
    
    double transscale = 1.0;
    double rotscale = 0.2;
    
    // Don't actually add noise to the first node, since it's fixed.
    for (i = 1; i < sys.nodes.size(); i++)
    {
      Vector4d temptrans = sys.nodes[i].trans;
      Quaterniond tempqrot = sys.nodes[i].qrot;
      
      // Add error to both translation and rotation.
      temptrans.x() += transscale*(drand48() - 0.5);
      temptrans.y() += transscale*(drand48() - 0.5);
      temptrans.z() += transscale*(drand48() - 0.5);
      tempqrot.x() += rotscale*(drand48() - 0.5);
      tempqrot.y() += rotscale*(drand48() - 0.5);
      tempqrot.z() += rotscale*(drand48() - 0.5);
      tempqrot.normalize();
      
      sys.nodes[i].trans = temptrans;
      sys.nodes[i].qrot = tempqrot;
      
      // These methods should be called to update the node.
      sys.nodes[i].normRot();
      sys.nodes[i].setTransform();
      sys.nodes[i].setProjection();
      sys.nodes[i].setDr(true);
    }
        
}
void odom_callback(const nav_msgs::Odometry::ConstPtr &msg)
{
    //your code for update
    //camera position in the IMU frame = (0, -0.05, +0.02)
    //camera orientaion in the IMU frame = Quaternion(0, 0, -1, 0); w x y z, respectively
    VectorXd z = VectorXd::Zero(_DIM_LEN << 1);
    MatrixXd R = MatrixXd::Zero(_DIM_LEN << 1, _DIM_LEN << 1);
    {   // get the measurement from the msg
        auto q = msg->pose.pose.orientation;
        auto p = msg->pose.pose.position;
        Quaterniond q_ic(0, 0, -1, 0);
        Quaterniond q_cw(q.w, q.x, q.y, q.z);

        MatrixXd g_cw = MatrixXd::Zero(4, 4), g_ic = MatrixXd::Zero(4, 4);

        g_cw.block(0, 0, 3, 3) << q_cw.toRotationMatrix();
        g_cw.block(0, 3, 3, 1) << p.x, p.y, p.z;
        g_cw(3, 3) = 1.0;

        g_ic.block(0, 0, 3, 3) << q_ic.toRotationMatrix();
        g_ic.block(0, 3, 3, 1) << 0, -0.05, +0.02;
        g_ic(3, 3) = 1.0;

        MatrixXd g_wi = (g_ic * g_cw).inverse();
        //ROS_WARN_STREAM("g_ic = " << endl << g_ic);
        //ROS_WARN_STREAM("g_cw = " << endl << g_cw);
        //ROS_WARN_STREAM("g_wi = " << endl << g_wi);


        z <<
          g_wi.block(0, 3, 3, 1),
                     RotMtoRPY(g_wi.block(0, 0, 3, 3));

        nav_msgs::Odometry odom = *msg;
        Quaterniond qq ;
        qq = RPYtoR(z.segment(3, 3)).block<3, 3>(0, 0);
        odom.pose.pose.position.x = z(0);
        odom.pose.pose.position.y = z(1);
        odom.pose.pose.position.z = z(2);
        odom.pose.pose.orientation.w = qq.w();
        odom.pose.pose.orientation.x = qq.x();
        odom.pose.pose.orientation.y = qq.y();
        odom.pose.pose.orientation.z = qq.z();
        msm_pub.publish(odom);

#if 1
        R(0, 0) = 0.02 * 0.02;
        R(1, 1) = 0.02 * 0.02;
        R(2, 2) = 0.02 * 0.02;
        R(3, 3) = (2.0/180.0)*acos(-1) * (2.0/180.0)*acos(-1);
        R(4, 4) = (2.0/180.0)*acos(-1) * (2.0/180.0)*acos(-1);
        R(5, 5) = (2.0/180.0)*acos(-1) * (2.0/180.0)*acos(-1);
#endif
    }

    //ROS_WARN("[update] preparation done");

    MatrixXd C_t = MatrixXd::Zero(_DIM_LEN << 1, _STT_LEN);
    MatrixXd K_t = MatrixXd::Zero(_STT_LEN, _DIM_LEN << 1);
    {   // compute the C_t and K_t
        C_t.block(_P_M_BEG, _POS_BEG, _P_M_LEN, _POS_LEN) <<
                Matrix3d::Identity();
        C_t.block(_O_M_BEG, _ORT_BEG, _O_M_LEN, _ORT_LEN) <<
                Matrix3d::Identity();
        K_t = cov_x * C_t.transpose() * (C_t * cov_x * C_t.transpose() + R).inverse();
    }

    //ROS_WARN("[update] C_t K_t done");
#if 1
    {
        VectorXd error = z - C_t * x;
        for (int i = _O_M_BEG; i < _O_M_END; ++i)
        {
            if (error(i) < -_PI)
            {
                //ROS_WARN_STREAM("error = " << error.transpose() << endl);
                error(i) += 2.0 * _PI;
                //ROS_WARN_STREAM("ERROR = " << error.transpose() << endl);
            }
            if (error(i) > _PI)
            {
                //ROS_WARN_STREAM("error = " << error.transpose() << endl);
                error(i) -= 2.0 * _PI;
                //ROS_WARN_STREAM("ERROR = " << error.transpose() << endl);
            }

        }
        x = x + K_t * error;
        cov_x = cov_x - K_t * C_t * cov_x;
    }
#endif
    //ROS_WARN("[update] x cov_x done");

    //pubOdometry();
}
예제 #23
0
void PoseGraph::updatePath()
{
    m_keyframelist.lock();
    list<KeyFrame*>::iterator it;
    for (int i = 1; i <= sequence_cnt; i++)
    {
        path[i].poses.clear();
    }
    base_path.poses.clear();
    posegraph_visualization->reset();

    if (SAVE_LOOP_PATH)
    {
        ofstream loop_path_file_tmp(VINS_RESULT_PATH, ios::out);
        loop_path_file_tmp.close();
    }

    for (it = keyframelist.begin(); it != keyframelist.end(); it++)
    {
        Vector3d P;
        Matrix3d R;
        (*it)->getPose(P, R);
        Quaterniond Q;
        Q = R;
//        printf("path p: %f, %f, %f\n",  P.x(),  P.z(),  P.y() );

        geometry_msgs::PoseStamped pose_stamped;
        pose_stamped.header.stamp = ros::Time((*it)->time_stamp);
        pose_stamped.header.frame_id = "world";
        pose_stamped.pose.position.x = P.x() + VISUALIZATION_SHIFT_X;
        pose_stamped.pose.position.y = P.y() + VISUALIZATION_SHIFT_Y;
        pose_stamped.pose.position.z = P.z();
        pose_stamped.pose.orientation.x = Q.x();
        pose_stamped.pose.orientation.y = Q.y();
        pose_stamped.pose.orientation.z = Q.z();
        pose_stamped.pose.orientation.w = Q.w();
        if((*it)->sequence == 0)
        {
            base_path.poses.push_back(pose_stamped);
            base_path.header = pose_stamped.header;
        }
        else
        {
            path[(*it)->sequence].poses.push_back(pose_stamped);
            path[(*it)->sequence].header = pose_stamped.header;
        }

        if (SAVE_LOOP_PATH)
        {
            ofstream loop_path_file(VINS_RESULT_PATH, ios::app);
            loop_path_file.setf(ios::fixed, ios::floatfield);
            loop_path_file.precision(0);
            loop_path_file << (*it)->time_stamp * 1e9 << ",";
            loop_path_file.precision(5);
            loop_path_file  << P.x() << ","
                  << P.y() << ","
                  << P.z() << ","
                  << Q.w() << ","
                  << Q.x() << ","
                  << Q.y() << ","
                  << Q.z() << ","
                  << endl;
            loop_path_file.close();
        }
        //draw local connection
        if (SHOW_S_EDGE)
        {
            list<KeyFrame*>::reverse_iterator rit = keyframelist.rbegin();
            list<KeyFrame*>::reverse_iterator lrit;
            for (; rit != keyframelist.rend(); rit++)  
            {  
                if ((*rit)->index == (*it)->index)
                {
                    lrit = rit;
                    lrit++;
                    for (int i = 0; i < 4; i++)
                    {
                        if (lrit == keyframelist.rend())
                            break;
                        if((*lrit)->sequence == (*it)->sequence)
                        {
                            Vector3d conncected_P;
                            Matrix3d connected_R;
                            (*lrit)->getPose(conncected_P, connected_R);
                            posegraph_visualization->add_edge(P, conncected_P);
                        }
                        lrit++;
                    }
                    break;
                }
            } 
        }
        if (SHOW_L_EDGE)
        {
            if ((*it)->has_loop && (*it)->sequence == sequence_cnt)
            {
                
                KeyFrame* connected_KF = getKeyFrame((*it)->loop_index);
                Vector3d connected_P;
                Matrix3d connected_R;
                connected_KF->getPose(connected_P, connected_R);
                //(*it)->getVioPose(P, R);
                (*it)->getPose(P, R);
                if((*it)->sequence > 0)
                {
                    posegraph_visualization->add_loopedge(P, connected_P + Vector3d(VISUALIZATION_SHIFT_X, VISUALIZATION_SHIFT_Y, 0));
                }
            }
        }

    }
    publish();
    m_keyframelist.unlock();
}
예제 #24
0
void setupSBA(SysSBA &sba)
{
    // Create camera parameters.
    frame_common::CamParams cam_params;
    cam_params.fx = 430; // Focal length in x
    cam_params.fy = 430; // Focal length in y
    cam_params.cx = 320; // X position of principal point
    cam_params.cy = 240; // Y position of principal point
    cam_params.tx = -30; // Baseline (no baseline since this is monocular)

    // Create a plane containing a wall of points.
    Plane plane0, plane1;
    plane0.resize(3, 2, 10, 5);
    
    plane1 = plane0;
    plane1.translate(0.1, 0.05, 0.0);
    
    plane1.rotate(PI/4.0, 1, 0, 0);
    plane1.translate(0.0, 0.0, 7.0);
    
    
    plane0.rotate(PI/4.0, 1, 0, 0);
    plane0.translate(0.0, 0.0, 7.0);
    
    //plane1.translate(0.05, 0.0, 0.05);
    
    // Create nodes and add them to the system.
    unsigned int nnodes = 2; // Number of nodes.
    double path_length = 2; // Length of the path the nodes traverse.

    // Set the random seed.
    unsigned short seed = (unsigned short)time(NULL);
    seed48(&seed);
    
    for (int i = 0; i < nnodes; i++)
    { 
      // Translate in the x direction over the node path.
      Vector4d trans(i/(nnodes-1.0)*path_length, 0, 0, 1);
            
#if 0
      if (i >= 0)
	    {
	      // perturb a little
	      double tnoise = 0.5;	// meters
	      trans.x() += tnoise*(drand48()-0.5);
	      trans.y() += tnoise*(drand48()-0.5);
	      trans.z() += tnoise*(drand48()-0.5);
	    }
#endif

      // Don't rotate.
      Quaterniond rot(1, 0, 0, 0);
#if 0
      if (i > 0)
	    {
	      // perturb a little
	      double qnoise = 0.1;	// meters
	      rot.x() += qnoise*(drand48()-0.5);
	      rot.y() += qnoise*(drand48()-0.5);
	      rot.z() += qnoise*(drand48()-0.5);
	    }
#endif
      rot.normalize();
      
      // Add a new node to the system.
      sba.addNode(trans, rot, cam_params, false);
    }
    
    Vector3d imagenormal(0, 0, 1);
    
    Matrix3d covar0;
    covar0 << sqrt(imagenormal(0)), 0, 0, 0, sqrt(imagenormal(1)), 0, 0, 0, sqrt(imagenormal(2));
    Matrix3d covar;
    
    Quaterniond rotation;
    Matrix3d rotmat;
    
    // Project points into nodes.
    addPointAndProjection(sba, plane0.points, 0);
    addPointAndProjection(sba, plane1.points, 1);
    
    int offset = plane0.points.size();
    
    Vector3d normal0 = sba.nodes[0].qrot.toRotationMatrix().transpose()*plane0.normal; 
    Vector3d normal1 = sba.nodes[1].qrot.toRotationMatrix().transpose()*plane1.normal; 
    
    printf("Normal: %f %f %f -> %f %f %f\n", plane0.normal.x(), plane0.normal.y(), plane0.normal.z(), normal0.x(), normal0.y(), normal0.z());
    printf("Normal: %f %f %f -> %f %f %f\n", plane1.normal.x(), plane1.normal.y(), plane1.normal.z(), normal1.x(), normal1.y(), normal1.z());
    
    for (int i = 0; i < plane0.points.size(); i++)
    {
      sba.addPointPlaneMatch(0, i, normal0, 1, i+offset, normal1);

      Matrix3d covar;
      covar << 0.1, 0, 0,
                0, 0.1, 0, 
          	0, 0, 0.1;
      sba.setProjCovariance(0, i+offset, covar);
      sba.setProjCovariance(1, i, covar);
    }
    
    // Add noise to node position.
    
    double transscale = 1.0;
    double rotscale = 0.1;
    
    // Don't actually add noise to the first node, since it's fixed.
    for (int i = 1; i < sba.nodes.size(); i++)
    {
      Vector4d temptrans = sba.nodes[i].trans;
      Quaterniond tempqrot = sba.nodes[i].qrot;
      
      // Add error to both translation and rotation.
      temptrans.x() += transscale*(drand48() - 0.5);
      temptrans.y() += transscale*(drand48() - 0.5);
      temptrans.z() += transscale*(drand48() - 0.5);
      tempqrot.x() += rotscale*(drand48() - 0.5);
      tempqrot.y() += rotscale*(drand48() - 0.5);
      tempqrot.z() += rotscale*(drand48() - 0.5);
      tempqrot.normalize();
      
      sba.nodes[i].trans = temptrans;
      sba.nodes[i].qrot = tempqrot;
      
      // These methods should be called to update the node.
      sba.nodes[i].normRot();
      sba.nodes[i].setTransform();
      sba.nodes[i].setProjection();
      sba.nodes[i].setDr(true);
    }
}
int main(int argc, char **argv) {

    ros::init(argc, argv, "handeye_calibration");
    ros::NodeHandle nh ;

    srand(clock()) ;

    int c = 1 ;

    while ( c < argc )
    {
        if ( strncmp(argv[c], "--camera", 8) == 0 )
        {
            if ( c + 1 < argc ) {
                camera_id = argv[++c] ;
            }
        }
        else if ( strncmp(argv[c], "--out", 5) == 0 )
        {
            if ( c + 1 < argc ) {
                outFolder = argv[++c] ;
            }
        }
        else if ( strncmp(argv[c], "--data", 6) == 0 )
        {
            if ( c + 1 < argc ) {
                dataFolder = argv[++c] ;
            }
        }
        else if ( strncmp(argv[c], "--arm", 5) == 0 )
        {
            if ( c + 1 < argc ) {
                armName = argv[++c] ;
            }
        }
        else if ( strncmp(argv[c], "--board", 7) == 0 )
        {
            int bx = -1, by = -1 ;

            if ( c + 1 < argc ) {
                bx = atof(argv[++c]) ;
            }
            if ( c + 1 < argc ) {
                by = atof(argv[++c]) ;
            }

            if ( bx > 0 && by > 0 )
                boardSize = cv::Size(bx, by) ;
        }
        else if ( strncmp(argv[c], "--cell", 7) == 0 )
        {
            if ( c + 1 < argc ) {
                string cs = argv[++c] ;
                cellSize = atof(cs.c_str()) ;;
            }
        }
        else if ( strncmp(argv[c], "--stations", 10) == 0 )
        {
            if ( c + 1 < argc ) {
                nStations = atoi(argv[++c]) ;
            }
        }
        else if ( strncmp(argv[c], "--bbox", 6) == 0 )
        {
            if ( c + 1 < argc ) {
                minX = atof(argv[++c]) ;
            }
            if ( c + 1 < argc ) {
                minY = atof(argv[++c]) ;
            }
            if ( c + 1 < argc ) {
                minZ = atof(argv[++c]) ;
            }
            if ( c + 1 < argc ) {
                maxX = atof(argv[++c]) ;
            }
            if ( c + 1 < argc ) {
                maxY = atof(argv[++c]) ;
            }
            if ( c + 1 < argc ) {
                maxZ = atof(argv[++c]) ;
            }

        }
        else if ( strncmp(argv[c], "--orient", 8) == 0 )
        {
            double ox, oy, oz ;

            if ( c + 1 < argc ) {
                ox = atof(argv[++c]) ;
            }
            if ( c + 1 < argc ) {
                oy = atof(argv[++c]) ;
            }
            if ( c + 1 < argc ) {
                oz = atof(argv[++c]) ;
            }

            orient = Vector3d(ox, oy, oz) ;
        }
        else if ( strncmp(argv[c], "--fixed", 7) == 0 )
        {
            fixedCam = true ;

        }

        ++c ;
    }


    if ( camera_id.empty() )
    {
        ROS_ERROR("No camera specified") ;
        return 0 ;
    }

    if ( outFolder.empty() )
    {
        outFolder = "/home/" ;
        outFolder += getenv("USER") ;
        outFolder += "/.ros/clopema_calibration/" ;
        outFolder += camera_id ;
        outFolder += "/handeye_rgb.calib" ;
    }

    ros::AsyncSpinner spinner(4) ;
    spinner.start() ;

    // open grabber and wait until connection

    camera_helpers::OpenNICaptureRGBD grabber(camera_id) ;

    if ( !grabber.connect() )
    {
        ROS_ERROR("Cannot connect to frame grabber: %s", camera_id.c_str()) ;
        return 0 ;
    }

    c = 0 ;

    // move robot and grab images

    robot_helpers::MoveRobot mv ;
    mv.setServoMode(false);

    tf::TransformListener listener(ros::Duration(1.0));

    double cx, cy, fx, fy ;

    while ( c < nStations )
    {
        // move the robot to the next position

        double X = minX + (maxX - minX)*(rand()/(double)RAND_MAX) ;
        double Y = minY + (maxY - minY)*(rand()/(double)RAND_MAX) ;
        double Z = minZ + (maxZ - minZ)*(rand()/(double)RAND_MAX) ;

        const double qscale = 0.3 ;
        double qx = qscale * (rand()/(double)RAND_MAX - 0.5) ;
        double qy = qscale * (rand()/(double)RAND_MAX - 0.5) ;
        double qz = qscale * (rand()/(double)RAND_MAX - 0.5) ;
        double qw = qscale * (rand()/(double)RAND_MAX - 0.5) ;

        Quaterniond q = robot_helpers::lookAt(orient, M_PI) ;

       q = Quaterniond(q.x() + qx, q.y() + qy, q.z() + qz, q.w() + qw) ;
       q.normalize();

        if ( fixedCam ) {

            addPlaneToCollisionModel(armName, 0.3, q) ;
            if ( !robot_helpers::moveGripper(mv, armName, Eigen::Vector3d(X, Y, Z), q) ) continue ;
            robot_helpers::resetCollisionModel() ;
        }
        else
        {
            // for an Xtion on arm the bounding box indicates the target position and the orient the lookAt direction
            // so move back the sensor far enough so that the target is visible. This is currently hardcoded.

            Vector3d dir = q*Vector3d(0, 0, 1) ;
            Vector3d c = Vector3d(X, Y, Z) - 0.9*dir ;

            trajectory_msgs::JointTrajectory traj ;
            if ( !robot_helpers::planXtionToPose(armName, c, q, traj) ) continue ;

            mv.execTrajectory(traj) ;

        }

        image_geometry::PinholeCameraModel cm ;
        cv::Mat clr, depth ;

        ros::Time ts ;

        grabber.grab(clr, depth, ts, cm) ;

        // camera intrinsics. we assume a rectfied and calibrated frame

        cx = cm.cx() ;
        cy = cm.cy() ;
        fx = cm.fx() ;
        fy = cm.fy() ;

        string filenamePrefix = dataFolder + str(boost::format("/grab_%06d") % c) ;

        cv::imwrite(filenamePrefix + "_c.png", clr) ;
        cv::imwrite(filenamePrefix + "_d.png", depth) ;

        Eigen::Affine3d pose_ ;

        if ( fixedCam ) pose_ = robot_helpers::getPose(armName) ;
        else
        {
            tf::StampedTransform transform;

            try {
                listener.waitForTransform(armName + "_link_6", "base_link", ts, ros::Duration(1) );
                listener.lookupTransform(armName + "_link_6", "base_link", ts, transform);

                tf::TransformTFToEigen(transform, pose_);

                } catch (tf::TransformException ex) {
                    ROS_ERROR("%s",ex.what());
                    continue ;
            }
        }



        {
            ofstream strm((filenamePrefix + "_pose.txt").c_str()) ;

            strm << pose_.rotation() << endl << pose_.translation() ;
        }

         ++c ;

    }

    grabber.disconnect();


    robot_helpers::setServoPowerOff() ;

//    exit(1) ;

    vector<Affine3d> gripper_to_base, target_to_sensor ;
    Affine3d sensor_to_base, sensor_to_gripper ;

    cv::Mat cameraMatrix ;

    cameraMatrix = cv::Mat::eye(3, 3, CV_64F);

    cameraMatrix.at<double>(0, 0) = fx ;
    cameraMatrix.at<double>(1, 1) = fy ;
    cameraMatrix.at<double>(0, 2) = cx ;
    cameraMatrix.at<double>(1, 2) = cy ;



 //   find_target_motions("grab_",  "/home/malasiot/images/clothes/calibration/calib_xtion2/", boardSize, cellSize, cameraMatrix, true, gripper_to_base, target_to_sensor) ;

       find_target_motions("grab_",  dataFolder, boardSize, cellSize, cameraMatrix, true, gripper_to_base, target_to_sensor) ;


    if ( fixedCam )
        solveHandEyeFixed(gripper_to_base, target_to_sensor, Tsai, true, sensor_to_base) ;
    else
        solveHandEyeMoving(gripper_to_base, target_to_sensor, Horaud, false, sensor_to_gripper) ;


    certh_libs::createDir(boost::filesystem::path(outFolder).parent_path().string(), true) ;

    ofstream ostrm(outFolder.c_str()) ;

    if ( fixedCam )
    {
        ostrm << sensor_to_base.inverse().matrix() ;
//        cout << sensor_to_base.inverse().matrix() ;
    }
    else
    {
        ostrm << sensor_to_gripper.inverse().matrix() ;
      // cout << sensor_to_gripper.inverse().matrix() << endl ;
/*
       for(int i=0 ; i<gripper_to_base.size() ; i++)
           cout << (gripper_to_base[i] * sensor_to_gripper * target_to_sensor[i]).matrix() << endl ;
           */
    }

    return 1;
}
예제 #26
0
void setupSBA(SysSBA &sys)
{
    // Create camera parameters.
    frame_common::CamParams cam_params;
    cam_params.fx = 430; // Focal length in x
    cam_params.fy = 430; // Focal length in y
    cam_params.cx = 320; // X position of principal point
    cam_params.cy = 240; // Y position of principal point
    cam_params.tx = 0.3; // Baseline 

    // Define dimensions of the image.
    int maxx = 640;
    int maxy = 480;

    // Create a plane containing a wall of points.
    Plane middleplane;
    middleplane.resize(3, 2, 10, 5);
    //middleplane.rotate(PI/4.0, PI/6.0, 1, 0);
    middleplane.rotate(PI/4.0, 1, 0, 1);
    middleplane.translate(0.0, 0.0, 5.0);
    
    // Create another plane containing a wall of points.
    Plane mp2;
    mp2.resize(3, 2, 10, 5);
    mp2.rotate(0, 0, 0, 1);
    mp2.translate(0.0, 0.0, 4.0);


    // Create another plane containing a wall of points.
    Plane mp3;
    mp3.resize(3, 2, 10, 5);
    mp3.rotate(-PI/4.0, 1, 0, 1);
    mp3.translate(0.0, 0.0, 4.5);



    // Vector containing the true point positions.
    vector<Eigen::Vector3d, Eigen::aligned_allocator<Eigen::Vector3d> > normals;
    
    points.insert(points.end(), middleplane.points.begin(), middleplane.points.end());
    normals.insert(normals.end(), middleplane.points.size(), middleplane.normal);
    points.insert(points.end(), mp2.points.begin(), mp2.points.end());
    normals.insert(normals.end(), mp2.points.size(), mp2.normal);
    points.insert(points.end(), mp3.points.begin(), mp3.points.end());
    normals.insert(normals.end(), mp3.points.size(), mp3.normal);
    
    // Create nodes and add them to the system.
    unsigned int nnodes = 2; // Number of nodes.
    double path_length = 1.0; // Length of the path the nodes traverse.

    // Set the random seed.
    unsigned short seed = (unsigned short)time(NULL);
    seed48(&seed);
    
    unsigned int i = 0;
    
    Vector3d inormal0 = middleplane.normal;
    Vector3d inormal1 = middleplane.normal;
    Vector3d inormal20 = mp2.normal;
    Vector3d inormal21 = mp2.normal;
    Vector3d inormal30 = mp3.normal;
    Vector3d inormal31 = mp3.normal;
    
    for (i = 0; i < nnodes; i++)
    { 
      // Translate in the x direction over the node path.
      Vector4d trans(i/(nnodes-1.0)*path_length, 0, 0, 1);
            
#if 1
      if (i >= 2)
	    {
	      // perturb a little
	      double tnoise = 0.5;	// meters
	      trans.x() += tnoise*(drand48()-0.5);
	      trans.y() += tnoise*(drand48()-0.5);
	      trans.z() += tnoise*(drand48()-0.5);
	    }
#endif

      // Don't rotate.
      Quaterniond rot(1, 0, 0, 0);
#if 1
      if (i >= 2)
	    {
	      // perturb a little
	      double qnoise = 0.1;	// meters
	      rot.x() += qnoise*(drand48()-0.5);
	      rot.y() += qnoise*(drand48()-0.5);
	      rot.z() += qnoise*(drand48()-0.5);
	    }
#endif
      rot.normalize();
      
      // Add a new node to the system.
      sys.addNode(trans, rot, cam_params, false);
      
      // set normal
      if (i == 0)
	{
	  inormal0 = rot.toRotationMatrix().transpose() * inormal0;
	  inormal20 = rot.toRotationMatrix().transpose() * inormal20;
	  inormal30 = rot.toRotationMatrix().transpose() * inormal30;
	}
      else
	{
	  inormal1 = rot.toRotationMatrix().transpose() * inormal1;
	  inormal21 = rot.toRotationMatrix().transpose() * inormal21;
	  inormal31 = rot.toRotationMatrix().transpose() * inormal31;
	}
    }
        
    double pointnoise = 1.0;
    
    // Add points into the system, and add noise.
    for (i = 0; i < points.size(); i++)
    {
      // Add up to .5 points of noise.
      Vector4d temppoint = points[i];
      temppoint.x() += pointnoise*(drand48() - 0.5);
      temppoint.y() += pointnoise*(drand48() - 0.5);
      temppoint.z() += pointnoise*(drand48() - 0.5);
      sys.addPoint(temppoint);
    }
    
    // Each point gets added twice
    for (i = 0; i < points.size(); i++)
    {
      // Add up to .5 points of noise.
      Vector4d temppoint = points[i];
      temppoint.x() += pointnoise*(drand48() - 0.5);
      temppoint.y() += pointnoise*(drand48() - 0.5);
      temppoint.z() += pointnoise*(drand48() - 0.5);
      sys.addPoint(temppoint);
    }

    Vector2d proj2d, proj2dp;
    Vector3d proj, projp, pc, pcp, baseline, baselinep;
    
    Vector3d imagenormal(0, 0, 1);
    
    Matrix3d covar0;
    covar0 << sqrt(imagenormal(0)), 0, 0, 0, sqrt(imagenormal(1)), 0, 0, 0, sqrt(imagenormal(2));
    Matrix3d covar;
    
    Quaterniond rotation;
    Matrix3d rotmat;
    
    unsigned int midindex = middleplane.points.size();
    printf("Normal for Middle Plane: [%f %f %f], index %d -> %d\n", middleplane.normal.x(), middleplane.normal.y(), middleplane.normal.z(), 0, midindex-1);
    
    int nn = points.size();

    // Project points into nodes.
    for (i = 0; i < points.size(); i++)
    {
      int k=i;
      // Project the point into the node's image coordinate system.
      sys.nodes[0].setProjection();
      sys.nodes[0].project2im(proj2d, points[k]);
      sys.nodes[1].setProjection();
      sys.nodes[1].project2im(proj2dp, points[k]);
        
      // Camera coords for right camera
      baseline << sys.nodes[0].baseline, 0, 0;
      pc = sys.nodes[0].Kcam * (sys.nodes[0].w2n*points[k] - baseline); 
      proj.head<2>() = proj2d;
      proj(2) = pc(0)/pc(2);
        
      baseline << sys.nodes[1].baseline, 0, 0;
      pcp = sys.nodes[1].Kcam * (sys.nodes[1].w2n*points[k] - baseline); 
      projp.head<2>() = proj2dp;
      projp(2) = pcp(0)/pcp(2);

      // add noise to projections
      double prnoise = 0.5;	// 0.5 pixels
      proj.head<2>() += Vector2d(prnoise*(drand48() - 0.5),prnoise*(drand48() - 0.5));
      projp.head<2>() += Vector2d(prnoise*(drand48() - 0.5),prnoise*(drand48() - 0.5));


      // If valid (within the range of the image size), add the stereo 
      // projection to SBA.
      if (proj.x() > 0 && proj.x() < maxx && proj.y() > 0 && proj.y() < maxy)
        {
          // add point cloud shape-holding projections to each node
          sys.addStereoProj(0, k, proj);
          sys.addStereoProj(1, k+nn, projp);

#ifdef USE_PP
          // add exact matches
          if (i == 20 || i == 65 || i == 142)
            sys.addStereoProj(1, k, projp);
#endif

#ifdef USE_PPL
          // add point-plane matches
	  if (i < midindex)
            sys.addPointPlaneMatch(0, k, inormal0, 1, k+nn, inormal1);
	  else if (i < 2*midindex)
	    sys.addPointPlaneMatch(0, k, inormal20, 1, k+nn, inormal21);
	  else
	    sys.addPointPlaneMatch(0, k, inormal30, 1, k+nn, inormal31);
          //          sys.addStereoProj(0, k+nn, projp);
          //          sys.addStereoProj(1, k, proj);

          Matrix3d covar;
          double cv = 0.01;
          covar << cv, 0, 0,
            0, cv, 0, 
            0, 0, cv;
          sys.setProjCovariance(0, k+nn, covar);
          sys.setProjCovariance(1, k, covar);
#endif

        }
      else
        {
          cout << "ERROR! point not in view of nodes" << endl;
          //return;
        }
    }

    // Add noise to node position.
    
    double transscale = 2.0;
    double rotscale = 0.5;
    
    // Don't actually add noise to the first node, since it's fixed.
    for (i = 1; i < sys.nodes.size(); i++)
    {
      Vector4d temptrans = sys.nodes[i].trans;
      Quaterniond tempqrot = sys.nodes[i].qrot;
      
      // Add error to both translation and rotation.
      temptrans.x() += transscale*(drand48() - 0.5);
      temptrans.y() += transscale*(drand48() - 0.5);
      temptrans.z() += transscale*(drand48() - 0.5);
      tempqrot.x() += rotscale*(drand48() - 0.5);
      tempqrot.y() += rotscale*(drand48() - 0.5);
      tempqrot.z() += rotscale*(drand48() - 0.5);
      tempqrot.normalize();
      
      sys.nodes[i].trans = temptrans;
      sys.nodes[i].qrot = tempqrot;
      
      // These methods should be called to update the node.
      sys.nodes[i].normRot();
      sys.nodes[i].setTransform();
      sys.nodes[i].setProjection();
      sys.nodes[i].setDr(true);
    }
        
}