コード例 #1
0
void GJointRevolute::update_short()
{
	if ( bReversed ) {
		T = SE3(Exp(-axis*coordinate.q), Vec3(0,0,0));
		inv_T = SE3(~T.GetRotation());
		S[0] = -axis[0]; S[1] = -axis[1]; S[2] = -axis[2];
	} else {
		T = SE3(Exp(axis*coordinate.q), Vec3(0,0,0));
		inv_T = SE3(~T.GetRotation());
		S[0] = axis[0]; S[1] = axis[1]; S[2] = axis[2];
	}
}
コード例 #2
0
void GJointPlanar::update_short()
{
	if ( bReversed ) {
		T = SE3(1,0,0,0,1,0,0,0,1, -coordinates[0].q, -coordinates[1].q, 0);
		inv_T = SE3(1,0,0,0,1,0,0,0,1, coordinates[0].q, coordinates[1].q, 0);
		S[3] = S[10] = -1.0;
	} else {
		T = SE3(1,0,0,0,1,0,0,0,1, coordinates[0].q, coordinates[1].q, 0);
		inv_T = SE3(1,0,0,0,1,0,0,0,1, -coordinates[0].q, -coordinates[1].q, 0);
		S[3] = S[10] = 1.0;
	}
}
コード例 #3
0
ファイル: gsystem.cpp プロジェクト: mrdeveloperdude/OctoMY
void GSystem::calcDerivative_MomentumCOM_Dq_Ddq(RMatrix &DHcDq_, RMatrix &DHcDdq_)
{
	int i;
	std::list<GBody *>::iterator iter_pbody;
	std::list<GCoordinate *>::iterator iter_pcoord;
	std::vector<SE3> M(pBodies.size());
	std::vector<GConstraintJointLoop::JOINTLOOP_CONSTRAINT_TYPE> jlc_type(pBodies.size());

	for (i=0, iter_pbody = pBodies.begin(); iter_pbody != pBodies.end(); i++, iter_pbody++) {

		// save previous settings
		M[i] = (*iter_pbody)->fwdJointChain.M1;
		jlc_type[i] = (*iter_pbody)->fwdJointChain.jointLoopConstraintType;

		// prerequisites for (*iter_pbody)->getDerivative_MomentumGlobal_Dq(...) and (*iter_pbody)->getDerivative_MomentumGlobal_Ddq(...)
		(*iter_pbody)->fwdJointChain.setM(SE3());
		(*iter_pbody)->fwdJointChain.jointLoopConstraintType = GConstraintJointLoop::JOINTLOOP_ORIENTATION_POSITION;
		(*iter_pbody)->fwdJointChain.update_J();
	}

	// calculate the derivatives
	Vec3 p = getPositionCOMGlobal();
	dse3 Hg = getMomentumGlobal();
	RMatrix DHgDq(6, getNumCoordinates()), DHgDdq(6, getNumCoordinates());
	dse3 DHgDqi, DHgDdqi;
	for (i=0, iter_pcoord = pCoordinates.begin(); iter_pcoord != pCoordinates.end(); i++, iter_pcoord++) {
		DHgDqi = getDerivative_MomentumGlobal_Dq(*iter_pcoord);
		DHgDdqi = getDerivative_MomentumGlobal_Ddq(*iter_pcoord);
		//DHgDq.Push(0, i, convert_to_RMatrix(DHgDqi));
		//DHgDdq.Push(0, i, convert_to_RMatrix(DHgDdqi));
		matSet(&DHgDq[6*i], DHgDqi.GetArray(), 6);
		matSet(&DHgDdq[6*i], DHgDdqi.GetArray(), 6);
	}
	RMatrix DdAdDq_Hg(6, getNumCoordinates());
	Vec3 DpDqi;
	for (i=0, iter_pcoord = pCoordinates.begin(); iter_pcoord != pCoordinates.end(); i++, iter_pcoord++) {
		DpDqi = getDerivative_PositionCOMGlobal_Dq(*iter_pcoord);
		//DdAdDq_Hg.Push(0, i, convert_to_RMatrix(dAd(SE3(p), dad(se3(Vec3(0,0,0),DpDqi), Hg))));
		matSet(&DdAdDq_Hg[6*i], dAd(SE3(p), dad(se3(Vec3(0,0,0),DpDqi), Hg)).GetArray(), 6);
	}
	DHcDq_ = dAd(SE3(p), DHgDq) + DdAdDq_Hg;
	DHcDdq_ = dAd(SE3(p), DHgDdq);

	// restore the previous settings
	for (i=0, iter_pbody = pBodies.begin(); iter_pbody != pBodies.end(); i++, iter_pbody++) {
		(*iter_pbody)->fwdJointChain.setM(M[i]);
		(*iter_pbody)->fwdJointChain.setJointLoopConstraintType(jlc_type[i]);
	}
}
コード例 #4
0
void FrameHandlerMono::addImage(const cv::Mat& img, const double timestamp)
{
  if(!startFrameProcessingCommon(timestamp))
    return;

  // some cleanup from last iteration, can't do before because of visualization
  core_kfs_.clear();
  overlap_kfs_.clear();

  // create new frame
  SVO_START_TIMER("pyramid_creation");
  new_frame_.reset(new Frame(cam_, img.clone(), timestamp));
  SVO_STOP_TIMER("pyramid_creation");

  // process frame
  UpdateResult res = RESULT_FAILURE;
  if(stage_ == STAGE_DEFAULT_FRAME)
    res = processFrame();
  else if(stage_ == STAGE_SECOND_FRAME)
    res = processSecondFrame();
  else if(stage_ == STAGE_FIRST_FRAME)
    res = processFirstFrame();
  else if(stage_ == STAGE_RELOCALIZING)
    res = relocalizeFrame(SE3(Matrix3d::Identity(), Vector3d::Zero()),
                          map_.getClosestKeyframe(last_frame_));

  // set last frame
  last_frame_ = new_frame_;
  new_frame_.reset();
  // finish processing
  finishFrameProcessingCommon(last_frame_->id_, res, last_frame_->nObs());
}
コード例 #5
0
ファイル: gsystem.cpp プロジェクト: mrdeveloperdude/OctoMY
void GSystem::calcDerivative_PositionCOMGlobal_Dq(RMatrix &DpDq_)
{
	int i;
	std::list<GBody *>::iterator iter_pbody;
	std::list<GCoordinate *>::iterator iter_pcoord;
	std::vector<SE3> M(pBodies.size());
	std::vector<GConstraintJointLoop::JOINTLOOP_CONSTRAINT_TYPE> jlc_type(pBodies.size());
	Vec3 DpDqi;

	for (i=0, iter_pbody = pBodies.begin(); iter_pbody != pBodies.end(); i++, iter_pbody++) {

		// save previous settings
		M[i] = (*iter_pbody)->fwdJointChain.M1;
		jlc_type[i] = (*iter_pbody)->fwdJointChain.jointLoopConstraintType;

		// prerequisites for (*iter_pbody)->getDerivative_PositionCOMGlobal_Dq(...)
		(*iter_pbody)->fwdJointChain.setM(SE3());
		(*iter_pbody)->fwdJointChain.jointLoopConstraintType = GConstraintJointLoop::JOINTLOOP_ORIENTATION_POSITION;
		(*iter_pbody)->fwdJointChain.update_J();
	}

	// calculate the derivative
	DpDq_.SetZero(3, getNumCoordinates());
	for (i=0, iter_pcoord = pCoordinates.begin(); iter_pcoord != pCoordinates.end(); i++, iter_pcoord++) {
		DpDqi = getDerivative_PositionCOMGlobal_Dq(*iter_pcoord);
		//DpDq_.Push(0, i, convert_to_RMatrix(DpDqi));
		matSet(&DpDq_[3*i], DpDqi.GetArray(), 3);
	}

	// restore the previous settings
	for (i=0, iter_pbody = pBodies.begin(); iter_pbody != pBodies.end(); i++, iter_pbody++) {
		(*iter_pbody)->fwdJointChain.setM(M[i]);
		(*iter_pbody)->fwdJointChain.setJointLoopConstraintType(jlc_type[i]);
	}
}
コード例 #6
0
void GJointPlanar::update()
{
	//dS, dSdq are still zeros.

	if ( bReversed ) {
		T = SE3(1,0,0,0,1,0,0,0,1, -coordinates[0].q, -coordinates[1].q, 0);
		inv_T = SE3(1,0,0,0,1,0,0,0,1, coordinates[0].q, coordinates[1].q, 0);
		Sdq = se3(0, 0, 0, -coordinates[0].dq, -coordinates[1].dq, 0);
		Sddq = se3(0, 0, 0, -coordinates[0].ddq, -coordinates[1].ddq, 0);
		DSdqDt = Sddq;
		S[3] = S[10] = -1.0;
	} else {
		T = SE3(1,0,0,0,1,0,0,0,1, coordinates[0].q, coordinates[1].q, 0);
		inv_T = SE3(1,0,0,0,1,0,0,0,1, -coordinates[0].q, -coordinates[1].q, 0);
		Sdq = se3(0, 0, 0, coordinates[0].dq, coordinates[1].dq, 0);
		Sddq = se3(0, 0, 0, coordinates[0].ddq, coordinates[1].ddq, 0);
		DSdqDt = Sddq;
		S[3] = S[10] = 1.0;
	}
}
コード例 #7
0
void GJointRevolute::update()
{
	//dS, dSdq are still zeros.

	if ( bReversed ) {
		T = SE3(Exp(-axis*coordinate.q), Vec3(0,0,0));
		inv_T = SE3(~T.GetRotation());
		Sdq = se3(-axis*coordinate.dq, Vec3(0,0,0));
		Sddq = se3(-axis*coordinate.ddq, Vec3(0,0,0));
		DSdqDt = Sddq;
		S[0] = -axis[0]; S[1] = -axis[1]; S[2] = -axis[2];
	} else {
		T = SE3(Exp(axis*coordinate.q), Vec3(0,0,0));
		inv_T = SE3(~T.GetRotation());
		Sdq = se3(axis*coordinate.dq, Vec3(0,0,0));
		Sddq = se3(axis*coordinate.ddq, Vec3(0,0,0));
		DSdqDt = Sddq;
		S[0] = axis[0]; S[1] = axis[1]; S[2] = axis[2];
	}
}
コード例 #8
0
FrameHandlerMono::UpdateResult FrameHandlerMono::processFirstFrame()
{
  new_frame_->T_f_w_ = SE3(Matrix3d::Identity(), Vector3d::Zero());
  if(klt_homography_init_.addFirstFrame(new_frame_) == initialization::FAILURE)
    return RESULT_NO_KEYFRAME;
  new_frame_->setKeyframe();
  map_.addKeyframe(new_frame_);
  stage_ = STAGE_SECOND_FRAME;
  SVO_INFO_STREAM("Init: Selected first frame.");
  return RESULT_IS_KEYFRAME;
}
コード例 #9
0
ファイル: DoorTraj.cpp プロジェクト: personalrobotics/owd
SE3 pose_to_SE3(geometry_msgs::Pose &p) {
  btQuaternion ep_qrot(p.orientation.x,
		       p.orientation.y,
		       p.orientation.z,
		       p.orientation.w);
  so3 R(R3(ep_qrot.getAxis().x(),
	   ep_qrot.getAxis().y(),
	   ep_qrot.getAxis().z()),
	ep_qrot.getAngle());
  R3 t(p.position.x,
       p.position.y,
       p.position.z);
  return SE3(R,t);
}
コード例 #10
0
InitResult KltHomographyInit::addFirstFrame(FramePtr frame_ref, Matrix3d orient, Vector3d pos)
{
  reset();
  detectFeatures(frame_ref, px_ref_, f_ref_);
  if(px_ref_.size() < 100)
  {
    SVO_WARN_STREAM_THROTTLE(2.0, "First image has less than 100 features. Retry in more textured environment.");
    return FAILURE;
  }
  frame_ref_ = frame_ref;
  px_cur_.insert(px_cur_.begin(), px_ref_.begin(), px_ref_.end());
  T_first_frame_real_scale = SE3(orient, pos);
  frame_ref_->T_f_w_ =  T_first_frame_real_scale;
  return SUCCESS;
}
コード例 #11
0
ファイル: Relocalizer.cpp プロジェクト: Bingxiong/lsd_slam
void Relocalizer::getResult(Frame* &out_keyframe, std::shared_ptr<Frame> &frame, int &out_successfulFrameID, SE3 &out_frameToKeyframe)
{
	boost::unique_lock<boost::mutex> lock(exMutex);
	if(hasResult)
	{
		out_keyframe = resultKF;
		out_successfulFrameID = resultFrameID;
		out_frameToKeyframe = resultFrameToKeyframe;
		frame = resultRelocFrame;
	}
	else
	{
		out_keyframe = 0;
		out_successfulFrameID = -1;
		out_frameToKeyframe = SE3();
		frame.reset();
	}
}
コード例 #12
0
void initialize(void)
{
	//VP_BASIC_RENDERER_WORLD(world);
	VP_FRAMEWORK_WORLD(world)
	VP_FRAMEWORK_CAMERA(-49.9825, 15.968, 15.5653, -109.286, 78.4562, 0.767533)

	vpMaterial::GetDefaultMaterial()->SetRestitution(0.2);
	vpMaterial::GetDefaultMaterial()->SetDynamicFriction(1.0);
	ground.SetGround();
	ground.AddGeometry(new vpBox(Vec3(20, 20, 1)));
	ground.AddGeometry(new vpCapsule(0.5, 11.0), Vec3(0, 0, 5));
	ground.AddGeometry(new vpCapsule(0.5, 6.0), SE3(Vec3(0, -2.5, 10))*RotX(0.5*M_PI));

	for ( int i = 0; i < NUM_CHAIN-1; i++ )
	{
		chain[i].SetJoint(&J[i], Vec3(10.0 / NUM_CHAIN, 0, 0));
		chain[i+1].SetJoint(&J[i], Vec3(-10.0 / NUM_CHAIN, 0, 0));
		//chain[i].AddGeometry(new vpBox(Vec3(20.0 / NUM_CHAIN, 0.2, 0.2)));
		chain[i].AddGeometry(new vpCapsule(0.1, 20.0 / NUM_CHAIN + 0.2), RotY(0.5*M_PI));
		for ( int j = -3; j < 3; j++ )
			if ( i+j >= 0 && i+j <= NUM_CHAIN-1 ) world.IgnoreCollision(&chain[i], &chain[i+j]);
		J[i].SetDamping(SpatialDamper(20));
		J[i].SetElasticity(SpatialSpring(1500));
		J[i].SetOrientation(Exp(Axis(0.01, 0.01, 0.01)));
		chain[i].SetInertia(Inertia(.1));
	}
	chain[NUM_CHAIN-1].SetInertia(Inertia(1));
	chain[NUM_CHAIN-1].AddGeometry(new vpCapsule(0.1, 20.0 / NUM_CHAIN + 0.2), RotY(0.5*M_PI));
	chain[NUM_CHAIN/2].SetFrame(Vec3(0, -2, 15));
	
	world.AddBody(&chain[NUM_CHAIN/2]);
	world.AddBody(&ground);

	world.SetGravity(Vec3(0.0, 0.0, -10.0));
	world.Initialize();
	world.SetIntegrator(VP::IMPLICIT_EULER_FAST);
	world.SetTimeStep(0.005);
	world.BackupState();

	vpTimer timer;
	timer.Tic();
	for ( int i = 0; i < 1000; i++ ) world.StepAhead();
	cout << timer.Toc() << chain[10].GetGenVelocity() << endl;
}
コード例 #13
0
ファイル: Relocalizer.cpp プロジェクト: Bingxiong/lsd_slam
Relocalizer::Relocalizer(int w, int h, Eigen::Matrix3f K)
{
	for(int i=0;i<RELOCALIZE_THREADS;i++)
		running[i] = false;


	this->w = w;
	this->h = h;
	this->K = K;

	KFForReloc.clear();
	nextRelocIDX = maxRelocIDX = 0;
	continueRunning = isRunning = false;

	hasResult = false;
	resultKF = 0;
	resultFrameID = 0;
	resultFrameToKeyframe = SE3();
}
コード例 #14
0
ファイル: vo_node.cpp プロジェクト: ruffsl/rpg_svo
bool VoNode::initCb(){
    srv.request.timeA = time_first;
    srv.request.timeB = time_second;

    std::cout<<"\ntime_first:\n"<<(time_first)<<std::endl;
    std::cout<<"\ntime_second:\n"<<(time_second)<<std::endl;

    ROS_INFO("Calling service get_between_pose");

    if (client.call(srv)){
        ROS_INFO("Got inti pose from get_between_pose srv!");
        geometry_msgs::Pose m = srv.response.A2B;;
        Eigen::Affine3d e = Eigen::Translation3d(m.position.x,
                                               m.position.y,
                                               m.position.z) *
              Eigen::Quaterniond(m.orientation.w,
                                 m.orientation.x,
                                 m.orientation.y,
                                 m.orientation.z);

        Eigen::Matrix3d rot = e.rotation();
        Eigen::Vector3d trans = e.translation();

        std::cout<<"Time diff in first and second:\n"<<(time_second-time_first)<<std::endl;
        std::cout<<time_first<<std::endl<<time_second<<std::endl;

        SE3 current_trans = SE3(rot,trans);


        std::cout<<"trans:\n"<<trans<<std::endl;
        std::cout<<"rot:\n"<<rot<<std::endl;

        vo_->InitPose(current_trans);
        our_scale_ = current_trans.matrix().block<3,1>(0,3).norm();
        return true;
    }
    else{
        ROS_ERROR("Failed to call service get_between_pose");
        return false;
    }
}
コード例 #15
0
ファイル: gsystem.cpp プロジェクト: mrdeveloperdude/OctoMY
void GSystem::calcDerivative_MomentumGlobal_Dq_Ddq(std::vector<GCoordinate*> pCoordinates_, RMatrix &DHgDq_, RMatrix &DHgDdq_)
{
	int i;
	std::list<GBody *>::iterator iter_pbody;
	std::vector<GCoordinate *>::iterator iter_pcoord;
	std::vector<SE3> M(pBodies.size());
	std::vector<GConstraintJointLoop::JOINTLOOP_CONSTRAINT_TYPE> jlc_type(pBodies.size());
	dse3 DHDqi, DHDdqi;

	for (i=0, iter_pbody = pBodies.begin(); iter_pbody != pBodies.end(); i++, iter_pbody++) {

		// save previous settings
		M[i] = (*iter_pbody)->fwdJointChain.M1;
		jlc_type[i] = (*iter_pbody)->fwdJointChain.jointLoopConstraintType;

		// prerequisites for (*iter_pbody)->getDerivative_MomentumGlobal_Dq(...) and (*iter_pbody)->getDerivative_MomentumGlobal_Ddq(...)
		(*iter_pbody)->fwdJointChain.setM(SE3());
		(*iter_pbody)->fwdJointChain.jointLoopConstraintType = GConstraintJointLoop::JOINTLOOP_ORIENTATION_POSITION;
		(*iter_pbody)->fwdJointChain.update_J();
	}

	// calculate the derivatives
	DHgDq_.SetZero(6, int(pCoordinates_.size()));
	DHgDdq_.SetZero(6, int(pCoordinates_.size()));
	for (i=0, iter_pcoord = pCoordinates_.begin(); iter_pcoord != pCoordinates_.end(); i++, iter_pcoord++) {
		DHDqi = getDerivative_MomentumGlobal_Dq(*iter_pcoord);
		DHDdqi = getDerivative_MomentumGlobal_Ddq(*iter_pcoord);
		//DHgDq_.Push(0, i, convert_to_RMatrix(DHDqi));
		//DHgDdq_.Push(0, i, convert_to_RMatrix(DHDdqi));
		matSet(&DHgDq_[6*i], DHDqi.GetArray(), 6);
		matSet(&DHgDdq_[6*i], DHDdqi.GetArray(), 6);
	}

	// restore the previous settings
	for (i=0, iter_pbody = pBodies.begin(); iter_pbody != pBodies.end(); i++, iter_pbody++) {
		(*iter_pbody)->fwdJointChain.setM(M[i]);
		(*iter_pbody)->fwdJointChain.setJointLoopConstraintType(jlc_type[i]);
	}
}
コード例 #16
0
void localBA(
    Frame* center_kf,
    set<FramePtr>* core_kfs,
    Map* map,
    size_t& n_incorrect_edges_1,
    size_t& n_incorrect_edges_2,
    double& init_error,
    double& final_error)
{

  // init g2o
  g2o::SparseOptimizer optimizer;
  setupG2o(&optimizer);

  list<EdgeContainerSE3> edges;
  set<Point*> mps;
  list<Frame*> neib_kfs;
  size_t v_id = 0;
  size_t n_mps = 0;
  size_t n_fix_kfs = 0;
  size_t n_var_kfs = 1;
  size_t n_edges = 0;
  n_incorrect_edges_1 = 0;
  n_incorrect_edges_2 = 0;

  // Add all core keyframes
  for(set<FramePtr>::iterator it_kf = core_kfs->begin(); it_kf != core_kfs->end(); ++it_kf)
  {
    g2oFrameSE3* v_kf = createG2oFrameSE3(it_kf->get(), v_id++, false);
    (*it_kf)->v_kf_ = v_kf;
    ++n_var_kfs;
    assert(optimizer.addVertex(v_kf));

    // all points that the core keyframes observe are also optimized:
    for(list<PointFeat*>::iterator it_pt=(*it_kf)->pt_fts_.begin(); it_pt!=(*it_kf)->pt_fts_.end(); ++it_pt)
      if((*it_pt)->feat3D != NULL)
        mps.insert((*it_pt)->feat3D);
  }

  // Now go throug all the points and add a measurement. Add a fixed neighbour
  // Keyframe if it is not in the set of core kfs
  double reproj_thresh_2 = Config::lobaThresh() / center_kf->cam_->errorMultiplier2();
  double reproj_thresh_1 = Config::poseOptimThresh() / center_kf->cam_->errorMultiplier2();
  double reproj_thresh_1_squared = reproj_thresh_1*reproj_thresh_1;
  for(set<Point*>::iterator it_pt = mps.begin(); it_pt!=mps.end(); ++it_pt)
  {
    // Create point vertex
    g2oPoint* v_pt = createG2oPoint((*it_pt)->pos_, v_id++, false);
    (*it_pt)->v_g2o_ = v_pt;
    assert(optimizer.addVertex(v_pt));
    ++n_mps;

    // Add edges
    list<PointFeat*>::iterator it_obs=(*it_pt)->obs_.begin();
    while(it_obs!=(*it_pt)->obs_.end())
    {
      Vector2d error = vk::project2d((*it_obs)->f) - vk::project2d((*it_obs)->frame->w2f((*it_pt)->pos_));

      if((*it_obs)->frame->v_kf_ == NULL)
      {
        // frame does not have a vertex yet -> it belongs to the neib kfs and
        // is fixed. create one:
        g2oFrameSE3* v_kf = createG2oFrameSE3((*it_obs)->frame, v_id++, true);
        (*it_obs)->frame->v_kf_ = v_kf;
        ++n_fix_kfs;
        assert(optimizer.addVertex(v_kf));
        neib_kfs.push_back((*it_obs)->frame);
      }

      // create edge
      g2oEdgeSE3* e = createG2oEdgeSE3((*it_obs)->frame->v_kf_, v_pt,
                                       vk::project2d((*it_obs)->f),
                                       true,
                                       reproj_thresh_2*Config::lobaRobustHuberWidth(),
                                       1.0 / (1<<(*it_obs)->level));
      assert(optimizer.addEdge(e));
      edges.push_back(EdgeContainerSE3(e, (*it_obs)->frame, *it_obs));
      ++n_edges;
      ++it_obs;
    }
  }

  // structure only
  g2o::StructureOnlySolver<3> structure_only_ba;
  g2o::OptimizableGraph::VertexContainer points;
  for (g2o::OptimizableGraph::VertexIDMap::const_iterator it = optimizer.vertices().begin(); it != optimizer.vertices().end(); ++it)
  {
    g2o::OptimizableGraph::Vertex* v = static_cast<g2o::OptimizableGraph::Vertex*>(it->second);
      if (v->dimension() == 3 && v->edges().size() >= 2)
        points.push_back(v);
  }
  structure_only_ba.calc(points, 10);

  // Optimization
  if(Config::lobaNumIter() > 0)
    runSparseBAOptimizer(&optimizer, Config::lobaNumIter(), init_error, final_error);

  // Update Keyframes
  for(set<FramePtr>::iterator it = core_kfs->begin(); it != core_kfs->end(); ++it)
  {
    (*it)->T_f_w_ = SE3( (*it)->v_kf_->estimate().rotation(),
                         (*it)->v_kf_->estimate().translation());
    (*it)->v_kf_ = NULL;
  }

  for(list<Frame*>::iterator it = neib_kfs.begin(); it != neib_kfs.end(); ++it)
    (*it)->v_kf_ = NULL;

  // Update Mappoints
  for(set<Point*>::iterator it = mps.begin(); it != mps.end(); ++it)
  {
    (*it)->pos_ = (*it)->v_g2o_->estimate();
    (*it)->v_g2o_ = NULL;
  }

  // Remove Measurements with too large reprojection error
  double reproj_thresh_2_squared = reproj_thresh_2*reproj_thresh_2;
  for(list<EdgeContainerSE3>::iterator it = edges.begin(); it != edges.end(); ++it)
  {
    //PointFeat* it_aux = static_cast<PointFeat*>(it_e->feature);
    if(it->edge->chi2() > reproj_thresh_2_squared) //*(1<<it->feature_->level))
    {
      PointFeat* feat_aux = static_cast<PointFeat*>(it->feature);
      map->removePtFrameRef(it->frame, feat_aux);
      ++n_incorrect_edges_2;
    }
  }

  // TODO: delete points and edges!
  init_error = sqrt(init_error)*center_kf->cam_->errorMultiplier2();
  final_error = sqrt(final_error)*center_kf->cam_->errorMultiplier2();
}
コード例 #17
0
ファイル: gsystem.cpp プロジェクト: mrdeveloperdude/OctoMY
dse3 GSystem::getMomentumCOM()
{
	return dAd(SE3(getPositionCOMGlobal()), getMomentumGlobal());
}
コード例 #18
0
InitResult KltHomographyInit::addSecondFrame(FramePtr frame_cur, Matrix3d orient, Vector3d pos)
{
  trackKlt(frame_ref_, frame_cur, px_ref_, px_cur_, f_ref_, f_cur_, disparities_);
  SVO_INFO_STREAM("Init: KLT tracked "<< disparities_.size() <<" features");

  if(disparities_.size() < Config::initMinTracked())
    return FAILURE;

  double disparity = vk::getMedian(disparities_);
  SVO_INFO_STREAM("Init: KLT "<<disparity<<"px average disparity.");
  if(disparity < Config::initMinDisparity())
    return NO_KEYFRAME;

  computeHomography(
      f_ref_, f_cur_,
      frame_ref_->cam_->errorMultiplier2(), Config::poseOptimThresh(),
      inliers_, xyz_in_cur_, T_cur_from_ref_);
  SVO_INFO_STREAM("Init: Homography RANSAC "<<inliers_.size()<<" inliers.");

  if(inliers_.size() < Config::initMinInliers())
  {
    SVO_WARN_STREAM("Init WARNING: "<<Config::initMinInliers()<<" inliers minimum required.");
    return FAILURE;
  }

  // Transformation in real world
  T_cur_frame_real_scale = SE3(orient, pos);
  Vector3d trans = T_cur_frame_real_scale.translation() - T_first_frame_real_scale.translation();
  double length_real = sqrt(pow(trans[0],2) + pow(trans[1],2) + pow(trans[2],2));

  SVO_INFO_STREAM("Real world transform x: " << trans[0] << " y:" << trans[1] << " z:" << trans[2] << " length:" << length_real);

  double x = T_cur_from_ref_.translation()[0];
  double y = T_cur_from_ref_.translation()[1];
  double z = T_cur_from_ref_.translation()[2];
  double length_svo = sqrt(pow(x,2) + pow(y,2) + pow(z,2));

  SVO_INFO_STREAM("SVO transform x: " << x << " y:" << y << " z:" << z << " length:" << length_svo);

#ifdef USE_ASE_IMU
  // Rescale the map such that the real length of the movement matches with the svo movement length
  double scale =length_real / length_svo;
#else
  // Rescale the map such that the mean scene depth is equal to the specified scale
  vector<double> depth_vec;
  for(size_t i=0; i<xyz_in_cur_.size(); ++i)
    depth_vec.push_back((xyz_in_cur_[i]).z());
  double scene_depth_median = vk::getMedian(depth_vec);
  double scale = Config::mapScale()/scene_depth_median;
#endif
  frame_cur->T_f_w_ = T_cur_from_ref_ * frame_ref_->T_f_w_;
  frame_cur->T_f_w_.translation() =
     -frame_cur->T_f_w_.rotation_matrix()*(frame_ref_->pos() + scale*(frame_cur->pos() - frame_ref_->pos()));

  //frame_cur->T_f_w_ = T_cur_frame_real_scale;
  //frame_ref_->T_f_w_ = T_first_frame_real_scale;

//  // Rescale the map such that the mean scene depth is equal to the specified scale
//  vector<double> depth_vec;
//  for(size_t i=0; i<xyz_in_cur_.size(); ++i)
//    depth_vec.push_back((xyz_in_cur_[i]).z());
//  double scene_depth_median = vk::getMedian(depth_vec);
//  double scale = Config::mapScale()/scene_depth_median;
//  frame_cur->T_f_w_ = T_cur_from_ref_ * frame_ref_->T_f_w_;
//  frame_cur->T_f_w_.translation() =
//      -frame_cur->T_f_w_.rotation_matrix()*(frame_ref_->pos() + scale*(frame_cur->pos() - frame_ref_->pos()));

  // For each inlier create 3D point and add feature in both frames
  SE3 T_world_cur = frame_cur->T_f_w_.inverse();
  for(vector<int>::iterator it=inliers_.begin(); it!=inliers_.end(); ++it)
  {
    Vector2d px_cur(px_cur_[*it].x, px_cur_[*it].y);
    Vector2d px_ref(px_ref_[*it].x, px_ref_[*it].y);
    if(frame_ref_->cam_->isInFrame(px_cur.cast<int>(), 10) && frame_ref_->cam_->isInFrame(px_ref.cast<int>(), 10) && xyz_in_cur_[*it].z() > 0)
    {
      Vector3d pos = T_world_cur * (xyz_in_cur_[*it]*scale);
      Point* new_point = new Point(pos);

      Feature* ftr_cur(new Feature(frame_cur.get(), new_point, px_cur, f_cur_[*it], 0));
      frame_cur->addFeature(ftr_cur);
      new_point->addFrameRef(ftr_cur);

      Feature* ftr_ref(new Feature(frame_ref_.get(), new_point, px_ref, f_ref_[*it], 0));
      frame_ref_->addFeature(ftr_ref);
      new_point->addFrameRef(ftr_ref);
    }
  }
  return SUCCESS;
}
コード例 #19
0
ファイル: Relocalizer.cpp プロジェクト: Bingxiong/lsd_slam
void Relocalizer::threadLoop(int idx)
{
	if(!multiThreading && idx != 0) return;

	SE3Tracker* tracker = new SE3Tracker(w,h,K);

	boost::unique_lock<boost::mutex> lock(exMutex);
	while(continueRunning)
	{
		// if got something: do it (unlock in the meantime)
		if(nextRelocIDX < maxRelocIDX && CurrentRelocFrame)
		{
			Frame* todo = KFForReloc[nextRelocIDX%KFForReloc.size()];
			nextRelocIDX++;
			if(todo->neighbors.size() <= 2) continue;

			std::shared_ptr<Frame> myRelocFrame = CurrentRelocFrame;

			lock.unlock();

			// initial Alignment
			SE3 todoToFrame = tracker->trackFrameOnPermaref(todo, myRelocFrame.get(), SE3());

			// try neighbours
			float todoGoodVal = tracker->pointUsage * tracker->lastGoodCount / (tracker->lastGoodCount+tracker->lastBadCount);
			if(todoGoodVal > relocalizationTH)
			{
				int numGoodNeighbours = 0;
				int numBadNeighbours = 0;

				float bestNeightbourGoodVal = todoGoodVal;
				float bestNeighbourUsage = tracker->pointUsage;
				Frame* bestKF = todo;
				SE3 bestKFToFrame = todoToFrame;
				for(Frame* nkf : todo->neighbors)
				{
					SE3 nkfToFrame_init = se3FromSim3((nkf->getScaledCamToWorld().inverse() * todo->getScaledCamToWorld() * sim3FromSE3(todoToFrame.inverse(), 1))).inverse();
					SE3 nkfToFrame = tracker->trackFrameOnPermaref(nkf, myRelocFrame.get(), nkfToFrame_init);

					float goodVal = tracker->pointUsage * tracker->lastGoodCount / (tracker->lastGoodCount+tracker->lastBadCount);
					if(goodVal > relocalizationTH*0.8 && (nkfToFrame * nkfToFrame_init.inverse()).log().norm() < 0.1)
						numGoodNeighbours++;
					else
						numBadNeighbours++;

					if(goodVal > bestNeightbourGoodVal)
					{
						bestNeightbourGoodVal = goodVal;
						bestKF = nkf;
						bestKFToFrame = nkfToFrame;
						bestNeighbourUsage = tracker->pointUsage;
					}
				}

				if(numGoodNeighbours > numBadNeighbours || numGoodNeighbours >= 5)
				{
					if(enablePrintDebugInfo && printRelocalizationInfo)
						printf("RELOCALIZED! frame %d on %d (bestNeighbour %d): good %2.1f%%, usage %2.1f%%, GoodNeighbours %d / %d\n",
								myRelocFrame->id(), todo->id(), bestKF->id(),
								100*bestNeightbourGoodVal, 100*bestNeighbourUsage,
								numGoodNeighbours, numGoodNeighbours+numBadNeighbours);

					// set everything to stop!
					continueRunning = false;
					lock.lock();
					resultRelocFrame = myRelocFrame;
					resultFrameID = myRelocFrame->id();
					resultKF = bestKF;
					resultFrameToKeyframe = bestKFToFrame.inverse();
					resultReadySignal.notify_all();
					hasResult = true;
					lock.unlock();
				}
				else
				{
					if(enablePrintDebugInfo && printRelocalizationInfo)
						printf("FAILED RELOCALIZE! frame %d on %d (bestNeighbour %d): good %2.1f%%, usage %2.1f%%, GoodNeighbours %d / %d\n",
								myRelocFrame->id(), todo->id(), bestKF->id(),
								100*bestNeightbourGoodVal, 100*bestNeighbourUsage,
								numGoodNeighbours, numGoodNeighbours+numBadNeighbours);
				}
			}

			lock.lock();
		}
		else
		{
			newCurrentFrameSignal.wait(lock);
		}
	}

	delete tracker;
}
コード例 #20
0
void globalBA(Map* map)
{
  // init g2o
  g2o::SparseOptimizer optimizer;
  setupG2o(&optimizer);

  list<EdgeContainerSE3> edges;
  list< pair<FramePtr,Feature*> > incorrect_edges;

  // Go through all Keyframes
  size_t v_id = 0;
  double reproj_thresh_2 = Config::lobaThresh() / map->lastKeyframe()->cam_->errorMultiplier2();
  double reproj_thresh_1_squared = Config::poseOptimThresh() / map->lastKeyframe()->cam_->errorMultiplier2();
  reproj_thresh_1_squared *= reproj_thresh_1_squared;
  for(list<FramePtr>::iterator it_kf = map->keyframes_.begin();
      it_kf != map->keyframes_.end(); ++it_kf)
  {
    // New Keyframe Vertex
    g2oFrameSE3* v_kf = createG2oFrameSE3(it_kf->get(), v_id++, false);
    (*it_kf)->v_kf_ = v_kf;
    optimizer.addVertex(v_kf);
    for(list<PointFeat*>::iterator it_ftr=(*it_kf)->pt_fts_.begin(); it_ftr!=(*it_kf)->pt_fts_.end(); ++it_ftr)
    {
      // for each keyframe add edges to all observed mapoints
      Point* mp = (*it_ftr)->feat3D;
      if(mp == NULL)
        continue;
      g2oPoint* v_mp = mp->v_g2o_;
      if(v_mp == NULL)
      {
        // mappoint-vertex doesn't exist yet. create a new one:
        v_mp = createG2oPoint(mp->pos_, v_id++, false);
        mp->v_g2o_ = v_mp;
        optimizer.addVertex(v_mp);
      }

      // Due to merging of mappoints it is possible that references in kfs suddenly
      // have a very large reprojection error which may result in distorted results.
      Vector2d error = vk::project2d((*it_ftr)->f) - vk::project2d((*it_kf)->w2f(mp->pos_));
      if(error.squaredNorm() > reproj_thresh_1_squared)
        incorrect_edges.push_back(pair<FramePtr,Feature*>(*it_kf, *it_ftr));
      else
      {
        g2oEdgeSE3* e = createG2oEdgeSE3(v_kf, v_mp, vk::project2d((*it_ftr)->f),
                                         true,
                                         reproj_thresh_2*Config::lobaRobustHuberWidth());

        edges.push_back(EdgeContainerSE3(e, it_kf->get(), *it_ftr));
        optimizer.addEdge(e);
      }
    }
  }

  // Optimization
  double init_error=0.0, final_error=0.0;
  if(Config::lobaNumIter() > 0)
    runSparseBAOptimizer(&optimizer, Config::lobaNumIter(), init_error, final_error);

  // Update Keyframe and MapPoint Positions
  for(list<FramePtr>::iterator it_kf = map->keyframes_.begin();
        it_kf != map->keyframes_.end(); ++it_kf)
  {
    (*it_kf)->T_f_w_ = SE3( (*it_kf)->v_kf_->estimate().rotation(),
                            (*it_kf)->v_kf_->estimate().translation());
    (*it_kf)->v_kf_ = NULL;
    for(list<PointFeat*>::iterator it_ftr=(*it_kf)->pt_fts_.begin(); it_ftr!=(*it_kf)->pt_fts_.end(); ++it_ftr)
    {
      Point* mp = (*it_ftr)->feat3D;
      if(mp == NULL)
        continue;
      if(mp->v_g2o_ == NULL)
        continue;       // mp was updated before
      mp->pos_ = mp->v_g2o_->estimate();
      mp->v_g2o_ = NULL;
    }
  }

  // Remove Measurements with too large reprojection error
  for(list< pair<FramePtr,Feature*> >::iterator it=incorrect_edges.begin(); it!=incorrect_edges.end(); ++it)
  {
    PointFeat* feat_aux = static_cast<PointFeat*>(it->second);
    map->removePtFrameRef(it->first.get(), feat_aux);
  }

  double reproj_thresh_2_squared = reproj_thresh_2*reproj_thresh_2;
  for(list<EdgeContainerSE3>::iterator it = edges.begin(); it != edges.end(); ++it)
  {
    if(it->edge->chi2() > reproj_thresh_2_squared)
    {
      PointFeat* feat_aux = static_cast<PointFeat*>(it->feature);
      map->removePtFrameRef(it->frame, feat_aux);
    }
  }
}
コード例 #21
0
ファイル: gsystem_ik.cpp プロジェクト: mrdeveloperdude/OctoMY
bool GSystemIK::buildConstrIK_dq(RMatrix &J, RMatrix &V, std::vector<GBody*> pbodies, std::vector<Vec3> pos, std::vector<se3> V_target, std::vector< std::vector<int> > idxC)
{
	int i, j, k;
	int cnt;			// a counter
	int nb;				// number of bodies
	int ncik;			// number of IK constraints
	std::list<GCoordinate*>::iterator iter_pcoord, iter_pcoord2;

	nb = int(pbodies.size());

	if ( pos.size() != (size_t)nb || V_target.size() != (size_t)nb || idxC.size() != (size_t)nb ) return false;

	// counts the number of IK constraints
	ncik = 0;
	for (i=0; i<nb; i++) {
		for ( j=0; j<int(idxC[i].size()); j++) {
			if ( idxC[i][j] < 0 || idxC[i][j] > 5 ) return false;
		}
		ncik += int(idxC[i].size());
	}
	J.SetZero(ncik, getNumCoordinates());
	V.SetZero(ncik, 1);

	// build J, V
	cnt = 0;
	for (i=0; i<nb; i++) {
		// update Jacobian
		pbodies[i]->fwdJointChain.setM(SE3(pos[i]));
		pbodies[i]->fwdJointChain.setJointLoopConstraintType(GConstraintJointLoop::JOINTLOOP_ORIENTATION_POSITION);
		pbodies[i]->fwdJointChain.update_J();

		// transformed Jacobian, Jg = [R 0; 0 R] * J
		RMatrix Jg(pbodies[i]->fwdJointChain.J.RowSize(), pbodies[i]->fwdJointChain.J.ColSize());
		//RMatrix R = convert_to_RMatrix(pbodies[i]->T_global.GetRotation());
		RMatrix R(3,3); matSet(R.GetPtr(), pbodies[i]->T_global.GetRotation().GetArray(), 9);
		Jg.Push(0, 0, R * pbodies[i]->fwdJointChain.J.Sub(0, 2, 0, pbodies[i]->fwdJointChain.J.ColSize()-1));
		Jg.Push(3, 0, R * pbodies[i]->fwdJointChain.J.Sub(3, 5, 0, pbodies[i]->fwdJointChain.J.ColSize()-1));

		// build J
		for (j=0, iter_pcoord = pbodies[i]->fwdJointChain.pCoordinates.begin(); iter_pcoord != pbodies[i]->fwdJointChain.pCoordinates.end(); j++, iter_pcoord++) {

			// find index of pbodies[i]->fwdJointChain.pCoordinates[j] in pCoordinates
			int idx = -1;
			for (k=0, iter_pcoord2 = pCoordinates.begin(); iter_pcoord2 != pCoordinates.end(); k++, iter_pcoord2++) {
				if ( *iter_pcoord2 == *iter_pcoord ) { idx = k; break; }
			}
			if ( idx < 0 ) return false;

			// insert j-th column of the body Jacobian to the right place
			for (k=0; k<int(idxC[i].size()); k++) {
				J(cnt+k, idx) = Jg(idxC[i][k], j);
			}
		}

		// build V
		for (j=0; j<int(idxC[i].size()); j++) {
			V(cnt+j, 0) = V_target[i][idxC[i][j]];
		}

		cnt += int(idxC[i].size());
	}

	return true;
}
コード例 #22
0
Frame* TrackableKeyFrameSearch::findRePositionCandidate(Frame* frame, float maxScore)
{
    std::vector<TrackableKFStruct, Eigen::aligned_allocator<TrackableKFStruct> > potentialReferenceFrames =
            findEuclideanOverlapFrames(frame, maxScore / (KFDistWeight*KFDistWeight), 0.75);

	float bestScore = maxScore;
	float bestDist, bestUsage;
	float bestPoseDiscrepancy = 0;
	Frame* bestFrame = 0;
	SE3 bestRefToFrame = SE3();
	SE3 bestRefToFrame_tracked = SE3();

	int checkedSecondary = 0;
	for(unsigned int i=0;i<potentialReferenceFrames.size();i++)
	{
		if(frame->getTrackingParent() == potentialReferenceFrames[i].ref)
			continue;

		if(potentialReferenceFrames[i].ref->idxInKeyframes < INITIALIZATION_PHASE_COUNT)
			continue;

		struct timeval tv_start, tv_end;
		gettimeofday(&tv_start, NULL);
		tracker->checkPermaRefOverlap(potentialReferenceFrames[i].ref, potentialReferenceFrames[i].refToFrame);
		gettimeofday(&tv_end, NULL);
		msTrackPermaRef = 0.9*msTrackPermaRef + 0.1*((tv_end.tv_sec-tv_start.tv_sec)*1000.0f + (tv_end.tv_usec-tv_start.tv_usec)/1000.0f);
		nTrackPermaRef++;

		float score = getRefFrameScore(potentialReferenceFrames[i].dist, tracker->pointUsage);

		if(score < maxScore)
		{
			SE3 RefToFrame_tracked = tracker->trackFrameOnPermaref(potentialReferenceFrames[i].ref, frame, potentialReferenceFrames[i].refToFrame);
			Sophus::Vector3d dist = RefToFrame_tracked.translation() * potentialReferenceFrames[i].ref->meanIdepth;

			float newScore = getRefFrameScore(dist.dot(dist), tracker->pointUsage);
			float poseDiscrepancy = (potentialReferenceFrames[i].refToFrame * RefToFrame_tracked.inverse()).log().norm();
			float goodVal = tracker->pointUsage * tracker->lastGoodCount / (tracker->lastGoodCount+tracker->lastBadCount);
			checkedSecondary++;

			if(tracker->trackingWasGood && goodVal > relocalizationTH && newScore < bestScore && poseDiscrepancy < 0.2)
			{
				bestPoseDiscrepancy = poseDiscrepancy;
				bestScore = score;
				bestFrame = potentialReferenceFrames[i].ref;
				bestRefToFrame = potentialReferenceFrames[i].refToFrame;
				bestRefToFrame_tracked = RefToFrame_tracked;
				bestDist = dist.dot(dist);
				bestUsage = tracker->pointUsage;
			}
		}
	}

	if(bestFrame != 0)
	{
		if(enablePrintDebugInfo && printRelocalizationInfo)
			printf("FindReferences for %d: Checked %d (%d). dist %.3f + usage %.3f = %.3f. pose discrepancy %.2f. TAKE %d!\n",
					(int)frame->id(), (int)potentialReferenceFrames.size(), checkedSecondary,
					bestDist, bestUsage, bestScore,
					bestPoseDiscrepancy, bestFrame->id());
		return bestFrame;
	}
	else
	{
		if(enablePrintDebugInfo && printRelocalizationInfo)
			printf("FindReferences for %d: Checked %d (%d), bestScore %.2f. MAKE NEW\n",
					(int)frame->id(), (int)potentialReferenceFrames.size(), checkedSecondary, bestScore);
		return 0;
	}
}
コード例 #23
0
void vpSiloWorld::Create(void)
{
	vpMaterial::GetDefaultMaterial()->SetRestitution(0.3);
	vpMaterial::GetDefaultMaterial()->SetDynamicFriction(0.3);

	for ( int i = 0; i < NUM_SILO_LEVEL; i++ )
	{
		for ( int j = 0; j < NUM_SILO_BLOCKS; j++ )
		{
			block[i][j].AddGeometry(new vpBox(Vec3(1, 2, 1)));
			block[i][j].SetFrame(Exp(se3(0, 0, (scalar)(j + (scalar)0.5 * (i % 2)) / (scalar)NUM_SILO_BLOCKS * M_2PI, 0, 0, 0)) * SE3(Vec3(4.4, 0, 1.01 * i)));
			AddBody(&block[i][j]);
			block[i][j].SetInertia(Inertia(1));
		}
	}

	for ( int i = 0; i < NUM_BALL; i++ )
	{
		ball[i].AddGeometry(new vpSphere(0.25));
		ball[i].SetFrame(Vec3(drand(2.0), drand(2.0), NUM_SILO_LEVEL + (25.0 / NUM_BALL) * i));
		ball[i].SetGenVelocity(se3(0,0,0,drand(0.1), drand(0.1), drand(0.1)));
		ball[i].SetInertia(Inertia(0.5));
		AddBody(&ball[i]);
	}

	floor.AddGeometry(new vpBox(Vec3(100, 100, 1)));
	floor.SetFrame(Vec3(0, 0, -1.0));
	floor.SetGround();
	AddBody(&floor);

	roof.AddGeometry(new vpBox(Vec3(10, 10, 1)), Vec3(0.0, 0.0, -1));
	roof.SetFrame(Vec3(0, 0, -10));
	AddBody(&roof);

	//SetIntegrator(VP::IMPLICIT_EULER);	
	SetIntegrator(VP::EULER);
	SetTimeStep(0.005);
	SetGravity(Vec3(0.0, 0.0, -10.0));

	Initialize();
	BackupState();

	renderer.SetTarget(this);
	
	if ( materialArray.size() )
	{
		renderer.SetMaterial(&floor, materialArray[0]);
		renderer.SetMaterial(&roof, materialArray[1]);

		for ( int i = 0; i < NUM_SILO_LEVEL; i++ )
		{
			for ( int j = 0; j < NUM_SILO_BLOCKS; j++ )
			{
				renderer.SetMaterial(&block[i][j], materialArray[2]);
			}
		}

		for ( int i = 0; i < NUM_BALL; i++ )
		{
			renderer.SetMaterial(&ball[i], materialArray[1]);
		}
	}
}
コード例 #24
0
void VisualOdometry::poseEstimationPnP()
{
    // construct the 3d 2d observations
    vector<cv::Point3f> pts3d;
    vector<cv::Point2f> pts2d;
    
    for ( cv::DMatch m:feature_matches_ )
    {
        pts3d.push_back( pts_3d_ref_[m.queryIdx] );
        pts2d.push_back( keypoints_curr_[m.trainIdx].pt );
    }
    
    Mat K = ( cv::Mat_<double>(3,3)<<
        ref_->camera_->fx_, 0, ref_->camera_->cx_,
        0, ref_->camera_->fy_, ref_->camera_->cy_,
        0,0,1
    );
    Mat rvec, tvec, inliers;
    cv::solvePnPRansac( pts3d, pts2d, K, Mat(), rvec, tvec, false, 100, 4.0, 0.99, inliers );
    num_inliers_ = inliers.rows;
    cout<<"pnp inliers: "<<num_inliers_<<endl;
    T_c_r_estimated_ = SE3(
        SO3(rvec.at<double>(0,0), rvec.at<double>(1,0), rvec.at<double>(2,0)), 
        Vector3d( tvec.at<double>(0,0), tvec.at<double>(1,0), tvec.at<double>(2,0))
    );
    
    // using bundle adjustment to optimize the pose 
    typedef g2o::BlockSolver<g2o::BlockSolverTraits<6,2>> Block;
    Block::LinearSolverType* linearSolver = new g2o::LinearSolverDense<Block::PoseMatrixType>();
    Block* solver_ptr = new Block( linearSolver );
    g2o::OptimizationAlgorithmLevenberg* solver = new g2o::OptimizationAlgorithmLevenberg ( solver_ptr );
    g2o::SparseOptimizer optimizer;
    optimizer.setAlgorithm ( solver );
    
    g2o::VertexSE3Expmap* pose = new g2o::VertexSE3Expmap();
    pose->setId ( 0 );
    pose->setEstimate ( g2o::SE3Quat (
        T_c_r_estimated_.rotation_matrix(), 
        T_c_r_estimated_.translation()
    ) );
    optimizer.addVertex ( pose );

    // edges
    for ( int i=0; i<inliers.rows; i++ )
    {
        int index = inliers.at<int>(i,0);
        // 3D -> 2D projection
        EdgeProjectXYZ2UVPoseOnly* edge = new EdgeProjectXYZ2UVPoseOnly();
        edge->setId(i);
        edge->setVertex(0, pose);
        edge->camera_ = curr_->camera_.get();
        edge->point_ = Vector3d( pts3d[index].x, pts3d[index].y, pts3d[index].z );
        edge->setMeasurement( Vector2d(pts2d[index].x, pts2d[index].y) );
        edge->setInformation( Eigen::Matrix2d::Identity() );
        optimizer.addEdge( edge );
    }
    
    optimizer.initializeOptimization();
    optimizer.optimize(10);
    
    T_c_r_estimated_ = SE3 (
        pose->estimate().rotation(),
        pose->estimate().translation()
    );
}