Beispiel #1
0
  // assumes cv::Mats are of type <float>
  int PoseEstimator::estimate(const matches_t &matches, 
                              const cv::Mat &train_kpts, const cv::Mat &query_kpts,
                              const cv::Mat &train_pts, const cv::Mat &query_pts,
			      const cv::Mat &K, const double baseline)
  {
    // make sure we're using floats
    if (train_kpts.depth() != CV_32F ||
	query_kpts.depth() != CV_32F ||
	train_pts.depth() != CV_32F ||
	query_pts.depth() != CV_32F)
      throw std::runtime_error("Expected input to be of floating point (CV_32F)");

    int nmatch = matches.size();

    cout << endl << "[pe3d] Matches: " << nmatch << endl;

    // set up data structures for fast processing
    // indices to good matches

    std::vector<Eigen::Vector3f> t3d, q3d;
    std::vector<Eigen::Vector2f> t2d, q2d;
    std::vector<int> m;		// keep track of matches 

    for (int i=0; i<nmatch; i++)
      {
	const float* ti = train_pts.ptr<float>(matches[i].trainIdx);
	const float* qi = query_pts.ptr<float>(matches[i].queryIdx);
	const float* ti2 = train_kpts.ptr<float>(matches[i].trainIdx);
	const float* qi2 = query_kpts.ptr<float>(matches[i].queryIdx);

	// make sure we have points within range; eliminates NaNs as well
        if (matches[i].distance < 60 && // TODO: get this out of the estimator
	    ti[2] < maxMatchRange && 
	    qi[2] < maxMatchRange)
	  {
	    m.push_back(i);
	    t2d.push_back(Eigen::Vector2f(ti2[0],ti2[1]));
	    q2d.push_back(Eigen::Vector2f(qi2[0],qi2[1]));
	    t3d.push_back(Eigen::Vector3f(ti[0],ti[1],ti[2]));
	    q3d.push_back(Eigen::Vector3f(qi[0],qi[1],qi[2]));
          }
      }


    nmatch = m.size();
    cout << "[pe3d] Filtered matches: " << nmatch << endl;

    if (nmatch < 3) return 0;   // can't do it...

    int bestinl = 0;
    Matrix3f Kf;
    cv::cv2eigen(K,Kf);		    // camera matrix
    float fb = Kf(0,0)*baseline; // focal length times baseline
    float maxInlierXDist2 = maxInlierXDist*maxInlierXDist;
    float maxInlierDDist2 = maxInlierDDist*maxInlierDDist;

    // set up minimum hyp pixel distance
    float minpdist = minPDist * Kf(0,2) * 2.0;

    // RANSAC loop
    int numchoices = 1 + numRansac / 10;
    for (int ii=0; ii<numRansac; ii++) 
      {
        // find a candidate
	int k;
        int a=rand()%nmatch;
	int b;
	for (k=0; k<numchoices; k++)
	  {
	    b=rand()%nmatch;
	    if (a!=b && (t2d[a]-t2d[b]).norm() > minpdist
		     && (q2d[a]-q2d[b]).norm() > minpdist)
	      // TODO: add distance check
	      break;
	  }
	if (k >= numchoices) continue;
        int c;
	for (k=0; k<numchoices+numchoices; k++)
	  {
	    c=rand()%nmatch;
	    if (c!=b && c!=a && (t2d[a]-t2d[c]).norm() > minpdist
		&& (t2d[b]-t2d[c]).norm() > minpdist
		&& (q2d[a]-q2d[c]).norm() > minpdist
		&& (q2d[b]-q2d[c]).norm() > minpdist)
	      // TODO: add distance check
	      break;
	  }
	if (k >= numchoices+numchoices) continue;

        // get centroids
        Vector3f p0a = t3d[a];
        Vector3f p0b = t3d[b];
        Vector3f p0c = t3d[c];

        Vector3f p1a = q3d[a];
        Vector3f p1b = q3d[b];
        Vector3f p1c = q3d[c];

        Vector3f c0 = (p0a+p0b+p0c)*(1.0/3.0);
        Vector3f c1 = (p1a+p1b+p1c)*(1.0/3.0);

        // subtract out
        p0a -= c0;
        p0b -= c0;
        p0c -= c0;
        p1a -= c1;
        p1b -= c1;
        p1c -= c1;

        Matrix3f Hf = p1a*p0a.transpose() + p1b*p0b.transpose() +
	              p1c*p0c.transpose();
	Matrix3d H = Hf.cast<double>();

#if 0
        cout << p0a.transpose() << endl;
        cout << p0b.transpose() << endl;
        cout << p0c.transpose() << endl;
#endif

        // do the SVD thang
        JacobiSVD<Matrix3d> svd(H, ComputeFullU | ComputeFullV);
        Matrix3d V = svd.matrixV();
        Matrix3d R = V * svd.matrixU().transpose();          
        double det = R.determinant();
        //ntot++;
        if (det < 0.0)
          {
            //nneg++;
            V.col(2) = V.col(2)*-1.0;
            R = V * svd.matrixU().transpose();
          }

	Vector3d cd0 = c0.cast<double>();
	Vector3d cd1 = c1.cast<double>();
        Vector3d tr = cd0-R*cd1;    // translation 

	//      cout << "[PE test] R: " << endl << R << endl;
	//	cout << "[PE test] t: " << tr.transpose() << endl;

        Vector3f trf = tr.cast<float>();      // convert to floats
        Matrix3f Rf = R.cast<float>();
	Rf = Kf*Rf;
	trf = Kf*trf;

        // find inliers, based on image reprojection
        int inl = 0;
        for (int i=0; i<nmatch; i++)
          {
            const Vector3f &pt1 = q3d[i];
            Vector3f ipt = Rf*pt1+trf; // transform query point
            float iz = 1.0/ipt.z();
	    Vector2f &kp = t2d[i];
	    //	    cout << kp.transpose() << " " << pt1.transpose() << " " << ipt.transpose() << endl;
            float dx = kp.x() - ipt.x()*iz;
            float dy = kp.y() - ipt.y()*iz;
            float dd = fb/t3d[i].z() - fb/ipt.z(); // diff of disparities, could pre-compute t3d
            if (dx*dx < maxInlierXDist2 && dy*dy < maxInlierXDist2
                && dd*dd < maxInlierDDist2)
               //              inl+=(int)fsqrt(ipt.z()); // clever way to weight closer points
              inl++;
          }
        
        if (inl > bestinl) 
          {
            bestinl = inl;
            trans = tr.cast<float>();      // convert to floats
            rot = R.cast<float>();
          }

      }

    cout << "[pe3d] Best inliers: " << bestinl << endl;
//    printf("Total ransac: %d  Neg det: %d\n", ntot, nneg);

    // reduce matches to inliers
    matches_t inls;    // temporary for current inliers
    inliers.clear();
    Matrix3f Rf = Kf*rot;
    Vector3f trf = Kf*trans;

    //    cout << "[pe3e] R: " << endl << rot << endl;
    cout << "[pe3d] t: " << trans.transpose() << endl;

    AngleAxisf aa;
    aa.fromRotationMatrix(rot);
    cout << "[pe3d] AA: " << aa.angle()*180.0/M_PI << "   " << aa.axis().transpose() << endl;

    for (int i=0; i<nmatch; i++)
      {
	Vector3f &pt1 = q3d[i];
        Vector3f ipt = Rf*pt1+trf; // transform query point
        float iz = 1.0/ipt.z();
	Vector2f &kp = t2d[i];
	//	cout << kp.transpose() << " " << pt1.transpose() << " " << ipt.transpose() << endl;
        float dx = kp.x() - ipt.x()*iz;
        float dy = kp.y() - ipt.y()*iz;
        float dd = fb/t3d[i].z() - fb/ipt.z(); // diff of disparities, could pre-compute t3d

        if (dx*dx < maxInlierXDist2 && dy*dy < maxInlierXDist2 && 
            dd*dd < maxInlierDDist2)
	  inls.push_back(matches[m[i]]); 
      }

    cout << "[pe3d] Final inliers: " << inls.size() << endl;

    // polish with SVD
    if (polish)
      {
	Matrix3d Rd = rot.cast<double>();
	Vector3d trd = trans.cast<double>();
	StereoPolish pol(5,false);
	pol.polish(inls,train_kpts,query_kpts,train_pts,query_pts,K,baseline,
	   Rd,trd);

	AngleAxisd aa;
	aa.fromRotationMatrix(Rd);
	cout << "[pe3d] Polished t: " << trd.transpose() << endl;
	cout << "[pe3d] Polished AA: " << aa.angle()*180.0/M_PI << "   " << aa.axis().transpose() << endl;

	int num = inls.size();
	// get centroids
	Vector3f c0(0,0,0);
	Vector3f c1(0,0,0);
	for (int i=0; i<num; i++)
	  {
	    c0 += t3d[i];
	    c1 += q3d[i];
	  }

	c0 = c0 / (float)num;
	c1 = c1 / (float)num;

        // subtract out and create H matrix
	Matrix3f Hf;
	Hf.setZero();

	for (int i=0; i<num; i++)
	  {
	    Vector3f p0 = t3d[i]-c0;
	    Vector3f p1 = q3d[i]-c1;
	    Hf += p1*p0.transpose();
	  }

	Matrix3d H = Hf.cast<double>();

        // do the SVD thang
        JacobiSVD<Matrix3d> svd(H, ComputeFullU | ComputeFullV);
        Matrix3d V = svd.matrixV();
        Matrix3d R = V * svd.matrixU().transpose();          
        double det = R.determinant();
        //ntot++;
        if (det < 0.0)
          {
            //nneg++;
            V.col(2) = V.col(2)*-1.0;
            R = V * svd.matrixU().transpose();
          }

	Vector3d cd0 = c0.cast<double>();
	Vector3d cd1 = c1.cast<double>();
        Vector3d tr = cd0-R*cd1;    // translation 


	aa.fromRotationMatrix(R);
	cout << "[pe3d] t: " << tr.transpose() << endl;
	cout << "[pe3d] AA: " << aa.angle()*180.0/M_PI << "   " << aa.axis().transpose() << endl;

#if 0
        // system
        SysSBA sba;
        sba.verbose = 0;

        // set up nodes
        // should have a frame => node function        
        Vector4d v0 = Vector4d(0,0,0,1);
        Quaterniond q0 = Quaternion<double>(Vector4d(0,0,0,1));
        sba.addNode(v0, q0, f0.cam, true);
        
        Quaterniond qr1(rot);   // from rotation matrix
        Vector4d temptrans = Vector4d(trans(0), trans(1), trans(2), 1.0);

        //        sba.addNode(temptrans, qr1.normalized(), f1.cam, false);
        qr1.normalize();
        sba.addNode(temptrans, qr1, f1.cam, false);

        int in = 3;
        if (in > (int)inls.size())
          in = inls.size();

        // set up projections
        for (int i=0; i<(int)inls.size(); i++)
          {
            // add point
            int i0 = inls[i].queryIdx;
            int i1 = inls[i].trainIdx;
            Vector4d pt = query_pts[i0];
            sba.addPoint(pt);
            
            // projected point, ul,vl,ur
            Vector3d ipt;
            ipt(0) = f0.kpts[i0].pt.x;
            ipt(1) = f0.kpts[i0].pt.y;
            ipt(2) = ipt(0)-f0.disps[i0];
            sba.addStereoProj(0, i, ipt);

            // projected point, ul,vl,ur
            ipt(0) = f1.kpts[i1].pt.x;
            ipt(1) = f1.kpts[i1].pt.y;
            ipt(2) = ipt(0)-f1.disps[i1];
            sba.addStereoProj(1, i, ipt);
          }

        sba.huber = 2.0;
        sba.doSBA(5,10e-4,SBA_DENSE_CHOLESKY);
        int nbad = sba.removeBad(2.0);
//        cout << endl << "Removed " << nbad << " projections > 2 pixels error" << endl;
        sba.doSBA(5,10e-5,SBA_DENSE_CHOLESKY);

//        cout << endl << sba.nodes[1].trans.transpose().head(3) << endl;

        // get the updated transform
	      trans = sba.nodes[1].trans.head(3);
	      Quaterniond q1;
	      q1 = sba.nodes[1].qrot;
	      rot = q1.toRotationMatrix();

        // set up inliers
        inliers.clear();
        for (int i=0; i<(int)inls.size(); i++)
          {
            ProjMap &prjs = sba.tracks[i].projections;
            if (prjs[0].isValid && prjs[1].isValid) // valid track
              inliers.push_back(inls[i]);
          }
#if 0
        printf("Inliers: %d   After polish: %d\n", (int)inls.size(), (int)inliers.size());
#endif

#endif

      }

    inliers = inls;
    return (int)inls.size();
  }
void setupSBA(SysSBA &sys)
{
    // Create camera parameters.
    frame_common::CamParams cam_params;
    cam_params.fx = 430; // Focal length in x
    cam_params.fy = 430; // Focal length in y
    cam_params.cx = 320; // X position of principal point
    cam_params.cy = 240; // Y position of principal point
    cam_params.tx = -30; // Baseline (no baseline since this is monocular)

    // Define dimensions of the image.
    int maxx = 640;
    int maxy = 480;

    // Create a plane containing a wall of points.
    Plane middleplane;
    middleplane.resize(3, 2, 10, 5);
    middleplane.translate(0.0, 0.0, 7.0);
    
    Plane leftplane;
    leftplane.resize(1, 2, 6, 12);
    //    leftplane.rotate(-PI/4.0, 0, 1, 0);
    leftplane.translate(0, 0, 7.0);
    
    Plane rightplane;
    rightplane.resize(1, 2, 6, 12);
    //    rightplane.rotate(PI/4.0, 0, 1, 0);
    rightplane.translate(2, 0, 7.0);
    
    Plane topplane;
    topplane.resize(1, 1.5, 6, 12);
    //    topplane.rotate(PI/4.0, 1, 0, 0);
    topplane.translate(2, 0, 7.0);

    // Vector containing the true point positions.
    rightplane.normal = rightplane.normal; 
  
    vector<Point, Eigen::aligned_allocator<Point> > points;
    vector<Eigen::Vector3d, Eigen::aligned_allocator<Eigen::Vector3d> > normals;
    
    points.insert(points.end(), middleplane.points.begin(), middleplane.points.end());
    normals.insert(normals.end(), middleplane.points.size(), middleplane.normal);
    
    points.insert(points.end(), leftplane.points.begin(), leftplane.points.end());
    normals.insert(normals.end(), leftplane.points.size(), leftplane.normal);
    
    points.insert(points.end(), rightplane.points.begin(), rightplane.points.end());
    normals.insert(normals.end(), rightplane.points.size(), rightplane.normal);
    
    points.insert(points.end(), topplane.points.begin(), topplane.points.end());
    normals.insert(normals.end(), topplane.points.size(), topplane.normal);
    
    // Create nodes and add them to the system.
    unsigned int nnodes = 2; // Number of nodes.
    double path_length = 0.5; // Length of the path the nodes traverse.

    // Set the random seed.
    unsigned short seed = (unsigned short)time(NULL);
    seed48(&seed);
    
    unsigned int i = 0, j = 0;
    
    for (i = 0; i < nnodes; i++)
    { 
      // Translate in the x direction over the node path.
      Vector4d trans(i/(nnodes-1.0)*path_length, 0, 0, 1);
            
#if 1
      if (i >= 0)
	{
	  // perturb a little
	  double tnoise = 0.5;	// meters
	  trans.x() += tnoise*(drand48()-0.5);
	  trans.y() += tnoise*(drand48()-0.5);
	  trans.z() += tnoise*(drand48()-0.5);
	}
#endif

      // Don't rotate.
      Quaterniond rot(1, 0, 0, 0);
#if 1
      if (i >= 0)
	{
	  // perturb a little
	  double qnoise = 0.1;	// meters
	  rot.x() += qnoise*(drand48()-0.5);
	  rot.y() += qnoise*(drand48()-0.5);
	  rot.z() += qnoise*(drand48()-0.5);
	}
#endif
      rot.normalize();
      
      // Add a new node to the system.
      sys.addNode(trans, rot, cam_params, false);
    }
        
    double pointnoise = 1.0;
    
    // Add points into the system, and add noise.
    for (i = 0; i < points.size(); i++)
    {
      // Add up to .5 points of noise.
      Vector4d temppoint = points[i];
      temppoint.x() += pointnoise*(drand48() - 0.5);
      temppoint.y() += pointnoise*(drand48() - 0.5);
      temppoint.z() += pointnoise*(drand48() - 0.5);
      sys.addPoint(temppoint);
    }
    
    Vector2d proj2d;
    Vector3d proj, pc, baseline;
    
    Vector3d imagenormal(0, 0, 1);
    
    Matrix3d covar0;
    covar0 << sqrt(imagenormal(0)), 0, 0, 0, sqrt(imagenormal(1)), 0, 0, 0, sqrt(imagenormal(2));
    Matrix3d covar;
    
    Quaterniond rotation;
    Matrix3d rotmat;
    
    unsigned int midindex = middleplane.points.size();
    unsigned int leftindex = midindex + leftplane.points.size();
    unsigned int rightindex = leftindex + rightplane.points.size();
    printf("Normal for Middle Plane: [%f %f %f], index %d -> %d\n", middleplane.normal.x(), middleplane.normal.y(), middleplane.normal.z(), 0, midindex-1);
    printf("Normal for Left Plane:   [%f %f %f], index %d -> %d\n", leftplane.normal.x(), leftplane.normal.y(), leftplane.normal.z(), midindex, leftindex-1);
    printf("Normal for Right Plane:  [%f %f %f], index %d -> %d\n", rightplane.normal.x(), rightplane.normal.y(), rightplane.normal.z(), leftindex, rightindex-1);
    
    // Project points into nodes.
    for (i = 0; i < points.size(); i++)
    {
      for (j = 0; j < sys.nodes.size(); j++)
      {
        // Project the point into the node's image coordinate system.
        sys.nodes[j].setProjection();
        sys.nodes[j].project2im(proj2d, points[i]);
        
        
        // Camera coords for right camera
        baseline << sys.nodes[j].baseline, 0, 0;
        pc = sys.nodes[j].Kcam * (sys.nodes[j].w2n*points[i] - baseline); 
        proj.head<2>() = proj2d;
        proj(2) = pc(0)/pc(2);
        
        // If valid (within the range of the image size), add the stereo 
        // projection to SBA.
        if (proj.x() > 0 && proj.x() < maxx && proj.y() > 0 && proj.y() < maxy)
        {
          sys.addStereoProj(j, i, proj);
          
          // Create the covariance matrix: 
          // image plane normal = [0 0 1]
          // wall normal = [0 0 -1]
          // covar = (R)T*[0 0 0;0 0 0;0 0 1]*R
          
          rotation.setFromTwoVectors(imagenormal, normals[i]);
          rotmat = rotation.toRotationMatrix();
          covar = rotmat.transpose()*covar0*rotmat;
          
	  //          if (!(i % sys.nodes.size() == j))
	  //            sys.setProjCovariance(j, i, covar);
        }
      }
    }
    
    // Add noise to node position.
    
    double transscale = 2.0;
    double rotscale = 0.2;
    
    // Don't actually add noise to the first node, since it's fixed.
    for (i = 1; i < sys.nodes.size(); i++)
    {
      Vector4d temptrans = sys.nodes[i].trans;
      Quaterniond tempqrot = sys.nodes[i].qrot;
      
      // Add error to both translation and rotation.
      temptrans.x() += transscale*(drand48() - 0.5);
      temptrans.y() += transscale*(drand48() - 0.5);
      temptrans.z() += transscale*(drand48() - 0.5);
      tempqrot.x() += rotscale*(drand48() - 0.5);
      tempqrot.y() += rotscale*(drand48() - 0.5);
      tempqrot.z() += rotscale*(drand48() - 0.5);
      tempqrot.normalize();
      
      sys.nodes[i].trans = temptrans;
      sys.nodes[i].qrot = tempqrot;
      
      // These methods should be called to update the node.
      sys.nodes[i].normRot();
      sys.nodes[i].setTransform();
      sys.nodes[i].setProjection();
      sys.nodes[i].setDr(true);
    }
        
}
Beispiel #3
0
  int PoseEstimator3d::estimate(const Frame& f0, const Frame& f1, 
                                const std::vector<cv::DMatch> &matches)
  {
    // convert keypoints in match to 3d points
    std::vector<Vector4d, aligned_allocator<Vector4d> > p0; // homogeneous coordinates
    std::vector<Vector4d, aligned_allocator<Vector4d> > p1;

    int nmatch = matches.size();

    // set up data structures for fast processing
    // indices to good matches
    vector<int> m0, m1;
    for (int i=0; i<nmatch; i++)
      {
        if (f0.disps[matches[i].queryIdx] > minMatchDisp && 
            f1.disps[matches[i].trainIdx] > minMatchDisp)
          {
            m0.push_back(matches[i].queryIdx);
            m1.push_back(matches[i].trainIdx);
          }
      }

    nmatch = m0.size();
    if (nmatch < 3) return 0;   // can't do it...

    int bestinl = 0;

    // RANSAC loop
    #pragma omp parallel for shared( bestinl )
    for (int i=0; i<numRansac; i++) 
      {
        // find a candidate
        int a=rand()%nmatch;
        int b = a;
        while (a==b)
          b=rand()%nmatch;
        int c = a;
        while (a==c || b==c)
          c=rand()%nmatch;

        int i0a = m0[a];
        int i0b = m0[b];
        int i0c = m0[c];
        int i1a = m1[a];
        int i1b = m1[b];
        int i1c = m1[c];

        if (i0a == i0b || i0a == i0c || i0b == i0c ||
            i1a == i1b || i1a == i1c || i1b == i1c)
          continue;

        // get centroids
        Vector3d p0a = f0.pts[i0a].head<3>();
        Vector3d p0b = f0.pts[i0b].head<3>();
        Vector3d p0c = f0.pts[i0c].head<3>();
        Vector3d p1a = f1.pts[i1a].head<3>();
        Vector3d p1b = f1.pts[i1b].head<3>();
        Vector3d p1c = f1.pts[i1c].head<3>();

        Vector3d c0 = (p0a+p0b+p0c)*(1.0/3.0);
        Vector3d c1 = (p1a+p1b+p1c)*(1.0/3.0);

        // subtract out
        p0a -= c0;
        p0b -= c0;
        p0c -= c0;
        p1a -= c1;
        p1b -= c1;
        p1c -= c1;

        Matrix3d H = p1a*p0a.transpose() + p1b*p0b.transpose() +
                     p1c*p0c.transpose();

#if 0
        cout << p0a.transpose() << endl;
        cout << p0b.transpose() << endl;
        cout << p0c.transpose() << endl;
#endif

        // do the SVD thang
        JacobiSVD<Matrix3d> svd(H, ComputeFullU | ComputeFullV);
        Matrix3d V = svd.matrixV();
        Matrix3d R = V * svd.matrixU().transpose();          
        double det = R.determinant();
        //ntot++;
        if (det < 0.0)
          {
            //nneg++;
            V.col(2) = V.col(2)*-1.0;
            R = V * svd.matrixU().transpose();
          }
        Vector3d tr = c0-R*c1;    // translation 

        //        cout << "[PE test] R: " << endl << R << endl;
        //        cout << "[PE test] t: " << tr.transpose() << endl;

#if 0
        R << 0.9997, 0.002515, 0.02418,
          -0.002474, .9999, -0.001698,
          -0.02418, 0.001638, 0.9997;
        tr << -0.005697, -0.01753, 0.05666;
        R = R.transpose();
        tr = -R*tr;
#endif

        // transformation matrix, 3x4
        Matrix<double,3,4> tfm;
        //        tfm.block<3,3>(0,0) = R.transpose();
        //        tfm.col(3) = -R.transpose()*tr;
        tfm.block<3,3>(0,0) = R;
        tfm.col(3) = tr;
        
        //        cout << "[PE test] T: " << endl << tfm << endl;

        // find inliers, based on image reprojection
        int inl = 0;
        for (int i=0; i<nmatch; i++)
          {
            Vector3d pt = tfm*f1.pts[m1[i]];
            Vector3d ipt = f0.cam2pix(pt);
            const cv::KeyPoint &kp = f0.kpts[m0[i]];
            double dx = kp.pt.x - ipt.x();
            double dy = kp.pt.y - ipt.y();
            double dd = f0.disps[m0[i]] - ipt.z();
            if (dx*dx < maxInlierXDist2 && dy*dy < maxInlierXDist2 && 
                dd*dd < maxInlierDDist2)
              inl+=(int)sqrt(ipt.z()); // clever way to weight closer points
              //              inl++;
          }
        
        #pragma omp critical
        if (inl > bestinl) 
          {
            bestinl = inl;
            rot = R;
            trans = tr;
          }
      }

    //    printf("Best inliers: %d\n", bestinl);
    //    printf("Total ransac: %d  Neg det: %d\n", ntot, nneg);

    // reduce matches to inliers
    vector<cv::DMatch> inls;    // temporary for current inliers
    inliers.clear();
    Matrix<double,3,4> tfm;
    tfm.block<3,3>(0,0) = rot;
    tfm.col(3) = trans;

    nmatch = matches.size();
    for (int i=0; i<nmatch; i++)
      {
        if (!f0.goodPts[matches[i].queryIdx] || !f1.goodPts[matches[i].trainIdx])
          continue;
        Vector3d pt = tfm*f1.pts[matches[i].trainIdx];
        Vector3d ipt = f0.cam2pix(pt);
        const cv::KeyPoint &kp = f0.kpts[matches[i].queryIdx];
        double dx = kp.pt.x - ipt.x();
        double dy = kp.pt.y - ipt.y();
        double dd = f0.disps[matches[i].queryIdx] - ipt.z();
        if (dx*dx < maxInlierXDist2 && dy*dy < maxInlierXDist2 && 
            dd*dd < maxInlierDDist2)
          inls.push_back(matches[i]);
      }

#if 0
    cout << endl << trans.transpose().head(3) << endl << endl;
    cout << rot << endl;
#endif

    // polish with stereo sba
    if (polish)
      {
        // system
        SysSBA sba;
        sba.verbose = 0;

        // set up nodes
        // should have a frame => node function        
        Vector4d v0 = Vector4d(0,0,0,1);
        Quaterniond q0 = Quaternion<double>(Vector4d(0,0,0,1));
        sba.addNode(v0, q0, f0.cam, true);
        
        Quaterniond qr1(rot);   // from rotation matrix
        Vector4d temptrans = Vector4d(trans(0), trans(1), trans(2), 1.0);

        //        sba.addNode(temptrans, qr1.normalized(), f1.cam, false);
        qr1.normalize();
        sba.addNode(temptrans, qr1, f1.cam, false);

        int in = 3;
        if (in > (int)inls.size())
          in = inls.size();

        // set up projections
        for (int i=0; i<(int)inls.size(); i++)
          {
            // add point
            int i0 = inls[i].queryIdx;
            int i1 = inls[i].trainIdx;
            Vector4d pt = f0.pts[i0];
            sba.addPoint(pt);
            
            // projected point, ul,vl,ur
            Vector3d ipt;
            ipt(0) = f0.kpts[i0].pt.x;
            ipt(1) = f0.kpts[i0].pt.y;
            ipt(2) = ipt(0)-f0.disps[i0];
            sba.addStereoProj(0, i, ipt);

            // projected point, ul,vl,ur
            ipt(0) = f1.kpts[i1].pt.x;
            ipt(1) = f1.kpts[i1].pt.y;
            ipt(2) = ipt(0)-f1.disps[i1];
            sba.addStereoProj(1, i, ipt);
          }

        sba.huber = 2.0;
        sba.doSBA(5,10e-4,SBA_DENSE_CHOLESKY);
        int nbad = sba.removeBad(2.0);
//        cout << endl << "Removed " << nbad << " projections > 2 pixels error" << endl;
        sba.doSBA(5,10e-5,SBA_DENSE_CHOLESKY);

//        cout << endl << sba.nodes[1].trans.transpose().head(3) << endl;

        // get the updated transform
	      trans = sba.nodes[1].trans.head(3);
	      Quaterniond q1;
	      q1 = sba.nodes[1].qrot;
	      rot = q1.toRotationMatrix();

        // set up inliers
        inliers.clear();
        for (int i=0; i<(int)inls.size(); i++)
          {
            ProjMap &prjs = sba.tracks[i].projections;
            if (prjs[0].isValid && prjs[1].isValid) // valid track
              inliers.push_back(inls[i]);
          }
#if 0
        printf("Inliers: %d   After polish: %d\n", (int)inls.size(), (int)inliers.size());
#endif
      }

    return (int)inliers.size();
  }
void setupSBA(SysSBA &sys)
{
    // Create camera parameters.
    frame_common::CamParams cam_params;
    cam_params.fx = 430; // Focal length in x
    cam_params.fy = 430; // Focal length in y
    cam_params.cx = 320; // X position of principal point
    cam_params.cy = 240; // Y position of principal point
    cam_params.tx = 0.3; // Baseline 

    // Define dimensions of the image.
    int maxx = 640;
    int maxy = 480;

    // Create a plane containing a wall of points.
    Plane middleplane;
    middleplane.resize(3, 2, 10, 5);
    //middleplane.rotate(PI/4.0, PI/6.0, 1, 0);
    middleplane.rotate(PI/4.0, 1, 0, 1);
    middleplane.translate(0.0, 0.0, 5.0);
    
    // Create another plane containing a wall of points.
    Plane mp2;
    mp2.resize(3, 2, 10, 5);
    mp2.rotate(0, 0, 0, 1);
    mp2.translate(0.0, 0.0, 4.0);


    // Create another plane containing a wall of points.
    Plane mp3;
    mp3.resize(3, 2, 10, 5);
    mp3.rotate(-PI/4.0, 1, 0, 1);
    mp3.translate(0.0, 0.0, 4.5);



    // Vector containing the true point positions.
    vector<Eigen::Vector3d, Eigen::aligned_allocator<Eigen::Vector3d> > normals;
    
    points.insert(points.end(), middleplane.points.begin(), middleplane.points.end());
    normals.insert(normals.end(), middleplane.points.size(), middleplane.normal);
    points.insert(points.end(), mp2.points.begin(), mp2.points.end());
    normals.insert(normals.end(), mp2.points.size(), mp2.normal);
    points.insert(points.end(), mp3.points.begin(), mp3.points.end());
    normals.insert(normals.end(), mp3.points.size(), mp3.normal);
    
    // Create nodes and add them to the system.
    unsigned int nnodes = 2; // Number of nodes.
    double path_length = 1.0; // Length of the path the nodes traverse.

    // Set the random seed.
    unsigned short seed = (unsigned short)time(NULL);
    seed48(&seed);
    
    unsigned int i = 0;
    
    Vector3d inormal0 = middleplane.normal;
    Vector3d inormal1 = middleplane.normal;
    Vector3d inormal20 = mp2.normal;
    Vector3d inormal21 = mp2.normal;
    Vector3d inormal30 = mp3.normal;
    Vector3d inormal31 = mp3.normal;
    
    for (i = 0; i < nnodes; i++)
    { 
      // Translate in the x direction over the node path.
      Vector4d trans(i/(nnodes-1.0)*path_length, 0, 0, 1);
            
#if 1
      if (i >= 2)
	    {
	      // perturb a little
	      double tnoise = 0.5;	// meters
	      trans.x() += tnoise*(drand48()-0.5);
	      trans.y() += tnoise*(drand48()-0.5);
	      trans.z() += tnoise*(drand48()-0.5);
	    }
#endif

      // Don't rotate.
      Quaterniond rot(1, 0, 0, 0);
#if 1
      if (i >= 2)
	    {
	      // perturb a little
	      double qnoise = 0.1;	// meters
	      rot.x() += qnoise*(drand48()-0.5);
	      rot.y() += qnoise*(drand48()-0.5);
	      rot.z() += qnoise*(drand48()-0.5);
	    }
#endif
      rot.normalize();
      
      // Add a new node to the system.
      sys.addNode(trans, rot, cam_params, false);
      
      // set normal
      if (i == 0)
	{
	  inormal0 = rot.toRotationMatrix().transpose() * inormal0;
	  inormal20 = rot.toRotationMatrix().transpose() * inormal20;
	  inormal30 = rot.toRotationMatrix().transpose() * inormal30;
	}
      else
	{
	  inormal1 = rot.toRotationMatrix().transpose() * inormal1;
	  inormal21 = rot.toRotationMatrix().transpose() * inormal21;
	  inormal31 = rot.toRotationMatrix().transpose() * inormal31;
	}
    }
        
    double pointnoise = 1.0;
    
    // Add points into the system, and add noise.
    for (i = 0; i < points.size(); i++)
    {
      // Add up to .5 points of noise.
      Vector4d temppoint = points[i];
      temppoint.x() += pointnoise*(drand48() - 0.5);
      temppoint.y() += pointnoise*(drand48() - 0.5);
      temppoint.z() += pointnoise*(drand48() - 0.5);
      sys.addPoint(temppoint);
    }
    
    // Each point gets added twice
    for (i = 0; i < points.size(); i++)
    {
      // Add up to .5 points of noise.
      Vector4d temppoint = points[i];
      temppoint.x() += pointnoise*(drand48() - 0.5);
      temppoint.y() += pointnoise*(drand48() - 0.5);
      temppoint.z() += pointnoise*(drand48() - 0.5);
      sys.addPoint(temppoint);
    }

    Vector2d proj2d, proj2dp;
    Vector3d proj, projp, pc, pcp, baseline, baselinep;
    
    Vector3d imagenormal(0, 0, 1);
    
    Matrix3d covar0;
    covar0 << sqrt(imagenormal(0)), 0, 0, 0, sqrt(imagenormal(1)), 0, 0, 0, sqrt(imagenormal(2));
    Matrix3d covar;
    
    Quaterniond rotation;
    Matrix3d rotmat;
    
    unsigned int midindex = middleplane.points.size();
    printf("Normal for Middle Plane: [%f %f %f], index %d -> %d\n", middleplane.normal.x(), middleplane.normal.y(), middleplane.normal.z(), 0, midindex-1);
    
    int nn = points.size();

    // Project points into nodes.
    for (i = 0; i < points.size(); i++)
    {
      int k=i;
      // Project the point into the node's image coordinate system.
      sys.nodes[0].setProjection();
      sys.nodes[0].project2im(proj2d, points[k]);
      sys.nodes[1].setProjection();
      sys.nodes[1].project2im(proj2dp, points[k]);
        
      // Camera coords for right camera
      baseline << sys.nodes[0].baseline, 0, 0;
      pc = sys.nodes[0].Kcam * (sys.nodes[0].w2n*points[k] - baseline); 
      proj.head<2>() = proj2d;
      proj(2) = pc(0)/pc(2);
        
      baseline << sys.nodes[1].baseline, 0, 0;
      pcp = sys.nodes[1].Kcam * (sys.nodes[1].w2n*points[k] - baseline); 
      projp.head<2>() = proj2dp;
      projp(2) = pcp(0)/pcp(2);

      // add noise to projections
      double prnoise = 0.5;	// 0.5 pixels
      proj.head<2>() += Vector2d(prnoise*(drand48() - 0.5),prnoise*(drand48() - 0.5));
      projp.head<2>() += Vector2d(prnoise*(drand48() - 0.5),prnoise*(drand48() - 0.5));


      // If valid (within the range of the image size), add the stereo 
      // projection to SBA.
      if (proj.x() > 0 && proj.x() < maxx && proj.y() > 0 && proj.y() < maxy)
        {
          // add point cloud shape-holding projections to each node
          sys.addStereoProj(0, k, proj);
          sys.addStereoProj(1, k+nn, projp);

#ifdef USE_PP
          // add exact matches
          if (i == 20 || i == 65 || i == 142)
            sys.addStereoProj(1, k, projp);
#endif

#ifdef USE_PPL
          // add point-plane matches
	  if (i < midindex)
            sys.addPointPlaneMatch(0, k, inormal0, 1, k+nn, inormal1);
	  else if (i < 2*midindex)
	    sys.addPointPlaneMatch(0, k, inormal20, 1, k+nn, inormal21);
	  else
	    sys.addPointPlaneMatch(0, k, inormal30, 1, k+nn, inormal31);
          //          sys.addStereoProj(0, k+nn, projp);
          //          sys.addStereoProj(1, k, proj);

          Matrix3d covar;
          double cv = 0.01;
          covar << cv, 0, 0,
            0, cv, 0, 
            0, 0, cv;
          sys.setProjCovariance(0, k+nn, covar);
          sys.setProjCovariance(1, k, covar);
#endif

        }
      else
        {
          cout << "ERROR! point not in view of nodes" << endl;
          //return;
        }
    }

    // Add noise to node position.
    
    double transscale = 2.0;
    double rotscale = 0.5;
    
    // Don't actually add noise to the first node, since it's fixed.
    for (i = 1; i < sys.nodes.size(); i++)
    {
      Vector4d temptrans = sys.nodes[i].trans;
      Quaterniond tempqrot = sys.nodes[i].qrot;
      
      // Add error to both translation and rotation.
      temptrans.x() += transscale*(drand48() - 0.5);
      temptrans.y() += transscale*(drand48() - 0.5);
      temptrans.z() += transscale*(drand48() - 0.5);
      tempqrot.x() += rotscale*(drand48() - 0.5);
      tempqrot.y() += rotscale*(drand48() - 0.5);
      tempqrot.z() += rotscale*(drand48() - 0.5);
      tempqrot.normalize();
      
      sys.nodes[i].trans = temptrans;
      sys.nodes[i].qrot = tempqrot;
      
      // These methods should be called to update the node.
      sys.nodes[i].normRot();
      sys.nodes[i].setTransform();
      sys.nodes[i].setProjection();
      sys.nodes[i].setDr(true);
    }
        
}
Beispiel #5
0
size_t PoseEstimator<PointSource,PointTarget>::estimate(const NDTFrame<PointSource>& f0, const NDTFrame<PointTarget>& f1,
        const std::vector<cv::DMatch> &matches)
{
    // convert keypoints in match to 3d points
    std::vector<Eigen::Vector4d, Eigen::aligned_allocator<Eigen::Vector4d> > p0; // homogeneous coordinates
    std::vector<Eigen::Vector4d, Eigen::aligned_allocator<Eigen::Vector4d> > p1;

    int nmatch = matches.size();
    //srand(getDoubleTime());

    // set up data structures for fast processing
    // indices to good matches
    std::vector<int> m0, m1;
    for (int i=0; i<nmatch; i++)
    {
        m0.push_back(matches[i].queryIdx);
        m1.push_back(matches[i].trainIdx);
        //std::cout<<m0[i]<<" "<<m1[i]<<std::endl;
    }

    nmatch = m0.size();

    if (nmatch < 3) return 0;   // can't do it...

    int bestinl = 0;

    // RANSAC loop
//#pragma omp parallel for shared( bestinl )
    for (int i=0; i<numRansac; i++)
    {
        //std::cout << "ransac loop : " << i << std::endl;
        // find a candidate
        int a=rand()%nmatch;
        int b = a;
        while (a==b)
            b=rand()%nmatch;
        int c = a;
        while (a==c || b==c)
            c=rand()%nmatch;

        int i0a = m0[a];
        int i0b = m0[b];
        int i0c = m0[c];
        int i1a = m1[a];
        int i1b = m1[b];
        int i1c = m1[c];

        if (i0a == i0b || i0a == i0c || i0b == i0c ||
                i1a == i1b || i1a == i1c || i1b == i1c)
            continue;

        //std::cout<<a<<" "<<b<<" "<<c<<std::endl;
        //std::cout<<i0a<<" "<<i0b<<" "<<i0c<<std::endl;
        //std::cout<<i1a<<" "<<i1b<<" "<<i1c<<std::endl;

        // get centroids
        Eigen::Vector3d p0a = f0.pts[i0a].head(3);
        Eigen::Vector3d p0b = f0.pts[i0b].head(3);
        Eigen::Vector3d p0c = f0.pts[i0c].head(3);
        Eigen::Vector3d p1a = f1.pts[i1a].head(3);
        Eigen::Vector3d p1b = f1.pts[i1b].head(3);
        Eigen::Vector3d p1c = f1.pts[i1c].head(3);

        Eigen::Vector3d c0 = (p0a+p0b+p0c)*(1.0/3.0);
        Eigen::Vector3d c1 = (p1a+p1b+p1c)*(1.0/3.0);

        //std::cout<<c0.transpose()<<std::endl;
        //std::cout<<c1.transpose()<<std::endl;
        // subtract out
        p0a -= c0;
        p0b -= c0;
        p0c -= c0;
        p1a -= c1;
        p1b -= c1;
        p1c -= c1;

        Eigen::Matrix3d H = p1a*p0a.transpose() + p1b*p0b.transpose() +
                            p1c*p0c.transpose();

        // do the SVD thang
        Eigen::JacobiSVD<Eigen::Matrix3d> svd(H, Eigen::ComputeFullU | Eigen::ComputeFullV);
        Eigen::Matrix3d V = svd.matrixV();
        Eigen::Matrix3d R = V * svd.matrixU().transpose();
        double det = R.determinant();
        //ntot++;
        if (det < 0.0)
        {
            //nneg++;
            V.col(2) = V.col(2)*-1.0;
            R = V * svd.matrixU().transpose();
        }
        Eigen::Vector3d tr = c0-R*c1;    // translation

        // transformation matrix, 3x4
        Eigen::Matrix<double,3,4> tfm;
        //        tfm.block<3,3>(0,0) = R.transpose();
        //        tfm.col(3) = -R.transpose()*tr;
        tfm.block<3,3>(0,0) = R;
        tfm.col(3) = tr;

#if 0
        // find inliers, based on image reprojection
        int inl = 0;
        for (int i=0; i<nmatch; i++)
        {
            Vector3d pt = tfm*f1.pts[m1[i]];
            Vector3d ipt = f0.cam2pix(pt);
            const cv::KeyPoint &kp = f0.kpts[m0[i]];
            double dx = kp.pt.x - ipt.x();
            double dy = kp.pt.y - ipt.y();
            double dd = f0.disps[m0[i]] - ipt.z();
            if (dx*dx < maxInlierXDist2 && dy*dy < maxInlierXDist2 &&
                    dd*dd < maxInlierDDist2)
            {
                inl+=(int)sqrt(ipt.z()); // clever way to weight closer points
//		 inl+=(int)sqrt(ipt.z()/matches[i].distance);
//		 cout << "matches[i].distance : " << matches[i].distance << endl;
//		 inl++;
            }
        }
#endif
        int inl = 0;
        for (int i=0; i<nmatch; i++)
        {
            Eigen::Vector3d pt1 = tfm*f1.pts[m1[i]];
            Eigen::Vector3d pt0 = f0.pts[m0[i]].head(3);

//	       double z = fabs(pt1.z() - pt0.z())*0.5;
            double z = pt1.z();
            double dx = pt1.x() - pt0.x();
            double dy = pt1.y() - pt0.y();
            double dd = pt1.z() - pt0.z();

            if (projectMatches)
            {
                // The central idea here is to divide by the distance (this is essentially what cam2pix does).
                dx = dx / z;
                dy = dy / z;
            }
            if (dx*dx < maxInlierXDist2 && dy*dy < maxInlierXDist2 &&
                    dd*dd < maxInlierDDist2)
            {
//----		    inl+=(int)sqrt(pt0.z()); // clever way to weight closer points
//		 inl+=(int)sqrt(ipt.z()/matches[i].distance);
//		 cout << "matches[i].distance : " << matches[i].distance << endl;
                inl++;
            }
        }

//#pragma omp critical
        if (inl > bestinl)
        {
            bestinl = inl;
            rot = R;
            trans = tr;
//	       std::cout << "bestinl : " << bestinl << std::endl;
        }
    }

    //printf("Best inliers: %d\n", bestinl);
    //printf("Total ransac: %d  Neg det: %d\n", ntot, nneg);

    // reduce matches to inliers
    std::vector<cv::DMatch> inls;    // temporary for current inliers
    inliers.clear();
    Eigen::Matrix<double,3,4> tfm;
    tfm.block<3,3>(0,0) = rot;
    tfm.col(3) = trans;

    //std::cout<<"f0: "<<f0.pts.size()<<" "<<f0.kpts.size()<<" "<<f0.pc_kpts.size()<<std::endl;
    //std::cout<<"f1: "<<f1.pts.size()<<" "<<f1.kpts.size()<<" "<<f1.pc_kpts.size()<<std::endl;

    nmatch = matches.size();
    for (int i=0; i<nmatch; i++)
    {
        Eigen::Vector3d pt1 = tfm*f1.pts[matches[i].trainIdx];
        //Eigen::Vector3d pt1_unchanged = f1.pts[matches[i].trainIdx].head(3);
        //Vector3d pt1 = pt1_unchanged;
#if 0
        Vector3d ipt = f0.cam2pix(pt);
        const cv::KeyPoint &kp = f0.kpts[matches[i].queryIdx];
        double dx = kp.pt.x - ipt.x();
        double dy = kp.pt.y - ipt.y();
        double dd = f0.disps[matches[i].queryIdx] - ipt.z();
#endif
        Eigen::Vector3d pt0 = f0.pts[matches[i].queryIdx].head(3);

        //double z = fabs(pt1.z() - pt0.z())*0.5;
        double z = pt1.z();
        double dx = pt1.x() - pt0.x();
        double dy = pt1.y() - pt0.y();
        double dd = pt1.z() - pt0.z();

        if (projectMatches)
        {
            // The central idea here is to divide by the distance (this is essentially what cam2pix does).
            dx = dx / z;
            dy = dy / z;
        }

        if (dx*dx < maxInlierXDist2 && dy*dy < maxInlierXDist2 &&
                dd*dd < maxInlierDDist2)
        {
            if (z < maxDist && z > minDist)

//	       if (fabs(f0.kpts[matches[i].queryIdx].pt.y - f1.kpts[matches[i].trainIdx].pt.y) > 300)
            {
//		   std::cout << " ---------- " << dx << "," << dy << "," << dd << ",\npt0 " << pt0.transpose() << "\npt1 " << pt1.transpose() << f0.kpts[matches[i].queryIdx].pt << "," <<
//			 f1.kpts[matches[i].trainIdx].pt << "\n unchanged pt1 " << pt1_unchanged.transpose() << std::endl;
                inliers.push_back(matches[i]);
            }
        }
    }

#if 0
    // Test with the SBA...
    {
        // system
        SysSBA sba;
        sba.verbose = 0;

#if 0
        // set up nodes
        // should have a frame => node function
        Vector4d v0 = Vector4d(0,0,0,1);
        Quaterniond q0 = Quaternion<double>(Vector4d(0,0,0,1));
        sba.addNode(v0, q0, f0.cam, true);

        Quaterniond qr1(rot);   // from rotation matrix
        Vector4d temptrans = Vector4d(trans(0), trans(1), trans(2), 1.0);

        //        sba.addNode(temptrans, qr1.normalized(), f1.cam, false);
        qr1.normalize();
        sba.addNode(temptrans, qr1, f1.cam, false);

        int in = 3;
        if (in > (int)inls.size())
            in = inls.size();

        // set up projections
        for (int i=0; i<(int)inls.size(); i++)
        {
            // add point
            int i0 = inls[i].queryIdx;
            int i1 = inls[i].trainIdx;
            Vector4d pt = f0.pts[i0];
            sba.addPoint(pt);

            // projected point, ul,vl,ur
            Vector3d ipt;
            ipt(0) = f0.kpts[i0].pt.x;
            ipt(1) = f0.kpts[i0].pt.y;
            ipt(2) = ipt(0)-f0.disps[i0];
            sba.addStereoProj(0, i, ipt);

            // projected point, ul,vl,ur
            ipt(0) = f1.kpts[i1].pt.x;
            ipt(1) = f1.kpts[i1].pt.y;
            ipt(2) = ipt(0)-f1.disps[i1];
            sba.addStereoProj(1, i, ipt);
        }

        sba.huber = 2.0;
        sba.doSBA(5,10e-4,SBA_DENSE_CHOLESKY);
        int nbad = sba.removeBad(2.0); // 2.0
        cout << endl << "Removed " << nbad << " projections > 2 pixels error" << endl;
        sba.doSBA(5,10e-5,SBA_DENSE_CHOLESKY);

//        cout << endl << sba.nodes[1].trans.transpose().head(3) << endl;

        // get the updated transform
        trans = sba.nodes[1].trans.head(3);
        Quaterniond q1;
        q1 = sba.nodes[1].qrot;
        quat = q1;
        rot = q1.toRotationMatrix();

        // set up inliers
        inliers.clear();
        for (int i=0; i<(int)inls.size(); i++)
        {
            ProjMap &prjs = sba.tracks[i].projections;
            if (prjs[0].isValid && prjs[1].isValid) // valid track
                inliers.push_back(inls[i]);
        }

        printf("Inliers: %d   After polish: %d\n", (int)inls.size(), (int)inliers.size());
#endif
    }
#endif

//     std::cout << std::endl << trans.transpose().head(3) << std::endl << std::endl;
//     std::cout << rot << std::endl;

    return inliers.size();
}