コード例 #1
0
ファイル: Mapping.cpp プロジェクト: PETGreen/MFfCUAVSLAM
  void LocalMapping::CreateNewMapPoints() {
    // Retrieve neighbor keyframes in covisibility graph
    //    int nn = 10;
    //    if(mbMonocular)
    int nn = 20;
    const vector<kfptr> vpNeighKFs = mpCurrentKeyFrame->GetBestCovisibilityKeyFrames(nn);

    ORBmatcher matcher(0.6, false);

    cv::Mat Rcw1 = mpCurrentKeyFrame->GetRotation();
    cv::Mat Rwc1 = Rcw1.t();
    cv::Mat tcw1 = mpCurrentKeyFrame->GetTranslation();
    cv::Mat Tcw1(3, 4, CV_32F);
    Rcw1.copyTo(Tcw1.colRange(0, 3));
    tcw1.copyTo(Tcw1.col(3));
    cv::Mat Ow1 = mpCurrentKeyFrame->GetCameraCenter();

    const float &fx1 = mpCurrentKeyFrame->fx;
    const float &fy1 = mpCurrentKeyFrame->fy;
    const float &cx1 = mpCurrentKeyFrame->cx;
    const float &cy1 = mpCurrentKeyFrame->cy;
    const float &invfx1 = mpCurrentKeyFrame->invfx;
    const float &invfy1 = mpCurrentKeyFrame->invfy;

    const float ratioFactor = 1.5f * mpCurrentKeyFrame->mfScaleFactor;

    int nnew = 0;

    // Search matches with epipolar restriction and triangulate
    for(size_t i = 0; i < vpNeighKFs.size(); i++) {
      if(i > 0 && CheckNewKeyFrames()) {
        return;
      }

      kfptr pKF2 = vpNeighKFs[i];

      // Check first that baseline is not too short
      cv::Mat Ow2 = pKF2->GetCameraCenter();
      cv::Mat vBaseline = Ow2 - Ow1;
      const float baseline = cv::norm(vBaseline);

      //        if(!mbMonocular)
      //        {
      //            if(baseline<pKF2->mb)
      //            continue;
      //        }
      //        else
      //        {
      const float medianDepthKF2 = pKF2->ComputeSceneMedianDepth(2);
      const float ratioBaselineDepth = baseline / medianDepthKF2;

      if(ratioBaselineDepth < 0.01) {
        continue;
      }

      //        }

      // Compute Fundamental Matrix
      cv::Mat F12 = ComputeF12(mpCurrentKeyFrame, pKF2);

      // Search matches that fullfil epipolar constraint
      vector<pair<size_t, size_t> > vMatchedIndices;
      matcher.SearchForTriangulation(mpCurrentKeyFrame, pKF2, F12, vMatchedIndices);

      cv::Mat Rcw2 = pKF2->GetRotation();
      cv::Mat Rwc2 = Rcw2.t();
      cv::Mat tcw2 = pKF2->GetTranslation();
      cv::Mat Tcw2(3, 4, CV_32F);
      Rcw2.copyTo(Tcw2.colRange(0, 3));
      tcw2.copyTo(Tcw2.col(3));

      const float &fx2 = pKF2->fx;
      const float &fy2 = pKF2->fy;
      const float &cx2 = pKF2->cx;
      const float &cy2 = pKF2->cy;
      const float &invfx2 = pKF2->invfx;
      const float &invfy2 = pKF2->invfy;

      // Triangulate each match
      const int nmatches = vMatchedIndices.size();

      for(int ikp = 0; ikp < nmatches; ikp++) {
        const int &idx1 = vMatchedIndices[ikp].first;
        const int &idx2 = vMatchedIndices[ikp].second;

        const cv::KeyPoint &kp1 = mpCurrentKeyFrame->mvKeysUn[idx1];
        //            const float kp1_ur=mpCurrentKeyFrame->mvuRight[idx1];
        //            bool bStereo1 = kp1_ur>=0;

        const cv::KeyPoint &kp2 = pKF2->mvKeysUn[idx2];
        //            const float kp2_ur = pKF2->mvuRight[idx2];
        //            bool bStereo2 = kp2_ur>=0;

        // Check parallax between rays
        cv::Mat xn1 = (cv::Mat_<float>(3, 1) << (kp1.pt.x - cx1) * invfx1, (kp1.pt.y - cy1) * invfy1, 1.0);
        cv::Mat xn2 = (cv::Mat_<float>(3, 1) << (kp2.pt.x - cx2) * invfx2, (kp2.pt.y - cy2) * invfy2, 1.0);

        cv::Mat ray1 = Rwc1 * xn1;
        cv::Mat ray2 = Rwc2 * xn2;
        const float cosParallaxRays = ray1.dot(ray2) / (cv::norm(ray1) * cv::norm(ray2));

        float cosParallaxStereo = cosParallaxRays + 1;
        //            float cosParallaxStereo1 = cosParallaxStereo;
        //            float cosParallaxStereo2 = cosParallaxStereo;

        //            if(bStereo1)
        //                cosParallaxStereo1 = cos(2*atan2(mpCurrentKeyFrame->mb/2,mpCurrentKeyFrame->mvDepth[idx1]));
        //            else if(bStereo2)
        //                cosParallaxStereo2 = cos(2*atan2(pKF2->mb/2,pKF2->mvDepth[idx2]));

        //            cosParallaxStereo = min(cosParallaxStereo1,cosParallaxStereo2);

        cv::Mat x3D;

        if(cosParallaxRays < cosParallaxStereo && cosParallaxRays > 0 && (cosParallaxRays < 0.9998)) { //(bStereo1 || bStereo2 || cosParallaxRays<0.9998))
          // Linear Triangulation Method
          cv::Mat A(4, 4, CV_32F);
          A.row(0) = xn1.at<float>(0) * Tcw1.row(2) - Tcw1.row(0);
          A.row(1) = xn1.at<float>(1) * Tcw1.row(2) - Tcw1.row(1);
          A.row(2) = xn2.at<float>(0) * Tcw2.row(2) - Tcw2.row(0);
          A.row(3) = xn2.at<float>(1) * Tcw2.row(2) - Tcw2.row(1);

          cv::Mat w, u, vt;
          cv::SVD::compute(A, w, u, vt, cv::SVD::MODIFY_A | cv::SVD::FULL_UV);

          x3D = vt.row(3).t();

          if(x3D.at<float>(3) == 0) {
            continue;
          }

          // Euclidean coordinates
          x3D = x3D.rowRange(0, 3) / x3D.at<float>(3);

        }

        //            else if(bStereo1 && cosParallaxStereo1<cosParallaxStereo2)
        //            {
        //                x3D = mpCurrentKeyFrame->UnprojectStereo(idx1);
        //            }
        //            else if(bStereo2 && cosParallaxStereo2<cosParallaxStereo1)
        //            {
        //                x3D = pKF2->UnprojectStereo(idx2);
        //            }
        else {
          continue;  //No stereo and very low parallax
        }

        cv::Mat x3Dt = x3D.t();

        //Check triangulation in front of cameras
        float z1 = Rcw1.row(2).dot(x3Dt) + tcw1.at<float>(2);

        if(z1 <= 0) {
          continue;
        }

        float z2 = Rcw2.row(2).dot(x3Dt) + tcw2.at<float>(2);

        if(z2 <= 0) {
          continue;
        }

        //Check reprojection error in first keyframe
        const float &sigmaSquare1 = mpCurrentKeyFrame->mvLevelSigma2[kp1.octave];
        const float x1 = Rcw1.row(0).dot(x3Dt) + tcw1.at<float>(0);
        const float y1 = Rcw1.row(1).dot(x3Dt) + tcw1.at<float>(1);
        const float invz1 = 1.0 / z1;

        //            if(!bStereo1)
        //            {
        float u1 = fx1 * x1 * invz1 + cx1;
        float v1 = fy1 * y1 * invz1 + cy1;
        float errX1 = u1 - kp1.pt.x;
        float errY1 = v1 - kp1.pt.y;

        if((errX1 * errX1 + errY1 * errY1) > 5.991 * sigmaSquare1) {
          continue;
        }

        //            }
        //            else
        //            {
        //                float u1 = fx1*x1*invz1+cx1;
        //                float u1_r = u1 - mpCurrentKeyFrame->mbf*invz1;
        //                float v1 = fy1*y1*invz1+cy1;
        //                float errX1 = u1 - kp1.pt.x;
        //                float errY1 = v1 - kp1.pt.y;
        //                float errX1_r = u1_r - kp1_ur;
        //                if((errX1*errX1+errY1*errY1+errX1_r*errX1_r)>7.8*sigmaSquare1)
        //                    continue;
        //            }

        //Check reprojection error in second keyframe
        const float sigmaSquare2 = pKF2->mvLevelSigma2[kp2.octave];
        const float x2 = Rcw2.row(0).dot(x3Dt) + tcw2.at<float>(0);
        const float y2 = Rcw2.row(1).dot(x3Dt) + tcw2.at<float>(1);
        const float invz2 = 1.0 / z2;
        //            if(!bStereo2)
        //            {
        float u2 = fx2 * x2 * invz2 + cx2;
        float v2 = fy2 * y2 * invz2 + cy2;
        float errX2 = u2 - kp2.pt.x;
        float errY2 = v2 - kp2.pt.y;

        if((errX2 * errX2 + errY2 * errY2) > 5.991 * sigmaSquare2) {
          continue;
        }

        //            }
        //            else
        //            {
        //                float u2 = fx2*x2*invz2+cx2;
        //                float u2_r = u2 - mpCurrentKeyFrame->mbf*invz2;
        //                float v2 = fy2*y2*invz2+cy2;
        //                float errX2 = u2 - kp2.pt.x;
        //                float errY2 = v2 - kp2.pt.y;
        //                float errX2_r = u2_r - kp2_ur;
        //                if((errX2*errX2+errY2*errY2+errX2_r*errX2_r)>7.8*sigmaSquare2)
        //                    continue;
        //            }

        //Check scale consistency
        cv::Mat normal1 = x3D - Ow1;
        float dist1 = cv::norm(normal1);

        cv::Mat normal2 = x3D - Ow2;
        float dist2 = cv::norm(normal2);

        if(dist1 == 0 || dist2 == 0) {
          continue;
        }

        const float ratioDist = dist2 / dist1;
        const float ratioOctave = mpCurrentKeyFrame->mvScaleFactors[kp1.octave] / pKF2->mvScaleFactors[kp2.octave];

        /*if(fabs(ratioDist-ratioOctave)>ratioFactor)
            continue;*/
        if(ratioDist * ratioFactor < ratioOctave || ratioDist > ratioOctave * ratioFactor) {
          continue;
        }

        // Triangulation is succesfull
        mpptr pMP{new MapPoint(x3D, mpCurrentKeyFrame, mpMap, mClientId, mpComm, mpCC->mSysState, -1)};

        pMP->AddObservation(mpCurrentKeyFrame, idx1);
        pMP->AddObservation(pKF2, idx2);

        mpCurrentKeyFrame->AddMapPoint(pMP, idx1);
        pKF2->AddMapPoint(pMP, idx2);

        pMP->ComputeDistinctiveDescriptors();

        pMP->UpdateNormalAndDepth();

        mpMap->AddMapPoint(pMP);
        mlpRecentAddedMapPoints.push_back(pMP);

        nnew++;
      }
    }
  }
コード例 #2
0
 void LocalMapping::CreateNewMapPoints()
 {
     // Take neighbor keyframes in covisibility graph
     vector<KeyFrame*> vpNeighKFs = mpCurrentKeyFrame->GetBestCovisibilityKeyFrames(20);
     
     ORBmatcher matcher(0.6,false);
     
     cv::Mat Rcw1 = mpCurrentKeyFrame->GetRotation();
     cv::Mat Rwc1 = Rcw1.t();
     cv::Mat tcw1 = mpCurrentKeyFrame->GetTranslation();
     cv::Mat Tcw1(3,4,CV_32F);
     Rcw1.copyTo(Tcw1.colRange(0,3));
     tcw1.copyTo(Tcw1.col(3));
     cv::Mat Ow1 = mpCurrentKeyFrame->GetCameraCenter();
     
     const float fx1 = mpCurrentKeyFrame->fx;
     const float fy1 = mpCurrentKeyFrame->fy;
     const float cx1 = mpCurrentKeyFrame->cx;
     const float cy1 = mpCurrentKeyFrame->cy;
     const float invfx1 = 1.0f/fx1;
     const float invfy1 = 1.0f/fy1;
     
     const float ratioFactor = 1.5f*mpCurrentKeyFrame->GetScaleFactor();
     
     // Search matches with epipolar restriction and triangulate
     for(size_t i=0; i<vpNeighKFs.size(); i++)
     {
         KeyFrame* pKF2 = vpNeighKFs[i];
         
         // Check first that baseline is not too short
         // Small translation errors for short baseline keyframes make scale to diverge
         cv::Mat Ow2 = pKF2->GetCameraCenter();
         cv::Mat vBaseline = Ow2-Ow1;
         const float baseline = cv::norm(vBaseline);
         const float medianDepthKF2 = pKF2->ComputeSceneMedianDepth(2);
         const float ratioBaselineDepth = baseline/medianDepthKF2;
         
         if(ratioBaselineDepth<0.01)
             continue;
         
         // Compute Fundamental Matrix
         cv::Mat F12 = ComputeF12(mpCurrentKeyFrame,pKF2);
         
         // Search matches that fulfil epipolar constraint
         vector<cv::KeyPoint> vMatchedKeysUn1;
         vector<cv::KeyPoint> vMatchedKeysUn2;
         vector<pair<size_t,size_t> > vMatchedIndices;
         matcher.SearchForTriangulation(mpCurrentKeyFrame,pKF2,F12,vMatchedKeysUn1,vMatchedKeysUn2,vMatchedIndices);
         
         cv::Mat Rcw2 = pKF2->GetRotation();
         cv::Mat Rwc2 = Rcw2.t();
         cv::Mat tcw2 = pKF2->GetTranslation();
         cv::Mat Tcw2(3,4,CV_32F);
         Rcw2.copyTo(Tcw2.colRange(0,3));
         tcw2.copyTo(Tcw2.col(3));
         
         const float fx2 = pKF2->fx;
         const float fy2 = pKF2->fy;
         const float cx2 = pKF2->cx;
         const float cy2 = pKF2->cy;
         const float invfx2 = 1.0f/fx2;
         const float invfy2 = 1.0f/fy2;
         
         // Triangulate each match
         for(size_t ikp=0, iendkp=vMatchedKeysUn1.size(); ikp<iendkp; ikp++)
         {
             const int idx1 = vMatchedIndices[ikp].first;
             const int idx2 = vMatchedIndices[ikp].second;
             
             const cv::KeyPoint &kp1 = vMatchedKeysUn1[ikp];
             const cv::KeyPoint &kp2 = vMatchedKeysUn2[ikp];
             
             // Check parallax between rays
             cv::Mat xn1 = (cv::Mat_<float>(3,1) << (kp1.pt.x-cx1)*invfx1, (kp1.pt.y-cy1)*invfy1, 1.0 );
             cv::Mat ray1 = Rwc1*xn1;
             cv::Mat xn2 = (cv::Mat_<float>(3,1) << (kp2.pt.x-cx2)*invfx2, (kp2.pt.y-cy2)*invfy2, 1.0 );
             cv::Mat ray2 = Rwc2*xn2;
             const float cosParallaxRays = ray1.dot(ray2)/(cv::norm(ray1)*cv::norm(ray2));
             
             if(cosParallaxRays<0 || cosParallaxRays>0.9998)
                 continue;
             
             // Linear Triangulation Method
             cv::Mat A(4,4,CV_32F);
             A.row(0) = xn1.at<float>(0)*Tcw1.row(2)-Tcw1.row(0);
             A.row(1) = xn1.at<float>(1)*Tcw1.row(2)-Tcw1.row(1);
             A.row(2) = xn2.at<float>(0)*Tcw2.row(2)-Tcw2.row(0);
             A.row(3) = xn2.at<float>(1)*Tcw2.row(2)-Tcw2.row(1);
             
             cv::Mat w,u,vt;
             cv::SVD::compute(A,w,u,vt,cv::SVD::MODIFY_A| cv::SVD::FULL_UV);
             
             cv::Mat x3D = vt.row(3).t();
             
             if(x3D.at<float>(3)==0)
                 continue;
             
             // Euclidean coordinates
             x3D = x3D.rowRange(0,3)/x3D.at<float>(3);
             cv::Mat x3Dt = x3D.t();
             
             //Check triangulation in front of cameras
             float z1 = Rcw1.row(2).dot(x3Dt)+tcw1.at<float>(2);
             if(z1<=0)
                 continue;
             
             float z2 = Rcw2.row(2).dot(x3Dt)+tcw2.at<float>(2);
             if(z2<=0)
                 continue;
             
             //Check reprojection error in first keyframe
             float sigmaSquare1 = mpCurrentKeyFrame->GetSigma2(kp1.octave);
             float x1 = Rcw1.row(0).dot(x3Dt)+tcw1.at<float>(0);
             float y1 = Rcw1.row(1).dot(x3Dt)+tcw1.at<float>(1);
             float invz1 = 1.0/z1;
             float u1 = fx1*x1*invz1+cx1;
             float v1 = fy1*y1*invz1+cy1;
             float errX1 = u1 - kp1.pt.x;
             float errY1 = v1 - kp1.pt.y;
             if((errX1*errX1+errY1*errY1)>5.991*sigmaSquare1)
                 continue;
             
             //Check reprojection error in second keyframe
             float sigmaSquare2 = pKF2->GetSigma2(kp2.octave);
             float x2 = Rcw2.row(0).dot(x3Dt)+tcw2.at<float>(0);
             float y2 = Rcw2.row(1).dot(x3Dt)+tcw2.at<float>(1);
             float invz2 = 1.0/z2;
             float u2 = fx2*x2*invz2+cx2;
             float v2 = fy2*y2*invz2+cy2;
             float errX2 = u2 - kp2.pt.x;
             float errY2 = v2 - kp2.pt.y;
             if((errX2*errX2+errY2*errY2)>5.991*sigmaSquare2)
                 continue;
             
             //Check scale consistency
             cv::Mat normal1 = x3D-Ow1;
             float dist1 = cv::norm(normal1);
             
             cv::Mat normal2 = x3D-Ow2;
             float dist2 = cv::norm(normal2);
             
             if(dist1==0 || dist2==0)
                 continue;
             
             float ratioDist = dist1/dist2;
             float ratioOctave = mpCurrentKeyFrame->GetScaleFactor(kp1.octave)/pKF2->GetScaleFactor(kp2.octave);
             if(ratioDist*ratioFactor<ratioOctave || ratioDist>ratioOctave*ratioFactor)
                 continue;
             
             // Triangulation is succesfull
             MapPoint* pMP = new MapPoint(x3D,mpCurrentKeyFrame,mpMap);
             
             pMP->AddObservation(pKF2,idx2);
             pMP->AddObservation(mpCurrentKeyFrame,idx1);
             
             mpCurrentKeyFrame->AddMapPoint(pMP,idx1);
             pKF2->AddMapPoint(pMP,idx2);
             
             pMP->ComputeDistinctiveDescriptors();
             
             pMP->UpdateNormalAndDepth();
             
             mpMap->AddMapPoint(pMP);
             mlpRecentAddedMapPoints.push_back(pMP);
         }
     }
 }