Exemplo n.º 1
0
void MatchesPointsToMat
(
  const matching::IndMatches & putativeMatches,
  const cameras::IntrinsicBase * cam_I,
  const features::PointFeatures & feature_I,
  const cameras::IntrinsicBase * cam_J,
  const features::PointFeatures & feature_J,
  Mat2X & x_I,
  Mat2X & x_J
)
{
  const size_t n = putativeMatches.size();
  x_I.resize(2, n);
  x_J.resize(2, n);
  using Scalar = typename Mat::Scalar; // Output matrix type

  for (size_t i=0; i < putativeMatches.size(); ++i)  {
    const features::PointFeature & pt_I = feature_I[putativeMatches[i].i_];
    const features::PointFeature & pt_J = feature_J[putativeMatches[i].j_];
    if (cam_I)
      x_I.col(i) = cam_I->get_ud_pixel(pt_I.coords().cast<Scalar>());
    else
      x_I.col(i) = pt_I.coords().cast<Scalar>();

    if (cam_J)
      x_J.col(i) = cam_J->get_ud_pixel(pt_J.coords().cast<Scalar>());
    else
      x_J.col(i) = pt_J.coords().cast<Scalar>();
  }
}
Exemplo n.º 2
0
  /// Robust fitting of the HOMOGRAPHY matrix
  bool Robust_estimation(
    const sfm::SfM_Data * sfm_data,
    const shared_ptr<sfm::Regions_Provider> & regions_provider,
    const Pair pairIndex,
    const matching::IndMatches & vec_PutativeMatches,
    matching::IndMatches & geometric_inliers)
  {
    using namespace openMVG;
    using namespace openMVG::robust;
    geometric_inliers.clear();

    // Get back corresponding view index
    const IndexT iIndex = pairIndex.first;
    const IndexT jIndex = pairIndex.second;

    //--
    // Get corresponding point regions arrays
    //--

    Mat xI,xJ;
    MatchesPairToMat(pairIndex, vec_PutativeMatches, sfm_data, regions_provider, xI, xJ);

    //--
    // Robust estimation
    //--

    // Define the AContrario adapted Homography matrix solver
    typedef ACKernelAdaptor<
      openMVG::homography::kernel::FourPointSolver,
      openMVG::homography::kernel::AsymmetricError,
      UnnormalizerI,
      Mat3>
      KernelType;

    KernelType kernel(
      xI, sfm_data->GetViews().at(iIndex)->ui_width, sfm_data->GetViews().at(iIndex)->ui_height,
      xJ, sfm_data->GetViews().at(jIndex)->ui_width, sfm_data->GetViews().at(jIndex)->ui_height,
      false); // configure as point to point error model.

    // Robustly estimate the Homography matrix with A Contrario ransac
    const double upper_bound_precision = Square(m_dPrecision);
    std::vector<size_t> vec_inliers;
    const std::pair<double,double> ACRansacOut =
      ACRANSAC(kernel, vec_inliers, m_stIteration, &m_H, upper_bound_precision);

    if (vec_inliers.size() > KernelType::MINIMUM_SAMPLES *2.5)  {
      m_dPrecision_robust = ACRansacOut.first;
      // update geometric_inliers
      geometric_inliers.reserve(vec_inliers.size());
      for ( const size_t & index : vec_inliers)  {
        geometric_inliers.push_back( vec_PutativeMatches[index] );
      }
      return true;
    }
    else  {
      vec_inliers.clear();
      return false;
    }
    return true;
  }
Exemplo n.º 3
0
  bool Geometry_guided_matching
  (
    const sfm::SfM_Data * sfm_data,
    const shared_ptr<sfm::Regions_Provider> & regions_provider,
    const Pair pairIndex,
    const double dDistanceRatio,
    matching::IndMatches & matches
  )
  {
    if (m_dPrecision_robust != std::numeric_limits<double>::infinity())
    {
      // Get back corresponding view index
      const IndexT iIndex = pairIndex.first;
      const IndexT jIndex = pairIndex.second;

      const sfm::View * view_I = sfm_data->views.at(iIndex).get();
      const sfm::View * view_J = sfm_data->views.at(jIndex).get();

      // Retrieve corresponding pair camera intrinsic if any
      const cameras::IntrinsicBase * cam_I =
        sfm_data->GetIntrinsics().count(view_I->id_intrinsic) ?
          sfm_data->GetIntrinsics().at(view_I->id_intrinsic).get() : NULL;
      const cameras::IntrinsicBase * cam_J =
        sfm_data->GetIntrinsics().count(view_J->id_intrinsic) ?
          sfm_data->GetIntrinsics().at(view_J->id_intrinsic).get() : NULL;

      if (dDistanceRatio < 0)
      {
        // Filtering based only on region positions
        const features::PointFeatures pointsFeaturesI = regions_provider->regions_per_view.at(iIndex)->GetRegionsPositions();
        const features::PointFeatures pointsFeaturesJ = regions_provider->regions_per_view.at(jIndex)->GetRegionsPositions();
        Mat xI, xJ;
        PointsToMat(cam_I, pointsFeaturesI, xI);
        PointsToMat(cam_J, pointsFeaturesJ, xJ);

        geometry_aware::GuidedMatching
          <Mat3, openMVG::homography::kernel::AsymmetricError>(
          m_H, xI, xJ, Square(m_dPrecision_robust), matches);

        // Remove duplicates
        matching::IndMatch::getDeduplicated(matches);

        // Remove matches that have the same (X,Y) coordinates
        matching::IndMatchDecorator<float> matchDeduplicator(matches, pointsFeaturesI, pointsFeaturesJ);
        matchDeduplicator.getDeduplicated(matches);
      }
      else
      {
        // Filtering based on region positions and regions descriptors
        geometry_aware::GuidedMatching
          <Mat3, openMVG::homography::kernel::AsymmetricError>(
          m_H,
          cam_I, *regions_provider->regions_per_view.at(iIndex),
          cam_J, *regions_provider->regions_per_view.at(jIndex),
          Square(m_dPrecision_robust), Square(dDistanceRatio),
          matches);
      }
    }
    return matches.size() != 0;
  }
Exemplo n.º 4
0
  bool Geometry_guided_matching
  (
    const sfm::SfM_Data * sfm_data,
    const std::shared_ptr<sfm::Regions_Provider> & regions_provider,
    const Pair pairIndex,
    const double dDistanceRatio,
    matching::IndMatches & matches
  )
  {
    if (m_dPrecision_robust != std::numeric_limits<double>::infinity())
    {
      // Get back corresponding view index
      const IndexT iIndex = pairIndex.first;
      const IndexT jIndex = pairIndex.second;

      const sfm::View * view_I = sfm_data->views.at(iIndex).get();
      const sfm::View * view_J = sfm_data->views.at(jIndex).get();

      // Check that valid cameras can be retrieved for the pair of views
      const cameras::IntrinsicBase * cam_I =
        sfm_data->GetIntrinsics().count(view_I->id_intrinsic) ?
          sfm_data->GetIntrinsics().at(view_I->id_intrinsic).get() : nullptr;
      const cameras::IntrinsicBase * cam_J =
        sfm_data->GetIntrinsics().count(view_J->id_intrinsic) ?
          sfm_data->GetIntrinsics().at(view_J->id_intrinsic).get() : nullptr;

      if (!cam_I || !cam_J)
        return false;

      if ( !isPinhole(cam_I->getType()) || !isPinhole(cam_J->getType()))
        return false;

      const cameras::Pinhole_Intrinsic * ptrPinhole_I = (const cameras::Pinhole_Intrinsic*)(cam_I);
      const cameras::Pinhole_Intrinsic * ptrPinhole_J = (const cameras::Pinhole_Intrinsic*)(cam_J);

      Mat3 F;
      FundamentalFromEssential(m_E, ptrPinhole_I->K(), ptrPinhole_J->K(), &F);

      geometry_aware::GuidedMatching
        <Mat3,
        openMVG::fundamental::kernel::EpipolarDistanceError>(
        //openMVG::fundamental::kernel::SymmetricEpipolarDistanceError>(
        F,
        cam_I, *regions_provider->regions_per_view.at(iIndex),
        cam_J, *regions_provider->regions_per_view.at(jIndex),
        Square(m_dPrecision_robust), Square(dDistanceRatio),
        matches);
    }
    return matches.size() != 0;
  }
Exemplo n.º 5
0
  bool Geometry_guided_matching
  (
    const sfm::SfM_Data * sfm_data,
    const std::shared_ptr<sfm::Regions_Provider> & regions_provider,
    const Pair pairIndex,
    const double dDistanceRatio,
    matching::IndMatches & matches
  )
  {
    if (m_dPrecision_robust != std::numeric_limits<double>::infinity())
    {
      // Get back corresponding view index
      const IndexT iIndex = pairIndex.first;
      const IndexT jIndex = pairIndex.second;

      const sfm::View * view_I = sfm_data->views.at(iIndex).get();
      const sfm::View * view_J = sfm_data->views.at(jIndex).get();

      // Retrieve corresponding pair camera intrinsic if any
      const cameras::IntrinsicBase * cam_I =
        sfm_data->GetIntrinsics().count(view_I->id_intrinsic) ?
          sfm_data->GetIntrinsics().at(view_I->id_intrinsic).get() : nullptr;
      const cameras::IntrinsicBase * cam_J =
        sfm_data->GetIntrinsics().count(view_J->id_intrinsic) ?
          sfm_data->GetIntrinsics().at(view_J->id_intrinsic).get() : nullptr;

      // Check the features correspondences that agree in the geometric and photometric domain
      geometry_aware::GuidedMatching
        <Mat3,
        openMVG::fundamental::kernel::EpipolarDistanceError>(
        //openMVG::fundamental::kernel::SymmetricEpipolarDistanceError>(
        m_F,
        cam_I, *regions_provider->regions_per_view.at(iIndex),
        cam_J, *regions_provider->regions_per_view.at(jIndex),
        Square(m_dPrecision_robust), Square(dDistanceRatio),
        matches);
    }
    return matches.size() != 0;
  }
Exemplo n.º 6
0
  bool Robust_estimation(
    const sfm::SfM_Data * sfm_data,
    const std::shared_ptr<Regions_or_Features_ProviderT> & regions_provider,
    const Pair pairIndex,
    const matching::IndMatches & vec_PutativeMatches,
    matching::IndMatches & geometric_inliers)
  {
    using namespace openMVG;
    using namespace openMVG::robust;
    geometric_inliers.clear();

    // Get back corresponding view index
    const IndexT iIndex = pairIndex.first;
    const IndexT jIndex = pairIndex.second;

    //--
    // Reject pair with missing Intrinsic information
    //--

    const sfm::View * view_I = sfm_data->views.at(iIndex).get();
    const sfm::View * view_J = sfm_data->views.at(jIndex).get();

     // Check that valid cameras can be retrieved for the pair of views
    const cameras::IntrinsicBase * cam_I =
      sfm_data->GetIntrinsics().count(view_I->id_intrinsic) ?
        sfm_data->GetIntrinsics().at(view_I->id_intrinsic).get() : nullptr;
    const cameras::IntrinsicBase * cam_J =
      sfm_data->GetIntrinsics().count(view_J->id_intrinsic) ?
        sfm_data->GetIntrinsics().at(view_J->id_intrinsic).get() : nullptr;

    if (!cam_I || !cam_J)
      return false;
    if ( !isPinhole(cam_I->getType()) || !isPinhole(cam_J->getType()))
      return false;

    //--
    // Get corresponding point regions arrays
    //--

    Mat xI,xJ;
    MatchesPairToMat(pairIndex, vec_PutativeMatches, sfm_data, regions_provider, xI, xJ);

    //--
    // Robust estimation
    //--

    // Define the AContrario adapted Essential matrix solver
    typedef ACKernelAdaptorEssential<
        openMVG::essential::kernel::FivePointKernel,
        openMVG::fundamental::kernel::EpipolarDistanceError,
        UnnormalizerT,
        Mat3>
        KernelType;

    const cameras::Pinhole_Intrinsic * ptrPinhole_I = (const cameras::Pinhole_Intrinsic*)(cam_I);
    const cameras::Pinhole_Intrinsic * ptrPinhole_J = (const cameras::Pinhole_Intrinsic*)(cam_J);

    KernelType kernel(
      xI, sfm_data->GetViews().at(iIndex)->ui_width, sfm_data->GetViews().at(iIndex)->ui_height,
      xJ, sfm_data->GetViews().at(jIndex)->ui_width, sfm_data->GetViews().at(jIndex)->ui_height,
      ptrPinhole_I->K(), ptrPinhole_J->K());

    // Robustly estimate the Essential matrix with A Contrario ransac
    const double upper_bound_precision = Square(m_dPrecision);
    std::vector<size_t> vec_inliers;
    const std::pair<double,double> ACRansacOut =
      ACRANSAC(kernel, vec_inliers, m_stIteration, &m_E, upper_bound_precision);

    if (vec_inliers.size() > KernelType::MINIMUM_SAMPLES *2.5)  {
      m_dPrecision_robust = ACRansacOut.first;
      // update geometric_inliers
      geometric_inliers.reserve(vec_inliers.size());
      for ( const size_t & index : vec_inliers)  {
        geometric_inliers.push_back( vec_PutativeMatches[index] );
      }
      return true;
    }
    else  {
      vec_inliers.clear();
      return false;
    }
  }