int mcmc_pose_estimation(int argc, char ** argv)
{
  try {
    
    using namespace nuklei;
    using namespace TCLAP;
    
    CmdLine cmd("");
    
    UnlabeledValueArg<std::string> objectFileArg
    ("object_evidence",
     "Object file.",
     true, "", "filename", cmd);
    
    UnlabeledValueArg<std::string> sceneFileArg
    ("scene_evidence",
     "Scene file.",
     true, "", "filename", cmd);
    
    ValueArg<std::string> alignedObjectEvidenceFileArg
    ("", "aligned",
     "Transformed object evidence, matching object pose.",
     false, "", "filename", cmd);
    
    ValueArg<int> nArg
    ("n", "n_model_points",
     "Number of particle supporting the object model.",
     false, 0, "int", cmd);
    
    ValueArg<double> locHArg
    ("l", "loc_h",
     "Location kernel width.",
     false, 0, "float", cmd);
    
    ValueArg<double> oriHArg
    ("o", "ori_h",
     "Orientation kernel width (in radians).",
     false, 0.2, "float", cmd);
    
    ValueArg<int> nChainsArg
    ("c", "n_chains",
     "Number of MCMC chains.",
     false, 0, "int", cmd);
    
    ValueArg<std::string> bestTransfoArg
    ("", "best_transfo",
     "File to write the most likely transformation to.",
     false, "", "filename", cmd);
    
    SwitchArg computeNormalsArg
    ("", "normals",
     "Compute a normal vector for all input points. Makes pose estimation more robust.", cmd);
    
    SwitchArg lightArg
    ("", "light",
     "Limit the scene model to 10000 points, for speed.", cmd);

    SwitchArg accurateScoreArg
    ("s", "accurate_score",
     "Recompute the matching score using all input points (instead of using N points as given by -n N).", cmd);
    
    cmd.parse( argc, argv );
    
    // ------------- //
    // Read-in data: //
    // ------------- //
    
    KernelCollection objectEvidence, sceneEvidence;
    readObservations(objectFileArg.getValue(), objectEvidence);
    readObservations(sceneFileArg.getValue(), sceneEvidence);
    
    if (objectEvidence.size() == 0 || sceneEvidence.size() == 0)
      NUKLEI_THROW("Empty input cloud.");
    
      if (computeNormalsArg.getValue())
      {
        std::cout << "Computing normals for object model..." << std::endl;
        objectEvidence.buildNeighborSearchTree();
        objectEvidence.computeSurfaceNormals();
        std::cout << "Computing normals for object model... done." << std::endl;
      }
      else
        std::cout << "Warning: object model is an R3 cloud. " <<
        "Pose estimation will be suboptimal. Use --normals to fix this." <<
        std::endl;

    if (computeNormalsArg.getValue())
      {
        std::cout << "Computing normals for scene model..." << std::endl;
        sceneEvidence.buildNeighborSearchTree();
        sceneEvidence.computeSurfaceNormals();
        std::cout << "Computing normals for scene model... done." << std::endl;
      }
      else
        std::cout << "Warning: scene model is an R3 cloud. " <<
        "Pose estimation will be suboptimal. Use --normals to fix this." <<
        std::endl;
    
    if (objectEvidence.front().polyType() != sceneEvidence.front().polyType())
      NUKLEI_THROW("Input point clouds must be defined on the same domain.");
    
    
    if (lightArg.getValue() && sceneEvidence.size() > 10000)
    {
      KernelCollection tmp;
      for (KernelCollection::sample_iterator i = sceneEvidence.sampleBegin(10000);
           i != i.end(); i++)
      {
        tmp.add(*i);
      }
      sceneEvidence = tmp;
    }

    if (sceneEvidence.size() > 10000)
      std::cout << "Warning: Scene model has more than 10000 points. "
      "To keep computation time low, keep the model under 10000 points. "
      "Use --light to fix this." << std::endl;
    
      
    // Kernel widths, for position and orientation:
    const double locH = (locHArg.getValue()<=0
                         ?
                         objectEvidence.moments()->getLocH()/10
                         :
                         locHArg.getValue()
                         );
    const double oriH = oriHArg.getValue(); // in radians
    
    // For best performances, choose a multiple of
    // the number of logical cores.
    const int nChains = (nChainsArg.getValue()<=0
                         ?
                         8
                         :
                         nChainsArg.getValue()
                         );
    
    int n = -1;
    
    if (nArg.getValue() <= 0)
    {
      n = objectEvidence.size();
      if (n > 1000)
      {
        std::cout << "Warning: Object model has more than 1000 points. "
        "To keep computational cost low, only 1000 points will be used at each "
        "inference loop. "
        "Use -n to force a large number of model points." << std::endl;
        n = 1000;
      }
    }
    else
      n = nArg.getValue();
    
    // ------------------------------- //
    // Prepare density for evaluation: //
    // ------------------------------- //
    
    sceneEvidence.setKernelLocH(locH);
    sceneEvidence.setKernelOriH(oriH);
    objectEvidence.setKernelLocH(locH);
    objectEvidence.setKernelOriH(oriH);
    
    objectEvidence.computeKernelStatistics();
    sceneEvidence.computeKernelStatistics();
    sceneEvidence.buildKdTree();
    
    kernel::se3 t = estimatePose(objectEvidence, sceneEvidence, nChains, n);
    
    if (accurateScoreArg.getValue())
    {
      t.setWeight(0);
      for (KernelCollection::const_iterator i = objectEvidence.begin();
           i != objectEvidence.end(); ++i)
      {
        weight_t w = 0;
        if (WEIGHTED_SUM_EVIDENCE_EVAL)
        {
          w = sceneEvidence.evaluationAt(*i->polyTransformedWith(t),
                                         KernelCollection::WEIGHTED_SUM_EVAL);
        }
        else
        {
          w = sceneEvidence.evaluationAt(*i->polyTransformedWith(t), KernelCollection::MAX_EVAL);
        }
        t.setWeight(t.getWeight() + w);
      }
    }
    
    if (!bestTransfoArg.getValue().empty())
    {
      writeSingleObservation(bestTransfoArg.getValue(), t);
    }
    
    if (!alignedObjectEvidenceFileArg.getValue().empty())
    {
      objectEvidence.transformWith(t);
      writeObservations(alignedObjectEvidenceFileArg.getValue(), objectEvidence);
    }
    
    return 0;
  }
  catch (std::exception &e) {
    std::cerr << "Exception caught: ";
    std::cerr << e.what() << std::endl;
    return EXIT_FAILURE;
  }
  catch (...) {
    std::cerr << "Caught unknown exception." << std::endl;
    return EXIT_FAILURE;
  }
  
}
int main(int argc, const char * argv[])
{
    // parse input
    CmdLine cmd ("match a pair of images using specified features");
    
    vector<string> featureTypes;
    featureTypes.push_back("sift");
    featureTypes.push_back("surf");
    featureTypes.push_back("orb");
    featureTypes.push_back("brisk");
    ValuesConstraint<string> cmdFeatureTypes( featureTypes );
    ValueArg<string> cmdFeature("f", "feature", "feature type", true, "", &cmdFeatureTypes, cmd);
    
    ValueArg<string> cmd1st ("1", "1st", "1st image file path", true, "", "string", cmd);
    ValueArg<string> cmd2nd ("2", "2nd", "2nd image file path", true, "", "string", cmd);
    ValueArg<float> cmdThresh ("t", "threshold", "threshold for matching, 0-1, higher gives more matches", true, 3, "float", cmd);
    ValueArg<string> cmdOutM  ("o", "outmat", "file path for matches", false, "/dev/null", "string", cmd);
    SwitchArg cmdDisableImshow ("", "disable_image", "don't show image", cmd);
    MultiSwitchArg cmdVerbose ("v", "", "level of verbosity of output", cmd);
    
    cmd.parse(argc, argv);
    string           featureType    = cmdFeature.getValue();
    float            threshold      = cmdThresh.getValue();
    string           imageName1     = cmd1st.getValue();
    string           imageName2     = cmd2nd.getValue();
    string           outMName       = cmdOutM.getValue();
    bool             disableImshow  = cmdDisableImshow.getValue();
    int              verbose        = cmdVerbose.getValue();
    
    // file for output
    path outMPath = absolute(path(outMName));
    if (! exists(outMPath.parent_path()))
    {
        cerr << "parent path " << outMPath.parent_path() << " doesn't exist." << endl;
        return -1;
    }
    if (is_directory(outMPath))
    {
        cerr << "writeSimpleMatches: Need a filename, not a directory: " << outMPath << endl;
        return -1;
    }
    
    // load images
    Mat im1, im2;
    if (!evg::loadImage(imageName1, im1)) return 0;
    if (!evg::loadImage(imageName2, im2)) return 0;
    
    // setup detectors
    Ptr<FeatureDetector> detector = newFeatureDetector (featureType);
    Ptr<DescriptorExtractor> extractor = newDescriptorExtractor (featureType);
    Ptr<DescriptorMatcher> matcher = newMatcher (featureType, verbose);
    
    // match
    vector<KeyPoint> keypoints1, keypoints2;
    Mat descriptors1, descriptors2;
    vector< vector<DMatch> > matchesPairs;
    
    detector->detect (im1, keypoints1);
    detector->detect (im2, keypoints2);
    extractor->compute (im1, keypoints1, descriptors1);
    extractor->compute (im2, keypoints2, descriptors2);
    matcher->knnMatch (descriptors1, descriptors2, matchesPairs, 2);

    // filter based on relative distance to the two closest
    vector<DMatch> matches;
    matches.reserve (matchesPairs.size());
    for (int i = 0; i != matchesPairs.size(); ++i)
    {
        float ratio = matchesPairs[i][0].distance / matchesPairs[i][1].distance;
        if (ratio < threshold)
        {
            if (verbose >= 2) cout << ratio << " ";
            matchesPairs[i][0].distance = ratio;
            matches.push_back (matchesPairs[i][0]);
        }
    }
    if (verbose >= 2) cout << endl;

    
    // write results
    evg::writeSimpleMatches (outMPath.string(), imageName1, imageName2, keypoints1, keypoints2, matches);
    
    if (!disableImshow)
    {
        Mat im1gray, im2gray;
        cvtColor(im1, im1gray, CV_RGB2GRAY);
        cvtColor(im2, im2gray, CV_RGB2GRAY);
        float factor = float(1440) / im1gray.cols / 2;
        vector<KeyPoint> keypoints1im = keypoints1, keypoints2im = keypoints2;
        for (int i = 0; i != keypoints1im.size(); ++i)
        {
            keypoints1im[i].pt.x = keypoints1im[i].pt.x * factor;
            keypoints1im[i].pt.y = keypoints1im[i].pt.y * factor;
        }
        for (int i = 0; i != keypoints2im.size(); ++i)
        {
            keypoints2im[i].pt.x = keypoints2im[i].pt.x * factor;
            keypoints2im[i].pt.y = keypoints2im[i].pt.y * factor;
        }
        
        resize(im1gray, im1gray, Size(), factor, factor);
        resize(im2gray, im2gray, Size(), factor, factor);
        Mat imgMatches;
        drawMatches (im1gray, keypoints1im, im2gray, keypoints2im, matches, imgMatches,
                     Scalar::all(-1), Scalar::all(-1),
                     vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS );
        imshow( "matches", imgMatches );
        if (waitKey(0) == 27) return 0;
    }
    
    return 0;
}
Esempio n. 3
0
/**
 * main procedure
 *
 * @param argc # of args
 * @param argv args
 *
 * @return no error
 */
int main ( int argc, char** argv )
{

  try
    {
      //CmdLine Parser generator
      
      
      CmdLine cmd ( "Quasi Affine Transform in dimension 2", ' ', "0.3" );
      vector<Arg *> xorlist;
   
      //Option (not required)
      SwitchArg backwardSwitch ( "l","linearbackward","Bilinear Backward Mapping", false );
      xorlist.push_back(&backwardSwitch);
      SwitchArg backwardNNSwitch ( "n","NNbackward","Nearest Neighbor Backward Mapping", false );
      xorlist.push_back(&backwardNNSwitch);
      SwitchArg naiveSwitch ( "","naive","Naive BoundingRect method",false );
      xorlist.push_back(&naiveSwitch);
      SwitchArg periodicitySwitch ( "p","periodicity","Use paving periodicity", false );
      xorlist.push_back(&periodicitySwitch);
      cmd.xorAdd ( xorlist);
      
      SwitchArg nomultiplySwitch ( "m","no_multiplication","No multiplications in the paving computation", false );
      cmd.add ( nomultiplySwitch );
      SwitchArg fakeColorSwitch ( "f","fake_color","Output fake colors to illustrate pavings (non contracting AQA only)", false );
      cmd.add ( fakeColorSwitch );
      SwitchArg compositionSwitch ( "","composition","Composition test: f.f^{-1}", false );
      cmd.add ( compositionSwitch );
      ValueArg<string> transformFile ( "t","transform","The transform file name (this file should contain in this order : omega, a, b, c, d, e, f, separated with spaces, where the quasi-affine transform is : (ax + by + e, cx + dy + f) / omega)",true,"","file name" );
      cmd.add ( transformFile );
      ValueArg<string> outFile ( "o","output","The output image file name",true,"","file name" );
      cmd.add ( outFile );
      ValueArg<string> inFile ( "i","input","The input image file name",true,"","file name" );
      cmd.add ( inFile );

      // Parse the argv array.
      cmd.parse ( argc, argv );

      // Get the value parsed by each arg.
      InterpolationType interp = NO_BM;
      if ( backwardSwitch.getValue() )
        interp = LINEAR;
      else
        if ( backwardNNSwitch.getValue() )
          interp = NN;

      bool useBoundingRect = naiveSwitch.getValue();
      bool noMultiply = nomultiplySwitch.getValue();
      bool usePeriodicity = periodicitySwitch.getValue();
      bool fakeColor = fakeColorSwitch.getValue();
      bool composition = compositionSwitch.getValue();


      cout <<"Cmd Line Test: [ ";

      cout << "Interpolation type : " << interp <<", ";
      cout << "BoundingRect : ";
      if ( useBoundingRect )
        cout << "Y, ";
      else
        cout << "N, ";

      cout << "noMultiply : ";
      if ( noMultiply )
        cout << "Y, ";
      else
        cout << "N, ";

      cout << "Periodicity : ";
      if ( usePeriodicity )
        cout << "Y";
      else
        cout << "N";
      cout << " ]"<<endl;

      cout << "Fake Colors: ";
      if ( fakeColor )
        cout << "Y";
      else
        cout << "N";
      cout << " ]"<<endl;


      // Aquisition de données
      int o, a, b, c, d, e, f;
      fstream transform ( transformFile.getValue().c_str(), fstream::in );
      transform >> o>> a >> b >> c >> d >> e >> f;
      transform.close();

      Image initialImage ( inFile.getValue() );

      QAT qat ( Matrix2x2 ( a, b, c, d ), o, Vector2D ( e, f ) );

      QAT qat2 ( Matrix2x2 ( a, b, c, d ), o, Vector2D ( e, f ) );

      if ( composition )
        {
          Image finalImage = qat.applyToImage ( initialImage, interp, useBoundingRect, usePeriodicity, noMultiply, fakeColor, false );
          cout << "Inverse computation..."<<endl;
          Image inverse = qat2.applyToImage ( finalImage, interp, useBoundingRect, usePeriodicity, noMultiply, fakeColor, true );
          inverse.write ( outFile.getValue() );

          double db = psnr ( initialImage, inverse );
          cout << "PSNR = "<<db<<" db"<<endl;
        }
      else
        {
          Image finalImage = qat.applyToImage ( initialImage, interp , useBoundingRect, usePeriodicity, noMultiply, fakeColor, false );
          finalImage.write ( outFile.getValue() );
        }

    }
  catch ( ArgException &e )  // catch any exceptions
    {
      std::cerr << "error: " << e.error() << " for arg " << e.argId() << std::endl;
    }

  return 0;
}