int main(int argc, const char* argv[])
{
    // Print OpenCV build info:
    // std::cout << cv::getBuildInformation() << std::endl;
    
    std::vector<FeatureAlgorithm>              algorithms;
    std::vector<cv::Ptr<ImageTransformation> > transformations;
    
    // Initialize list of algorithm tuples:
    algorithms.push_back(FeatureAlgorithm("SURF-FREAK",
                                          new cv::SurfFeatureDetector(),
                                          new cv::FREAK(),
                                          new cv::BFMatcher(cv::NORM_HAMMING)));
                                          
    algorithms.push_back(FeatureAlgorithm("ORB-FREAK",
                                          new cv::OrbFeatureDetector(),
                                          new cv::FREAK(),
                                          new cv::BFMatcher(cv::NORM_HAMMING)));
    
    algorithms.push_back(FeatureAlgorithm("ORB - 2",
                                          new cv::ORB(),
                                          new cv::ORB(),
                                          new cv::BFMatcher(cv::NORM_HAMMING, false)));
    
    algorithms.push_back(FeatureAlgorithm("ORB - 3",
                                          new cv::ORB(500, 1.2f, 8,31, 0, 3),
                                          new cv::ORB(500, 1.2f, 8,31, 0, 3),
                                          new cv::BFMatcher(cv::NORM_HAMMING2, false)));

    algorithms.push_back(FeatureAlgorithm("ORB - 4",
                                          new cv::ORB(500, 1.2f, 8, 31, 0, 4),
                                          new cv::ORB(500, 1.2f, 8, 31, 0, 4),
                                          new cv::BFMatcher(cv::NORM_HAMMING2, false)));
    
    algorithms.push_back(FeatureAlgorithm("FAST+BRIEF",
                                          new cv::FastFeatureDetector(50),
                                          new cv::BriefDescriptorExtractor(),
                                          new cv::BFMatcher(cv::NORM_HAMMING, false)));

    algorithms.push_back(FeatureAlgorithm("SURF-BruteForce",
                                          new cv::SurfFeatureDetector(),
                                          new cv::SurfDescriptorExtractor(),
                                          new cv::BFMatcher(cv::NORM_L2, false)));

    algorithms.push_back(FeatureAlgorithm("SURF-Flann",
                                          new cv::SurfFeatureDetector(),
                                          new cv::SurfDescriptorExtractor(),
                                          new cv::FlannBasedMatcher()));
    /**/
    // Initialize list of used transformations:
    transformations.push_back(new GaussianBlurTransform(9));
    transformations.push_back(new BrightnessImageTransform(-127, +127,1));
    transformations.push_back(new ImageRotationTransformation(0, 360, 1, cv::Point2f(0.5f,0.5f)));
    transformations.push_back(new ImageScalingTransformation(0.25f, 2f, 0.01f));
    
    if (argc < 2)
    {
        std::cout << "At least one input image should be passed" << std::endl;
    }
    
    for (int imageIndex = 1; imageIndex < argc; imageIndex++)
    {
        std::string testImagePath(argv[imageIndex]);
        cv::Mat testImage = cv::imread(testImagePath);
        
        CollectedStatistics fullStat;
        
        if (testImage.empty())
        {
            std::cout << "Cannot read image from " << testImagePath << std::endl;
        }
        
        //std::cout << "[" << testImagePath << "]" << std::endl;
        
        for (size_t algIndex = 0; algIndex < algorithms.size(); algIndex++)
        {
            const FeatureAlgorithm& alg   = algorithms[algIndex];

            for (size_t transformIndex = 0; transformIndex < transformations.size(); transformIndex++)
            {
                const ImageTransformation& trans = *transformations[transformIndex].obj;

                performEstimation(alg, trans, testImage.clone(), fullStat.getStatistics(alg.name, trans.name));
            }
        }
        
        fullStat.printPerformanceStatistics(std::cout);
        fullStat.printStatistics(std::cout, StatisticsElementPercentOfCorrectMatches);
        fullStat.printStatistics(std::cout, StatisticsElementMatchingRatio);
        fullStat.printStatistics(std::cout, StatisticsElementMeanDistance);
    }
    
    return 0;
}
int main(int argc, const char* argv[])
{
    // Print OpenCV build info:
    std::cout << cv::getBuildInformation() << std::endl;

    std::vector<FeatureAlgorithm>              algorithms;
    std::vector<cv::Ptr<ImageTransformation> > transformations;

    bool useCrossCheck = true;

    // Initialize list of algorithm tuples:
    algorithms.push_back(FeatureAlgorithm("BRISK/BRISK/BF",
        new cv::BriskFeatureDetector(60,4),
        new cv::BriskDescriptorExtractor(),
        new cv::BFMatcher(cv::NORM_HAMMING, useCrossCheck)));

    algorithms.push_back(FeatureAlgorithm("ORB/ORB/BF",
        new cv::ORB(),
        new cv::ORB(),
        new cv::BFMatcher(cv::NORM_HAMMING, useCrossCheck)));

    algorithms.push_back(FeatureAlgorithm("SURF/BRISK/BF",
        new cv::SurfFeatureDetector(),
        new cv::BriskDescriptorExtractor(),
        new cv::BFMatcher(cv::NORM_HAMMING, useCrossCheck)));

    algorithms.push_back(FeatureAlgorithm("SURF/FREAK/BF",
        new cv::SurfFeatureDetector(),
        new cv::FREAK(),
        new cv::BFMatcher(cv::NORM_HAMMING, useCrossCheck)));

    algorithms.push_back(FeatureAlgorithm("ORB3/ORB3/BF",
        new cv::ORB(500, 1.2f, 8,31, 0, 3),
        new cv::ORB(500, 1.2f, 8,31, 0, 3),
        new cv::BFMatcher(cv::NORM_HAMMING2, useCrossCheck)));

    algorithms.push_back(FeatureAlgorithm("ORB4/ORB4/BF",
        new cv::ORB(500, 1.2f, 8, 31, 0, 4),
        new cv::ORB(500, 1.2f, 8, 31, 0, 4),
        new cv::BFMatcher(cv::NORM_HAMMING2, useCrossCheck)));

    algorithms.push_back(FeatureAlgorithm("FAST/BRIEF/BF",
        new cv::FastFeatureDetector(50),
        new cv::BriefDescriptorExtractor(),
        new cv::BFMatcher(cv::NORM_HAMMING, useCrossCheck)));

    algorithms.push_back(FeatureAlgorithm("ORB/FREAK/BF",
        new cv::OrbFeatureDetector(),
        new cv::FREAK(),
        new cv::BFMatcher(cv::NORM_HAMMING, useCrossCheck)));

    algorithms.push_back(FeatureAlgorithm("SURF/SURF/BF",
        new cv::SurfFeatureDetector(),
        new cv::SurfDescriptorExtractor(),
        new cv::BFMatcher(cv::NORM_L2, useCrossCheck)));

    algorithms.push_back(FeatureAlgorithm("SURF/SURF/FLANN",
        new cv::SurfFeatureDetector(),
        new cv::SurfDescriptorExtractor(),
        new cv::FlannBasedMatcher()));

    /**/

    // Initialize list of used transformations:
    if (USE_VERBOSE_TRANSFORMATIONS)
    {
        transformations.push_back(new GaussianBlurTransform(9));
        transformations.push_back(new BrightnessImageTransform(-127, +127,1));
        transformations.push_back(new ImageRotationTransformation(0, 360, 1, cv::Point2f(0.5f,0.5f)));
        transformations.push_back(new ImageScalingTransformation(0.25f, 2.0f, 0.01f));
    }
    else
    {
        transformations.push_back(new GaussianBlurTransform(9));
        transformations.push_back(new BrightnessImageTransform(-127, +127, 10));
        transformations.push_back(new ImageRotationTransformation(0, 360, 10, cv::Point2f(0.5f,0.5f)));
        transformations.push_back(new ImageScalingTransformation(0.25f, 2.0f, 0.1f));
    }

    if (argc < 2)
    {
        std::cout << "At least one input image should be passed" << std::endl;
    }

    for (int imageIndex = 1; imageIndex < argc; imageIndex++)
    {
        std::string testImagePath(argv[imageIndex]);
        cv::Mat testImage = cv::imread(testImagePath);

        CollectedStatistics fullStat;

        if (testImage.empty())
        {
            std::cout << "Cannot read image from " << testImagePath << std::endl;
        }

        for (size_t algIndex = 0; algIndex < algorithms.size(); algIndex++)
        {
            const FeatureAlgorithm& alg   = algorithms[algIndex];

            std::cout << "Testing " << alg.name << "...";

            for (size_t transformIndex = 0; transformIndex < transformations.size(); transformIndex++)
            {
                const ImageTransformation& trans = *transformations[transformIndex].obj;

                performEstimation(alg, trans, testImage.clone(), fullStat.getStatistics(alg.name, trans.name));
            }

            std::cout << "done." << std::endl;
        }

        std::ofstream performance("Performance.csv");
        fullStat.printPerformanceStatistics(performance);
        std::ofstream matchingRatio("MatchingRatio.csv");
        fullStat.printStatistics(matchingRatio,StatisticsElementMatchingRatio);
        std::ofstream percentOfMatches("PercentOfMatches.csv");
        fullStat.printStatistics(percentOfMatches,StatisticsElementPercentOfMatches);
        std::ofstream percentOfCorrectMatches("PercentOfCorrectMatches.csv");
        fullStat.printStatistics(percentOfCorrectMatches,StatisticsElementPercentOfCorrectMatches);
        std::ofstream meanDistance("MeanDistance.csv");
        fullStat.printStatistics(meanDistance,StatisticsElementMeanDistance);
    }

    return 0;
}