bool ImageTransformation::findHomography( const Keypoints& source, const Keypoints& result, const Matches& input, Matches& inliers, cv::Mat& homography) { if (input.size() < 8) return false; std::vector<cv::Point2f> srcPoints, dstPoints; const int pointsCount = input.size(); for (int i=0; i<pointsCount; i++) { srcPoints.push_back(source[input[i].trainIdx].pt); dstPoints.push_back(result[input[i].queryIdx].pt); } std::vector<unsigned char> status; cv::findHomography(srcPoints, dstPoints, CV_FM_RANSAC, 3, status); inliers.clear(); for (int i=0; i<pointsCount; i++) { if (status[i]) { inliers.push_back(input[i]); } } return true; }
bool computeMatchesDistanceStatistics(const Matches& matches, float& meanDistance, float& stdDev) { if (matches.empty()) return false; std::vector<float> distances(matches.size()); for (size_t i=0; i<matches.size(); i++) distances[i] = matches[i].distance; cv::Scalar mean, dev; cv::meanStdDev(distances, mean, dev); meanDistance = static_cast<float>(mean.val[0]); stdDev = static_cast<float>(dev.val[0]); return false; }
/* * Find exact matches for the semantic descriptor in the database */ bool SemanticDescriptor::findExactMatches(Database & db, Matches & matches) { SemanticDescriptor compareID; map<SemanticDescriptor, string>::iterator iter; for (iter = db.dDB.begin(); iter != db.dDB.end(); ++iter) { compareID = iter->first; if (equals(compareID, db)) { matches.insert(iter->second); } } return matches.size(); }
bool ImageTransformation::findHomography( const Keypoints& source, const Keypoints& result, const Matches& input, Matches& inliers, cv::Mat& homography) { if (input.size() < 4) return false; const int pointsCount = input.size(); const float reprojectionThreshold = 2; //Prepare src and dst points std::vector<cv::Point2f> srcPoints, dstPoints; for (int i = 0; i < pointsCount; i++) { srcPoints.push_back(source[input[i].trainIdx].pt); dstPoints.push_back(result[input[i].queryIdx].pt); } // Find homography using RANSAC algorithm std::vector<unsigned char> status; homography = cv::findHomography(srcPoints, dstPoints, cv::RANSAC, reprojectionThreshold, status); // Warp dstPoints to srcPoints domain using inverted homography transformation std::vector<cv::Point2f> srcReprojected; cv::perspectiveTransform(dstPoints, srcReprojected, homography.inv()); // Pass only matches with low reprojection error (less than reprojectionThreshold value in pixels) inliers.clear(); for (int i = 0; i < pointsCount; i++) { cv::Point2f actual = srcPoints[i]; cv::Point2f expect = srcReprojected[i]; cv::Point2f v = actual - expect; float distanceSquared = v.dot(v); if (/*status[i] && */distanceSquared <= reprojectionThreshold * reprojectionThreshold) { inliers.push_back(input[i]); } } // Test for bad case if (inliers.size() < 4) return false; // Now use only good points to find refined homography: std::vector<cv::Point2f> refinedSrc, refinedDst; for (int i = 0; i < inliers.size(); i++) { refinedSrc.push_back(source[inliers[i].trainIdx].pt); refinedDst.push_back(result[inliers[i].queryIdx].pt); } // Use least squares method to find precise homography cv::Mat homography2 = cv::findHomography(refinedSrc, refinedDst, 0, reprojectionThreshold); // Reproject again: cv::perspectiveTransform(dstPoints, srcReprojected, homography2.inv()); inliers.clear(); for (int i = 0; i < pointsCount; i++) { cv::Point2f actual = srcPoints[i]; cv::Point2f expect = srcReprojected[i]; cv::Point2f v = actual - expect; float distanceSquared = v.dot(v); if (distanceSquared <= reprojectionThreshold * reprojectionThreshold) { inliers.push_back(input[i]); } } homography = homography2; return inliers.size() >= 4; }
bool performEstimation ( const FeatureAlgorithm& alg, const ImageTransformation& transformation, const cv::Mat& sourceImage, std::vector<FrameMatchingStatistics>& stat ) { Keypoints sourceKp; Descriptors sourceDesc; cv::Mat gray; if (sourceImage.channels() == 3) cv::cvtColor(sourceImage, gray, CV_BGR2GRAY); else if (sourceImage.channels() == 4) cv::cvtColor(sourceImage, gray, CV_BGRA2GRAY); else if(sourceImage.channels() == 1) gray = sourceImage; if (!alg.extractFeatures(gray, sourceKp, sourceDesc)) return false; std::vector<float> x = transformation.getX(); stat.resize(x.size()); const int count = x.size(); cv::Mat transformedImage; Keypoints resKpReal; Descriptors resDesc; Matches matches; // To convert ticks to milliseconds const double toMsMul = 1000. / cv::getTickFrequency(); #pragma omp parallel for private(transformedImage, resKpReal, resDesc, matches) for (int i = 0; i < count; i++) { float arg = x[i]; FrameMatchingStatistics& s = stat[i]; transformation.transform(arg, gray, transformedImage); int64 start = cv::getTickCount(); alg.extractFeatures(transformedImage, resKpReal, resDesc); // Initialize required fields s.isValid = resKpReal.size() > 0; s.argumentValue = arg; if (!s.isValid) continue; if (alg.knMatchSupported) { std::vector<Matches> knMatches; alg.matchFeatures(sourceDesc, resDesc, 2, knMatches); ratioTest(knMatches, 0.75, matches); // Compute percent of false matches that were rejected by ratio test s.ratioTestFalseLevel = (float)(knMatches.size() - matches.size()) / (float) knMatches.size(); } else { alg.matchFeatures(sourceDesc, resDesc, matches); } int64 end = cv::getTickCount(); Matches correctMatches; cv::Mat homography; bool homographyFound = ImageTransformation::findHomography(sourceKp, resKpReal, matches, correctMatches, homography); // Some simple stat: s.isValid = homographyFound; s.totalKeypoints = resKpReal.size(); s.consumedTimeMs = (end - start) * toMsMul; // Compute overall percent of matched keypoints s.percentOfMatches = (float) matches.size() / (float)(std::min(sourceKp.size(), resKpReal.size())); s.correctMatchesPercent = (float) correctMatches.size() / (float)matches.size(); // Compute matching statistics computeMatchesDistanceStatistics(correctMatches, s.meanDistance, s.stdDevDistance); } return true; }
int main() { cv::VideoCapture cap("../data/guitar_scene.mp4"); if(!cap.isOpened()) { cerr << "Camera/Video was not opened\n"; return 1; } //SystemAlgorithms algorithms(new SIFTDetector(500), new SIFTExtractor, new BruteForceMatcher(cv::NORM_L2), new LucasKanadeAlgorithm); SystemAlgorithms algorithms = SystemAlgorithms::Create(false, true); HybridTracker tracker(algorithms); Marker target = tracker.Registry(PreMarker("../data/guitar_object.jpg", nullptr)); Size2i targetSize = target.GetSize(); vector<cv::Point2f> targetCorner(4); targetCorner[0] = cv::Point2f(0.0, 0.0); targetCorner[3] = cv::Point2f(0.0, targetSize.height); targetCorner[2] = cv::Point2f(targetSize.width, targetSize.height); targetCorner[1] = cv::Point2f(targetSize.width, 0.0); uint numFrames = 0; double time = (double) cv::getTickCount(); Frame frame; Matches matches; while(true) { cap >> frame.image; if(frame.image.empty() and numFrames == 0) continue; if(frame.image.empty()) break; numFrames++; tracker.Update(frame); matches = tracker.Find(target, frame); if(matches.size() > 20) target.SetLost(false); else target.SetLost(true); if(matches.size() >= 4) { vector<Point2f> corners; Mat homography = cv::findHomography(matches.targetPts(), matches.scenePts(), cv::RANSAC, 4); cv::perspectiveTransform(targetCorner, corners, homography); if(!corners.empty()) { cv::line(frame.image, corners[0], corners[1], cv::Scalar(0, 255, 0), 4); cv::line(frame.image, corners[1], corners[2], cv::Scalar(0, 255, 0), 4); cv::line(frame.image, corners[2], corners[3], cv::Scalar(0, 255, 0), 4); cv::line(frame.image, corners[3], corners[0], cv::Scalar(0, 255, 0), 4); } } for(auto p : matches.scenePts()) { cv::circle(frame.image, p, 3, cv::Scalar(0, 255, 0), 1); } cv::imshow("Tracker Test", frame.image); char key = cv::waitKey(1); if(key == 0x1B) // ESC break; } time = (double)(cv::getTickCount() - time) / cv::getTickFrequency(); cout << (numFrames/time) << " fps\n"; return 0; }
int main( int argc, char **argv ) { float userTime; struct rusage startTime, stopTime; getrusage( RUSAGE_SELF, &startTime ); if ( argc != 4 ) { std::cout << "Usage: " << argv[0] << " <k> <data file> <query file>" << std::endl << std::endl; return 1; } unsigned k = atoi( argv[1] ); std::string line; std::cout << "Reading the dataset ........ "; fflush( stdout ); NumTable db; std::ifstream dict; dict.open( argv[2], std::ifstream::in ); while ( getline( dict, line ) ) { Number h; std::istringstream reader( line ); reader >> h; db.push_back( h ); } NumTable q; std::ifstream qdict; qdict.open( argv[3], std::ifstream::in ); while ( getline( qdict, line ) ) { Number h; std::istringstream reader( line ); reader >> h; q.push_back( h ); } std::cout << "done. " << db.size() << " db hashes and " << q.size() << " query hashes." << std::endl; std::cout << "Building with " << k << " hamming distance bound ....... "; fflush( stdout ); HEngine_sn e( k ); e.build( db ); std::cout << "done." << std::endl; getrusage( RUSAGE_SELF, &stopTime ); userTime = ( (float) ( stopTime.ru_utime.tv_sec - startTime.ru_utime.tv_sec ) ) + ( (float) ( stopTime.ru_utime.tv_usec - startTime.ru_utime.tv_usec ) ) * 1e-6; std::cout << std::endl; std::cout << "Building time: " << userTime << " seconds" << std::endl; std::cout << std::endl; std::cout << "Searching HEngine matches ......." << std::endl; getrusage( RUSAGE_SELF, &startTime ); int c = 0; for ( auto &h: q ) { Matches res = e.query( h ); c += res.size(); /*for ( auto &d : res ) { //std::cout << "[" << d.second << "] "<< std::endl;// << HEngine::binStr2Number( h ) << " -> " << HEngine::binStr2Number( d.first ) << std::endl; c++; }*/ } getrusage( RUSAGE_SELF, &stopTime ); userTime = ( (float) ( stopTime.ru_utime.tv_sec - startTime.ru_utime.tv_sec ) ) + ( (float) ( stopTime.ru_utime.tv_usec - startTime.ru_utime.tv_usec ) ) * 1e-6; std::cout << "found " << c << " total matches. HEngine query time: " << userTime << " seconds" << std::endl << std::endl; std::cout << "Searching linear matches ......." << std::endl; getrusage( RUSAGE_SELF, &startTime ); c = 0; for ( auto &item: db ) { for ( auto &h: q ) { unsigned d = HEngine::getHammingDistance( h, item ); if ( d <= k ) { c++; } } } getrusage( RUSAGE_SELF, &stopTime ); userTime = ( (float) ( stopTime.ru_utime.tv_sec - startTime.ru_utime.tv_sec ) ) + ( (float) ( stopTime.ru_utime.tv_usec - startTime.ru_utime.tv_usec ) ) * 1e-6; std::cout << "found " << c << " total matches. Linear query time: " << userTime << " seconds" << std::endl << std::endl; std::cout << std::endl; return 0; }
void Solution::initMyMatches(const Matches& matches){ for (size_t ii = 0; ii < matches.size(); ii++){ myMatches[matches[ii].myDriver.routeId] = matches[ii]; } }
int main() { std::string port=":9000"; int listenQueueBacklog = 400; if ( FCGX_Init() ) { exit( 1 ); } int listen_socket = FCGX_OpenSocket( port.c_str(), listenQueueBacklog ); if ( listen_socket < 0 ) { exit( 1 ); } FCGX_Request request; if ( FCGX_InitRequest( &request, listen_socket, 0 ) ) { exit( 1 ); } std::string header = "Content-type: text/html\r\n\r\n"; NumTable db; std::map<Number,Number> posts; try { sql::Driver *driver; sql::Connection *con; sql::Statement *stmt; sql::ResultSet *res; driver = get_driver_instance(); con = driver->connect( "tcp://127.0.0.1:3306", "wiki_bot", "31415" ); con->setSchema( "orpheus" ); stmt = con->createStatement(); res = stmt->executeQuery( "SELECT hash, track_id FROM hashes_all" ); while ( res->next() ) { Number h = res->getUInt64( "hash" ); db.push_back( h ); posts[h] = res->getUInt64( "track_id" ); } delete res; delete stmt; delete con; } catch ( sql::SQLException &e ) { std::cout << "# ERR: SQLException in " << __FILE__; std::cout << "(" << __FUNCTION__ << ") on line " << __LINE__ << std::endl; std::cout << "# ERR: " << e.what(); std::cout << " (MySQL error code: " << e.getErrorCode(); std::cout << ", SQLState: " << e.getSQLState() << " )" << std::endl; } std::cout << "Start building." << std::endl; HEngine_sn e( 7 ); e.build( db ); std::cout << "Building done." << std::endl; while ( FCGX_Accept_r( &request ) == 0 ) { std::cout << "Have request. " << std::endl; NumTable q; Number hash; std::string query = FCGX_GetParam( "QUERY_STRING", request.envp ); std::stringstream ss( query ); while ( ss >> hash ) { q.push_back( hash ); if ( ss.peek() == ',' ) { ss.ignore(); } } std::string body = header; int c = 0; for ( auto &h: q ) { Matches res = e.query( h ); c += res.size(); for ( auto &r: res ) { body += std::to_string( posts[r.first] ) + ":" + std::to_string( r.first ) + ":" + std::to_string( r.second ) + "<br/>"; } } FCGX_PutS( body.c_str(), request.out ); FCGX_Finish_r( &request ); } return 0; }