예제 #1
0
int main(int, char**)
{
	char* imagesDirectory = "../../data/Louvre/samples/";
	string descriptorsDirectory = "../../descriptors/Louvre/FULL/";
	string vocFileName = "../../descriptors/words/ORB+ORB+100000.yml";
	string histsFileName = "../../descriptors/words/samples-hists100000-bis.yml";
	string upFileName = "../../descriptors/words/ORB1000+ORB+1000.yml";
	string indexFileName = "../../descriptors/words/indexes.yml";
	string matsFileName = "../..//descriptors/words/mats.yml";

	//pre-compute: do this once

	//computeCenters(imagesDirectory, descriptorsDirectory, vocFileName, 100000);
	
	//bag(imagesDirectory, descriptorsDirectory, vocFileName, histsFileName);
	
	//upToBottom = bottomUp2(500, 2, vocFileName, upFileName, matsFileName, indexFileName);

	vector<Mat> mats;
	Mat upToBottom;
	
	FileStorage ifs(indexFileName, FileStorage::READ);
	ifs["index"] >> upToBottom;
	ifs.release();
	FileStorage mfs(matsFileName, FileStorage::READ);
	read(mfs["mats"], mats);
	mfs.release();


	//The histograms
	SparseMat hists;
	
	
	FileStorage fs(histsFileName, FileStorage::READ);
	fs["hists"] >> hists;
	fs.release();

	//BOW
	//IndexParams* indexParams = new LshIndexParams(6, 12, 1);
	//Ptr<DescriptorMatcher> matcher(new FlannBasedMatcher(indexParams));
	//Ptr<DescriptorExtractor> descex(new ORB(1000));
	//Ptr<DescriptorMatcher> matcher(new BFMatcher(NORM_HAMMING));

	//Mat vocabulary;
	//FileStorage voc(vocFileName, FileStorage::READ);
	//voc["centers"] >> vocabulary;
	//voc.release();
	//vector<Mat> vocs;
	//vocs.push_back(vocabulary);
	/*matcher->add(vocs);
	matcher->train();*/
	//BOWImgDescriptorExtractor bow(descex, matcher);
	//bow.setVocabulary(vocabulary);
	
	vector<KeyPoint> kp;
	vector<vector<int>> hist;
	const int numberOfInput = 10;
	string inputs[numberOfInput] = {
		"../input/cc.jpg",
		"../input/ex2.jpg",
		"../input/woman1.jpg",
		"../input/woman2.jpg",
		"../input/liberte-glass.jpg",
		"../input/chartres-input.jpg",
		"../input/corot-pearl-input.jpg",
		"../input/meduse.jpg",
		"../input/lebrun-input.jpg",
		"../input/lebrun2-input.jpg"};

	Mat up;
	FileStorage u(upFileName, FileStorage::READ);
	u["centers"] >> up;
	u.release();
	cout << up.cols << " " << up.rows << " " << up.type();
	for(int k=0; k<numberOfInput; k++)
	{
		clock_t start = clock();

		cout << inputs[k] << endl;
		Mat input = imread(inputs[k], CV_LOAD_IMAGE_GRAYSCALE);
		if(input.empty())
			exit(-1);
		
		Mat descriptors;
		vector<KeyPoint> keypoints;
		ORB orb;
		orb.detect(input, keypoints);
		orb.compute(input, keypoints, descriptors);

		vector<DMatch> upMatches;
		BFMatcher(NORM_HAMMING).match(descriptors, up, upMatches);
		cout << ( clock() - start ) / (double) CLOCKS_PER_SEC << endl;
		
		vector<vector<int> > hist;
		for(int i=0; i<100000; i++)
			hist.push_back(vector<int>());
		
		for(int i=0; i<descriptors.rows; i++)
		{
			vector<DMatch> bottomMatch;
			BFMatcher(NORM_HAMMING).match(descriptors.row(i), mats[upMatches[i].trainIdx], bottomMatch);
			hist[upToBottom.at<INT32>(upMatches[i].trainIdx,bottomMatch[0].trainIdx)].push_back(i);
		}

		cout << ( clock() - start ) / (double) CLOCKS_PER_SEC << " ... second matching" << endl;

		Mat nhist = normalise(hist);
		vector<pair<int, float> > imageResponse;

		Mat answer = mul(hists, nhist);

		for(int i=0; i<answer.rows; i++)
		{
			imageResponse.push_back(pair<int, float>(i, answer.at<float>(i)));
		}

		std::sort(imageResponse.begin(), imageResponse.end(), irComparer2);
		cout << ( clock() - start ) / (double) CLOCKS_PER_SEC << endl;
		cout << imageResponse[0].first << " " << 
			imageResponse[1].first << " " <<
			imageResponse[2].first << " " <<
			imageResponse[3].first << " " <<
			imageResponse[4].first << endl;
		
		//vector<pair<int, float> > pss = paintingSearch(input, hists, bow, kp, hist);

		//for(unsigned i=0; i<5; i++)
		//	cout << (pss[i]).first << endl:

		cin.ignore();
	}
	return 0;
}
예제 #2
0
파일: detect.cpp 프로젝트: rdspring1/RPi
void processImage(ORB& detector, std::vector<KeyPoint> keypoints_object, Mat& descriptors_object, Mat& img_object, Mat& img_scene)
{
	//-- Step 1: Detect the keypoints using ORB Detector
	std::vector<KeyPoint> keypoints_scene;
	detector.detect( img_scene, keypoints_scene );

	//-- Step 2: Calculate descriptors (feature vectors)
	Mat descriptors_scene;
	detector.compute( img_scene, keypoints_scene, descriptors_scene );
	descriptors_scene.convertTo(descriptors_scene, CV_32F);
	if(descriptors_scene.empty())
	{
		//throw std::runtime_error("Missing Scene Descriptors");
		imshow( "Camera", img_scene );
		return;
	}

	//-- Step 3: Matching descriptor vectors using FLANN matcher
	FlannBasedMatcher matcher;
	std::vector< DMatch > matches;
	matcher.match( descriptors_object, descriptors_scene, matches );

	// m1 - main match / m2 - closest neighbor
	//std::vector< std::vector< DMatch > > matches;
	//matcher.knnMatch( descriptors_object, descriptors_scene, matches, k );

	std::sort(matches.begin(), matches.end(), 
			[](const DMatch& l, const DMatch& r) -> bool
			{
			return l.distance < r.distance;
			});

	//-- Quick calculation of max and min distances between keypoints
	//double max_dist = matches[matches.size()-1].distance; 
	//printf("-- Max dist : %f \n", max_dist );

	double min_dist = std::min(200.0f, matches[0].distance); 
	//printf("-- Min dist : %f \n", min_dist );

	/*
	   double average = 0;
	   for( int i = 0; i < descriptors_object.rows; i++ )
	   { 
	   average += matches[i].distance;
	   }
	   average /= descriptors_object.rows;
	   printf("-- Avg dist : %f \n", average);

	   double sd = 0;
	   for( int i = 0; i < descriptors_object.rows; i++ )
	   { 
	   sd += pow((matches[i].distance - average), 2.0f);
	   }
	   sd /= descriptors_object.rows;
	   printf("-- Avg dist : %f \n", sd );
	 */

	//-- Draw only "good" matches - top N matches
	std::vector< DMatch > good_matches;
	for( unsigned i = 0; i < matches.size() && i < MAX_MATCH_COUNT; ++i )
	{
		if(matches[i].distance < 1.15 * min_dist)
		{
			good_matches.push_back(matches[i]); 
		}
	}

	Mat img_matches;
	drawMatches( img_object, keypoints_object, img_scene, keypoints_scene,
			good_matches, img_matches, Scalar::all(-1), Scalar::all(-1),
			vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS );

	if(match_list.front())
	{
		--match_count;
	}	
	match_list.pop_front();

	if(good_matches.size() > MIN_MATCH_COUNT)
	{
		match_list.push_back(true);
		++match_count;

		//std::cout << "-- matches : " << good_matches.size() << std::endl;
		std::vector<Point2f> scene;
		for( unsigned i = 0; i < good_matches.size(); i++ )
		{
			//-- Get the keypoints from the good matches
			scene.push_back( keypoints_scene[ good_matches[i].trainIdx ].pt );
		}

		std::vector<Point2f> hull;
		convexHull(scene, hull);

		for(unsigned i = 0; i < hull.size()-1; ++i)
		{
			line( img_matches, hull[i] + Point2f( img_object.cols, 0), hull[i+1] + Point2f( img_object.cols, 0), Scalar(0, 255, 0), 4 );
		}
			line( img_matches, hull[hull.size()-1] + Point2f( img_object.cols, 0), hull[0] + Point2f( img_object.cols, 0), Scalar(0, 255, 0), 4 );
	}
	else
	{
		match_list.push_back(false);
	}

	if(match_count >= MATCH_THRESHOLD)
	{	
		std::cout << "MATCH DETECTED: " << match_count << std::endl;
	}
	else
	{
		std::cout << "NO MATCH: " << match_count << std::endl;
	}

	//-- Show detected matches
	imshow( "Camera", img_matches );
}