static void align_2nd_to_1st_img(Mat& img1, Mat& img2) {
    // Calculate descriptors (feature vectors)
    std::vector<KeyPoint> keyPoints1, keyPoints2;
    Mat descriptor1, descriptor2;
    
    OrbFeatureDetector detector(5000);
    detector.detect(img1, keyPoints1);
    detector.detect(img2, keyPoints2);

    OrbDescriptorExtractor extractor;
    extractor.compute(img1, keyPoints1, descriptor1);
    extractor.compute(img2, keyPoints2, descriptor2);
    
    // Match descriptor vectors
    BFMatcher matcher;
    std::vector<vector< DMatch >> matches;
    matcher.knnMatch(descriptor2, descriptor1, matches, 2);
    
    std::vector< DMatch > good_matches;
    for (int i = 0; i < matches.size(); i ++) {
        float rejectRatio = 0.8;
        if (matches[i][0].distance / matches[i][1].distance > rejectRatio)
            continue;
        good_matches.push_back(matches[i][0]);
    }
    
    std::vector<Point2f> good_keyPoints1, good_keyPoints2;
    for (int i = 0; i < good_matches.size(); i ++) {
        good_keyPoints1.push_back(keyPoints1[good_matches[i].trainIdx].pt);
        good_keyPoints2.push_back(keyPoints2[good_matches[i].queryIdx].pt);
    }
    
    Mat H = findHomography( good_keyPoints2, good_keyPoints1, CV_RANSAC );
    warpPerspective(img2, img2, H, img1.size(), INTER_NEAREST);
}
int main( int argc, char** argv )
{
  if( argc != 3 )
  { readme(); return -1; }

  Mat img_1 = imread( argv[1], CV_LOAD_IMAGE_GRAYSCALE );
  Mat img_2 = imread( argv[2], CV_LOAD_IMAGE_GRAYSCALE );

  if( !img_1.data || !img_2.data )
  { std::cout<< " --(!) Error reading images " << std::endl; return -1; }

  //-- Step 1: Detect the keypoints using ORB Detector

  ORB detector(1000,1.1f,8,31,0,2,ORB::HARRIS_SCORE,31);

  std::vector<KeyPoint> keypoints_1, keypoints_2;

  detector.detect( img_1, keypoints_1 );
  detector.detect( img_2, keypoints_2 );

  //-- Step 2: Calculate descriptors 
  OrbDescriptorExtractor extractor;

  Mat descriptors_1, descriptors_2;

  extractor.compute( img_1, keypoints_1, descriptors_1 );
  extractor.compute( img_2, keypoints_2, descriptors_2 );

  //-- Step 3: Matching descriptor vectors using FLANN matcher or BruteForce
  //FlannBasedMatcher matcher;
  // BFMatcher matcher(NORM_L2, true);
  Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create("BruteForce-Hamming");


  

  vector< vector<DMatch> > matches, matches2;
  matcher->knnMatch( descriptors_1, descriptors_2, matches, 500 );
  matcher->knnMatch( descriptors_2, descriptors_1, matches2, 500);
  cout << "==" << matches.size() << ", "<< matches[0].size() << endl;
 
  //-- Draw matches
  // Mat img_matches, img_matches2;
  // drawMatches( img_1, keypoints_1, img_2, keypoints_2,matches, img_matches, Scalar::all(-1), Scalar::all(-1),
  //              vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS );
  // drawMatches( img_1, keypoints_1, img_2, keypoints_2,matches2, img_matches2, Scalar::all(-1), Scalar::all(-1),
  //              vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS );
  // cout << "Matches1-2:" << matches.size() << endl;
  // cout << "Matches2-1:" << matches2.size() <<endl;

  // imshow("Matches result 1-2",img_matches);
  // imshow("Matches result 2-1", img_matches2);
  waitKey(0);

  return 0;
}
void FeatureMatchThread::run()
{
	Mat resultImg;
	Mat grayImg;

	cvtColor(cropImg,grayImg,CV_BGR2GRAY);
	featureDetector.detect(grayImg,keyPoints);
	featureExtractor.compute(grayImg,keyPoints,descriptors);
	flannIndex.build(descriptors,flann::LshIndexParams(12,20,2),cvflann::FLANN_DIST_HAMMING);

	while(true)
	{
		Mat captureImage_gray;

		vector<KeyPoint> captureKeyPoints;
		Mat captureDescription;
		vector<DMatch> goodMatches;

		cap >> captureImage;
		
		if(captureImage.empty())
			continue;

		cvtColor(captureImage,captureImage_gray,CV_BGR2GRAY);
		featureDetector.detect(captureImage_gray,captureKeyPoints);
		featureExtractor.compute(captureImage_gray,captureKeyPoints,captureDescription);

		Mat matchIndex(captureDescription.rows,2,CV_32SC1);
		Mat matchDistance(captureDescription.rows,2,CV_32FC1);

		flannIndex.knnSearch(captureDescription,matchIndex,matchDistance,2,flann::SearchParams());
		
		for(int i=0;i<matchDistance.rows;i++)
		{
			if(matchDistance.at<float>(i,0) < 0.6 * matchDistance.at<float>(i,1))
			{
				DMatch dmatches(i,matchIndex.at<int>(i,0),matchDistance.at<float>(i,0));
				goodMatches.push_back(dmatches);
			}
		}

		drawMatches(captureImage,captureKeyPoints,cropImg,keyPoints,goodMatches,resultImg);
		emit NewFeatureMatch(&resultImg);

		imshow(WindowName,captureImage);
		cv::setMouseCallback(WindowName,mouse_callback);

		captureKeyPoints.clear();
		goodMatches.clear();
		waitKey(30);
	}

	return;
}
void mouse_callback(int event,int x,int y,int flags, void* param)
{
	switch(event)
	{
	case EVENT_MOUSEMOVE:
		{
			if(drawing_box)
			{
				box.width = x-box.x;
				box.height = y-box.y;
				Mat tmp = captureImage.clone();
				cv::rectangle(tmp,Point(box.x,box.y),Point(box.x + box.width , box.y + box.height) , Scalar(0,255,0) , 3);
				imshow(WindowName,tmp);
			}
		}
		break;

	case EVENT_LBUTTONDOWN:
		{
			drawing_box = true;
			box = Rect(x,y,0,0);
		}
		break;

	case EVENT_LBUTTONUP:
		{
			drawing_box = false;
			if(box.width < 0)
			{
				box.x += box.width;
				box.width *= -1;
			}

			if(box.height < 0)
			{
				box.y += box.height;
				box.height *= -1;
			}

			//Â^¨ú¹Ï¹³
			Mat copyCapture = captureImage.clone();
			Mat tmp(copyCapture,box);
			Mat grayImg;

			cropImg = tmp;
			cvtColor(cropImg,grayImg,CV_BGR2GRAY);
			featureDetector.detect(grayImg,keyPoints);
			featureExtractor.compute(grayImg,keyPoints,descriptors);
			flannIndex.build(descriptors,flann::LshIndexParams(12,20,2),cvflann::FLANN_DIST_HAMMING);
		}
		break;
	}

	return;
}
int main(int argc, char** argv)
{
    if(argc != 3)
    {
        help();
        return -1;
    }

    Mat img1 = imread(argv[1], CV_LOAD_IMAGE_GRAYSCALE);
    Mat img2 = imread(argv[2], CV_LOAD_IMAGE_GRAYSCALE);
    if(img1.empty() || img2.empty())
    {
        printf("Can't read one of the images\n");
        return -1;
    }

    // detecting keypoints
    OrbFeatureDetector detector(400);
    vector<KeyPoint> keypoints1, keypoints2;
    detector.detect(img1, keypoints1);
    detector.detect(img2, keypoints2);

    // computing descriptors
    OrbDescriptorExtractor extractor;
    Mat descriptors1, descriptors2;
    extractor.compute(img1, keypoints1, descriptors1);
    extractor.compute(img2, keypoints2, descriptors2);

    // matching descriptors
    BruteForceMatcher<Hamming> matcher;
    vector<DMatch> matches;
    matcher.match(descriptors1, descriptors2, matches);

    // drawing the results
    namedWindow("matches", 1);
    Mat img_matches;
    drawMatches(img1, keypoints1, img2, keypoints2, matches, img_matches);
    imshow("matches", img_matches);
    waitKey(0);

    return 0;
}
int main(int argc, char** argv)
{
    if( argc < 2 )
    {
        printPrompt( argv[0] );
        return -1;
    }

    initModule_nonfree();

    // Get Input Data
    ifstream file(argv[1]);
    if ( !file.is_open() )
        return false;
    
    string str;
    
        // Image Name
    getline( file, str ); getline( file, str );
    string image_name = str;
        // Cloud Name
    getline( file, str ); getline( file, str );
    string cloud_name = str;
        // width of images to be created.
    getline( file, str ); getline( file, str );
    int w = atoi(str.c_str());
        // height of images to be created
    getline( file, str ); getline( file, str );
    int h = atoi(str.c_str());
        // resolution of voxel grids
    getline( file, str ); getline( file, str );
    float r = atof(str.c_str());
        // f (distance from pinhole)
    getline( file, str ); getline( file, str );
    float f = atof(str.c_str());
        // thetax (initial rotation about X Axis of map)
    getline( file, str ); getline( file, str );
    float thetaX = atof(str.c_str());
        // thetay (initial rotation about Y Axis of map)
    getline( file, str ); getline( file, str );
    float thetaY = atof(str.c_str());
        // number of points to go to
    getline( file, str ); getline( file, str );
    float nop = atoi(str.c_str());
        // Number of divisions
    getline( file, str ); getline( file, str );
    float divs = atoi(str.c_str());
        // Number of images to return
    getline( file, str ); getline( file, str );
    int numtoreturn = atoi(str.c_str());    
        // Should we load or create photos?
    getline( file, str ); getline( file, str );
    string lorc =str.c_str();
        // Directory to look for photos
    getline( file, str ); getline( file, str );
    string dir =str.c_str();
        // Directory to look for kp and descriptors
    getline( file, str ); getline( file, str );
    string kdir =str.c_str();
        // save photos?
    getline( file, str ); getline( file, str );
    string savePhotos =str.c_str();
    
    file.close();
    // Done Getting Input Data

    map<vector<float>, Mat> imagemap;
    map<vector<float>, Mat> surfmap;
    map<vector<float>, Mat> siftmap;
    map<vector<float>, Mat> orbmap;
    map<vector<float>, Mat> fastmap;
    imagemap.clear();

    vector<KeyPoint> SurfKeypoints;
    vector<KeyPoint> SiftKeypoints;
    vector<KeyPoint> OrbKeypoints;
    vector<KeyPoint> FastKeypoints;
    Mat SurfDescriptors;
    Mat SiftDescriptors;
    Mat OrbDescriptors;
    Mat FastDescriptors;

    int minHessian = 300;

    SurfFeatureDetector SurfDetector (minHessian);
    SiftFeatureDetector SiftDetector (minHessian);
    OrbFeatureDetector OrbDetector (minHessian);
    FastFeatureDetector FastDetector (minHessian);


    SurfDescriptorExtractor SurfExtractor;
    SiftDescriptorExtractor SiftExtractor;
    OrbDescriptorExtractor OrbExtractor;

    if ( !fs::exists( dir ) || lorc == "c" )
    { // Load Point Cloud and render images
        PointCloud<PT>::Ptr cloud (new pcl::PointCloud<PT>);
        io::loadPCDFile<PT>(cloud_name, *cloud);

        Eigen::Affine3f tf = Eigen::Affine3f::Identity();
        tf.rotate (Eigen::AngleAxisf (thetaX, Eigen::Vector3f::UnitX()));
        pcl::transformPointCloud (*cloud, *cloud, tf);
        tf = Eigen::Affine3f::Identity();
        tf.rotate (Eigen::AngleAxisf (thetaY, Eigen::Vector3f::UnitY()));
        pcl::transformPointCloud (*cloud, *cloud, tf);

        // Create images from point cloud
        imagemap = render::createImages(cloud, nop, w, h, r, f);

        if (savePhotos == "y")
        {
            for (map<vector<float>, Mat>::iterator i = imagemap.begin(); i != imagemap.end(); ++i)
            {
                // Create image name and storagename
                string imfn = dir + "/";
                string kpfn = kdir + "/";
                for (int j = 0; j < i->first.size(); j++)
                {
                    imfn += boost::to_string(i->first[j]) + " ";
                    kpfn += boost::to_string(i->first[j]) + " ";
                }
                imfn += ".jpg";
                imwrite(imfn, i->second);

                // Detect keypoints, add to keypoint map. Same with descriptors

                SurfDetector.detect(i->second, SurfKeypoints);
                SiftDetector.detect(i->second, SiftKeypoints);
                OrbDetector.detect(i->second, OrbKeypoints);
                FastDetector.detect(i->second, FastKeypoints);

                SurfExtractor.compute(i->second, SurfKeypoints, SurfDescriptors);
                SiftExtractor.compute(i->second, SiftKeypoints, SiftDescriptors);
                OrbExtractor.compute(i->second, OrbKeypoints, OrbDescriptors);
                SiftExtractor.compute(i->second, FastKeypoints, FastDescriptors);

                // Store KP and Descriptors in yaml file.

                kpfn += ".yml";
                FileStorage store(kpfn, cv::FileStorage::WRITE);
                write(store,"SurfKeypoints",SurfKeypoints);
                write(store,"SiftKeypoints",SiftKeypoints);
                write(store,"OrbKeypoints", OrbKeypoints);
                write(store,"FastKeypoints",FastKeypoints);
                write(store,"SurfDescriptors",SurfDescriptors);
                write(store,"SiftDescriptors",SiftDescriptors);
                write(store,"OrbDescriptors", OrbDescriptors);
                write(store,"FastDescriptors",FastDescriptors);
                store.release();

                surfmap[i->first] = SurfDescriptors;
                siftmap[i->first] = SiftDescriptors;
                orbmap[i->first]  = OrbDescriptors;
                fastmap[i->first] = FastDescriptors;
            }
        }
    } 
    else 
    { // load images from the folder dir
        // First look into the folder to get a list of filenames
        vector<fs::path> ret;
        const char * pstr = dir.c_str();
        fs::path p(pstr);
        get_all(pstr, ret);

        for (int i = 0; i < ret.size(); i++)
        {
            // Load Image via filename
            string fn = ret[i].string();
            istringstream iss(fn);
            vector<string> tokens;
            copy(istream_iterator<string>(iss), istream_iterator<string>(), back_inserter<vector<string> >(tokens));

            // Construct ID from filename
            vector<float> ID;
            for (int i = 0; i < 6; i++) // 6 because there are three location floats and three direction floats
                ID.push_back(::atof(tokens[i].c_str()));
            string imfn = dir + "/" + fn;

            // Read image and add to imagemap.
            Mat m = imread(imfn);
            imagemap[ID] = m;

            // Create Filename for loading Keypoints and descriptors
            string kpfn = kdir + "/";
            for (int j = 0; j < ID.size(); j++)
            {
                kpfn += boost::to_string(ID[j]) + " ";
            }

            kpfn = kpfn+ ".yml";
            
            // Create filestorage item to read from and add to map.
            FileStorage store(kpfn, cv::FileStorage::READ);

            FileNode n1 = store["SurfKeypoints"];
            read(n1,SurfKeypoints);
            FileNode n2 = store["SiftKeypoints"];
            read(n2,SiftKeypoints);
            FileNode n3 = store["OrbKeypoints"];
            read(n3,OrbKeypoints);
            FileNode n4 = store["FastKeypoints"];
            read(n4,FastKeypoints);
            FileNode n5 = store["SurfDescriptors"];
            read(n5,SurfDescriptors);
            FileNode n6 = store["SiftDescriptors"];
            read(n6,SiftDescriptors);
            FileNode n7 = store["OrbDescriptors"];
            read(n7,OrbDescriptors);
            FileNode n8 = store["FastDescriptors"];
            read(n8,FastDescriptors);

            store.release();

            surfmap[ID] = SurfDescriptors;
            siftmap[ID] = SiftDescriptors;
            orbmap[ID]  = OrbDescriptors;
            fastmap[ID] = FastDescriptors;
        }
    }

    TickMeter tm;
    tm.reset();
    cout << "<\n  Analyzing Images ..." << endl;

    // We have a bunch of images, now we compute their grayscale and black and white.
    map<vector<float>, Mat> gsmap;
    map<vector<float>, Mat> bwmap;
    for (map<vector<float>, Mat>::iterator i = imagemap.begin(); i != imagemap.end(); ++i)
    {
        vector<float> ID = i->first;
        Mat Image = i-> second;
        GaussianBlur( Image, Image, Size(5,5), 0, 0, BORDER_DEFAULT );


        gsmap[ID] = averageImage::getPixSumFromImage(Image, divs);
        bwmap[ID] = averageImage::aboveBelow(gsmap[ID]);
    }
    Mat image = imread(image_name);
    Mat gsimage = averageImage::getPixSumFromImage(image, divs);
    Mat bwimage = averageImage::aboveBelow(gsimage);

    // cout << gsimage <<endl;
    imwrite("GS.png", gsimage);
    namedWindow("GSIMAGE (Line 319)");
    imshow("GSIMAGE (Line 319)", gsimage);
    waitKey(0);

    vector<KeyPoint> imgSurfKeypoints;
    vector<KeyPoint> imgSiftKeypoints;
    vector<KeyPoint> imgOrbKeypoints;
    vector<KeyPoint> imgFastKeypoints;
    Mat imgSurfDescriptors;
    Mat imgSiftDescriptors;
    Mat imgOrbDescriptors;
    Mat imgFastDescriptors;

    SurfDetector.detect(image, imgSurfKeypoints);
    SiftDetector.detect(image, imgSiftKeypoints);
    OrbDetector.detect(image, imgOrbKeypoints);
    FastDetector.detect(image, imgFastKeypoints);

    SurfExtractor.compute(image, imgSurfKeypoints, imgSurfDescriptors);
    SiftExtractor.compute(image, imgSiftKeypoints, imgSiftDescriptors);
    OrbExtractor.compute(image, imgOrbKeypoints, imgOrbDescriptors);
    SiftExtractor.compute(image, imgFastKeypoints, imgFastDescriptors);


    tm.start();

    cout << ">\n<\n  Comparing Images ..." << endl;

    // We have their features, now compare them!
    map<vector<float>, float> gssim; // Gray Scale Similarity
    map<vector<float>, float> bwsim; // Above Below Similarity
    map<vector<float>, float> surfsim;
    map<vector<float>, float> siftsim;
    map<vector<float>, float> orbsim;
    map<vector<float>, float> fastsim;

    for (map<vector<float>, Mat>::iterator i = gsmap.begin(); i != gsmap.end(); ++i)
    {
        vector<float> ID = i->first;
        gssim[ID] = similarities::getSimilarity(i->second, gsimage);
        bwsim[ID] = similarities::getSimilarity(bwmap[ID], bwimage); 
        surfsim[ID] = similarities::compareDescriptors(surfmap[ID], imgSurfDescriptors);
        siftsim[ID] = similarities::compareDescriptors(siftmap[ID], imgSiftDescriptors);
        orbsim[ID] = 0;//similarities::compareDescriptors(orbmap[ID], imgOrbDescriptors);
        fastsim[ID] = 0;//similarities::compareDescriptors(fastmap[ID], imgFastDescriptors);
    }

    map<vector<float>, int> top;

    bool gotone = false;
    typedef map<vector<float>, int>::iterator iter;

    // Choose the best ones!
    for (map<vector<float>, Mat>::iterator i = imagemap.begin(); i != imagemap.end(); ++i)
    {
        vector<float> ID = i->first;

        int sim = /* gssim[ID] + 0.5*bwsim[ID] + */ 5*surfsim[ID] + 0.3*siftsim[ID] + orbsim[ID] + fastsim[ID];

        // cout << surfsim[ID] << "\t";
        // cout << siftsim[ID] << "\t";
        // cout << orbsim[ID] << "\t";
        // cout << fastsim[ID] << endl;

        if (!gotone)
        {
            top[ID] = sim;
            gotone = true;
        }

        iter it = top.begin();
        iter end = top.end();
        int max_value = it->second;
        vector<float> max_ID = it->first;
        for( ; it != end; ++it) 
        {
            int current = it->second;
            if(current > max_value) 
            {
                max_value = it->second;
                max_ID = it->first;
            }
        }
        // cout << "Sim: " << sim << "\tmax_value: " << max_value << endl;
        if (top.size() < numtoreturn)
            top[ID] = sim;
        else
        {
            if (sim < max_value)
            {
                top[ID] = sim;
                top.erase(max_ID);
            }
        }
    }
    tm.stop();
        double s = tm.getTimeSec();


    cout << ">\n<\n  Writing top " << numtoreturn << " images ..." << endl;

    int count = 1;
    namedWindow("Image");
    namedWindow("Match");
    namedWindow("ImageBW");
    namedWindow("MatchBW");
    namedWindow("ImageGS");
    namedWindow("MatchGS");

    imshow("Image", image);
    imshow("ImageBW", bwimage);
    imshow("ImageGS", gsimage);


    vector<KeyPoint> currentPoints;

    for (iter i = top.begin(); i != top.end(); ++i)
    {
        vector<float> ID = i->first;

        cout << "  Score: "<< i->second << "\tGrayScale: " << gssim[ID] << "\tBW: " << bwsim[ID] << "  \tSURF: " << surfsim[ID] << "\tSIFT: " << siftsim[ID] << endl;
        string fn = "Sim_" + boost::to_string(count) + "_" + boost::to_string(i->second) + ".png";
        imwrite(fn, imagemap[ID]);
        count++;

        normalize(bwmap[ID], bwmap[ID], 0, 255, NORM_MINMAX, CV_64F);
        normalize(gsmap[ID], gsmap[ID], 0, 255, NORM_MINMAX, CV_64F);

        imshow("Match", imagemap[ID]);
        imshow("MatchBW", bwmap[ID]);
        imshow("MatchGS", gsmap[ID]);


        waitKey(0);

    }

    cout << ">\nComparisons took " << s << " seconds for " << imagemap.size() << " images (" 
        << (int) imagemap.size()/s << " images per second)." << endl;

return 0;
}
Beispiel #7
0
JNIEXPORT jintArray JNICALL Java_org_opencv_samples_tutorial2_Tutorial2Activity_MatchFeatures(JNIEnv* env, jobject obj , jlong addrGray, jlong addrRgba, jlong addrObject)
{
	char stringtolog[100];
	const char* TAG="JNI";

	int detected=0;

	//load frame data
    Mat& mGr  = *(Mat*)addrGray;
    Mat& mRgb = *(Mat*)addrRgba;

    //load image data
    Mat& mObject = *(Mat*)addrObject;
    sprintf(stringtolog,"image load size (%d,%d)\n", mObject.rows,mObject.cols);
           __android_log_write(ANDROID_LOG_INFO, TAG, stringtolog);


    //keypoints
    vector<KeyPoint> keypoints_scene;
    vector<KeyPoint> keypoints_object;

    //FastFeatureDetector detector(50);
    //detector.detect(mGr, v);

    //ORB detection
    OrbFeatureDetector detector(200,1.2,8,31,0,2,0,31);
    //SiftFeatureDetector detector(100,3,0.04,10,1.6);
    detector.detect(mGr, keypoints_scene);
    detector.detect(mObject, keypoints_object);
    sprintf(stringtolog,"SIFT Feature-- Keypoints %d in object, %d in scene\n", keypoints_object.size(),keypoints_scene.size());
       __android_log_write(ANDROID_LOG_INFO, TAG, stringtolog);// JNI log

    //extraction
    //SiftDescriptorExtractor extractor;
    OrbDescriptorExtractor extractor;
    Mat descriptor_scene;
    Mat descriptor_object;
    extractor.compute(mGr,keypoints_scene,descriptor_scene);
    extractor.compute(mObject,keypoints_scene,descriptor_object);

    //draw keypoints
    bool todrawkeypoints=false;
    if (todrawkeypoints) {
    	for( unsigned int i = 0; i < keypoints_scene.size(); i++ )
    	    {
    	        const KeyPoint& kp = keypoints_scene[i];
    	        circle(mRgb, Point(kp.pt.x, kp.pt.y), 10, Scalar(255,0,0,255));
    	    }
    }

    //match
    std::vector<DMatch> matches;

	// L2 distance based matching. Brute Force Matching
	cv::BFMatcher matcher( NORM_HAMMING,false);

	//cv::FlannBasedMatcher matcher;

    // display of corresponding points
    matcher.match( descriptor_object, descriptor_scene, matches );

    //quick calculation of max and min distance
    double max_dist=-1;
    double min_dist=10000;
    double sum_dist=0;
    double sumsqr_dist=0;
    int dist_count[100]={0};
    //vector<double> dist_vector;

    //quick sort and process distances
    for (int i=0;i<descriptor_object.rows;i++)
    {
    	const DMatch& dmatch=matches[i];
		double dist = dmatch.distance;
		min_dist=(min_dist<dist)?min_dist:dist;
		max_dist=(max_dist>dist)?max_dist:dist;
		dist_count[int(dist/10)]=dist_count[int(dist/10)]+1;
		sum_dist+=dist;
		sumsqr_dist+=(110-dist)*(110-dist);
	//	dist_vector.push_back(dist);
    }

    //log
        sprintf(stringtolog,"SIFT Feature-- Max dist : %.2f \n", max_dist );
    	__android_log_write(ANDROID_LOG_INFO, TAG, stringtolog);// JNI log
    	sprintf(stringtolog,"SIFT Feature-- Min dist : %.2f \n", min_dist );
    	__android_log_write(ANDROID_LOG_INFO, TAG, stringtolog);// JNI log
    	sprintf(stringtolog,"SIFT Feature-- Mean dist : %.2f \n",  sum_dist/(descriptor_object.rows+0.0f));
		__android_log_write(ANDROID_LOG_INFO, TAG, stringtolog);// JNI log
		sprintf(stringtolog,"SIFT Feature-- RMS dist : %.2f \n",  sqrt((sumsqr_dist+0.0f)/(descriptor_object.rows+0.0f)));
		__android_log_write(ANDROID_LOG_INFO, TAG, stringtolog);// JNI log




//threshold based on empirical
    double num_std=3.0f;
    double dist=70.0f/num_std;
    min_dist=(min_dist<dist)?min_dist:dist;


    //draw matches
    std::vector< DMatch > good_matches;
    bool todrawmatch=true;
	for (int i=0; i<descriptor_object.rows;i++) {

			const DMatch& dmatch=matches[i];
			const KeyPoint& kp = keypoints_scene[dmatch.trainIdx];
			double dist = dmatch.distance;
			if (dist<min_dist*num_std) {
				if (todrawmatch)
					circle(mRgb, Point(kp.pt.x, kp.pt.y), 10, Scalar(0,255,0,255));
				good_matches.push_back(dmatch);
			}
	}

	sprintf(stringtolog,"SIFT Feature-- Good Matches : %d \n",  good_matches.size());
	__android_log_write(ANDROID_LOG_INFO, TAG, stringtolog);// JNI log



	//compute position
	std::vector<cv::Point2f> points_obj;
	std::vector<cv::Point2f> points_scene;

	for (int i=0; i<good_matches.size();i++)
	{
		//Get the keypoints from the good matches
		const DMatch& dmatch=good_matches[i];
		const KeyPoint& oKeyPoint=keypoints_object[dmatch.queryIdx];
		points_obj.push_back(oKeyPoint.pt);
		const KeyPoint& sKeyPoint=keypoints_scene[dmatch.trainIdx];
		points_scene.push_back(sKeyPoint.pt);
	}

	//clear 1	//clear memory
	int n_goodmatches=good_matches.size();
	if (n_goodmatches<=4)
		return NULL;
	while (good_matches.begin()!=good_matches.end())
		good_matches.pop_back();
	good_matches.clear();
	while (matches.begin()!=matches.end())
		matches.pop_back();
	matches.clear();


	//if enough points do homography
	Mat myHomo = findHomography(points_obj,points_scene,CV_RANSAC);

	//check error
	perspectiveTransform( points_obj, points_scene, myHomo);
	float HomoError=0.0f;
	for (int i=0; i<n_goodmatches;i++)
	{
		const Point2f p1=points_obj[i];
		const Point2f p2=points_scene[i];
		float dis=(cv::norm(p1-p2));
		HomoError+=dis*dis;
	}
	sprintf(stringtolog,"Homography Mean Error %.2f \n",  HomoError/(n_goodmatches+0.0f));
		__android_log_write(ANDROID_LOG_INFO, TAG, stringtolog);// JNI log


	//compute position
	std::vector<cv::Point2f> obj_corners(5);
	std::vector<Point2f> scene_corners(5);
	obj_corners[0] = cvPoint(0,0);
	obj_corners[1] = cvPoint( mObject.cols, 0 );
	obj_corners[2] = cvPoint( mObject.cols, mObject.rows );
	obj_corners[3] = cvPoint( 0, mObject.rows );
	obj_corners[4] = cvPoint( mObject.cols/2, mObject.rows*4/11);
	perspectiveTransform( obj_corners, scene_corners, myHomo);
	Point2f kp=scene_corners[4];

    //draw position
	if (HomoError<20000) {
		//if error is small
			circle(mRgb, Point2f(kp.x, kp.y), 50, Scalar(0,0,255,255),10);
	}
	circle(mRgb, Point2f(kp.x, kp.y), 50, Scalar(0,0,255,255),10);


	//output
	jintArray graphic;
	jint size = 5;
	graphic = (env)->NewIntArray(size);
	if(graphic == NULL) {
		sprintf(stringtolog,"JNI array not created");
		__android_log_write(ANDROID_LOG_INFO, TAG, stringtolog);// JNI log
		return NULL;
	}
	jint fill[size];
	fill[0]=(jint)1;//image id
	fill[1]=(jint) kp.x;//x position
	fill[2]=(jint) kp.y;//y position
	fill[3]=(jint) HomoError/(n_goodmatches+0.0f); //fitting error
	fill[4]=(jint) n_goodmatches; // matched feature
	env->SetIntArrayRegion(graphic,0,size,fill);

	//clear memory
		while (obj_corners.begin()!=obj_corners.end())
			obj_corners.pop_back();
		obj_corners.clear();
		while ( scene_corners.begin()!= scene_corners.end())
			 scene_corners.pop_back();
		scene_corners.clear();
		while (points_obj.begin()!= points_obj.end())
				points_obj.pop_back();
		points_obj.clear();
		while (points_scene.begin()!= points_scene.end())
					points_scene.pop_back();
		points_scene.clear();

	//return
		return graphic;



}
JNIEXPORT void JNICALL Java_org_recg_writehomog_NativeCodeInterface_nativeLoop
(JNIEnv * jenv, jclass, jlong hataddr, jlong gray1, jlong gray2)
{
	clock_t t1, t2;
	t1 = clock();
	homogandtimer *hatinloop = (homogandtimer *) hataddr;
    LOGD("passed just entered nativeloop b4 trying");
    try
    {
    	LOGD("passed just entered the try in nativeloop");
    	LOGD("passed char jenv getutfchars");
    	string homogstring;//(jidentitystr); // <--this one
    	LOGD("passed making jidentitystr");

    	//output the matrices to the Log
    	Mat frame1 = *((Mat *)gray1);
    	Mat frame2 = *((Mat *)gray2);
    	LOGD("passed making mats");

    	int minHessian = 400;

    	//initial variable declaration
    	OrbFeatureDetector detector(minHessian);
    	LOGD("passed making detector");
    	std::vector<KeyPoint> keypoints1, keypoints2;
    	LOGD("passed making keypoints");
    	OrbDescriptorExtractor extractor;
    	LOGD("passed making extractor");
    	Mat descriptors1, descriptors2;
    	LOGD("passed making descriptors");

    	//process first frame
    	detector.detect(frame1, keypoints1);
    	LOGD("passed detecting1");
    	extractor.compute(frame1, keypoints1, descriptors1);
    	LOGD("passed computing1");

    	//process second frame
    	detector.detect(frame2, keypoints2);
    	LOGD("passed detecting2");
    	extractor.compute(frame2, keypoints2, descriptors2);
    	LOGD("passed computing2");

    	//in case frame has no features (eg if all-black from finger blocking lens)
    	if (keypoints1.size() == 0){
    		LOGD("passed keypointssize was zero!!");
			frame1 = frame2.clone();
			keypoints1 = keypoints2;
			descriptors1 = descriptors2;
			//go back to the javacode and continue with the next frame
			return;
    	}

    	LOGD("passed keypointssize not zero!");
    	//Now match the points on the successive images
    	//FlannBasedMatcher matcher;
    	BFMatcher matcher;
    	LOGD("passed creating matcher");
    	std::vector<DMatch> matches;
    	LOGD("passed creating matches");
    	if(descriptors1.empty()){
    		LOGD("passed descriptors1 is empty!");
    	}
    	if(descriptors2.empty()){
    		LOGD("passed descriptors2 is empty!");
    	}
    	LOGD("passed key1 size %d", keypoints1.size());
    	LOGD("passed key2 size %d", keypoints2.size());

    	matcher.match(descriptors1, descriptors2, matches);
    	LOGD("passed doing the matching");

    	//eliminate weaker matches
    	double maxdist = 0;
		double mindist = 100;
		for (int j = 0; j < descriptors1.rows; j++){
			DMatch match = matches[j];
			double dist = match.distance;
			if( dist < mindist ) mindist = dist;
			if( dist > maxdist ) maxdist = dist;
		}

		//build the list of "good" matches
		std::vector<DMatch> goodmatches;
		for( int k = 0; k < descriptors1.rows; k++ ){
			DMatch amatch = matches[k];
			if( amatch.distance <= 3*mindist ){
				goodmatches.push_back(amatch);
			}
		}

	//Now compute homography matrix between the stronger matches
		//-- Localize the object
		std::vector<Point2f> obj;
		std::vector<Point2f> scene;
		if (goodmatches.size() < 4){
			frame1 = frame2.clone();
			keypoints1 = keypoints2;
			descriptors1 = descriptors2;
			return;
		}

		for(int l = 0; l < goodmatches.size(); l++){
		//-- Get the keypoints from the good matches
			DMatch gm1 = goodmatches[l];
			KeyPoint kp1 = keypoints1[gm1.queryIdx];
			obj.push_back(kp1.pt);

			KeyPoint kp2 = keypoints1[gm1.trainIdx];
			scene.push_back(kp2.pt);
		}

		Mat hmatrix = findHomography(obj,scene,CV_RANSAC);

		hatinloop->writehomogm << hmatrix.at<double>(0,0) << " ";
		LOGD("passed el00  %f",hmatrix.at<double>(0,0));
		LOGD("  ");
		hatinloop->writehomogm << hmatrix.at<double>(0,1) << " ";
		LOGD("passed el01  %f",hmatrix.at<double>(0,1));
		LOGD("  ");
		hatinloop->writehomogm << hmatrix.at<double>(0,2) << " ";

		hatinloop->writehomogm << hmatrix.at<double>(1,0) << " ";
		hatinloop->writehomogm << hmatrix.at<double>(1,1) << " ";
		hatinloop->writehomogm << hmatrix.at<double>(1,2) << " ";

		hatinloop->writehomogm << hmatrix.at<double>(2,0) << " ";
		hatinloop->writehomogm << hmatrix.at<double>(2,1) << " ";
		hatinloop->writehomogm << hmatrix.at<double>(2,2) << " endmatrix\n";

		t2 = clock();
		hatinloop->speedrecord << (float) (t2 - t1)/CLOCKS_PER_SEC << "\n";
		LOGD("passed timingstuff %f",(float) (t2 - t1)/CLOCKS_PER_SEC);

		hatinloop->writehomogm.flush();
		hatinloop->speedrecord.flush();

    }
    catch(cv::Exception& e)
    {
        LOGD("nativeCreateObject caught cv::Exception: %s", e.what());
        jclass je = jenv->FindClass("org/opencv/core/CvException");
        if(!je)
            je = jenv->FindClass("java/lang/Exception");
        jenv->ThrowNew(je, e.what());
    }
    catch (...)
    {
        LOGD("nativeDetect caught unknown exception");
        jclass je = jenv->FindClass("java/lang/Exception");
        jenv->ThrowNew(je, "Unknown exception in JNI nativeloop's code");
    }
    LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDetect exit");

}
Beispiel #9
0
StitchedMap::StitchedMap(Mat &img1, Mat &img2, int max_trans, int max_rotation, float max_pairwise_distance, cv::Mat oldTransform)
{
  // load images, TODO: check that they're grayscale
  image1 = img1.clone();
  image2 = img2.clone();
  if(image1.size != image2.size)
      cv::resize(image2,image2,image1.size());
  works = true;
  // create feature detector set.
  OrbFeatureDetector detector;
  OrbDescriptorExtractor dexc;
  BFMatcher dematc(NORM_HAMMING, false);

  // 1. extract keypoint          s
  detector.detect(image1, kpv1);
  detector.detect(image2, kpv2);

  // 2. extract descriptors
  dexc.compute(image1, kpv1, dscv1);
  dexc.compute(image2, kpv2, dscv2);

  // 3. match keypoints
  if(kpv1.size() == 0|| kpv2.size() == 0)
  {
      ROS_WARN("No KPV");
      works = false;
      return;
  }
//  ROS_INFO("Kpv1:%i entries\t Kpv2:%i entries",kpv1.size(),kpv2.size());
  dematc.match(dscv1, dscv2, matches);

  // 4. find matching point pairs with same distance in both images
  for (size_t i=0; i<matches.size(); i++) {
    KeyPoint a1 = kpv1[matches[i].queryIdx],
             b1 = kpv2[matches[i].trainIdx];

    if (matches[i].distance > 30)
      continue;

    for (size_t j=0; j<matches.size(); j++) {
      KeyPoint a2 = kpv1[matches[j].queryIdx],
               b2 = kpv2[matches[j].trainIdx];

      if (matches[j].distance > 30)
        continue;

      if ( fabs(norm(a1.pt-a2.pt) - norm(b1.pt-b2.pt)) > max_pairwise_distance ||
           fabs(norm(a1.pt-a2.pt) - norm(b1.pt-b2.pt)) == 0)
        continue;


      coord1.push_back(a1.pt);
      coord1.push_back(a2.pt);
      coord2.push_back(b1.pt);
      coord2.push_back(b2.pt);


      fil1.push_back(a1);
      fil1.push_back(a2);
      fil2.push_back(b1);
      fil2.push_back(b2);
    }
  }

   // cv::imwrite("img1.pgm",image1);
   // cv::imwrite("img2.pgm",image2);
  // 5. find homography
 // ROS_INFO("Found %i matches",matches.size());
  if(coord1.size() < 1)
  {
      ROS_WARN("Problem by transforming map,this migth just an start up problem \n Coord1:%lu",coord1.size());
      works = false;
      return;
  }

  ROS_DEBUG("Compute estimateRigid");
  H = estimateRigidTransform(coord2, coord1,false);
  if(H.empty())
  {
      ROS_WARN("H contain no data, cannot find valid transformation");
      works = false;
      return;
  }
  //ROS_DEBUG("H: size:%lu|empty:%i",H.size,H.empty());

  rotation = 180./M_PI*atan2(H.at<double>(0,1),H.at<double>(1,1));
  transx   = H.at<double>(0,2);
  transy   = H.at<double>(1,2);
  scalex   = sqrt(pow(H.at<double>(0,0),2)+pow(H.at<double>(0,1),2));
  scaley   = sqrt(pow(H.at<double>(1,0),2)+pow(H.at<double>(1,1),2));
  ROS_DEBUG("H: transx:%f|transy%f|scalex:%f,scaley:%f|rotation:%f",transx,transy,scalex,scaley,rotation);
  //first_x_trans = transx;
  //first_y_trans = transy;
  float scale_change = 0.05;

  if(scalex > 1 + scale_change || scaley > 1 + scale_change)
  {
      ROS_WARN("Map should not scale change is to lagre");
      works = false;
      return;
  }
  if(scalex < 1 - scale_change|| scaley < 1 - scale_change)
  {
      ROS_WARN("Map should not scale change is to small");
      works = false;
      return;
  }
  if(max_trans != -1)
  {
      if(transx > max_trans || transy > max_trans)
      {
          ROS_WARN("Map should not trans so strong");
          works = false;
          return;
      }
  }
  if(max_rotation != -1)
  {
      if(rotation > max_rotation || rotation < -1 * max_rotation)
      {
          ROS_WARN("Map should not rotate so strong");
          works = false;
          return;
      }
  }
  cur_trans = H;
  ROS_DEBUG("Finished estimateRigid");
  //evaluade transformation
  //evaluate
  if(works)
  {
      Mat tmp (image2.size(),image2.type());
      Mat thr;

      Mat image(image2.size(), image2.type());
      warpAffine(image2,image,H,image.size());
      addWeighted(image,.5,image1,.5,0.0,image);

      warpAffine(image2,tmp,H,image2.size());
      addWeighted(tmp,.5,image1,.5,0.0,image);
      //cvtColor(image1,tmp,CV_BGR2GRAY);
      threshold(tmp,thr,0,255,THRESH_BINARY);
      Mat K=(Mat_<uchar>(5,5)<<   0,  0,  1,  0,  0,\
                                     0,  0,  1,  0,  0,\
                                     1,  1,  1,  1,  1,\
                                     0,  0,  1,  0,  0,\
                                     0,  0,  1,  0,  0);

      erode(thr,thr,K,Point(-1,-1),1,BORDER_CONSTANT);
         vector< vector <Point> > contours; // Vector for storing contour
         vector< Vec4i > hierarchy;
         findContours( thr, contours, hierarchy,CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE );
         for( int i = 0; i< contours.size(); i++ )
         {
            Rect r= boundingRect(contours[i]);
            rects2.push_back(r);
            rectangle(tmp,Point(r.x,r.y), Point(r.x+r.width,r.y+r.height), Scalar(0,0,255),2,8,0);
          }//Opened contour

         Mat thr1;
         //cvtColor(image1,tmp,CV_BGR2GRAY);
         threshold(image1,thr1,0,255,THRESH_BINARY);
         erode(thr1,thr1,K,Point(-1,-1),1,BORDER_CONSTANT);
            vector< vector <Point> > contours1; // Vector for storing contour
            vector< Vec4i > hierarchy1;
            findContours( thr1, contours1, hierarchy1,CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE );

            for( int i = 0; i< contours1.size(); i++ ){
             Rect r= boundingRect(contours1[i]);
             rects1.push_back(r);
             //rectangle(image1,Point(r.x,r.y), Point(r.x+r.width,r.y+r.height), Scalar(0,0,255),2,8,0);
             }//Opened contour
         vector<Rect> near1,near2;
         int offset = 5;
         for(int i = 0; i < rects1.size(); i++)
         {
             Rect ri = rects1.at(i);
             if(ri.x == 1 && ri.y == 1)
                 continue;
             for(int j = 0; j < rects2.size();j++)
             {
                 Rect rj = rects2.at(j);
                 if(ri.x < rj.x + offset && ri.x > rj.x -offset)
                 {
                     if(ri.y < rj.y + offset && ri.y > rj.y -offset)
                     {
                         near1.push_back(ri);
                         near2.push_back(rj);
                     }
                 }
             }
         }
         double eudis = 0;
         double disX,disY;
         for(int i = 0; i < near1.size(); i++)
         {
             Rect ri = near1.at(i);
             Rect rj = near2.at(i);
             disX = ri.x - rj.x;
             disY = ri.y - rj.y;
             if(disX < 0)
                 disX = disX * (-1);
             if(disY < 0)
                 disY = disY * (-1);
             eudis += sqrt((disX*disX)+(disY*disY));
         }
         if(near1.size() < 2)
             eudis = -1;
         else
             eudis = eudis / near1.size();
         ROS_DEBUG("EudisNew:%f\t near1Size:%lu:\toldTran:%i",eudis,near1.size(),oldTransform.empty());
         //calc EudisOld

         Mat thr3,tmp1;
         //cvtColor(image1,tmp,CV_BGR2GRAY);
         if(oldTransform.empty())
             return;
         warpAffine(image2,tmp1,oldTransform,image2.size());
         threshold(tmp1,thr3,0,255,THRESH_BINARY);

         erode(thr3,thr3,K,Point(-1,-1),1,BORDER_CONSTANT);
            vector< vector <Point> > contours3; // Vector for storing contour
            vector< Vec4i > hierarchy3;
            findContours( thr3, contours3, hierarchy3,CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE );

            for( int i = 0; i< contours3.size(); i++ ){
             Rect r= boundingRect(contours3[i]);
             rects3.push_back(r);
            }//Opened contour
            near1.clear();
            near2.clear();
            for(int i = 0; i < rects1.size(); i++)
            {
                Rect ri = rects1.at(i);
                if(ri.x == 1 && ri.y == 1)
                    continue;
                for(int j = 0; j < rects3.size();j++)
                {
                    Rect rj = rects3.at(j);
                    if(ri.x < rj.x + offset && ri.x > rj.x -offset)
                    {
                        if(ri.y < rj.y + offset && ri.y > rj.y -offset)
                        {
                            near1.push_back(ri);
                            near2.push_back(rj);
                        }
                    }
                }
            }
            double eudisOLD = 0;
            for(int i = 0; i < near1.size(); i++)
            {
                Rect ri = near1.at(i);
                Rect rj = near2.at(i);
                disX = ri.x - rj.x;
                disY = ri.y - rj.y;
                if(disX < 0)
                    disX = disX * (-1);
                if(disY < 0)
                    disY = disY * (-1);
                eudisOLD += sqrt((disX*disX)+(disY*disY));
            }
            if(near1.size() < 2)
                eudis = -1;
            else
                eudisOLD = eudisOLD / near1.size();
            //if(eudisOLD < eudis)
               // works = false;
            ROS_WARN("EudisOLD:%f\t near1Size:%lu:|works:%i",eudis,near1.size(),works);
            //calc EudisOld
         /*  for(int i = 0; i < rects1.size(); i++)
           {
               Rect r = rects1.at(i);
               rectangle(image1,Point(r.x,r.y), Point(r.x+r.width,r.y+r.height), Scalar(0,0,255),2,8,0);
            }*/

  }
  return;
}