示例#1
0
void FeatureMatchThread::run()
{
	Mat resultImg;
	Mat grayImg;

	cvtColor(cropImg,grayImg,CV_BGR2GRAY);
	featureDetector.detect(grayImg,keyPoints);
	featureExtractor.compute(grayImg,keyPoints,descriptors);
	flannIndex.build(descriptors,flann::LshIndexParams(12,20,2),cvflann::FLANN_DIST_HAMMING);

	while(true)
	{
		Mat captureImage_gray;

		vector<KeyPoint> captureKeyPoints;
		Mat captureDescription;
		vector<DMatch> goodMatches;

		cap >> captureImage;
		
		if(captureImage.empty())
			continue;

		cvtColor(captureImage,captureImage_gray,CV_BGR2GRAY);
		featureDetector.detect(captureImage_gray,captureKeyPoints);
		featureExtractor.compute(captureImage_gray,captureKeyPoints,captureDescription);

		Mat matchIndex(captureDescription.rows,2,CV_32SC1);
		Mat matchDistance(captureDescription.rows,2,CV_32FC1);

		flannIndex.knnSearch(captureDescription,matchIndex,matchDistance,2,flann::SearchParams());
		
		for(int i=0;i<matchDistance.rows;i++)
		{
			if(matchDistance.at<float>(i,0) < 0.6 * matchDistance.at<float>(i,1))
			{
				DMatch dmatches(i,matchIndex.at<int>(i,0),matchDistance.at<float>(i,0));
				goodMatches.push_back(dmatches);
			}
		}

		drawMatches(captureImage,captureKeyPoints,cropImg,keyPoints,goodMatches,resultImg);
		emit NewFeatureMatch(&resultImg);

		imshow(WindowName,captureImage);
		cv::setMouseCallback(WindowName,mouse_callback);

		captureKeyPoints.clear();
		goodMatches.clear();
		waitKey(30);
	}

	return;
}
示例#2
0
void mouse_callback(int event,int x,int y,int flags, void* param)
{
	switch(event)
	{
	case EVENT_MOUSEMOVE:
		{
			if(drawing_box)
			{
				box.width = x-box.x;
				box.height = y-box.y;
				Mat tmp = captureImage.clone();
				cv::rectangle(tmp,Point(box.x,box.y),Point(box.x + box.width , box.y + box.height) , Scalar(0,255,0) , 3);
				imshow(WindowName,tmp);
			}
		}
		break;

	case EVENT_LBUTTONDOWN:
		{
			drawing_box = true;
			box = Rect(x,y,0,0);
		}
		break;

	case EVENT_LBUTTONUP:
		{
			drawing_box = false;
			if(box.width < 0)
			{
				box.x += box.width;
				box.width *= -1;
			}

			if(box.height < 0)
			{
				box.y += box.height;
				box.height *= -1;
			}

			//Â^¨ú¹Ï¹³
			Mat copyCapture = captureImage.clone();
			Mat tmp(copyCapture,box);
			Mat grayImg;

			cropImg = tmp;
			cvtColor(cropImg,grayImg,CV_BGR2GRAY);
			featureDetector.detect(grayImg,keyPoints);
			featureExtractor.compute(grayImg,keyPoints,descriptors);
			flannIndex.build(descriptors,flann::LshIndexParams(12,20,2),cvflann::FLANN_DIST_HAMMING);
		}
		break;
	}

	return;
}
int main(int argc, char** argv)
{
    if( argc < 2 )
    {
        printPrompt( argv[0] );
        return -1;
    }

    initModule_nonfree();

    // Get Input Data
    ifstream file(argv[1]);
    if ( !file.is_open() )
        return false;
    
    string str;
    
        // Image Name
    getline( file, str ); getline( file, str );
    string image_name = str;
        // Cloud Name
    getline( file, str ); getline( file, str );
    string cloud_name = str;
        // width of images to be created.
    getline( file, str ); getline( file, str );
    int w = atoi(str.c_str());
        // height of images to be created
    getline( file, str ); getline( file, str );
    int h = atoi(str.c_str());
        // resolution of voxel grids
    getline( file, str ); getline( file, str );
    float r = atof(str.c_str());
        // f (distance from pinhole)
    getline( file, str ); getline( file, str );
    float f = atof(str.c_str());
        // thetax (initial rotation about X Axis of map)
    getline( file, str ); getline( file, str );
    float thetaX = atof(str.c_str());
        // thetay (initial rotation about Y Axis of map)
    getline( file, str ); getline( file, str );
    float thetaY = atof(str.c_str());
        // number of points to go to
    getline( file, str ); getline( file, str );
    float nop = atoi(str.c_str());
        // Number of divisions
    getline( file, str ); getline( file, str );
    float divs = atoi(str.c_str());
        // Number of images to return
    getline( file, str ); getline( file, str );
    int numtoreturn = atoi(str.c_str());    
        // Should we load or create photos?
    getline( file, str ); getline( file, str );
    string lorc =str.c_str();
        // Directory to look for photos
    getline( file, str ); getline( file, str );
    string dir =str.c_str();
        // Directory to look for kp and descriptors
    getline( file, str ); getline( file, str );
    string kdir =str.c_str();
        // save photos?
    getline( file, str ); getline( file, str );
    string savePhotos =str.c_str();
    
    file.close();
    // Done Getting Input Data

    map<vector<float>, Mat> imagemap;
    map<vector<float>, Mat> surfmap;
    map<vector<float>, Mat> siftmap;
    map<vector<float>, Mat> orbmap;
    map<vector<float>, Mat> fastmap;
    imagemap.clear();

    vector<KeyPoint> SurfKeypoints;
    vector<KeyPoint> SiftKeypoints;
    vector<KeyPoint> OrbKeypoints;
    vector<KeyPoint> FastKeypoints;
    Mat SurfDescriptors;
    Mat SiftDescriptors;
    Mat OrbDescriptors;
    Mat FastDescriptors;

    int minHessian = 300;

    SurfFeatureDetector SurfDetector (minHessian);
    SiftFeatureDetector SiftDetector (minHessian);
    OrbFeatureDetector OrbDetector (minHessian);
    FastFeatureDetector FastDetector (minHessian);


    SurfDescriptorExtractor SurfExtractor;
    SiftDescriptorExtractor SiftExtractor;
    OrbDescriptorExtractor OrbExtractor;

    if ( !fs::exists( dir ) || lorc == "c" )
    { // Load Point Cloud and render images
        PointCloud<PT>::Ptr cloud (new pcl::PointCloud<PT>);
        io::loadPCDFile<PT>(cloud_name, *cloud);

        Eigen::Affine3f tf = Eigen::Affine3f::Identity();
        tf.rotate (Eigen::AngleAxisf (thetaX, Eigen::Vector3f::UnitX()));
        pcl::transformPointCloud (*cloud, *cloud, tf);
        tf = Eigen::Affine3f::Identity();
        tf.rotate (Eigen::AngleAxisf (thetaY, Eigen::Vector3f::UnitY()));
        pcl::transformPointCloud (*cloud, *cloud, tf);

        // Create images from point cloud
        imagemap = render::createImages(cloud, nop, w, h, r, f);

        if (savePhotos == "y")
        {
            for (map<vector<float>, Mat>::iterator i = imagemap.begin(); i != imagemap.end(); ++i)
            {
                // Create image name and storagename
                string imfn = dir + "/";
                string kpfn = kdir + "/";
                for (int j = 0; j < i->first.size(); j++)
                {
                    imfn += boost::to_string(i->first[j]) + " ";
                    kpfn += boost::to_string(i->first[j]) + " ";
                }
                imfn += ".jpg";
                imwrite(imfn, i->second);

                // Detect keypoints, add to keypoint map. Same with descriptors

                SurfDetector.detect(i->second, SurfKeypoints);
                SiftDetector.detect(i->second, SiftKeypoints);
                OrbDetector.detect(i->second, OrbKeypoints);
                FastDetector.detect(i->second, FastKeypoints);

                SurfExtractor.compute(i->second, SurfKeypoints, SurfDescriptors);
                SiftExtractor.compute(i->second, SiftKeypoints, SiftDescriptors);
                OrbExtractor.compute(i->second, OrbKeypoints, OrbDescriptors);
                SiftExtractor.compute(i->second, FastKeypoints, FastDescriptors);

                // Store KP and Descriptors in yaml file.

                kpfn += ".yml";
                FileStorage store(kpfn, cv::FileStorage::WRITE);
                write(store,"SurfKeypoints",SurfKeypoints);
                write(store,"SiftKeypoints",SiftKeypoints);
                write(store,"OrbKeypoints", OrbKeypoints);
                write(store,"FastKeypoints",FastKeypoints);
                write(store,"SurfDescriptors",SurfDescriptors);
                write(store,"SiftDescriptors",SiftDescriptors);
                write(store,"OrbDescriptors", OrbDescriptors);
                write(store,"FastDescriptors",FastDescriptors);
                store.release();

                surfmap[i->first] = SurfDescriptors;
                siftmap[i->first] = SiftDescriptors;
                orbmap[i->first]  = OrbDescriptors;
                fastmap[i->first] = FastDescriptors;
            }
        }
    } 
    else 
    { // load images from the folder dir
        // First look into the folder to get a list of filenames
        vector<fs::path> ret;
        const char * pstr = dir.c_str();
        fs::path p(pstr);
        get_all(pstr, ret);

        for (int i = 0; i < ret.size(); i++)
        {
            // Load Image via filename
            string fn = ret[i].string();
            istringstream iss(fn);
            vector<string> tokens;
            copy(istream_iterator<string>(iss), istream_iterator<string>(), back_inserter<vector<string> >(tokens));

            // Construct ID from filename
            vector<float> ID;
            for (int i = 0; i < 6; i++) // 6 because there are three location floats and three direction floats
                ID.push_back(::atof(tokens[i].c_str()));
            string imfn = dir + "/" + fn;

            // Read image and add to imagemap.
            Mat m = imread(imfn);
            imagemap[ID] = m;

            // Create Filename for loading Keypoints and descriptors
            string kpfn = kdir + "/";
            for (int j = 0; j < ID.size(); j++)
            {
                kpfn += boost::to_string(ID[j]) + " ";
            }

            kpfn = kpfn+ ".yml";
            
            // Create filestorage item to read from and add to map.
            FileStorage store(kpfn, cv::FileStorage::READ);

            FileNode n1 = store["SurfKeypoints"];
            read(n1,SurfKeypoints);
            FileNode n2 = store["SiftKeypoints"];
            read(n2,SiftKeypoints);
            FileNode n3 = store["OrbKeypoints"];
            read(n3,OrbKeypoints);
            FileNode n4 = store["FastKeypoints"];
            read(n4,FastKeypoints);
            FileNode n5 = store["SurfDescriptors"];
            read(n5,SurfDescriptors);
            FileNode n6 = store["SiftDescriptors"];
            read(n6,SiftDescriptors);
            FileNode n7 = store["OrbDescriptors"];
            read(n7,OrbDescriptors);
            FileNode n8 = store["FastDescriptors"];
            read(n8,FastDescriptors);

            store.release();

            surfmap[ID] = SurfDescriptors;
            siftmap[ID] = SiftDescriptors;
            orbmap[ID]  = OrbDescriptors;
            fastmap[ID] = FastDescriptors;
        }
    }

    TickMeter tm;
    tm.reset();
    cout << "<\n  Analyzing Images ..." << endl;

    // We have a bunch of images, now we compute their grayscale and black and white.
    map<vector<float>, Mat> gsmap;
    map<vector<float>, Mat> bwmap;
    for (map<vector<float>, Mat>::iterator i = imagemap.begin(); i != imagemap.end(); ++i)
    {
        vector<float> ID = i->first;
        Mat Image = i-> second;
        GaussianBlur( Image, Image, Size(5,5), 0, 0, BORDER_DEFAULT );


        gsmap[ID] = averageImage::getPixSumFromImage(Image, divs);
        bwmap[ID] = averageImage::aboveBelow(gsmap[ID]);
    }
    Mat image = imread(image_name);
    Mat gsimage = averageImage::getPixSumFromImage(image, divs);
    Mat bwimage = averageImage::aboveBelow(gsimage);

    // cout << gsimage <<endl;
    imwrite("GS.png", gsimage);
    namedWindow("GSIMAGE (Line 319)");
    imshow("GSIMAGE (Line 319)", gsimage);
    waitKey(0);

    vector<KeyPoint> imgSurfKeypoints;
    vector<KeyPoint> imgSiftKeypoints;
    vector<KeyPoint> imgOrbKeypoints;
    vector<KeyPoint> imgFastKeypoints;
    Mat imgSurfDescriptors;
    Mat imgSiftDescriptors;
    Mat imgOrbDescriptors;
    Mat imgFastDescriptors;

    SurfDetector.detect(image, imgSurfKeypoints);
    SiftDetector.detect(image, imgSiftKeypoints);
    OrbDetector.detect(image, imgOrbKeypoints);
    FastDetector.detect(image, imgFastKeypoints);

    SurfExtractor.compute(image, imgSurfKeypoints, imgSurfDescriptors);
    SiftExtractor.compute(image, imgSiftKeypoints, imgSiftDescriptors);
    OrbExtractor.compute(image, imgOrbKeypoints, imgOrbDescriptors);
    SiftExtractor.compute(image, imgFastKeypoints, imgFastDescriptors);


    tm.start();

    cout << ">\n<\n  Comparing Images ..." << endl;

    // We have their features, now compare them!
    map<vector<float>, float> gssim; // Gray Scale Similarity
    map<vector<float>, float> bwsim; // Above Below Similarity
    map<vector<float>, float> surfsim;
    map<vector<float>, float> siftsim;
    map<vector<float>, float> orbsim;
    map<vector<float>, float> fastsim;

    for (map<vector<float>, Mat>::iterator i = gsmap.begin(); i != gsmap.end(); ++i)
    {
        vector<float> ID = i->first;
        gssim[ID] = similarities::getSimilarity(i->second, gsimage);
        bwsim[ID] = similarities::getSimilarity(bwmap[ID], bwimage); 
        surfsim[ID] = similarities::compareDescriptors(surfmap[ID], imgSurfDescriptors);
        siftsim[ID] = similarities::compareDescriptors(siftmap[ID], imgSiftDescriptors);
        orbsim[ID] = 0;//similarities::compareDescriptors(orbmap[ID], imgOrbDescriptors);
        fastsim[ID] = 0;//similarities::compareDescriptors(fastmap[ID], imgFastDescriptors);
    }

    map<vector<float>, int> top;

    bool gotone = false;
    typedef map<vector<float>, int>::iterator iter;

    // Choose the best ones!
    for (map<vector<float>, Mat>::iterator i = imagemap.begin(); i != imagemap.end(); ++i)
    {
        vector<float> ID = i->first;

        int sim = /* gssim[ID] + 0.5*bwsim[ID] + */ 5*surfsim[ID] + 0.3*siftsim[ID] + orbsim[ID] + fastsim[ID];

        // cout << surfsim[ID] << "\t";
        // cout << siftsim[ID] << "\t";
        // cout << orbsim[ID] << "\t";
        // cout << fastsim[ID] << endl;

        if (!gotone)
        {
            top[ID] = sim;
            gotone = true;
        }

        iter it = top.begin();
        iter end = top.end();
        int max_value = it->second;
        vector<float> max_ID = it->first;
        for( ; it != end; ++it) 
        {
            int current = it->second;
            if(current > max_value) 
            {
                max_value = it->second;
                max_ID = it->first;
            }
        }
        // cout << "Sim: " << sim << "\tmax_value: " << max_value << endl;
        if (top.size() < numtoreturn)
            top[ID] = sim;
        else
        {
            if (sim < max_value)
            {
                top[ID] = sim;
                top.erase(max_ID);
            }
        }
    }
    tm.stop();
        double s = tm.getTimeSec();


    cout << ">\n<\n  Writing top " << numtoreturn << " images ..." << endl;

    int count = 1;
    namedWindow("Image");
    namedWindow("Match");
    namedWindow("ImageBW");
    namedWindow("MatchBW");
    namedWindow("ImageGS");
    namedWindow("MatchGS");

    imshow("Image", image);
    imshow("ImageBW", bwimage);
    imshow("ImageGS", gsimage);


    vector<KeyPoint> currentPoints;

    for (iter i = top.begin(); i != top.end(); ++i)
    {
        vector<float> ID = i->first;

        cout << "  Score: "<< i->second << "\tGrayScale: " << gssim[ID] << "\tBW: " << bwsim[ID] << "  \tSURF: " << surfsim[ID] << "\tSIFT: " << siftsim[ID] << endl;
        string fn = "Sim_" + boost::to_string(count) + "_" + boost::to_string(i->second) + ".png";
        imwrite(fn, imagemap[ID]);
        count++;

        normalize(bwmap[ID], bwmap[ID], 0, 255, NORM_MINMAX, CV_64F);
        normalize(gsmap[ID], gsmap[ID], 0, 255, NORM_MINMAX, CV_64F);

        imshow("Match", imagemap[ID]);
        imshow("MatchBW", bwmap[ID]);
        imshow("MatchGS", gsmap[ID]);


        waitKey(0);

    }

    cout << ">\nComparisons took " << s << " seconds for " << imagemap.size() << " images (" 
        << (int) imagemap.size()/s << " images per second)." << endl;

return 0;
}
示例#4
0
int main() {
	initModule_features2d();
	bool die(false);
	int iter(0);
	int i_snap(0);
	string filename("snapshot");
	string suffix(".png");
	Freenect::Freenect freenect;
	vector<KeyPoint> Keypoints;
	MyFreenectDevice& device = freenect.createDevice<MyFreenectDevice>(0);
	OrbFeatureDetector detector;
	Mat depthMat(Size(640, 480), CV_16UC1);
	Mat bgrMat(Size(640, 480), CV_8UC3, Scalar(0));
	Mat depthf(Size(640, 480), CV_8UC1);
	Mat ownMat(Size(640, 480), CV_8UC3, Scalar(0));
	Mat inFrame(Size(640, 480), CV_8UC3, Scalar(0));
	Mat outFrame(Size(640, 480), CV_8UC3, Scalar(0));
	Mat postProc(Size(640, 480), CV_8UC3, Scalar(0));
	Mat hsvMat(Size(640, 480), CV_8UC3, Scalar(0));
	Mat rgbMat(Size(640, 480), CV_8UC3, Scalar(0));
	Mat threshMat(Size(640, 480), CV_8U, Scalar(0));
	Mat bgrThresh(Size(640, 480), CV_8UC1, Scalar(0));
	Mat greyMat(Size(640, 480), CV_8UC1, Scalar(0));
	
	//HSV
	//Scalar min(0, 32, 0);
	//Scalar max(30, 255, 255);
	
	
	//RGB
	//Scalar min(101, 63, 21);
	//Scalar max(153, 75, 33);
	
	
	//BGR
	//Scalar min(0, 0, 0);
	//Scalar max(255, 0, 0);
	

	namedWindow("Output", CV_WINDOW_AUTOSIZE);
	namedWindow("depth", CV_WINDOW_AUTOSIZE);
	namedWindow("Post", CV_WINDOW_AUTOSIZE);

	device.startVideo();
	device.startDepth();
	
	while (die == false) {
		device.getVideo(bgrMat);

		device.getDepth(depthMat);
		cv::imshow("Output", bgrMat);

		cvtColor(bgrMat, hsvMat, CV_BGR2HSV);
		inRange(hsvMat, Scalar(100, 75, 35), Scalar(140, 255, 255), threshMat);
		morphOps(threshMat);

		adaptiveThreshold( threshMat, bgrThresh, 255, CV_ADAPTIVE_THRESH_MEAN_C, CV_THRESH_BINARY, 3, 1);
		
		detector.detect(bgrThresh, Keypoints);
		drawKeypoints(bgrMat, Keypoints, postProc, Scalar(0, 0, 255)/*, DrawMatchesFlags::DRAW_RICH_KEYPOINTS*/);
		
		cv::imshow("Post", postProc);
		
		//FOR DEBUGGING
		//cv::imshow("Post", bgrThresh);
		//cv::imshow("Post", threshMat);
		
		
		//depthMat.convertTo(depthf, CV_8UC1, 255.0 / 2048.0);
		//cv::imshow("depth", depthf);

		char k = cvWaitKey(5);
		if (k == 27) {
			cvDestroyWindow("rgb");
			cvDestroyWindow("depth");
			cvDestroyWindow("Post");
			break;
		}
		if (k == 8) {
			std::ostringstream file;
			file << filename << i_snap << suffix;
			cv::imwrite(file.str(), bgrMat);
			i_snap++;
		}

		if (iter >= 10000)
			break;
		iter++;

	}
	device.stopVideo();
	device.stopDepth();
	return 0;

}
示例#5
0
int main(int argc, char** argv)
{
    int flag_use_image = 0;
    if( argc != 2 )
      {
        std::cout<< "Usage: ./init num" << std::endl;
        std::cout<< "num: 0 - image" << std::endl
                 << "     1 - video" << std::endl
                 << "     2 - dataset" << std::endl;
        return -1;
    }
    else
    {
        std::string val = argv[1];
        if(val == "0")
        {

        }
        else if(val == "1")
        {
            flag_use_image = 1;
        }
        else if(val == "2")
        {
            flag_use_image = 2;
        }
        else
        {
            std::cout<< "num error" << std::endl;
        }
    }

    std::string winName = "Image";
    namedWindow(winName, WINDOW_NORMAL);
    mat_canvas = imread( "data/book.jpg");

    if(mat_canvas.data == NULL)
    {
        std::cout<< "Image is not opened." << std::endl;
        return -1;
    }


    if(flag_use_image == 0)
    {



        setMouseCallback(winName, mouseEvent);




//        // write mat to file
//        std::string fileName = "mat_descriptors.yml";
//        FileStorage fs(fileName, FileStorage::WRITE);
//        fs << "descriptors" << mat_descriptors;
//        fs.release();
//        std::cout<< fileName << " is generated." << std::endl;

//        Mat copy;
//        FileStorage fs2("mat_descriptors.yml", FileStorage::READ);
//        fs2["descriptors"] >> copy;
//        fs2.release();

//        FileStorage fs3("test.yml", FileStorage::WRITE);
//        fs3 << "descriptors" << copy;
//        fs3.release();


        //////////////////////////////////////////////////////////
//        std::vector<cv::Point3f> vec_pois;
//        vec_pois.push_back(Point3f(0, 0, 0));
//        vec_pois.push_back(Point3f(1.1, 0.1, 0));
//        vec_pois.push_back(Point3f(0.3, 2.1, 0));
//        vec_pois.push_back(Point3f(7.3, 2, 0));
//        vec_pois.push_back(Point3f(1.3, 4.1, 0));

//        FileStorage fs3("POIs.yml", FileStorage::WRITE);
//        fs3 << "POIs" << vec_pois;
//        fs3.release();

        //////////////////////////////////////////////////////////




        while(1)
        {
            imshow(winName, mat_canvas );

            waitKey(30);
        }

    }
    //-- use dataset
    else if(flag_use_image == 2)
    {



        useDataset();





        while(1)
        {

            imshow(winName, mat_canvas );

            waitKey(30);
        }

    }
    else // video input: tracking features
    {
        VideoCapture cap;

        cap.open(1);
        if(!cap.isOpened())  // check if we succeeded
            return -1;
        cap.set(CV_CAP_PROP_FRAME_WIDTH, 800);
        cap.set(CV_CAP_PROP_FRAME_HEIGHT, 600);


        namedWindow("Keypoints", WINDOW_NORMAL);
        Mat mat_image;
        int num_vecKeypoints;
        int num_trackingPoints = 50;
        Mat mat_descriptors;

        char keyInput;

        //-- Step 1: Detect the keypoints using Detector
        // int minHessian = 400;





        OrbFeatureDetector detector;
        FREAK extractor;

        while(1)
        {
            cap >> mat_image;

            std::vector<KeyPoint> vec_keypoints, vec_goodKeypoints;

            detector.detect( mat_image, vec_keypoints );
            num_vecKeypoints = vec_keypoints.size();

            std::sort(vec_keypoints.begin(), vec_keypoints.end(),
                      jlUtilities::sort_feature_response);

            if(num_vecKeypoints > num_trackingPoints)
            {
                num_vecKeypoints = num_trackingPoints;
                vec_keypoints.erase(vec_keypoints.begin() + num_vecKeypoints,
                                   vec_keypoints.end());
            }


            extractor.compute( mat_image, vec_keypoints, mat_descriptors );


            // write mat to file
            std::string fileName = "mat_descriptors.yml";
            FileStorage fs(fileName, FileStorage::WRITE);
            fs << "descriptors" << mat_descriptors;
            fs.release();
            std::cout<< fileName << " is generated." << std::endl;

    //        Mat copy;
    //        FileStorage fs2("mat_descriptors.yml", FileStorage::READ);
    //        fs2["descriptors"] >> copy;
    //        fs2.release();

    //        FileStorage fs3("test.yml", FileStorage::WRITE);
    //        fs3 << "descriptors" << copy;
    //        fs3.release();


            //////////////////////////////////////////////////////////
    //        std::vector<cv::Point3f> vec_pois;
    //        vec_pois.push_back(Point3f(0, 0, 0));
    //        vec_pois.push_back(Point3f(1.1, 0.1, 0));
    //        vec_pois.push_back(Point3f(0.3, 2.1, 0));
    //        vec_pois.push_back(Point3f(7.3, 2, 0));
    //        vec_pois.push_back(Point3f(1.3, 4.1, 0));

    //        FileStorage fs3("POIs.yml", FileStorage::WRITE);
    //        fs3 << "POIs" << vec_pois;
    //        fs3.release();

            //////////////////////////////////////////////////////////

            //-- Draw keypoints
            Mat mat_kpImage;

            drawKeypoints( mat_image, vec_keypoints, mat_kpImage,
                           Scalar::all(-1), DrawMatchesFlags::DEFAULT );

            for (int i=0; i<num_trackingPoints; i++)	{
                cv::circle(mat_kpImage,
                    vec_keypoints[i].pt,	// center
                    3,							// radius
                    cv::Scalar(0,0,255),		// color
                    -1);						// negative thickness=filled

                char szLabel[50];
                sprintf(szLabel, "%d", i);
                putText (mat_kpImage, szLabel, vec_keypoints[i].pt,
                    cv::FONT_HERSHEY_PLAIN, // font face
                    1.0,					// font scale
                    cv::Scalar(255,0,0),	// font color
                    1);						// thickness
            }


            //-- Show detected (drawn) keypoints
            imshow("Keypoints", mat_kpImage );

            waitKey(30);
        }


    }


    return 0;
}
示例#6
0
StitchedMap::StitchedMap(Mat &img1, Mat &img2, int max_trans, int max_rotation, float max_pairwise_distance, cv::Mat oldTransform)
{
  // load images, TODO: check that they're grayscale
  image1 = img1.clone();
  image2 = img2.clone();
  if(image1.size != image2.size)
      cv::resize(image2,image2,image1.size());
  works = true;
  // create feature detector set.
  OrbFeatureDetector detector;
  OrbDescriptorExtractor dexc;
  BFMatcher dematc(NORM_HAMMING, false);

  // 1. extract keypoint          s
  detector.detect(image1, kpv1);
  detector.detect(image2, kpv2);

  // 2. extract descriptors
  dexc.compute(image1, kpv1, dscv1);
  dexc.compute(image2, kpv2, dscv2);

  // 3. match keypoints
  if(kpv1.size() == 0|| kpv2.size() == 0)
  {
      ROS_WARN("No KPV");
      works = false;
      return;
  }
//  ROS_INFO("Kpv1:%i entries\t Kpv2:%i entries",kpv1.size(),kpv2.size());
  dematc.match(dscv1, dscv2, matches);

  // 4. find matching point pairs with same distance in both images
  for (size_t i=0; i<matches.size(); i++) {
    KeyPoint a1 = kpv1[matches[i].queryIdx],
             b1 = kpv2[matches[i].trainIdx];

    if (matches[i].distance > 30)
      continue;

    for (size_t j=0; j<matches.size(); j++) {
      KeyPoint a2 = kpv1[matches[j].queryIdx],
               b2 = kpv2[matches[j].trainIdx];

      if (matches[j].distance > 30)
        continue;

      if ( fabs(norm(a1.pt-a2.pt) - norm(b1.pt-b2.pt)) > max_pairwise_distance ||
           fabs(norm(a1.pt-a2.pt) - norm(b1.pt-b2.pt)) == 0)
        continue;


      coord1.push_back(a1.pt);
      coord1.push_back(a2.pt);
      coord2.push_back(b1.pt);
      coord2.push_back(b2.pt);


      fil1.push_back(a1);
      fil1.push_back(a2);
      fil2.push_back(b1);
      fil2.push_back(b2);
    }
  }

   // cv::imwrite("img1.pgm",image1);
   // cv::imwrite("img2.pgm",image2);
  // 5. find homography
 // ROS_INFO("Found %i matches",matches.size());
  if(coord1.size() < 1)
  {
      ROS_WARN("Problem by transforming map,this migth just an start up problem \n Coord1:%lu",coord1.size());
      works = false;
      return;
  }

  ROS_DEBUG("Compute estimateRigid");
  H = estimateRigidTransform(coord2, coord1,false);
  if(H.empty())
  {
      ROS_WARN("H contain no data, cannot find valid transformation");
      works = false;
      return;
  }
  //ROS_DEBUG("H: size:%lu|empty:%i",H.size,H.empty());

  rotation = 180./M_PI*atan2(H.at<double>(0,1),H.at<double>(1,1));
  transx   = H.at<double>(0,2);
  transy   = H.at<double>(1,2);
  scalex   = sqrt(pow(H.at<double>(0,0),2)+pow(H.at<double>(0,1),2));
  scaley   = sqrt(pow(H.at<double>(1,0),2)+pow(H.at<double>(1,1),2));
  ROS_DEBUG("H: transx:%f|transy%f|scalex:%f,scaley:%f|rotation:%f",transx,transy,scalex,scaley,rotation);
  //first_x_trans = transx;
  //first_y_trans = transy;
  float scale_change = 0.05;

  if(scalex > 1 + scale_change || scaley > 1 + scale_change)
  {
      ROS_WARN("Map should not scale change is to lagre");
      works = false;
      return;
  }
  if(scalex < 1 - scale_change|| scaley < 1 - scale_change)
  {
      ROS_WARN("Map should not scale change is to small");
      works = false;
      return;
  }
  if(max_trans != -1)
  {
      if(transx > max_trans || transy > max_trans)
      {
          ROS_WARN("Map should not trans so strong");
          works = false;
          return;
      }
  }
  if(max_rotation != -1)
  {
      if(rotation > max_rotation || rotation < -1 * max_rotation)
      {
          ROS_WARN("Map should not rotate so strong");
          works = false;
          return;
      }
  }
  cur_trans = H;
  ROS_DEBUG("Finished estimateRigid");
  //evaluade transformation
  //evaluate
  if(works)
  {
      Mat tmp (image2.size(),image2.type());
      Mat thr;

      Mat image(image2.size(), image2.type());
      warpAffine(image2,image,H,image.size());
      addWeighted(image,.5,image1,.5,0.0,image);

      warpAffine(image2,tmp,H,image2.size());
      addWeighted(tmp,.5,image1,.5,0.0,image);
      //cvtColor(image1,tmp,CV_BGR2GRAY);
      threshold(tmp,thr,0,255,THRESH_BINARY);
      Mat K=(Mat_<uchar>(5,5)<<   0,  0,  1,  0,  0,\
                                     0,  0,  1,  0,  0,\
                                     1,  1,  1,  1,  1,\
                                     0,  0,  1,  0,  0,\
                                     0,  0,  1,  0,  0);

      erode(thr,thr,K,Point(-1,-1),1,BORDER_CONSTANT);
         vector< vector <Point> > contours; // Vector for storing contour
         vector< Vec4i > hierarchy;
         findContours( thr, contours, hierarchy,CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE );
         for( int i = 0; i< contours.size(); i++ )
         {
            Rect r= boundingRect(contours[i]);
            rects2.push_back(r);
            rectangle(tmp,Point(r.x,r.y), Point(r.x+r.width,r.y+r.height), Scalar(0,0,255),2,8,0);
          }//Opened contour

         Mat thr1;
         //cvtColor(image1,tmp,CV_BGR2GRAY);
         threshold(image1,thr1,0,255,THRESH_BINARY);
         erode(thr1,thr1,K,Point(-1,-1),1,BORDER_CONSTANT);
            vector< vector <Point> > contours1; // Vector for storing contour
            vector< Vec4i > hierarchy1;
            findContours( thr1, contours1, hierarchy1,CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE );

            for( int i = 0; i< contours1.size(); i++ ){
             Rect r= boundingRect(contours1[i]);
             rects1.push_back(r);
             //rectangle(image1,Point(r.x,r.y), Point(r.x+r.width,r.y+r.height), Scalar(0,0,255),2,8,0);
             }//Opened contour
         vector<Rect> near1,near2;
         int offset = 5;
         for(int i = 0; i < rects1.size(); i++)
         {
             Rect ri = rects1.at(i);
             if(ri.x == 1 && ri.y == 1)
                 continue;
             for(int j = 0; j < rects2.size();j++)
             {
                 Rect rj = rects2.at(j);
                 if(ri.x < rj.x + offset && ri.x > rj.x -offset)
                 {
                     if(ri.y < rj.y + offset && ri.y > rj.y -offset)
                     {
                         near1.push_back(ri);
                         near2.push_back(rj);
                     }
                 }
             }
         }
         double eudis = 0;
         double disX,disY;
         for(int i = 0; i < near1.size(); i++)
         {
             Rect ri = near1.at(i);
             Rect rj = near2.at(i);
             disX = ri.x - rj.x;
             disY = ri.y - rj.y;
             if(disX < 0)
                 disX = disX * (-1);
             if(disY < 0)
                 disY = disY * (-1);
             eudis += sqrt((disX*disX)+(disY*disY));
         }
         if(near1.size() < 2)
             eudis = -1;
         else
             eudis = eudis / near1.size();
         ROS_DEBUG("EudisNew:%f\t near1Size:%lu:\toldTran:%i",eudis,near1.size(),oldTransform.empty());
         //calc EudisOld

         Mat thr3,tmp1;
         //cvtColor(image1,tmp,CV_BGR2GRAY);
         if(oldTransform.empty())
             return;
         warpAffine(image2,tmp1,oldTransform,image2.size());
         threshold(tmp1,thr3,0,255,THRESH_BINARY);

         erode(thr3,thr3,K,Point(-1,-1),1,BORDER_CONSTANT);
            vector< vector <Point> > contours3; // Vector for storing contour
            vector< Vec4i > hierarchy3;
            findContours( thr3, contours3, hierarchy3,CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE );

            for( int i = 0; i< contours3.size(); i++ ){
             Rect r= boundingRect(contours3[i]);
             rects3.push_back(r);
            }//Opened contour
            near1.clear();
            near2.clear();
            for(int i = 0; i < rects1.size(); i++)
            {
                Rect ri = rects1.at(i);
                if(ri.x == 1 && ri.y == 1)
                    continue;
                for(int j = 0; j < rects3.size();j++)
                {
                    Rect rj = rects3.at(j);
                    if(ri.x < rj.x + offset && ri.x > rj.x -offset)
                    {
                        if(ri.y < rj.y + offset && ri.y > rj.y -offset)
                        {
                            near1.push_back(ri);
                            near2.push_back(rj);
                        }
                    }
                }
            }
            double eudisOLD = 0;
            for(int i = 0; i < near1.size(); i++)
            {
                Rect ri = near1.at(i);
                Rect rj = near2.at(i);
                disX = ri.x - rj.x;
                disY = ri.y - rj.y;
                if(disX < 0)
                    disX = disX * (-1);
                if(disY < 0)
                    disY = disY * (-1);
                eudisOLD += sqrt((disX*disX)+(disY*disY));
            }
            if(near1.size() < 2)
                eudis = -1;
            else
                eudisOLD = eudisOLD / near1.size();
            //if(eudisOLD < eudis)
               // works = false;
            ROS_WARN("EudisOLD:%f\t near1Size:%lu:|works:%i",eudis,near1.size(),works);
            //calc EudisOld
         /*  for(int i = 0; i < rects1.size(); i++)
           {
               Rect r = rects1.at(i);
               rectangle(image1,Point(r.x,r.y), Point(r.x+r.width,r.y+r.height), Scalar(0,0,255),2,8,0);
            }*/

  }
  return;
}