int main(int argc, char** argv){ std::vector<std::pair<std::string,double> > myFileWeights; std::vector<bool> IsTtBar; std::string anaName; std::pair<std::string,double> inputNameWeight; inputNameWeight.first = ""; inputNameWeight.second = 0.; double Luminosity = 0; double intialBeforePresel = 0; double XSec = 0; bool isTtBar = false; double doJES = 1.; MassAndResolution myMassResolution; myMassResolution.topMass = -1; myMassResolution.topRes = -1; myMassResolution.wMass = -1; myMassResolution.wRes = -1; double b1 = 1000.; double b2 = 100.; double b3 = 10.; double extFdata = -1.; for (int f = 1; f < argc; f++) { std::string arg_fth(*(argv + f)); // cout<<"-------- "<< arg_fth<<endl; if (arg_fth == "out") { f++; std::string out(*(argv + f)); anaName = string(out); }else if (arg_fth == "JES") { f++; std::string out(*(argv + f)); doJES = atof(out.c_str()); }else if (arg_fth == "WMass") { f++; std::string out(*(argv + f)); myMassResolution.wMass = atof(out.c_str()); }else if (arg_fth == "WRes") { f++; std::string out(*(argv + f)); myMassResolution.wRes = atof(out.c_str()); }else if (arg_fth == "TopMass") { f++; std::string out(*(argv + f)); myMassResolution.topMass = atof(out.c_str()); }else if (arg_fth == "TopRes") { f++; std::string out(*(argv + f)); myMassResolution.topRes = atof(out.c_str()); }else if (arg_fth == "b1") { f++; std::string out(*(argv + f)); b1 = atof(out.c_str()); }else if (arg_fth == "b2") { f++; std::string out(*(argv + f)); b2 = atof(out.c_str()); }else if (arg_fth == "b3") { f++; std::string out(*(argv + f)); b3 = atof(out.c_str()); }else if (arg_fth == "Fdata") { f++; std::string out(*(argv + f)); extFdata = atof(out.c_str()); }else if (arg_fth == "input") { f++; std::string in(*(argv + f)); if(in == "Signal.root" || in == "OtherTtBar.root" || in == "0.9/FS_Signal_0.9.root" || in == "1.1/FS_Signal_1.1.root" ||in == "0.9/FS_OtherTt_0.9.root" ||in == "1.1/FS_OtherTt_1.1.root") isTtBar = true; IsTtBar.push_back(isTtBar); // in ="/home/ajafari/rootfiles/TopTrees/7TeV/July10/SystematicSamples/FullSelection/JES"+in; in ="/home/ajafari/rootfiles/TopTrees/7TeV/July10/Skimmed/FullSelection/FS_ECalDriven_"+in; inputNameWeight.first = in; // cout<<inputNameWeight.first<<endl; }else if(arg_fth == "XSec"){ f++; std::string Xsec(*(argv + f)); XSec = atof(Xsec.c_str()); }else if(arg_fth == "Lumi"){ f++; std::string Lumi(*(argv + f)); Luminosity = atof(Lumi.c_str()); }else if(arg_fth == "Initial"){ f++; std::string intialBeforePresel_(*(argv + f)); intialBeforePresel = atof(intialBeforePresel_.c_str()); }else if(arg_fth == "nextinput"){ f++; inputNameWeight.second = (double)(XSec*Luminosity)/(double)intialBeforePresel; myFileWeights.push_back(inputNameWeight); intialBeforePresel = 0; XSec = 0; isTtBar = false; std::string in(*(argv + f)); if(in == "Signal.root" || in == "OtherTtBar.root" || in == "0.9/FS_Signal_0.9.root" || in == "1.1/FS_Signal_1.1.root" ||in == "0.9/FS_OtherTt_0.9.root" ||in == "1.1/FS_OtherTt_1.1.root") isTtBar = true; IsTtBar.push_back(isTtBar); in ="/home/ajafari/rootfiles/TopTrees/7TeV/July10/Skimmed/FullSelection/FS_ECalDriven_"+in; // in ="/home/ajafari/rootfiles/TopTrees/7TeV/July10/SystematicSamples/FullSelection/JES"+in; inputNameWeight.first = in; // cout<<inputNameWeight.first<<endl; inputNameWeight.second = 0.; }else if(arg_fth == "lastinput"){ f++; inputNameWeight.second = (double)(XSec*Luminosity)/(double)intialBeforePresel; myFileWeights.push_back(inputNameWeight); break; } } ElectronCuts myElecCuts; myElecCuts.Pt = 30; myElecCuts.eta = 2.4; myElecCuts.ExLow = 1.442; myElecCuts.ExHigh = 1.56; myElecCuts.Id = "VBTF70"; myElecCuts.IdSecond = "VBTF95"; myElecCuts.IsoType = ""; myElecCuts.D0 = 0.02; myElecCuts.IsoCut = 0.1; JetCuts myJetCuts; myJetCuts.bTagAlgo = "TCHE"; myJetCuts.Pt = 30.; myJetCuts.eta = 2.4; myJetCuts.nCaloTower = 5; myJetCuts.EmfUp = 0.9; myJetCuts.EmfLow = 0.05; myJetCuts.fhpd = 1000.; myJetCuts.N90 = -1; myJetCuts.bTagCut = 4.; // // MassAndResolution myMassResolution; // myMassResolution.topMass = 180.6; // myMassResolution.topRes = 3.462; // myMassResolution.wMass = 88.76; // myMassResolution.wRes = 10.73; if(myMassResolution.topMass == -1 ||myMassResolution.wRes == -1 ||myMassResolution.wMass == -1 ||myMassResolution.topRes == -1 ) return -1; cout<<myMassResolution.topMass <<"\t"<<myMassResolution.topRes<<"\t"<<myMassResolution.wMass<<"\t"<<myMassResolution.wRes<<endl; bool ExtJets = true; // // PtWeightEffHandler * WeightHandler_1 = new PtWeightEffHandler("expo_1",1); WeightHandler_1->Condition = FunctionBtag_MC; WeightHandler_1->ResetConditionForAllProps(); WeightHandler_1->set2Ds(); ////// PtWeightEffHandler * WeightHandler_2 = new PtWeightEffHandler("expo_2",2); WeightHandler_2->Condition = FunctionBtag_MC; WeightHandler_2->ResetConditionForAllProps(); WeightHandler_2->set2Ds(); //// PtWeightEffHandler * WeightHandler_5 = new PtWeightEffHandler("landau_5",5); WeightHandler_5->Condition = FunctionBtag_MC; WeightHandler_5->ResetConditionForAllProps(); WeightHandler_5->set2Ds(); //// PtWeightEffHandler * WeightHandler_4 = new PtWeightEffHandler("landau_4",4); WeightHandler_4->Condition = FunctionBtag_MC; WeightHandler_4->ResetConditionForAllProps(); WeightHandler_4->set2Ds(); // MCAnalysis * myMCAnalysis = new MCAnalysis("FmcMC_Chi2Cut10000",50., 160.,160.,280.,/*chi2Cut*/10000.,/*topPtcut*/0., // /*ExtJets*/ExtJets); DataAnalysis * myDataAnalysis = new DataAnalysis(("FdataData_"+anaName), b1, b2,b2,b3,/*chi2cut*/100000.,/*topPtcut*/0., /*doAntiTag*/false,/*AntiTagCut*/1000.); MCAnalysis * myMCAnalysis = new MCAnalysis(string("FmcMC_"+anaName), b1, b2,b2,b3,/*chi2cut*/100000.,/*topPtcut*/0.); AnalysisHandler Fullanalysis("",/*dobTag*/false,/*doBtagDataLike*/false,/*ExtJets*/true,/*Lumi*/100., myMassResolution,myElecCuts,myJetCuts,"minChi2"); myDataAnalysis->AddWeighedHandlersToRightRegion(WeightHandler_1); myDataAnalysis->AddWeighedHandlersToRightRegion(WeightHandler_2); myDataAnalysis->AddWeighedHandlersToRightRegion(WeightHandler_4); myDataAnalysis->AddWeighedHandlersToRightRegion(WeightHandler_5); // myMCAnalysis->AddWeighedHandlersToRightRegion(WeightHandler_30); // myMCAnalysis->AddWeighedHandlersToRightRegion(WeightHandler_31); // myMCAnalysis->AddWeighedHandlersToRightRegion(WeightHandler_4); // myMCAnalysis->AddWeighedHandlersToRightRegion(WeightHandler_5); Fullanalysis.AddAnalysis(myDataAnalysis); Fullanalysis.AddAnalysis(myMCAnalysis); Fullanalysis.setVerbosity(0); for(uint nf = 0; nf < myFileWeights.size(); nf++){ std::string fname = myFileWeights.at(nf).first; cout<<fname<<endl; double weight = myFileWeights.at(nf).second; cout<<weight<<endl; TFile * F = new TFile(myFileWeights.at(nf).first.c_str(),"read"); if(F == NULL) cout<<"Bad file pointer"<<endl; TTree* runTree = (TTree*) F->Get("runTree"); TTree* eventTree = (TTree*) F->Get("eventTree"); PracticalEvent * pracEvt = NULL; if(IsTtBar.at(nf)) pracEvt = new PracticalEvent(eventTree,runTree,true); else pracEvt = new PracticalEvent(eventTree,runTree); int i = 0; while (pracEvt->Next()){ i++; //// cout<<"event number : "<<i<<endl; Fullanalysis.Analyze(pracEvt,weight,doJES); // if(i == 100) // break; } } double Fmc = myMCAnalysis->getF(); double Fdata = myDataAnalysis->getF(); if(extFdata != -1.){ Fullanalysis.setAnalysisNonTrivialF(1,extFdata); Fdata = extFdata; } Fullanalysis.End(); cout<<"\t\t--------------------------------"<<endl; cout<<"\t\tFmc = "<<Fmc<<endl; cout<<"\t\tFdata = "<<Fdata<<endl; // cout<<"\t\tFdata_RW = "<<Fullanalysis.f.at(0)<<endl; cout<<"\t\t--------------------------------"<<endl; return 0; };
int main(int argc, char *argv[]) { //Determine whether or not you should show the matches and print the ROC curve bool visible = true; if (argc == 2){ string arg(argv[1]); if (arg == "-visual") visible = true; } //Create a vector of the test images vector<TestImage> test_images; string filename; int left = 120; int right = 120; int direction = 0; vector<TestData> classification; if(visible){ cvNamedWindow("1D SURF", CV_WINDOW_AUTOSIZE ); cvMoveWindow("1D SURF", 0, 0); } //At this point, all of the images have been loaded. They now need to be broken into //the various datasets //*****************MATCHING********************************* //The number of images in each dataset int jpegCounter = 0; int jpegCounter1 = 0; //Use the terminal or not bool terminal = true; //Set the date and time string myDate = "10082012"; string myTime = "1028"; //Matching parameters int k_start =0; int k_end = 0; int s_start = 0; int s_end = 0; int index = 0; //NB ************************************* //CHOOSE THE DATASET TO USE //Counters used to ensure standard output for processing in Matlab int tempDirCounterkk = 0; int tempDirCounterss = 0; datasetType Type = MAIN_ROBOCUP_TESTING_DATASETS; //Set if you are matching or not matching bool isMatching = true; // MAIN_ROBOCUP_TESTING_DATASETS // OFFICE_ENVIRONMENT_DATASETS // LARGE_HALL_ENVIRONMENT_DATASETS if(isMatching) { if (Type==MAIN_ROBOCUP_TESTING_DATASETS)//The original dataset { //Counters used to ensure standard output for processing in Matlab tempDirCounterkk = 1; tempDirCounterss = 1; k_start = 1; k_end = 4; } else if (Type==OFFICE_ENVIRONMENT_DATASETS)//Additional datasets { //Counters used to ensure standard output for processing in Matlab tempDirCounterkk = 5; tempDirCounterss = 5; k_start = 17; k_end = 18; } else if(Type==LARGE_HALL_ENVIRONMENT_DATASETS) { //Counters used to ensure standard output for processing in Matlab tempDirCounterkk = 5; tempDirCounterss = 5; k_start = 19; k_end = 20; } } else { if (Type==MAIN_ROBOCUP_TESTING_DATASETS) { //Counters used to ensure standard output for processing in Matlab tempDirCounterkk = 1; tempDirCounterss = 3; k_start = 1; k_end = 2; s_start = 3; s_end = 4; } else if (Type==OFFICE_ENVIRONMENT_DATASETS) { //Counters used to ensure standard output for processing in Matlab tempDirCounterkk = 5; tempDirCounterss = 6; k_start = 17; k_end = 17; s_start = 18; s_end = 18; } else if (Type == LARGE_HALL_ENVIRONMENT_DATASETS){ //Counters used to ensure standard output for processing in Matlab tempDirCounterkk = 5; tempDirCounterss = 6; k_start = 19; k_end = 19; s_start = 20; s_end = 20; } } //Create object for dataAnalysis DataAnalysis dataAnalysis; //Set the various directories for matching and non-matching respectively for(int kk=k_start;kk<=k_end;kk++) { if(isMatching){ //The counter must be reset for formatting purposes in Matlab if(isMatching){ s_start = s_end = kk; tempDirCounterss = kk;//kk for matching } else{ tempDirCounterss = 3;// 3 for non-matching } } for (int ss = s_start;ss<=s_end;ss++) { //The directory where the files are stored string dir, dir1; dir = to_string<int>(kk); dir1 = to_string<int>(ss); string fname1; string fname2; string name1; string name2; std::string tempDir = to_string<int>(tempDirCounterkk); std::string tempDir1 =to_string<int>(tempDirCounterss); //Set the directory names and determine the number of images in each directory jpegCounter = dataAnalysis.getNumImagesInDirectory(&dir, terminal); jpegCounter1 = dataAnalysis.getNumImagesInDirectory(&dir1, terminal); cout<<"The directory is: "<<dir<<endl; cout<<"The second directory is: "<<dir1<<endl; cout<<"The number of images in the directory 1,2 is: "<<jpegCounter<<", "<<jpegCounter1<<endl; // MAIN_ROBOCUP_TESTING_DATASETS // OFFICE_ENVIRONMENT_DATASETS // LARGE_HALL_ENVIRONMENT_DATASETS //The file to store the matching data string filename = "data/Matches/"; if (Type==MAIN_ROBOCUP_TESTING_DATASETS) filename.append("nonmatching_matching_Data__"); else if (Type==OFFICE_ENVIRONMENT_DATASETS) filename.append("dataset2_nonmatching_matching_Data__"); else if (Type==LARGE_HALL_ENVIRONMENT_DATASETS) filename.append("dataset3_nonmatching_matching_Data__"); filename.append("SURF1D"); filename.append("_Euclidean_"); filename.append(myDate); filename.append("_"); filename.append(myTime); filename.append("_"); filename.append(to_string<double>(THRES)); filename.append("_"); filename.append("_given"); filename.append(".txt"); cout<<filename<<endl; //Remember that for non-matches, we can compare 1,1;2,2;3,3...etc //Determine matches without repetition for (int ii = 1;ii<=jpegCounter;ii++) { if(isMatching) index = ii; else index = jpegCounter1; for (int jj = 1; jj<index;jj++) { cout<<"Image "<<ii<<", image "<<jj<<endl; //The Processing the first image name1 = to_string<int>(ii); fname1 =dir + "/"+ name1+".jpg"; //cout<<"Directory: "<<fname1<<endl; TestImage image1 = TestImage(cvLoadImage(fname1.c_str() ),left,right,direction,dir); //Compute the descriptors for image 1 surfDetDes(image1); timespec times, timee; clock_gettime(CLOCK_PROCESS_CPUTIME_ID, ×); //Start processing the second image. This image will be timed. name2 = to_string<int>(jj); //Load the images from the dataset fname2 =dir1 + "/"+ name2+".jpg"; //Assign the test images TestImage image2 = TestImage(cvLoadImage(fname2.c_str() ),left,right,direction,dir1); //Run the 1D SURF feature extraction algorithm //Compute the descriptors for image 2 surfDetDes(image2); float result =0; IplImage* display; //Calculate the matches result = visualStaticMatch(image1, image2, visible, display); //cout<<result<<endl; clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &timee); Ipoint::overallTime = Ipoint::diffSurf(times, timee).tv_nsec/1000000.0f; if(visible){ cvShowImage("1D SURF", display); cvWaitKey(0); cv::imwrite("SURFtest/myImages/nonmatching.jpg",display); cvReleaseImage(&display); } //Write the data to a file ofstream writeFile; writeFile.open(filename.c_str(), ios::app); #if (DEBUG_TIMES) cout<<"Total Matches: "<<Ipoint::totalNumMatches<<endl; cout<<"Num valid matches: "<<Ipoint::numValidMatches<<endl; cout<<"Detection Time: "<<Ipoint::detectionTime<<endl; cout<<"Extraction Time: "<<Ipoint::extractionTime<<endl; cout<<"Matching Time: "<<Ipoint::matchingTime<<endl; cout<<"Verification Time: "<<Ipoint::verificationTime<<endl; cout<<"Overall Time: "<<Ipoint::overallTime<<endl; #endif writeFile <<tempDir<<", "<<tempDir1<<", "<<325<<", "<<name1<<", "<<name2<<", "<<image1.ipts.size()<<", "<<image2.ipts.size()<<", "<<result<<", "<<0<<", "<<Ipoint::totalNumMatches<<", "<<Ipoint::numValidMatches<<","<<Ipoint::totalNumMatches -Ipoint::numValidMatches<<", "<<0<<", "<<Ipoint::detectionTime<<", "<<Ipoint::extractionTime<<", "<<Ipoint::matchingTime<<", "<<Ipoint::verificationTime<<", "<<Ipoint::overallTime<<"\n"; //close the file writeFile.close(); writeFile.clear(); cvReleaseImage(&(image1.img)); cvReleaseImage(&(image2.img)); }//End of jj }//end of ii tempDirCounterss++; }//End of ss tempDirCounterkk++; }//End of kk if(visible) cvDestroyWindow("1D SURF"); return 0; }
int main(){ TFile * F = new TFile("/home/ajafari/rootfiles/TopTrees/7TeV/July10/Skimmed/FullSelection/FS_ECalDriven_Signal.root","read"); // TFile * F = new TFile("/tmp/ajafari/FullSelection/FS_ECalDriven_Signal.root","read"); // TFile * F = new TFile("/tmp/ajafari/FS_ECalDriven_Signal.root","read"); TTree* runTree = (TTree*) F->Get("runTree"); TTree* eventTree = (TTree*) F->Get("eventTree"); PracticalEvent pracEvt(eventTree,runTree,true); ElectronCuts myElecCuts; myElecCuts.Pt = 30; myElecCuts.eta = 2.4; myElecCuts.ExLow = 1.442; myElecCuts.ExHigh = 1.56; myElecCuts.Id = "VBTF70"; myElecCuts.IdSecond = "VBTF95"; myElecCuts.IsoType = ""; myElecCuts.D0 = 0.02; myElecCuts.IsoCut = 0.1; JetCuts myJetCuts; myJetCuts.bTagAlgo = "TCHE"; myJetCuts.Pt = 30.; myJetCuts.eta = 2.4; myJetCuts.nCaloTower = 5; myJetCuts.EmfUp = 0.9; myJetCuts.EmfLow = 0.05; myJetCuts.fhpd = 1000.; myJetCuts.N90 = -1; myJetCuts.bTagCut = 4.; MassAndResolution myMassResolution; myMassResolution.topMass = 180.6; myMassResolution.topRes = 3.462; myMassResolution.wMass = 88.76; myMassResolution.wRes = 10.73; // MCAnalysis * myMCAnalysis = new MCAnalysis("FmcMC",50., 160.,160.,280.); MCAnalysis * myMCAnalysis = new MCAnalysis("FmcMC",60., 160.,160.,280.); // MCAnalysis * myMCAnalysis_Fdata = new MCAnalysis("FdataMC",50., 160.,160.,280.); MCAnalysis * myMCAnalysis_Fdata = new MCAnalysis("FdataMC",60., 160.,160.,280.); // DataAnalysis * myDataAnalysis = new DataAnalysis("FdataDATA",50., 160.,160.,280.); DataAnalysis * myDataAnalysis = new DataAnalysis("FdataDATA",60., 160.,160.,280.); AnalysisHandler Signalanalysis("SignalStudy_60-160-280",/*dobtag*/false,/*doBtagDataLike*/false,/*extJetOk_*/true,/*Lumi*/100., myMassResolution,myElecCuts,myJetCuts,/*whichMethod*/"minChi2"); Signalanalysis.AddAnalysis(myMCAnalysis); Signalanalysis.AddAnalysis(myMCAnalysis_Fdata); Signalanalysis.AddAnalysis(myDataAnalysis); Signalanalysis.setOnly4MatchedJets(); Signalanalysis.setVerbosity(0); int i = 1; while (pracEvt.Next()){ // cout<<"-----------------------------------------------------------------------------------event number : "<<i<<endl; Signalanalysis.Analyze(&pracEvt,1.); i++; // if(i == 200) // break; } double Fmc = myMCAnalysis->getF(); double Fdata = myDataAnalysis->getF(); Signalanalysis.setAnalysisNonTrivialF(2,2.83644);// no reweighing // Signalanalysis.setAnalysisNonTrivialF(2,Fdata); Signalanalysis.End(); cout<<"\t\t--------------------------------"<<endl; cout<<"\t\tFmc = "<<Fmc<<endl; cout<<"\t\tFdata = "<<Fdata<<endl; cout<<"\t\t--------------------------------"<<endl; return 0; };
int main(int argc, char ** argv) { //The angle used for matching validation double angle = 10; //The distance threshold double distance = 200; //Determine if the KNN validation criterion is necessary bool usingKnnCriterion = true; //The horizon line int horizonLine = 300; //Using the terminal bool terminal = false; //For BRISK SURF Using radius = 0.20, threshold = 70 bool hamming=false; std::string feat_detector = "BRISK"; std::string feat_descriptor = "SURF"; double hammingDistance = 0.28; //double threshold = 30;//45 double threshold = 43.75; //For SBRISK SBRISK, hammingDistance = 85, Threshold = 100 // bool hamming=true; // std::string feat_detector = "BRISK"; // std::string feat_descriptor = "BRISK"; // double threshold = 46.25;//46.25;//46.25 KNN // // double threshold = 78.75;//Hamming // double hammingDistance = 121.25;//Hamming //For BRISK4 (4 octaves) // bool hamming=true; // std::string feat_detector = "BRISK4"; // std::string feat_descriptor = "BRISK4"; // //double threshold = 30; //KNN 51.25 // double threshold = 51.25;//Hamming // double hammingDistance = 120;//Hamming //Set the date and time string myDate = "06082012"; string myTime = "1402"; //Set if you are matching or not matching bool isMatching = false; //Determine whether the MPS or CPS threshold is being used bool isMax = true; //Are we comparing camera pics to the Nao pics bool usingCamera = true; //Lighting parameter lighting light = LIGHT_BOTH; //NB ************************************* //CHOOSE THE DATASET TO USE int dataset=1; //dataset =1 for original robocup pics //dataset = 2 for office pics and large hall pics //dataset = 3 for varying illumination and for comparing the robocup dataset with the camera robocup dataset //dataset = 4 Nao Street View 27-28 (directory indices) with Google Street view // AND second dataset 19-20 with camera pic indices 25-26 //Create the non-matching upper and lower bounds int upperBound = 0; int lowerBound = 0; //Matching parameters int k_start =0; int k_end = 0; int s_start = 0; int s_end = 0; int index = 0; //Counters used to ensure standard output for processing in Matlab int tempDirCounterkk = 0; int tempDirCounterss = 0; //USED FOR LIGHTING AND FOR COMPARING NAO CAMERA PICS int step = 4; //If we are performing the matching routine if(isMatching) { if (dataset==1)//The original dataset { //Counters used to ensure standard output for processing in Matlab tempDirCounterkk = 1; tempDirCounterss = 1; k_start = 1;//Left dataset: 5; Right Dataset: 9; Both Dataset: 13 k_end = 4;//Left dataset: 8; Right Dataset 12; Both Dataset: 16 } else if (dataset==2)//Additional datasets { //Counters used to ensure standard output for processing in Matlab tempDirCounterkk = 5; tempDirCounterss = 5; k_start = 19; //Dataset 2: 17; Dataset 3: 19 k_end = 20; //Dataset 2: 18; Dataset 3: 20 } else if (dataset==3) //Varying lighting { //Counters used to ensure standard output for processing in Matlab tempDirCounterkk = 1; tempDirCounterss = 1; k_start = 1;//Left dataset: 5; Right Dataset: 9; Both Dataset: 13 k_end = 4;//Left dataset: 6; Right Dataset: 10; Both Dataset: 14 //Determines which dataset to use. Either lighting datasets or camera datasets if(!usingCamera) { if(light == LIGHT_LEFT) step = 4; else if(light == LIGHT_RIGHT) step = 8; else if (light ==LIGHT_BOTH) step=12; } else //Camera dataset step=20; //Camera: 20 for camera pics } else if (dataset==4) //Google Street View { //Street view 27,28,29,30 //In matching, we want to match 27-29 and 28-30 //Used for matching the correct images step = 2; //Counters used to ensure standard output for processing in Matlab tempDirCounterkk = 1; tempDirCounterss = 1; if(!usingCamera) { k_start = 27; k_end = 28; } else { k_start = 25; k_end = 26; } } } else { if (dataset==1) { //Counters used to ensure standard output for processing in Matlab tempDirCounterkk = 1; tempDirCounterss = 3; k_start = 1;//Left dataset: 5; Right Dataset: 9; Both Dataset: 13 k_end = 2;//Left dataset: 6; Right Dataset: 10; Both Dataset: 14 s_start = 3;//Left dataset: 7; Right Dataset: 11; Both Dataset: 15 s_end = 4;//Left dataset: 8; Right Dataset: 12; Both Dataset: 16 } else if (dataset==2) { //Counters used to ensure standard output for processing in Matlab tempDirCounterkk = 5; tempDirCounterss = 6; k_start = 19; //Dataset 2: 17; Dataset 3: 19 k_end = 19; //Dataset 2: 17; Dataset 3: 19 s_start = 20; //Dataset 2: 18; Dataset 3: 20 s_end = 20; //Dataset 2: 18; Dataset 3: 20 } else if(dataset==3) //Varying lighting or comparing the Nao robocup pics with the Nikon Camera pics { //s_start; s_end //Left Light off dataset: 5; Left Light off Dataste 8; //Right Light off Dataset: 9; Right Light off Dataset: 12; //Both Light off Dataset: 13; Both Light off Dataset: 16 //Camera Pics Dataset: 21; Camera Pics Dataset: 24 //Counters used to ensure standard output for processing in Matlab //--------------------------------------------------------------- tempDirCounterkk = 1; tempDirCounterss = 3; //Set the datasets that you want to match k_start = 1; k_end = 4; if(!usingCamera){ if(light == LIGHT_LEFT){ lowerBound = 5; upperBound = 8; } else if(light == LIGHT_RIGHT){ lowerBound = 9; upperBound = 12; } else if (light ==LIGHT_BOTH){ lowerBound = 13; upperBound = 16; } } else{ lowerBound = 21; upperBound = 24; } s_start = lowerBound; s_end = upperBound; //REMEMBER: CHANGE s_start, s_end IN THE KK LOOP AS WELL //--------------------------------------------------------------- } else if(dataset==4) { //Run this twice //1. First do 27 and 30 //2. Second do 28 and 29 //Counters used to ensure standard output for processing in Matlab tempDirCounterkk = 1; tempDirCounterss = 3; if(!usingCamera) { lowerBound = 29; upperBound = 30; s_start = 31; s_end = 32; } else { lowerBound = 25; upperBound = 26; s_start = 27; s_end = 28; } k_start = lowerBound; k_end = upperBound; } } //Begin the matching procedure for(int kk=k_start;kk<=k_end;kk++) { if(dataset==1){ //The counter must be reset for formatting purposes in Matlab if(isMatching){ s_start = s_end = kk; tempDirCounterss = kk;//kk for matching } else{ tempDirCounterss = 3;// 3 for non-matching } } else if(dataset==2) { if(isMatching) s_start = s_end = kk; } else if(dataset==3){ //This is for the varying illumination datasets if(isMatching) { //Comparing datasets s_start = s_end = kk+step; //The counter must be reset for formatting purposes in Matlab tempDirCounterss = kk; } else { //This ensures that no datasets are repeated if (kk>=3){ tempDirCounterss = 5; s_start = lowerBound; s_end = lowerBound+1; } else{ tempDirCounterss = 3; s_start = upperBound - 1; s_end = upperBound; } } } else if (dataset==4)//This is for google street view { if(isMatching) s_start = s_end = kk+ step; else{ if (kk==lowerBound) s_start = s_end = lowerBound + 3; else if (kk==upperBound) s_start = s_end = upperBound+1; } } for (int ss = s_start;ss<=s_end;ss++) { //Create object for dataAnalysis DataAnalysis dataAnalysis; //Create the Feature extraction object FeatureExtraction feature(angle, distance, usingKnnCriterion); //The directory where the files are stored std::string dir, dir1; dir = to_string<int>(kk); dir1 = to_string<int>(ss); std::string tempDir = to_string<int>(tempDirCounterkk); std::string tempDir1 =to_string<int>(tempDirCounterss); //Names of the two image files std::string name1; std::string name2; //For changing the threshold double testThreshold = 0; //Set the directory names and determine the number of images in each directory int jpegCounter = dataAnalysis.getNumImagesInDirectory(&dir, terminal); int jpegCounter1 = dataAnalysis.getNumImagesInDirectory(&dir1, terminal); std::cout<<"The number of images in the directory is: "<<jpegCounter<<endl; //Set the flag for1 setting up the BRISK lookup table for the extractor cv::Ptr<cv::DescriptorExtractor> descriptorExtractor; descriptorExtractor = feature.getExtractor(argc, feat_descriptor, hamming, descriptorExtractor); //************************************* //Get the current time for saving purposes //************************************* time_t rawtime; struct tm * timeinfo; //char filename [80]; time ( &rawtime ); timeinfo = localtime ( &rawtime ); //strftime (filename,80,"../../data/Matches/matchingData_%b_%d_%H%M%S.txt",timeinfo); //strftime (filename,80,"../data/Matches/nonmatching_matching_Data__BRISK__BRISK_Hamming_070421012_1222.txt",timeinfo); //puts (filename); string filename = "../"; if (dataset==1) { filename.append("dataRobocup/Matches/"); filename.append("nonmatching_matching_Data__"); }else if (dataset==2) { filename.append("data2/Matches/");//Or data3 filename.append("dataset2_nonmatching_matching_Data__");//Or dataset3 } else if (dataset==3 && !usingCamera) { filename.append("dataLighting/Matches/"); if(light==LIGHT_LEFT) filename.append("dataLighting_left_light_off_nonmatching_matching_Data__"); else if(light==LIGHT_RIGHT) filename.append("dataLighting_right_light_off_nonmatching_matching_Data__"); else if (light==LIGHT_BOTH) filename.append("dataLighting_both_lights_off_nonmatching_matching_Data__"); } else if (dataset==3 && usingCamera){ filename.append("dataCamera/Matches/"); filename.append("camera_nonmatching_matching_Data__"); } else if (dataset==4){ if(usingCamera){ filename.append("dataCamera/Matches/"); filename.append("camera_large_hall_nonmatching_matching_Data__"); } else{ filename.append("dataStreetView/Matches/"); filename.append("streetview_nonmatching_matching_Data__"); } } filename.append(feat_detector); filename.append("_"); filename.append(feat_descriptor); filename.append("_"); if(usingKnnCriterion) filename.append("KNN_"); else filename.append("Hamming_"); filename.append(myDate); filename.append("_"); filename.append(myTime); filename.append("_"); filename.append(to_string<double>(threshold)); filename.append("_"); if (!usingKnnCriterion) filename.append(to_string<int>(hammingDistance)); if (isMax) filename.append("_max"); else filename.append("_consistent"); filename.append(".txt"); cout<<filename<<endl; //************************************* //Make sure that there are the same number of images in each frame (Not for non matches) if (isMatching && !(dataset==3)) { if(jpegCounter>jpegCounter1) jpegCounter = jpegCounter1; else jpegCounter1 = jpegCounter; } //Remember that for non-matches, we can compare 1,1;2,2;3,3...etc //Determine matches without repetition for (int ii = 1;ii<=jpegCounter;ii++) { if(isMatching && !(dataset ==3)) index = ii; else index = jpegCounter1; for (int jj = 1; jj<index;jj++)//jpegCounter1 { //Choose the images to compare name1 = to_string<int>(ii); name2 = to_string<int>(jj); cout<<"Image "<<ii<<", Image "<<jj<<endl; // names of the two images std::string fname1; std::string fname2; cv::Mat imgGray1Full; cv::Mat imgGray2Full; cv::Mat imgGray1; cv::Mat imgGray2; //Process the first image which would normally be stored on the robot //**************************************************** while(imgGray1Full.empty()){ fname1 = dir+"/"+name1+".jpg"; imgGray1Full = cv::imread(fname1.c_str(), CV_LOAD_IMAGE_GRAYSCALE); } if (imgGray1Full.empty()) { std::cout<<"image(s)"<<fname1<<" not found." << std::endl; return 2; } //Only take the image from the horizon line imgGray1 = imgGray1Full(cv::Rect(0, 0, imgGray1Full.cols, horizonLine)); //**************************************************** timespec ts, te, matchings, matchinge, detectors, detectore, extractors, extractore, verifys, verifye; clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &ts); //Read in images //***************************************************************** while(imgGray2Full.empty()){ fname2 = dir1+"/"+name2+".jpg"; imgGray2Full = cv::imread(fname2.c_str(), CV_LOAD_IMAGE_GRAYSCALE); } if (imgGray2Full.empty()) { std::cout<<"image(s)"<<fname2<<" not found." << std::endl; return 2; } //We only need the keypoints above the horizon imgGray2 = imgGray2Full(cv::Rect(0, 0, imgGray2Full.cols, horizonLine)); //***************************************************************** clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &te); float imageProcessingTime = diff(ts,te).tv_nsec/1000000.0f; clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &detectors); //MC: Generate a vector of keypoints std::vector<cv::KeyPoint> keypoints, keypoints2; // create the detector: //***************************************************************** cv::Ptr<cv::FeatureDetector> detector; //Note: Testflag==1 which means that the threshold will equal testThreshold detector = feature.getDetector(argc, feat_detector, detector, threshold, testThreshold,1); //***************************************************************** detector->detect(imgGray2,keypoints2); clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &detectore); float detectionTime = diff(detectors,detectore).tv_nsec/1000000.0f; //The descriptor stored on the robot //***************************************************************** detector->detect(imgGray1,keypoints); //***************************************************************** clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &extractors); // now the extractor: //***************************************************************** // get the descriptors cv::Mat descriptors, descriptors2; std::vector<cv::DMatch> indices; // first image. Computes the descrkeypoints2iptor for each of the keypoints. //Outputs a 64 bit vector describing the keypoints. descriptorExtractor->compute(imgGray2,keypoints2,descriptors2); clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &extractore); float extractionTime = diff(extractors,extractore).tv_nsec/1000000.0f; //The descriptors stored on the robot //***************************************************************** descriptorExtractor->compute(imgGray1,keypoints,descriptors); //***************************************************************** clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &matchings); // matching //***************************************************************** std::vector<std::vector<cv::DMatch> > matches; cv::Ptr<cv::DescriptorMatcher> descriptorMatcher; if(hamming) descriptorMatcher = new cv::BruteForceMatcher<cv::HammingSse>(); else descriptorMatcher = new cv::BruteForceMatcher<cv::L2<float> >(); if(hamming){ if(usingKnnCriterion) descriptorMatcher->knnMatch(descriptors,descriptors2,matches,2); else descriptorMatcher->radiusMatch(descriptors,descriptors2,matches,hammingDistance); } else{ if(usingKnnCriterion) { if(descriptors2.rows>0) descriptorMatcher->knnMatch(descriptors,descriptors2,matches,2); else matches.clear(); } else{ //Decreasing with the maxdistance value will drastically reduce the number of matches if(descriptors2.rows>0) descriptorMatcher->radiusMatch(descriptors,descriptors2,matches,hammingDistance); else matches.clear(); } } cv::Mat outimg; //Write the data to a file ofstream writeFile; //Create the filename with the current time writeFile.open(filename.c_str(), ios::app);//ios::app clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &verifys); //Perform the matching verification //************************************************************************* // cout<<"The total number of keypoints in image 1 is: "<<keypoints.size()<<endl; // cout<<"The total number of keypoints in image 2 is: "<<keypoints2.size()<<endl; // cout<<"The total number of matches is: "<<matches.size()<<endl; feature.performMatchingValidation(imgGray1,keypoints, keypoints2, matches, hamming); //************************************************************************* clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &verifye); float verifyTime = diff(verifys,verifye).tv_nsec/1000000.0f; clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &matchinge); float matchingTime = diff(matchings,matchinge).tv_nsec/1000000.0f; float overallTime = imageProcessingTime + detectionTime + extractionTime + matchingTime + verifyTime; #if (DEBUG_MODE) cout<<"****************************************"<<endl; cout<<"The matching score for the image (condsidering all matches) is "<<feature.imageMatchingScore<<endl; cout<<"The matching score for the image (condsidering best match only) is "<<feature.imageMatchingScoreBest<<endl; cout<<"The total number of matches is "<<feature.totalNumMatches<<endl; cout<<"The total number of best matches is "<<feature.totalNumBestMatches<<endl; cout<<"****************************************"<<endl; #endif #if (DEBUG_TIMES) std::cout<<"The times:"<<endl; std::cout<<"Detection Time: "<<detectionTime<<" ms"<<endl; std::cout<<"Extraction Time: "<<extractionTime<<" ms"<<endl; std::cout<<"Matching Time: "<<matchingTime<<" ms"<<endl; std::cout<<"Verify Matches Time: "<<verifyTime<<" ms"<<endl; std::cout<<"Overall Time: "<<overallTime<<" ms"<<endl; cv::waitKey(1000); #endif //threshold = atoi(argv[3]+5); //Write all the information to a file writeFile <<tempDirCounterkk<<", "<<tempDirCounterss<<", "<<threshold<<", "<<name1<<", "<<name2<<", "<<keypoints.size()<<", "<<keypoints2.size()<<", "<<feature.imageMatchingScoreBest<<", "<<feature.imageMatchingScore<<","<<feature.totalNumMatches<<", "<<feature.totalNumValidMatches<<", "<<feature.totalNumInvalidMatches<<", "<<feature.totalNumBestMatches<<", "<<detectionTime<<", "<<extractionTime<<", "<<matchingTime<<", "<<verifyTime<<", "<<overallTime<<"\n"; //close the file writeFile.close(); #if (DISPLAY) drawMatches(imgGray1, keypoints, imgGray2, keypoints2,matches,outimg, cv::Scalar(0,255,0), cv::Scalar(0,0,255), std::vector<std::vector<char> >(), cv::DrawMatchesFlags::DRAW_RICH_KEYPOINTS ); cv::namedWindow("Matches"); cv::imshow("Matches", outimg); //cv::imshow("keypoints", imgRGB1); //cv::imshow("keypoints2", imgRGB2); cv::waitKey(); #endif }//End of inner for loop (jj) }//end of outer for loop (ii) tempDirCounterss++; }//End of ss loop tempDirCounterkk++; }//end of kk loop return 0; }
//Main function int main(int argc, char ** argv) { //The angle used for matching validation double angle = 10; //The distance threshold double distance = 200; //The horizon line int horizonLine = 300; //Determine if the KNN validation criterion is necessary bool usingKnnCriterion = true; //To determine whether we are finding keypoint properties or KNN Ratio bool keypointProperties = true; //Set if you are matching or not matchingg bool isMatching = true; //Set the date and time string myDate = "11072012"; string myTime = "2300"; //Using the terminal bool terminal = false; //BRISK SURF2D bool hamming=false; std::string feat_detector = "BRISK"; int threshold = 43.75; int hammingDistance = 40;//BRISK BRISK //double radius = 0.15;//BRISK SURF std::string feat_descriptor = "SURF"; //Set the arguments // bool hamming=true; // std::string feat_detector = "BRISK"; // int threshold = 55; // int hammingDistance = 40;//BRISK BRISK // //double radius = 0.15;//BRISK SURF // std::string feat_descriptor = "U-BRISK"; //Create the Feature extraction object FeatureExtraction feature(angle, distance, usingKnnCriterion); //Create data analysis object DataAnalysis dataAnalysis; //************************************* //Get the current time for saving purposes and set the file to save to //************************************* time_t rawtime; struct tm * timeinfo; char filename [80]; string testThresholdString = to_string<int>(threshold); time ( &rawtime ); timeinfo = localtime ( &rawtime ); //Declare the files string file,file1, file2, file3; if(terminal) { file.append("../"); file1.append("../"); file2.append("../"); file3.append("../"); } file = "../data/singleImage/singleImageData_keypoints_"; file.append(feat_detector); file.append("_"); file.append(feat_descriptor); file.append("_"); if(usingKnnCriterion) file.append("KNN_"); else file.append("Hamming_"); file.append(myDate); file.append("_"); file.append(myTime); file1 = "../data/singleImage/singleImageData_matches_"; file1.append(feat_detector); file1.append("_"); file1.append(feat_descriptor); file1.append("_"); if(usingKnnCriterion) file1.append("KNN_"); else file1.append("Hamming_"); file1.append(myDate); file1.append("_"); file1.append(myTime); file2 = "../data/singleImage/singleImageData_invalid_matches_KNN_"; file2.append(feat_detector); file2.append("_"); file2.append(feat_descriptor); file2.append("_"); if(usingKnnCriterion) file2.append("KNN_"); else file2.append("Hamming_"); file2.append(myDate); file2.append("_"); file2.append(myTime); file3 = "../data/singleImage/singleImageData_invalid_matches_AngleDistance_"; file3.append(feat_detector); file3.append("_"); file3.append(feat_descriptor); file3.append("_"); if(usingKnnCriterion) file3.append("KNN_"); else file3.append("Hamming_"); file3.append(myDate); file3.append("_"); file3.append(myTime); if(keypointProperties) { file.append("_keypointStats"); file1.append("_keypointStats"); file2.append("_keypointStats"); file3.append("_keypointStats"); } else{ if(!isMatching) { file.append("_false"); file1.append("_false"); file2.append("_false"); file3.append("_false"); } else { file.append("_identical"); file1.append("_identical"); file2.append("_identical"); file3.append("_identical"); } } file.append(".txt"); cout<<file<<endl; file1.append(".txt"); cout<<file1<<endl; file2.append(".txt"); cout<<file2<<endl; file3.append(".txt"); cout<<file3<<endl; //************************************* //Matching parameters int k_start =0; int k_end = 0; int s_start = 0; int s_end = 0; int index = 0; if(isMatching) { k_start = 1; k_end = 4; } else { k_start = 1; k_end = 2; s_start = 3; s_end = 4; } for (int kk=k_start;kk<=k_end; kk++) { if(isMatching) s_start = s_end = kk; for (int ss=s_start;ss<=s_end;ss++) { std::string dir, dir1; dir = to_string<int>(kk); dir1 = to_string<int>(ss); std::string tempDir = to_string<int>(kk); std::string tempDir1 = to_string<int>(ss); //Names of the two image files std::string name1; std::string name2; //For changing the threshold int testThreshold = 10; //Set the directory names and determine the number of images in each directory int jpegCounter = dataAnalysis.getNumImagesInDirectory(&dir, terminal); int jpegCounter1 = dataAnalysis.getNumImagesInDirectory(&dir1, terminal); std::cout<<"The number of images in the directory is: "<<jpegCounter<<endl; // Declare the extractor. Only needs to be performed once since it computes lookup //tables for each of the various patterns on initialisation //***************************************************************** cv::Ptr<cv::DescriptorExtractor> descriptorExtractor; descriptorExtractor = feature.getExtractor(argc, feat_descriptor, hamming, descriptorExtractor); //***************************************************************** //Make sure that there are the same number of images in each frame if (isMatching) { jpegCounter=5; // if(jpegCounter>jpegCounter1) // jpegCounter = jpegCounter1; // else // jpegCounter1 = jpegCounter; } //Remember that for non-matches, we can compare 1,1;2,2;3,3...etc //Determine matches without repetition for (int ii = 1;ii<=jpegCounter;ii++) { if(isMatching) index = ii; else index = jpegCounter1; for (int jj = 1; jj<=index;jj++)//if int jj = index; jj<=index;jj++ then matching images to themselves { //Choose the images to compare name1 = to_string<int>(ii); name2 = to_string<int>(jj); cout<<"Image "<<ii<<", Image "<<jj<<endl; //For changing the threshold int testThreshold = 10; //cout<<"Image in directory 1: "<<name1<<", Image in directory 2: "<<name2<<endl; // names of the two images std::string fname1; std::string fname2; cv::Mat imgGray1Full; cv::Mat imgGray2Full; cv::Mat imgGray1; cv::Mat imgGray2; //Process the first image //**************************************************************** while(imgGray1Full.empty()){ fname1 = dir+"/"+name1+".jpg"; imgGray1Full = cv::imread(fname1.c_str(), CV_LOAD_IMAGE_GRAYSCALE); } if (imgGray1Full.empty()) { std::cout<<"image(s)"<<fname1<<" not found." << std::endl; return 2; } //Process only the image above the horizon line imgGray1 = imgGray1Full(cv::Rect(0, 0, imgGray1Full.cols, horizonLine)); timespec ts, te, matchings, matchinge, detectors, detectore, extractors, extractore, verifys, verifye; clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &ts); // if no arguments are passed: //Read in images //***************************************************************** while(imgGray2Full.empty()){ fname2 = dir1+"/"+name2+".jpg"; imgGray2Full = cv::imread(fname2.c_str(), CV_LOAD_IMAGE_GRAYSCALE); } if (imgGray2Full.empty()) { std::cout<<"image(s)"<<fname2<<" not found." << std::endl; return 2; } //***************************************************************** //We only need the keypoints above the horizon imgGray2 = imgGray2Full(cv::Rect(0, 0, imgGray2Full.cols, horizonLine)); clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &te); float imageProcessingTime = diff(ts,te).tv_nsec/1000000.0f; clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &detectors); //MC: Generate a vector of keypoints std::vector<cv::KeyPoint> keypoints, keypoints2; // create the detector: //***************************************************************** cv::Ptr<cv::FeatureDetector> detector; detector = feature.getDetector(argc, feat_detector, detector, threshold, testThreshold,1); //***************************************************************** detector->detect(imgGray2,keypoints2); clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &detectore); float detectionTime = diff(detectors,detectore).tv_nsec/1000000.0f; // run the detector: //***************************************************************** detector->detect(imgGray1,keypoints); //***************************************************************** //***************************************************************** clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &extractors); // get the descriptors cv::Mat descriptors, descriptors2; std::vector<cv::DMatch> indices; // second image. Computes the descriptor for each of the keypoints. //Outputs a 64 bit vector describing the keypoints. //***************************************************************** descriptorExtractor->compute(imgGray2,keypoints2,descriptors2); //***************************************************************** clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &extractore); float extractionTime = diff(extractors,extractore).tv_nsec/1000000.0f; //Compute the descriptors for the stored image //***************************************************************** descriptorExtractor->compute(imgGray1,keypoints,descriptors); //***************************************************************** clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &matchings); // matching //***************************************************************** std::vector<std::vector<cv::DMatch> > matches; cv::Ptr<cv::DescriptorMatcher> descriptorMatcher; if(hamming){ //descriptorMatcher = new cv::BruteForceMatcher<cv::HammingSse>(); } else descriptorMatcher = new cv::BruteForceMatcher<cv::L2<float> >(); if(hamming) //descriptorMatcher->radiusMatch(descriptors,descriptors2,matches,hammingDistance); //The first parameter is the query descriptor. The second parameter is the train descriptor descriptorMatcher->knnMatch(descriptors,descriptors2,matches,2); else{ //Messing with the maxdistance value will drastically reduce the number of matches descriptorMatcher->knnMatch(descriptors,descriptors2,matches,2);//radiusMatch radius } //For the above method, we could use KnnMatch. All values less than 0.21 max distance are selected //***************************************************************** //Image created for drawing cv::Mat outimg; clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &verifys); //Perform the matching verification //************************************************************************* feature.performMatchingValidation(imgGray1,keypoints, keypoints2, matches, hamming); //************************************************************************* clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &verifye); float verifyTime = diff(verifys,verifye).tv_nsec/1000000.0f; clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &matchinge); float matchingTime = diff(matchings,matchinge).tv_nsec/1000000.0f; float overallTime = imageProcessingTime + detectionTime + extractionTime + matchingTime + verifyTime; //Write the data to a file ofstream writeFile; writeFile.open(file.c_str(), ios::app); //1. Write all the keypoints to the first image file //Swap hamming distance with radius for(int im1 = 0;im1<keypoints.size();im1++){ //[image num, hammingdist, threshold, kp1x, kp1y, angle, size, octave] writeFile <<kk<<", "<<ss<<", "<<name1<<", "<<hammingDistance<<", "<<threshold<<", "<<im1<<", "<<keypoints[im1].pt.x<<", "<<keypoints[im1].pt.y<<", "<<keypoints[im1].angle<<", "<<keypoints[im1].size<<", "<<keypoints[im1].octave<<", "<<keypoints[im1].response<<"\n"; } //2. Write all the keypoints to the second image file //Swap hamming distance with radius for(int im2 = 0;im2<keypoints2.size();im2++){ //[image num, hammingdist, threshold, kp1x, kp1y, angle, size, octave] writeFile <<kk<<", "<<ss<<", "<<name2<<", "<<hammingDistance<<", "<<threshold<<", "<<im2<<", "<<keypoints2[im2].pt.x<<", "<<keypoints2[im2].pt.y<<", "<<keypoints2[im2].angle<<", "<<keypoints2[im2].size<<", "<<keypoints2[im2].octave<<", "<<keypoints2[im2].response<<"\n"; } //Close the file writeFile.close(); writeFile.clear(); //Write the data to a file writeFile.open(file1.c_str(), ios::app); //Write all the matches data to a file //image num left, image num right, queryIdx, trainIdx, keypoint1 x, kp1y, angle size, octave, response, kp2x, kp2y, angle, size, octave, neighbor num, distance //Swap hamming distance with radius for(size_t mIdx = 0;mIdx<matches.size();mIdx++){ for(size_t nIdx = 0;nIdx<matches[mIdx].size();nIdx++) { int q = matches[mIdx][nIdx].queryIdx; int t = matches[mIdx][nIdx].trainIdx; #if (DEBUG_MODE) cout<<"neighbor index: "<<nIdx<<endl; cout<<"Matches size: "<<matches[mIdx].size()<<endl; cout<<"Keypoint Left to be erased row,col : "<<keypoints[q].pt.y<<", "<<keypoints[q].pt.x<<endl; cout<<"Keypoint Right to be erased row,col : "<<keypoints2[t].pt.y<<", "<<keypoints2[t].pt.x<<endl; cout<<"The match distance is: "<<matches[mIdx][nIdx].distance<<endl; #endif writeFile <<kk<<", "<<ss<<", "<<name1<<", "<<name2<<", "<<q<<", "<<t<<", "<<keypoints[q].pt.x<<", "<<keypoints[q].pt.y<<", "<<keypoints[q].angle<<", "<<keypoints[q].size<<", "<<keypoints[q].octave<<", "<<keypoints[q].response<<", "<<keypoints2[t].pt.x<<", "<<keypoints2[t].pt.y<<", "<<keypoints2[t].angle<<", "<<keypoints2[t].size<<", "<<keypoints2[t].octave<<", "<<keypoints2[t].response<<", "<<nIdx<<", "<<matches[mIdx][nIdx].distance<<"\n"; } } //Close the file writeFile.close(); writeFile.clear(); //Write the invalid KNN matches data to a file writeFile.open(file2.c_str(), ios::app); for (size_t it = 0; it<feature.leftPointsKNN.size(); it++) { // cout<<"it index: "<<it<<endl; // cout<<"Matches size: "<<feature.leftPointsKNN.size()<<endl; // cout<<"Keypoint Left to be erased row,col : "<<feature.leftPointsKNN[it].pt.y<<", "<<feature.leftPointsKNN[it].pt.x<<endl; // cout<<"Keypoint Right to be erased row,col : "<<feature.rightPointsKNN[it].pt.y<<", "<<feature.rightPointsKNN[it].pt.x<<endl; // cout<<"The match distance is: "<<feature.keypointDistanceKNN[it]<<endl; writeFile <<kk<<", "<<ss<<", "<<name1<<", "<<name2<<", "<<feature.leftPointsKNN[it].pt.x<<", "<<feature.leftPointsKNN[it].pt.y<<", "<<feature.leftPointsKNN[it].angle<<", "<<feature.leftPointsKNN[it].size<<", "<<feature.leftPointsKNN[it].octave<<", "<<feature.leftPointsKNN[it].response<<", "<<feature.rightPointsKNN[it].pt.x<<", "<<feature.rightPointsKNN[it].pt.y<<", "<<feature.rightPointsKNN[it].angle<<", "<<feature.rightPointsKNN[it].size<<", "<<feature.rightPointsKNN[it].octave<<", "<<feature.rightPointsKNN[it].response<<", "<<feature.neighborIndexKNN[it]<<", "<<feature.keypointDistanceKNN[it]<<"\n"; } //Close the file writeFile.close(); writeFile.clear(); //Write the invalid angle and distance matches data to a file writeFile.open(file3.c_str(), ios::app); for (int it = 0; it<feature.leftPointsAngle.size(); it++) { writeFile <<kk<<", "<<ss<<", "<<name1<<", "<<name2<<", "<<feature.leftPointsAngle[it].pt.x<<", "<<feature.leftPointsAngle[it].pt.y<<", "<<feature.leftPointsAngle[it].angle<<", "<<feature.leftPointsAngle[it].size<<", "<<feature.leftPointsAngle[it].octave<<", "<<feature.leftPointsAngle[it].response<<", "<<feature.rightPointsAngle[it].pt.x<<", "<<feature.rightPointsAngle[it].pt.y<<", "<<feature.rightPointsAngle[it].angle<<", "<<feature.rightPointsAngle[it].size<<", "<<feature.rightPointsAngle[it].octave<<", "<<feature.rightPointsAngle[it].response<<", "<<feature.keypointDistanceAngle[it]<<"\n"; } //Close the file writeFile.close(); writeFile.clear(); #if (DEBUG_MODE) cout<<"****************************************"<<endl; cout<<"The matching score for the image (condsidering all matches) is "<<feature.imageMatchingScore<<endl; cout<<"The matching score for the image (condsidering best match only) is "<<feature.imageMatchingScoreBest<<endl; cout<<"The total number of matches is "<<feature.totalNumMatches<<endl; cout<<"The total number of valid matches is "<<feature.totalNumValidMatches<<endl; cout<<"The total number of invalid matches is "<<feature.totalNumInvalidMatches<<endl; cout<<"****************************************"<<endl; #endif #if (DEBUG_TIMES) std::cout<<"The times:"<<endl; std::cout<<"Detection Time: "<<detectionTime<<" ms"<<endl; std::cout<<"Extraction Time: "<<extractionTime<<" ms"<<endl; std::cout<<"Matching Time: "<<matchingTime<<" us"<<endl; std::cout<<"Verify Matches Time: "<<verifyTime<<" us"<<endl; std::cout<<"Overall Time: "<<overallTime<<" ms"<<endl; #endif #if (DEBUG_MODE) cout<<"The total number of keypoints in image 1 is: "<<keypoints.size()<<endl; cout<<"The total number of keypoints in image 2 is: "<<keypoints2.size()<<endl; #endif #if (DISPLAY) drawMatches(imgGray1, keypoints, imgGray2, keypoints2,matches,outimg, cv::Scalar(0,255,0), cv::Scalar(0,0,255), std::vector<std::vector<char> >(), cv::DrawMatchesFlags::DRAW_RICH_KEYPOINTS ); //NOT_DRAW_SINGLE_POINTS //Note: leftpoints correspond to keypoints - Image 1. rightpoints correspond to keypoints2 - Image 2. int colourChanger = 0; // for (int k = 0; k<feature.rightPoints.size(); k++) // { // // circle(imgGray2,cv::Point(feature.rightPoints[k].x, feature.rightPoints[k].y), 5, cv::Scalar(colourChanger, 100, 255), 4, 8, 0); // //#if(DEBUG_MATCHES) // cout<<"Incorrect coord Left row,col : "<<feature.rightPoints[k].y<<", "<<feature.rightPoints[k].x<<endl; //#endif // colourChanger = colourChanger+30; // } // colourChanger = 0; // for (int k = 0; k<feature.leftPoints.size(); k++) // { // circle(imgGray1,cv::Point(feature.leftPoints[k].x, feature.leftPoints[k].y), 5, cv::Scalar(colourChanger, 100, 255), 4, 8, 0); //#if(DEBUG_MATCHES) // cout<<"Incorrect coord Right row,col : "<<feature.leftPoints[k].y<<", "<<feature.leftPoints[k].x<<endl; //#endif // colourChanger = colourChanger+30; // } for (int k = 0; k<matches.size(); k++) { for (int j=0;j<matches[k].size();j++) { int qi = matches[k][j].queryIdx;//Reference Point. Assumed to be for image left int ti = matches[k][j].trainIdx;//Assumed to be image right //The points in the left image int kp1x = (*(keypoints.begin()+qi)).pt.x; int kp1y = (*(keypoints.begin()+qi)).pt.y; //The points in the right image int kp2x = (*(keypoints2.begin()+ti)).pt.x; int kp2y = (*(keypoints2.begin()+ti)).pt.y; circle(outimg,cv::Point(kp1x, kp1y), 5, cv::Scalar(colourChanger, 100, 255), 4, 8, 0); circle(outimg,cv::Point(640+ kp2x, kp2y), 5, cv::Scalar(colourChanger, 100, 255), 4, 8, 0); #if(DEBUG_MODE) cout<<"Correct coord Left row,col : "<<kp1y<<", "<<kp1x<<endl; cout<<"Correct coord Right row,col : "<<kp2y<<", "<<kp2x<<endl; #endif } colourChanger = colourChanger+30; } cv::namedWindow("Matches"); cv::imshow("Matches", outimg); //imgRGB1 is right. imgRGB2 is left #if(DEBUG_MODE) cv::imshow("keypoints", imgGray1); cv::imshow("keypoints2", imgGray2); #endif cv::waitKey(); #endif }//end of jj loop }//end of ii loop }//End of ss loop }//End of the kk loop return 0; }
int main(int argc, char *argv[]) { //Determine whether or not you should show the matches and print the ROC curve bool visible = true; bool printROC = false; if (argc == 2){ string arg(argv[1]); if (arg == "-visual") visible = true; if (arg == "-ROC") printROC = true; } //Read in the images string line; ifstream myfile ("SURFtest/surf.data"); //Create a vector of the test images vector<TestImage> test_images; string filename; int left = 120; int right = 120; int direction = 0; vector<TestData> classification; if(visible){ cvNamedWindow("1D SURF", CV_WINDOW_AUTOSIZE ); cvMoveWindow("1D SURF", 0, 0); } //At this point, all of the images have been loaded. They now need to be broken into //the various datasets //*****************MATCHING********************************* //Choose the dataset int dataset = 0; //The number of images in each dataset int jpegCounter = 0; int jpegCounter1 = 0; //Use the terminal or not bool terminal = false; //Create object for dataAnalysis DataAnalysis dataAnalysis; int testThreshold = 20; //Set the various directories for matching and non-matching respectively for (int ss = 5;ss<=6;ss++) { //The directory where the files are stored string dir, dir1; dir = to_string<int>(ss); dir1 = to_string<int>(ss); string fname1; string fname2; string name1; string name2; string tempDir = to_string<int>(ss); string tempDir1 = to_string<int>(ss); //Set the directory names and determine the number of images in each directory jpegCounter = dataAnalysis.getNumImagesInDirectory(&dir, terminal); jpegCounter1 = dataAnalysis.getNumImagesInDirectory(&dir1, terminal); cout<<"The number of images in the directory 1,2 is: "<<jpegCounter<<", "<<jpegCounter1<<endl; //The file to store the matching data string file = "data/Thresholds/thresholds_SURF1D_19072012_1151"; //***************************************** file.append(".txt"); cout<<file<<endl; //set the threshold THRES = 225; //This will compute the statistics for every combination in a directory for each threshold for (int tt = 1; tt<=20;tt++)//20 { cout<<"Threshold is: "<<THRES<<endl; //Determine matches without repetition for (int ii = 1;ii<=jpegCounter;ii++) { for (int jj = 1; jj<ii;jj++)//jpegCounter1 { //if(isMatching && (ii==jj)) continue; //cout<<"Image "<<ii<<", image "<<jj<<"\t"; //The Processing the first image name1 = to_string<int>(ii); fname1 =dir + "/"+ name1+".jpg"; //cout<<"Directory: "<<fname1<<endl; TestImage image1 = TestImage(cvLoadImage(fname1.c_str() ),left,right,direction,dir); //Compute the descriptors for image 1 surfDetDes(image1); timespec times, timee; clock_gettime(CLOCK_PROCESS_CPUTIME_ID, ×); //Start processing the second image. This image will be timed. name2 = to_string<int>(jj); //Load the images from the dataset fname2 =dir1 + "/"+ name2+".jpg"; //Assign the test images TestImage image2 = TestImage(cvLoadImage(fname2.c_str() ),left,right,direction,dir1); //Run the 1D SURF feature extraction algorithm //Compute the descriptors for image 2 surfDetDes(image2); float result =0; IplImage* display; if (ss>=6){ int test = 0; } //Calculate the matches result = visualStaticMatch(image1, image2, visible, display); //cout<<result<<endl; clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &timee); Ipoint::overallTime = Ipoint::diffSurf(times, timee).tv_nsec/1000000.0f; if(visible){ cvShowImage("1D SURF", display); cvWaitKey(0); cvReleaseImage(&display); } //Write the data to a file ofstream writeFile; writeFile.open(file.c_str(), ios::app); #if (DEBUG_TIMES) cout<<"Total Matches: "<<Ipoint::totalNumMatches<<endl; cout<<"Num valid matches: "<<Ipoint::numValidMatches<<endl; cout<<"Detection Time: "<<Ipoint::detectionTime<<endl; cout<<"Extraction Time: "<<Ipoint::extractionTime<<endl; cout<<"Matching Time: "<<Ipoint::matchingTime<<endl; cout<<"Verification Time: "<<Ipoint::verificationTime<<endl; cout<<"Overall Time: "<<Ipoint::overallTime<<endl; #endif writeFile <<tempDir<<", "<<THRES<<", "<<name1<<", "<<name2<<", "<<image1.ipts.size()<<", "<<image2.ipts.size()<<", "<<result<<", "<<0<<", "<<Ipoint::totalNumMatches<<", "<<Ipoint::numValidMatches<<","<<Ipoint::totalNumMatches -Ipoint::numValidMatches<<", "<<0<<", "<<Ipoint::detectionTime<<", "<<Ipoint::extractionTime<<", "<<Ipoint::matchingTime<<", "<<Ipoint::verificationTime<<", "<<Ipoint::overallTime<<"\n"; //close the file writeFile.close(); writeFile.clear(); cvReleaseImage(&(image1.img)); cvReleaseImage(&(image2.img)); }//End of jj }//end of ii THRES = THRES + 10; }//End tt }//End of ss if(visible) cvDestroyWindow("1D SURF"); // double tt_matching = ((double)cvGetTickCount() - time)/(cvGetTickFrequency()*1000.); // int neg = (int)classification.size()-pos; // printf("Tested %d matching images and %d unmatched images\n",pos, neg); // printf("Average extraction time %.3fms\n", total_extraction/test_images.size()); // printf("\tconstructing integral time %.3fms\n", total_integral/test_images.size()); // printf("\tfinding interest points time %.3fms\n", total_interestpoints/test_images.size()); // printf("\tcalculating descriptors time %.3fms\n", total_descriptors/test_images.size()); // printf("Average matching time %.3fms\n",tt_matching/classification.size()); // printf("Average features extracted: %.1f\n",num_features/test_images.size()); // // //Sort the matches in reverse order since we want to create the ROC curve // sort(classification.begin(), classification.end()); // reverse(classification.begin(), classification.end()); // // double prev_tp = 0; // double tp = 0; // double prev_fp = 0; // double fp = 0; // double AUC = 0; // if(printROC) printf("FPR\tTPR\tThreshold\n"); // for(int i=0; i<(int)classification.size(); i++){ // TestData data = classification.at(i); // if(data.match) tp++; // else fp++; // if(i%20==0){ // if(printROC) printf("%.2f\t%.2f\t%.2f\n",fp/neg,tp/pos,data.score); // } // if(true){ // AUC += (fp/neg-prev_fp/neg)*(tp/pos+prev_tp/pos); // prev_tp = tp; // prev_fp = fp; // } // } // AUC = AUC/2; // printf("AUC = %f\n\n",AUC); return 0; }