Пример #1
0
void processImage(Detector& detector, Mat &image) {
    if (!image.empty()) {
        cout << "Detecting objects on image          ... ";
        Scene scene = detector.describe(image);
        vector<Detection> detections = detector.detect(scene);
        cout << "[DONE]" << endl;

        BOOST_FOREACH(Detection d, detections) {
            drawDetection(image, d);
        }
Пример #2
0
TEST(SoftCascadeDetector, detectSeparate)
{
    std::string xml =  cvtest::TS::ptr()->get_data_path() + "cascadeandhog/cascades/inria_caltech-17.01.2013.xml";
    Detector cascade;
    cv::FileStorage fs(xml, cv::FileStorage::READ);
    ASSERT_TRUE(cascade.load(fs.getFirstTopLevelNode()));

    cv::Mat colored = cv::imread(cvtest::TS::ptr()->get_data_path() + "cascadeandhog/images/image_00000000_0.png");
    ASSERT_FALSE(colored.empty());

    cv::Mat rects, confs;

    cascade.detect(colored, cv::noArray(), rects, confs);
    ASSERT_EQ(719, confs.cols);
}
Пример #3
0
TEST(SoftCascadeDetector, detectEmptyRoi)
{
    std::string xml =  cvtest::TS::ptr()->get_data_path() + "cascadeandhog/cascades/inria_caltech-17.01.2013.xml";
    Detector cascade;
    cv::FileStorage fs(xml, cv::FileStorage::READ);
    ASSERT_TRUE(cascade.load(fs.getFirstTopLevelNode()));

    cv::Mat colored = cv::imread(cvtest::TS::ptr()->get_data_path() + "cascadeandhog/images/image_00000000_0.png");
    ASSERT_FALSE(colored.empty());

    std::vector<Detection> objects;
    cascade.detect(colored, cv::Mat::zeros(colored.size(), CV_8UC1), objects);

    ASSERT_EQ(0, (int)objects.size());
}
Пример #4
0
TEST(SoftCascadeDetector, detectRoi)
{
    std::string xml =  cvtest::TS::ptr()->get_data_path() + "cascadeandhog/cascades/inria_caltech-17.01.2013.xml";
    Detector cascade;
    cv::FileStorage fs(xml, cv::FileStorage::READ);
    ASSERT_TRUE(cascade.load(fs.getFirstTopLevelNode()));

    cv::Mat colored = cv::imread(cvtest::TS::ptr()->get_data_path() + "cascadeandhog/images/image_00000000_0.png");
    ASSERT_FALSE(colored.empty());

    std::vector<Detection> objects;
    std::vector<cv::Rect> rois;
    rois.push_back(cv::Rect(0, 0, 640, 480));

    cascade.detect(colored, rois, objects);
    ASSERT_EQ(719, (int)objects.size());
}
Пример #5
0
// ./bin/test_detector /home/gmanfred/devel/ros/Vision_pipeline_new/icaro/hand_detect/cascades/palm.xml
int main (int argc, char** argv) {
	Detector det (argv[1]);
	
    VideoCapture cap(0); // open the default camera
    if(!cap.isOpened())  // check if we succeeded
        return 1;

    Mat edges;
    namedWindow("hand",1);
    for(;;) {
        Mat frame;
        cap >> frame; // get a new frame from camera
        vector<Rect> hands;
        hands = det.detect (frame);
        draw (frame, hands);
        imshow("hand", frame);
        if(waitKey(30) >= 0) break;
    }
    
    return 0;
}
Пример #6
0
int main( int argc, const char** argv )
{
  string country;
  string benchmarkName;
  string inDir;
  string outDir;
  Mat frame;

  //Check if user specify image to process
  if(argc == 5)
  {
    country = argv[1];
    benchmarkName = argv[2];
    inDir = argv[3];
    outDir = argv[4];
  }
  else
  {
    printf("Use:\n\t%s [country] [benchmark name] [img input dir] [results output dir]\n",argv[0]);
    printf("\tex: %s us speed ./speed/usimages ./speed\n",argv[0]);
    printf("\n");
    printf("\ttest names are: speed, segocr, detection\n\n" );
    return 0;
  }

  if (DirectoryExists(inDir.c_str()) == false)
  {
    printf("Input dir does not exist\n");
    return 0;
  }
  if (DirectoryExists(outDir.c_str()) == false)
  {
    printf("Output dir does not exist\n");
    return 0;
  }

  vector<string> files = getFilesInDir(inDir.c_str());
  sort( files.begin(), files.end(), stringCompare );

  if (benchmarkName.compare("segocr") == 0)
  {
    Config* config = new Config(country);
    config->setDebug(false);
    config->skipDetection = true;

    AlprImpl alpr(country);
    alpr.config = config;
    
    for (int i = 0; i< files.size(); i++)
    {
      if (hasEnding(files[i], ".png") || hasEnding(files[i], ".jpg"))
      {
        string fullpath = inDir + "/" + files[i];

        frame = cv::imread(fullpath.c_str());
        
        cv::Rect roi;
        roi.x = 0;
        roi.y = 0;
        roi.width = frame.cols;
        roi.height = frame.rows;
        vector<Rect> rois;
        rois.push_back(roi);
        AlprResults results = alpr.recognize(frame, rois);
        
        char statecode[3];
        statecode[0] = files[i][0];
        statecode[1] = files[i][1];
        statecode[2] = '\0';
        string statecodestr(statecode);
                
        if (results.plates.size() == 1)
          cout << files[i] << "," << statecode << "," << results.plates[0].bestPlate.characters << endl;
        else if (results.plates.size() == 0)
          cout << files[i] << "," << statecode << "," << endl;
        else if (results.plates.size() > 1)
          cout << files[i] << "," << statecode << ",???+" << endl;

        imshow("Current LP", frame);
        waitKey(5);
      }
    }

    delete config;
  }
  else if (benchmarkName.compare("detection") == 0)
  {
    Config config(country);
    Detector* plateDetector = createDetector(&config);

    for (int i = 0; i< files.size(); i++)
    {
      if (hasEnding(files[i], ".png") || hasEnding(files[i], ".jpg"))
      {
        string fullpath = inDir + "/" + files[i];
        frame = imread( fullpath.c_str() );

        vector<PlateRegion> regions = plateDetector->detect(frame);

        imshow("Current LP", frame);
        waitKey(5);
      }
    }
    
    delete plateDetector;
  }
  else if (benchmarkName.compare("speed") == 0)
  {
    // Benchmarks speed of region detection, plate analysis, and OCR

    timespec startTime;
    timespec endTime;

    Config config(country);
    config.setDebug(false);

    AlprImpl alpr(country);
    alpr.config->setDebug(false);
    alpr.setDetectRegion(true);

    Detector* plateDetector = createDetector(&config);
    OCR ocr(&config);

    vector<double> endToEndTimes;
    vector<double> regionDetectionTimes;
    vector<double> stateIdTimes;
    vector<double> lpAnalysisPositiveTimes;
    vector<double> lpAnalysisNegativeTimes;
    vector<double> ocrTimes;
    vector<double> postProcessTimes;

    for (int i = 0; i< files.size(); i++)
    {
      if (hasEnding(files[i], ".png") || hasEnding(files[i], ".jpg"))
      {
        cout << "Image: " << files[i] << endl;

        string fullpath = inDir + "/" + files[i];
        frame = imread( fullpath.c_str() );

        getTimeMonotonic(&startTime);
        vector<Rect> regionsOfInterest;
        regionsOfInterest.push_back(Rect(0, 0, frame.cols, frame.rows));
        alpr.recognize(frame, regionsOfInterest);
        getTimeMonotonic(&endTime);
        double endToEndTime = diffclock(startTime, endTime);
        cout << " -- End to End recognition time: " << endToEndTime << "ms." << endl;
        endToEndTimes.push_back(endToEndTime);

        getTimeMonotonic(&startTime);
        vector<PlateRegion> regions = plateDetector->detect(frame);
        getTimeMonotonic(&endTime);

        double regionDetectionTime = diffclock(startTime, endTime);
        cout << " -- Region detection time: " << regionDetectionTime << "ms." << endl;
        regionDetectionTimes.push_back(regionDetectionTime);

        for (int z = 0; z < regions.size(); z++)
        {
	  
	  PipelineData pipeline_data(frame, regions[z].rect, &config);
	  
          getTimeMonotonic(&startTime);

          //stateDetector.detect(&pipeline_data);
          getTimeMonotonic(&endTime);
          double stateidTime = diffclock(startTime, endTime);
          cout << "\tRegion " << z << ": State ID time: " << stateidTime << "ms." << endl;
          stateIdTimes.push_back(stateidTime);

          getTimeMonotonic(&startTime);
          LicensePlateCandidate lp(&pipeline_data);
          lp.recognize();
          getTimeMonotonic(&endTime);
          double analysisTime = diffclock(startTime, endTime);
          cout << "\tRegion " << z << ": Analysis time: " << analysisTime << "ms." << endl;

          if (!pipeline_data.disqualified)
          {
            lpAnalysisPositiveTimes.push_back(analysisTime);

            getTimeMonotonic(&startTime);
            ocr.performOCR(&pipeline_data);
            getTimeMonotonic(&endTime);
            double ocrTime = diffclock(startTime, endTime);
            cout << "\tRegion " << z << ": OCR time: " << ocrTime << "ms." << endl;
            ocrTimes.push_back(ocrTime);

            getTimeMonotonic(&startTime);
            ocr.postProcessor.analyze("", 25);
            getTimeMonotonic(&endTime);
            double postProcessTime = diffclock(startTime, endTime);
            cout << "\tRegion " << z << ": PostProcess time: " << postProcessTime << "ms." << endl;
            postProcessTimes.push_back(postProcessTime);
          }
          else
          {
            lpAnalysisNegativeTimes.push_back(analysisTime);
          }
        }

        waitKey(5);
      }
    }

    cout << endl << "---------------------" << endl;

    cout << "End to End Time Statistics:" << endl;
    outputStats(endToEndTimes);
    cout << endl;

    cout << "Region Detection Time Statistics:" << endl;
    outputStats(regionDetectionTimes);
    cout << endl;

    cout << "State ID Time Statistics:" << endl;
    outputStats(stateIdTimes);
    cout << endl;

    cout << "Positive Region Analysis Time Statistics:" << endl;
    outputStats(lpAnalysisPositiveTimes);
    cout << endl;

    cout << "Negative Region Analysis Time Statistics:" << endl;
    outputStats(lpAnalysisNegativeTimes);
    cout << endl;

    cout << "OCR Time Statistics:" << endl;
    outputStats(ocrTimes);
    cout << endl;

    cout << "Post Processing Time Statistics:" << endl;
    outputStats(postProcessTimes);
    cout << endl;
  }
  else if (benchmarkName.compare("endtoend") == 0)
  {
    EndToEndTest e2eTest(inDir, outDir);
    e2eTest.runTest(country, files);
    
  }
}
Пример #7
0
int main(int argc, const char * argv[]) {
    
//    VideoCapture cap(1); // open the default camera
//    if(!cap.isOpened())  // check if we succeeded
//        return -1;

#ifdef ENABLE_BLUEFOX
    BlueFoxCam* cam = new BlueFoxCam();
#else	//default opencv camera
    CvCapture* capture = cvCreateCameraCapture(-1);
    if (!capture)
        return -1;
    cvSetCaptureProperty( capture, CV_CAP_PROP_FRAME_WIDTH, WIDTH);
    cvSetCaptureProperty( capture, CV_CAP_PROP_FRAME_HEIGHT, HEIGHT);
    cvSetCaptureProperty( capture, CV_CAP_PROP_FPS, 60);
#endif

    LowPassFilter* lpf = new LowPassFilter(100, 0);
    Detector* detector;

    try{
    	detector = new Detector("../haarcascades/haarcascade_frontalface_alt.xml",
        	"../haarcascades/haarcascade_profileface.xml", "../haarcascades/haarcascade_eye.xml", SCALEFACTOR);
     }catch(runtime_error e){
        cout << e.what() << endl;
     }

    SleepDetector sd(SCALEFACTOR);
    Mat frame(HEIGHT, WIDTH, CV_8UC3);
    Mat scaled;
//    namedWindow("Acquisition");
//    namedWindow("Elaboration", WINDOW_NORMAL);
//    resizeWindow("Elaboration", WIDTH, HEIGHT);
//    namedWindow("Debug", WINDOW_NORMAL);
    //resizeWindow("Debug", 300, 300);
    
    Face prevface;
    //prevface.eyes.push_back(Rect(0,0,0,0));
    
    VideoStreamServer vss(SRV_ADDR, SRV_PORT);
    
    
    for(;;)
    {
    	#ifdef BLUEFOX_CAM
	    try{
        	cam->getImage(frame.data);
            }catch(runtime_error e){
        	cout << e.what() << endl;
            }
	#else	//default opencv camera
	    frame = cvarrToMat(cvQueryFrame(capture), true);
	#endif //BLUEFOX_CAM
        
        detector->prepareImage(frame, scaled, prevface.face);
        
        
        if(prevface.eye.x){	//eye already detected, so perform track only
            //detector.display(prevface, frame);
            if(prevface.eyeOpen == 2){
            	this_thread::sleep_for(std::chrono::milliseconds(100));
            	if(lpf->Perform_digital(sd.isOpen(prevface.eyetpl, SleepDetector::SD_ADAPTIVE_THRESHOLDING)? 2 : 0) == 0){
            		cout << "Beep!----------------------------------------------------" << endl;
            	}
           	}else
           		prevface.eyeOpen = lpf->Perform_digital(sd.isOpen(prevface.eyetpl, SleepDetector::SD_ADAPTIVE_THRESHOLDING)? 2 : 0);
           	
            sd.display(frame, prevface.eye.tl());
            //imshow("Elaboration", prevface.eyetpl);
            detector->trackEye(scaled, prevface);
        }
        else	//eye not yet detected
        {
        	lpf->Perform_analog(1);
            detector->detect(scaled, prevface);
        }


        detector->display(prevface, frame);
//        imshow("Aquisition", frame);
        if(vss.isRunning()){
        	vss.queueFrame(frame);
        }
        	
        if(waitKey(10) >= 0) break;
    }
    vss.stop();

    // deinitialize camera
#ifdef ENABLE_BLUEFOX
    delete cam;
#endif
    
    delete detector;
    delete lpf;
    return 0;
}
Пример #8
0
int main(int argc, char** argv)
{
    CommandLineParser parser(argc, argv, keys);
    parser.about("This sample demonstrates the use ot the HoG descriptor.");
    if (parser.has("help"))
    {
        parser.printMessage();
        return 0;
    }
    int camera = parser.get<int>("camera");
    string file = parser.get<string>("video");
    if (!parser.check())
    {
        parser.printErrors();
        return 1;
    }

    VideoCapture cap;
    if (file.empty())
        cap.open(camera);
    else
    {
        file = samples::findFileOrKeep(file);
        cap.open(file);
    }
    if (!cap.isOpened())
    {
        cout << "Can not open video stream: '" << (file.empty() ? "<camera>" : file) << "'" << endl;
        return 2;
    }

    cout << "Press 'q' or <ESC> to quit." << endl;
    cout << "Press <space> to toggle between Default and Daimler detector" << endl;
    Detector detector;
    Mat frame;
    for (;;)
    {
        cap >> frame;
        if (frame.empty())
        {
            cout << "Finished reading: empty frame" << endl;
            break;
        }
        int64 t = getTickCount();
        vector<Rect> found = detector.detect(frame);
        t = getTickCount() - t;

        // show the window
        {
            ostringstream buf;
            buf << "Mode: " << detector.modeName() << " ||| "
                << "FPS: " << fixed << setprecision(1) << (getTickFrequency() / (double)t);
            putText(frame, buf.str(), Point(10, 30), FONT_HERSHEY_PLAIN, 2.0, Scalar(0, 0, 255), 2, LINE_AA);
        }
        for (vector<Rect>::iterator i = found.begin(); i != found.end(); ++i)
        {
            Rect &r = *i;
            detector.adjustRect(r);
            rectangle(frame, r.tl(), r.br(), cv::Scalar(0, 255, 0), 2);
        }
        imshow("People detector", frame);

        // interact with user
        const char key = (char)waitKey(30);
        if (key == 27 || key == 'q') // ESC
        {
            cout << "Exit requested" << endl;
            break;
        }
        else if (key == ' ')
        {
            detector.toggleMode();
        }
    }
    return 0;
}
int main()
{
//    Aquila::WaveFile wav("../../odonnell_you_go_girl.wav");
//    Aquila::WaveFile wav("../../harvey_super_cool.wav");
    //Aquila::WaveFile wav("../../miller_larry.wav");
//    Aquila::WaveFile wav("../../carlin_pc.wav");
//    Aquila::WaveFile wav("../../ireland_ouch.wav");
//    Aquila::WaveFile wav("../../mbi04w1.wav");
//    Aquila::WaveFile wav("../kabanos.wav");
//    Aquila::WaveFile wav("../../dbi03kaban.wav");
//    Aquila::WaveFile wav("../../dbi09w1short.wav");
//    Aquila::WaveFile wav("../../mbi04zapam.wav");
//    Aquila::WaveFile wav("../../mbi04poprzedni.wav");
//    Aquila::WaveFile wav("../../eastwood_lawyers.wav");
//    Aquila::WaveFile wav("../../mbi02w1.wav");
//    Aquila::WaveFile wav("../../mbi04w1.wav");
//    Aquila::WaveFile wav("../../dbi03w1s14.wav");
    Aquila::WaveFile wav("../../mwr35w1s14.wav");




    std::cout << "Filename: "           << wav.getFilename();
    std::cout << "\nLength: "           << wav.getAudioLength()     << " ms";
    std::cout << "\nSample frequency: " << wav.getSampleFrequency() << " Hz";
    std::cout << "\nChannels: "         << wav.getChannelsNum();
    std::cout << "\nByte rate: "        << wav.getBytesPerSec()/1024 << " kB/s";
    std::cout << "\nBits per sample: "  << wav.getBitsPerSample() << "b";
    std::cout << "\nSamples: "          << wav.getSampleFrequency()*wav.getAudioLength()/1000  << " samples\n";
//    for (int i = 0; i <wav.getSamplesCount() ; ++i) {
//        std::cout<<wav.sample(i)<<" ";
//    }

//    VAD *vad = new VADImp();
//    vad->detect(wav,1);
//
//    ResultPlotter *result = new ResultPlotter();
//    result->plot(wav);

    EnergyBasedDetector *detector = new EnergyBasedDetector();
    detector->detect(wav);

    Detector detectorSFF;
    detectorSFF.detect(wav);

    NewDetector detectorSFFNew;
    detectorSFFNew.detect(wav);



//    AquilaFft spectrum(wav.getSamplesCount());
//    const SampleType* x = wav.toArray();
//    SpectrumType mySpectrum;
//    mySpectrum = spectrum.fft(x);
//
//    //writing to file to plot results
//    system("touch spectrum");
//    //otwieram plik do zapisu
//    ofstream file2("spectrum");
//    if(file2){
//        for (size_t i = 0; i< mySpectrum.size() ; i++) {
//            file2 << mySpectrum.at(i) << endl;
//        }
//        file2.close();
//    } else{
//        cout<<"BLAD: nie mozna otworzyc pliku"<<endl;
//    }

    return 0;
}
Пример #10
0
int main( int argc, char** argv ) 
{
	ofstream fout;
	ofstream dbout;
	fout.open("people.csv", ofstream::out);
	dbout.open("db.dat", ofstream::out); //format: <classname> \n <filename> \n <id> <code>
	double t;
	int ms;
    
	if (argc < 2)
	{
	fprintf(stderr, "Usage: exportCode  <path_to_input_image_dir> <isDetect>\n");
	exit(1);
	}

	Detector detector;
	Classifier classifier;

	mkdir(argv[2], S_IRWXU);
	DIR *pDIR;
	struct dirent *entry;
	struct stat buf;
	pDIR = opendir(argv[1]);
	if(( pDIR=opendir(argv[1])) == NULL){
		std::cout<<"cannot open input dir"<<std::endl;
		exit(1);
	}
	entry = readdir(pDIR);
	int id = 1;
	while(entry != NULL)
	{
		if(0 != strcmp( ".", entry->d_name) && //Skip these dir
		   0 != strcmp( "..", entry->d_name) )
		{
			char * name = entry->d_name;
			stat(name, &buf);
			std::cout << name<<std::endl;
			std::string s2 = name;
			fout<<id<<','<<name<<',';
			int goodCount = 0;
			
			DIR* pDIR2;
			std::string s3 = argv[1];
			std::string origin_path = s3 + '/' + s2;
			pDIR2 = opendir(origin_path.data());
			struct dirent *entry2;
			entry2 = readdir(pDIR2);
			while(entry2 != NULL){
				if(0 != strcmp( ".", entry2->d_name) && 0 != strcmp( "..", entry2->d_name) )
				{
					//std::cout<<"\t"<<entry2->d_name<<std::endl;
					std::string img_origin_path = origin_path + '/' + entry2->d_name;
					Mat src;
					if (atoi(argv[2])==1)
						src  = detector.detect(img_origin_path.data());
					else
						src = imread(img_origin_path.data());
					Mat img;
					if (src.empty()){
						entry2 = readdir(pDIR2);
						continue;
					}
					goodCount++;
					if (src.channels() == 3 || src.channels() == 4){
						cvtColor(src, img, CV_RGB2GRAY);
					}
					else if (src.channels() == 1){
						img = src;
					}
					else{
						cout<<"channel error: "<<src.channels()<<endl;
						entry2 = readdir(pDIR2);
						continue;
					}
					float* code = classifier.encodeImg(img);
					dbout<<entry->d_name<<endl;
					dbout<<entry2->d_name<<endl;
					dbout<<id<<" ";
					for (int i = 0; i < classifier.getCodeDimension(); i++){
						dbout<<code[i]<<" ";
					}
					dbout<<endl;
					delete [] code;
					code = NULL;
				}
				entry2 = readdir(pDIR2);
			}
			fout<<goodCount<<endl;
			id++;
			closedir(pDIR2);
			
		}
		entry = readdir(pDIR);             //Next file in directory        
	}
	closedir(pDIR);
	fout.close();
}
Пример #11
0
 TEST_CYCLE()
 {
     cascade.detect(colored, cv::noArray(), objectBoxes);
 }