Ejemplo n.º 1
0
int main(int argc, char* argv[])
{
    // Initialising input video
    int xRes = 640;
    int yRes = 480;
    int cameraIndex = 0;
    if (argc > 2) {
        xRes = std::atoi(argv[1]);
        yRes = std::atoi(argv[2]);
    }
    if (argc > 3) {
        cameraIndex = std::atoi(argv[3]);
    }

    // The source of input images
    cv::VideoCapture capture(cameraIndex);
    if (!capture.isOpened())
    {
        std::cerr << "Unable to initialise video capture." << std::endl;
        return 1;
    }
#ifdef OPENCV3
    capture.set(cv::CAP_PROP_FRAME_WIDTH, xRes);
    capture.set(cv::CAP_PROP_FRAME_HEIGHT, yRes);
#else
    capture.set(CV_CAP_PROP_FRAME_WIDTH, xRes);
    capture.set(CV_CAP_PROP_FRAME_HEIGHT, yRes);
#endif
    cv::Mat inputImage;

    // We need separate Chilitags if we want to compare find() with different
    // detection/tracking parameters on the same image

    // This one is the reference Chilitags
    chilitags::Chilitags detectedChilitags;
    detectedChilitags.setFilter(0, 0.0f);

    // This one will be called with JUST_TRACK when it has previously detected
    // something
    chilitags::Chilitags trackedChilitags;
    trackedChilitags.setFilter(0, 0.0f);

    cv::namedWindow("DisplayChilitags");

    // Do we want to run and show the reference detection ?
    bool showReference = true;
    // Do we want to run and show the tracking-based detection ?
    bool showTracking = true;

    // In the tracking-based detection, we need to know whether there is
    // something to track
    bool tracking = false;

    char keyPressed;
    while ('q' != (keyPressed = (char) cv::waitKey(1))) {

        // toggle the processing, according to user input
        if (keyPressed == 't') showTracking = !showTracking;
        if (keyPressed == 'd') showReference = !showReference;

        capture.read(inputImage);

        cv::Mat outputImage = inputImage.clone();

        // nothing new here
        if (showReference) {
            int64 startTime = cv::getTickCount();
            auto tags = detectedChilitags.find(inputImage);
            int64 endTime = cv::getTickCount();
            drawTags(outputImage, tags, startTime, endTime, true);
        }

        if (showTracking) {
            int64 startTime = cv::getTickCount();
            // Tracking needs something to track; it is initialised with a
            // regular detection (find()). When something is detected, tracking
            // will take over and return tags processed from the previous call
            // to track() as long as there is something returned.
            // When nothing is returned, we are back to regular detection.
            auto tags =
                trackedChilitags.find(inputImage, tracking ? chilitags::Chilitags::TRACK_ONLY : chilitags::Chilitags::TRACK_AND_DETECT);
            int64 endTime = cv::getTickCount();
            drawTags(outputImage, tags, startTime, endTime, false);
            tracking = !tags.empty();
        }

        cv::imshow("DisplayChilitags", outputImage);
    }

    cv::destroyWindow("DisplayChilitags");
    capture.release();

    return 0;
}
Ejemplo n.º 2
0
int main(int argc, char* argv[])
{
    // Initialising input video
    int xRes = 640;
    int yRes = 480;
    int cameraIndex = 0;
    if (argc > 2) {
        xRes = std::atoi(argv[1]);
        yRes = std::atoi(argv[2]);
    }
    if (argc > 3) {
        cameraIndex = std::atoi(argv[3]);
    }

    // The source of input images
    cv::VideoCapture capture(cameraIndex);
    if (!capture.isOpened())
    {
        std::cerr << "Unable to initialise video capture." << std::endl;
        return 1;
    }
#ifdef OPENCV3
    capture.set(cv::CAP_PROP_FRAME_WIDTH, xRes);
    capture.set(cv::CAP_PROP_FRAME_HEIGHT, yRes);
#else
    capture.set(CV_CAP_PROP_FRAME_WIDTH, xRes);
    capture.set(CV_CAP_PROP_FRAME_HEIGHT, yRes);
#endif
    cv::Mat inputImage;

    chilitags::Chilitags chilitags;
    chilitags.setFilter(0, 0.);

    cv::namedWindow("DisplayChilitags");

    char keyPressed;
    const char* trigName = "DETECT_PERIODICALLY";
    chilitags::Chilitags::DetectionTrigger trig = chilitags::Chilitags::DETECT_PERIODICALLY;
    while ('q' != (keyPressed = (char) cv::waitKey(1))) {

        // toggle the processing mode, according to user input
        if(keyPressed == 't'){
            if(trig == chilitags::Chilitags::DETECT_PERIODICALLY){
                trig = chilitags::Chilitags::ASYNC_DETECT_PERIODICALLY;
                trigName = "ASYNC_DETECT_PERIODICALLY";
            }
            else if(trig == chilitags::Chilitags::ASYNC_DETECT_PERIODICALLY){
                trig = chilitags::Chilitags::ASYNC_DETECT_ALWAYS;
                trigName = "ASYNC_DETECT_ALWAYS";
            }
            else{
                trig = chilitags::Chilitags::DETECT_PERIODICALLY;
                trigName = "DETECT_PERIODICALLY";
            }
        }

        capture.read(inputImage);

        cv::Mat outputImage = inputImage.clone();

        auto tags = chilitags.find(inputImage, trig);
        drawTags(outputImage, tags);

        //Print detection trigger
        cv::putText(outputImage,
                cv::format("Detection trigger: %s (press 't' to toggle)", trigName),
                cv::Point(8,yRes - 24),
                cv::FONT_HERSHEY_SIMPLEX, 0.5, COLOR);

        cv::putText(outputImage,
                cv::format("Run 'top -H -p `pgrep async-detection`' to see running threads", trigName),
                cv::Point(8,yRes - 8),
                cv::FONT_HERSHEY_SIMPLEX, 0.5, COLOR);

        cv::imshow("DisplayChilitags", outputImage);
    }

    cv::destroyWindow("DisplayChilitags");
    capture.release();

    return 0;
}
Ejemplo n.º 3
0
int main( )
{
    // init input video source
//    cvCaptureFromFile
    
//    cv::VideoCapture captureInput("/Users/andriybas/Downloads/elephant_wild_life.m4v");
//    cv::VideoCapture captureInput("/Users/andriybas/Documents/test.mov");
    cv::VideoCapture captureInput(0);
    
    if (!captureInput.isOpened()) {
        std::cout << "Could not open input source" << std::endl;
        return -1;
    }
    
    double fps = captureInput.get(CV_CAP_PROP_FPS); //get the frames per seconds of the video
    
    std::cout << "Frame per seconds : " << fps << std::endl;
    
    cv::namedWindow("window1", CV_WINDOW_AUTOSIZE);
    
    
    int frameCount = 0;

    // loading classifiers
    cv::CascadeClassifier face_classifier(FACE_DETECT_CLASSIFIER_PATH);
    cv::CascadeClassifier profile_face_classifier(PROFILE_FACE_DETECT_CLASSIFIER_PATH);
    cv::CascadeClassifier elephant_classifier(ELEPHANT_DETECT_CLASSIFIER_PATH);
    cv::CascadeClassifier banana_classifier(BANANA_DETECT_CLASSIFIER_PATH);


    // creating detectors
    Detector faceDetector(face_classifier, "face");
    faceDetector.setScaleFactor(2);
    
//    Detector faceProfileDetector(profile_face_classifier, "face_profile");
    
    Detector elephantDetector(elephant_classifier, "elephant");
    elephantDetector.setScaleFactor(3);
    elephantDetector.setMinNeighbours(4);
    
    
    Detector bananaDetector(banana_classifier, "banana");
    bananaDetector.setScaleFactor(2);
    bananaDetector.setMinNeighbours(6);
    
    // init cascade
    DetectCascade detectCascade;
    detectCascade.addDetector(faceDetector);
//    detectCascade.addDetector(faceProfileDetector);
    detectCascade.addDetector(elephantDetector);
    detectCascade.addDetector(bananaDetector);
    
    VideoClassifier videoClassifier;
    
    DetectedResults detectedObjects;
    cv::Mat frame;
    
    long totalTime = 0;
    int detectedFrames = 0;
    
    while(true)
    {
        captureInput >> frame;
        
        if (frameCount < SKIP_COUNT) {
            frameCount++;
        } else {
            frameCount = 0;
            
            
            std::chrono::high_resolution_clock::time_point t1 = std::chrono::high_resolution_clock::now();
            
            
            detectedObjects = detectCascade.detect(frame);
            
            
            std::chrono::high_resolution_clock::time_point t2 = std::chrono::high_resolution_clock::now();
            auto duration = std::chrono::duration_cast<std::chrono::milliseconds>( t2 - t1 ).count();
            
//            std::cout << duration << std::endl;
            
            totalTime += duration;
            detectedFrames++;
            
            videoClassifier.addFrame(detectedObjects);
        }
        
        drawDetectedFrames(frame, detectedObjects);
        
        drawTags(frame, detectedObjects);
        
        std::string videoClass = videoClassifier.getVideoClass();
        
        drawClass(frame, videoClass);
        
        imshow("Video classifier", frame );
    
        if (detectedFrames > 100) {
            std::cout << "Average frame detect: " << 1.0 * totalTime / detectedFrames << "\n";
            
            detectedFrames = 0;
            totalTime = 0;
        }
        
        // Press 'c' to escape
//        if(waitKey(1) == 'c') break;
    }
    
    cv::waitKey(0);
    return 0;
}