Пример #1
0
GraphSize OperationDrawer::drawOrSizeDiagram(DiagramDrawer &drawer,
        OperationGraph &graph, const OperationDrawOptions &options, bool draw)
    {
    mCharHeight = static_cast<int>(drawer.getTextExtentHeight("W"));
    int pad = mCharHeight / 3;
    if(pad < 1)
        pad = 1;
    mPad = pad;

    GraphPoint startpos(mCharHeight, mCharHeight);
    GraphPoint pos = startpos;
    GraphSize size;
    GraphSize diagramSize;
    int maxy = 0;
    std::vector<int> classEndY;
    for(size_t i=0; i<graph.mOpClasses.size(); i++)
        {
        OperationClass &opClass = graph.mOpClasses[i];
        opClass.setPosition(pos);
        size = drawClass(drawer, opClass, options, draw);
        size_t condDepth = graph.getNestDepth(i);
        size.x += static_cast<int>(condDepth) * mPad;
        opClass.setSize(size);
        pos.x += size.x + mCharHeight;
        classEndY.push_back(startpos.y + size.y);
        if(size.y > maxy)
            maxy = size.y;
        }
    diagramSize.x = pos.x;

    pos = startpos;
    pos.y = startpos.y + maxy;  // space between class rect and operations
    std::set<const OperationDefinition*> drawnOperations;
    for(const auto &oper : graph.mOperations)
        {
        if(drawnOperations.find(oper) == drawnOperations.end())
            {
            size = drawOperation(drawer, pos, *oper, graph, options,
                    drawnOperations, draw);
            pos.y += size.y;
            }
        }
    if(draw)
        {
        drawLifeLines(drawer, graph.mOpClasses, classEndY, pos.y);
        }

    diagramSize.y = pos.y + mCharHeight;
    return diagramSize;
    }
Пример #2
0
int main( )
{
    // init input video source
//    cvCaptureFromFile
    
//    cv::VideoCapture captureInput("/Users/andriybas/Downloads/elephant_wild_life.m4v");
//    cv::VideoCapture captureInput("/Users/andriybas/Documents/test.mov");
    cv::VideoCapture captureInput(0);
    
    if (!captureInput.isOpened()) {
        std::cout << "Could not open input source" << std::endl;
        return -1;
    }
    
    double fps = captureInput.get(CV_CAP_PROP_FPS); //get the frames per seconds of the video
    
    std::cout << "Frame per seconds : " << fps << std::endl;
    
    cv::namedWindow("window1", CV_WINDOW_AUTOSIZE);
    
    
    int frameCount = 0;

    // loading classifiers
    cv::CascadeClassifier face_classifier(FACE_DETECT_CLASSIFIER_PATH);
    cv::CascadeClassifier profile_face_classifier(PROFILE_FACE_DETECT_CLASSIFIER_PATH);
    cv::CascadeClassifier elephant_classifier(ELEPHANT_DETECT_CLASSIFIER_PATH);
    cv::CascadeClassifier banana_classifier(BANANA_DETECT_CLASSIFIER_PATH);


    // creating detectors
    Detector faceDetector(face_classifier, "face");
    faceDetector.setScaleFactor(2);
    
//    Detector faceProfileDetector(profile_face_classifier, "face_profile");
    
    Detector elephantDetector(elephant_classifier, "elephant");
    elephantDetector.setScaleFactor(3);
    elephantDetector.setMinNeighbours(4);
    
    
    Detector bananaDetector(banana_classifier, "banana");
    bananaDetector.setScaleFactor(2);
    bananaDetector.setMinNeighbours(6);
    
    // init cascade
    DetectCascade detectCascade;
    detectCascade.addDetector(faceDetector);
//    detectCascade.addDetector(faceProfileDetector);
    detectCascade.addDetector(elephantDetector);
    detectCascade.addDetector(bananaDetector);
    
    VideoClassifier videoClassifier;
    
    DetectedResults detectedObjects;
    cv::Mat frame;
    
    long totalTime = 0;
    int detectedFrames = 0;
    
    while(true)
    {
        captureInput >> frame;
        
        if (frameCount < SKIP_COUNT) {
            frameCount++;
        } else {
            frameCount = 0;
            
            
            std::chrono::high_resolution_clock::time_point t1 = std::chrono::high_resolution_clock::now();
            
            
            detectedObjects = detectCascade.detect(frame);
            
            
            std::chrono::high_resolution_clock::time_point t2 = std::chrono::high_resolution_clock::now();
            auto duration = std::chrono::duration_cast<std::chrono::milliseconds>( t2 - t1 ).count();
            
//            std::cout << duration << std::endl;
            
            totalTime += duration;
            detectedFrames++;
            
            videoClassifier.addFrame(detectedObjects);
        }
        
        drawDetectedFrames(frame, detectedObjects);
        
        drawTags(frame, detectedObjects);
        
        std::string videoClass = videoClassifier.getVideoClass();
        
        drawClass(frame, videoClass);
        
        imshow("Video classifier", frame );
    
        if (detectedFrames > 100) {
            std::cout << "Average frame detect: " << 1.0 * totalTime / detectedFrames << "\n";
            
            detectedFrames = 0;
            totalTime = 0;
        }
        
        // Press 'c' to escape
//        if(waitKey(1) == 'c') break;
    }
    
    cv::waitKey(0);
    return 0;
}