void ofxCvOpticalFlowLK::allocate(int _w, int _h){
	width		= _w;
	height		= _h; 
	invWidth	= 1.0f/width;
	invHeight	= 1.0f/height;
	
	if(vel_x) cvReleaseImage(&vel_x);		// MEMO
	if(vel_y) cvReleaseImage(&vel_y);		// MEMO
	
	vel_x = cvCreateImage( cvSize( width, height ), IPL_DEPTH_32F, 1  );
	vel_y = cvCreateImage( cvSize( width, height ), IPL_DEPTH_32F, 1  );
	
    cvSetZero(vel_x);
    cvSetZero(vel_y);
	
	resetROI();
}
Ejemplo n.º 2
0
void ProcessingThread::run()
{
    while(1)
    {
        /////////////////////////////////
        // Stop thread if stopped=TRUE //
        /////////////////////////////////
        stoppedMutex.lock();
        if (stopped)
        {
            stopped=false;
            stoppedMutex.unlock();
            break;
        }
        stoppedMutex.unlock();
        /////////////////////////////////
        /////////////////////////////////

        // Save processing time
        processingTime=t.elapsed();
        // Start timer (used to calculate processing rate)
        t.start();

        // Get frame from queue, store in currentFrame, set ROI
        currentFrame=Mat(imageBuffer->getFrame(),currentROI);

        updateMembersMutex.lock();
        ///////////////////
        // PERFORM TASKS //
        ///////////////////
        // Note: ROI changes will take effect on next frame
        if(resetROIFlag)
            resetROI();
        else if(setROIFlag)
            setROI();
        ////////////////////////////////////
        // PERFORM IMAGE PROCESSING BELOW //
        ////////////////////////////////////
        // Grayscale conversion
        if(grayscaleOn)
            cvtColor(currentFrame,currentFrameGrayscale,CV_BGR2GRAY);
        // Smooth (in-place operations)
        if(smoothOn)
        {
            if(grayscaleOn)
            {
                switch(smoothType)
                {
                    // BLUR
                    case 0:
                        blur(currentFrameGrayscale,currentFrameGrayscale,Size(smoothParam1,smoothParam2));
                        break;
                    // GAUSSIAN
                    case 1:
                        GaussianBlur(currentFrameGrayscale,currentFrameGrayscale,Size(smoothParam1,smoothParam2),smoothParam3,smoothParam4);
                        break;
                    // MEDIAN
                    case 2:
                        medianBlur(currentFrameGrayscale,currentFrameGrayscale,smoothParam1);
                        break;
                }
            }
            else
            {
                switch(smoothType)
                {
                    // BLUR
                    case 0:
                        blur(currentFrame,currentFrame,Size(smoothParam1,smoothParam2));
                        break;
                    // GAUSSIAN
                    case 1:
                        GaussianBlur(currentFrame,currentFrame,Size(smoothParam1,smoothParam2),smoothParam3,smoothParam4);
                        break;
                    // MEDIAN
                    case 2:
                        medianBlur(currentFrame,currentFrame,smoothParam1);
                        break;
                }
            }
        }
        // Dilate
        if(dilateOn)
        {
            if(grayscaleOn)
                dilate(currentFrameGrayscale,currentFrameGrayscale,Mat(),Point(-1,-1),dilateNumberOfIterations);
            else
                dilate(currentFrame,currentFrame,Mat(),Point(-1,-1),dilateNumberOfIterations);
        }
        // Erode
        if(erodeOn)
        {
            if(grayscaleOn)
                erode(currentFrameGrayscale,currentFrameGrayscale,Mat(),Point(-1,-1),erodeNumberOfIterations);
            else
                erode(currentFrame,currentFrame,Mat(),Point(-1,-1),erodeNumberOfIterations);
        }
        // Flip
        if(flipOn)
        {
            if(grayscaleOn)
                flip(currentFrameGrayscale,currentFrameGrayscale,flipCode);
            else
                flip(currentFrame,currentFrame,flipCode);
        }
        // Canny edge detection
        if(cannyOn)
        {
            // Frame must be converted to grayscale first if grayscale conversion is OFF
            if(!grayscaleOn)
                cvtColor(currentFrame,currentFrameGrayscale,CV_BGR2GRAY);

            Canny(currentFrameGrayscale,currentFrameGrayscale,
                  cannyThreshold1,cannyThreshold2,
                  cannyApertureSize,cannyL2gradient);
        }
        ////////////////////////////////////
        // PERFORM IMAGE PROCESSING ABOVE //
        ////////////////////////////////////

        // Convert Mat to QImage: Show grayscale frame [if either Grayscale or Canny processing modes are ON]
        if(grayscaleOn||cannyOn)
            frame=MatToQImage(currentFrameGrayscale);
        // Convert Mat to QImage: Show BGR frame
        else
            frame=MatToQImage(currentFrame);
        updateMembersMutex.unlock();

        // Update statistics
        updateFPS(processingTime);
        currentSizeOfBuffer=imageBuffer->getSizeOfImageBuffer();
        // Inform GUI thread of new frame (QImage)
        emit newFrame(frame);
    }
    qDebug() << "Stopping processing thread...";
}