Beispiel #1
0
void CameraView::connectStream()
{
	
	// Create processing thread
	processingThread = new ProcessingThread(sharedImageBuffer, tabId,id);
	
	// Setup signal/slot connections
	connect(processingThread, SIGNAL(newFrame(QImage)), this, SLOT(updateFrame(QImage)));
	connect(processingThread, SIGNAL(updateStatisticsInGUI(struct ThreadStatisticsData)), this, SLOT(updateProcessingThreadStats(struct ThreadStatisticsData)));
	connect(captureThread, SIGNAL(updateStatisticsInGUI(struct ThreadStatisticsData)), this, SLOT(updateCaptureThreadStats(struct ThreadStatisticsData)));
	connect(this, SIGNAL(setROI(QRect)), processingThread, SLOT(setROI(QRect)));
	// Only enable ROI setting/resetting if frame processing is enabled
	connect(ui->frameLabel, SIGNAL(newMouseData(struct MouseData)), this, SLOT(newMouseData(struct MouseData)));
	
	// Set initial data in processing thread
	// emit setROI(QRect(0, 0, captureThread->getInputSourceWidth(), captureThread->getInputSourceHeight()));
	
	// Start capturing frames from camera
	captureThread->start((QThread::Priority)QThread::NormalPriority);
	// Start processing captured frames (if enabled)
	
	processingThread->start((QThread::Priority)QThread::TimeCriticalPriority);
	
	// Setup imageBufferBar with minimum and maximum values
	ui->imageBufferBar->setMinimum(0);
	ui->imageBufferBar->setMaximum(sharedImageBuffer->maxSize());
	// Enable/disable appropriate GUI items
	//ui->imageProcessingMenu->setEnabled(enableFrameProcessing);
	//ui->imageProcessingSettingsAction->setEnabled(enableFrameProcessing);
	// Enable "Clear Image Buffer" push button
	ui->clearImageBufferButton->setEnabled(true);
	// Set text in labels
	// ui->deviceNumberLabel->setNum(tabId);
	ui->cameraResolutionLabel->setText(
	        QString::number(captureThread->getInputSourceWidth())+
	        QString("x")+
	        QString::number(captureThread->getInputSourceHeight())+
	        QString(" (")+
	        QString::number(ui->frameLabel->width())+
	        QString("x")+
	        QString::number(ui->frameLabel->height())+
	        QString(")")
	        );
	// Set internal flag and return
	isCameraConnected=true;
	// Set frame label text
	
	
	loadLinesSettings();
	
	if(captureThread->getInputSourceWidth() > ui->frameLabel->width())
	{
	        processingThread->resizeToFitWindow = true;
	        INFOMSG(("resizing to fit window"));
	}
}
Beispiel #2
0
//--------------------------------------------------------------------------------
unsigned char*  ofxCvFloatImage::getPixels(){
    if(bPixelsDirty) {

        if( cvGrayscaleImage == NULL ) {
            cvGrayscaleImage = cvCreateImage( cvSize(width,height), IPL_DEPTH_8U, 1 );
        }

        ofRectangle lastROI = getROI();
        resetImageROI(cvGrayscaleImage);
        convertFloatToGray(cvImage, cvGrayscaleImage);
        setROI(lastROI);

        if(pixels == NULL) {
            // we need pixels, allocate it
            pixels = new unsigned char[width*height];
            pixelsWidth = width;
            pixelsHeight = height;
        } else if(pixelsWidth != width || pixelsHeight != height) {
            // ROI changed, reallocate pixels for new size
            delete pixels;
            pixels = new unsigned char[width*height];
            pixelsWidth = width;
            pixelsHeight = height;
        }

        // copy from ROI to pixels
        for( int i = 0; i < height; i++ ) {
            memcpy( pixels + (i*width),
                    cvGrayscaleImage->imageData + (i*cvGrayscaleImage->widthStep),
                    width );
        }
        bPixelsDirty = false;
    }
	return pixels;
}
//--------------------------------------------------------------------------------
void ofxCvFloatImage::setFromPixels( const unsigned char* _pixels, int w, int h ) {
    // This sets the internal image ignoring any ROI
	if( w == 0 || h == 0 ){
		ofLog(OF_LOG_ERROR, "in setFromPixels, w and h cannot = 0");
		return;
	}
    if( !bAllocated || w != width || h != height ) {
		ofLog(OF_LOG_NOTICE, "in setFromPixels, reallocating to match dimensions");	
		allocate(w,h);
	}

    if( w == width && h == height ) {
        ofRectangle lastROI = getROI();
        if( cvGrayscaleImage == NULL ) {
            cvGrayscaleImage = cvCreateImage( cvSize(width,height), IPL_DEPTH_8U, 1 );
        }
        resetImageROI(cvGrayscaleImage);
        // copy _pixels into cvGrayscaleImage
        for( int i=0; i < height; i++ ) {
            memcpy( cvGrayscaleImage->imageData + (i*cvGrayscaleImage->widthStep),
                    _pixels + (i*w),
                    width );
        }
        convertGrayToFloat(cvGrayscaleImage, cvImage);
        setROI(lastROI);
        flagImageChanged();
    } else {
        ofLog(OF_LOG_ERROR, "in setFromPixels, size mismatch");
    }
}
//--------------------------------------------------------------------------------
int ofxCvImage::countNonZeroInRegion( int x, int y, int w, int h ) {
    //TODO: test this method

	if (w == 0 || h == 0) return 0;
    int count = 0;

    // intersect the global ROI with the region to check
    ofRectangle iRoi = getIntersectionROI( getROI(), ofRectangle(x,y,w,h) );

    ofRectangle lastROI = getROI();
    setROI(iRoi);
	count = cvCountNonZero( cvImage );
    setROI(lastROI);

	return count;
}
Beispiel #5
0
void CameraView::handleContextMenuAction(QAction *action)
{
	if(action->text()=="Reset ROI")
	{
		emit setROI(QRect(0, 0, captureThread->getInputSourceWidth(), captureThread->getInputSourceHeight()));
	}
	
	
	printf("action: %s\n",qPrintable(action->text()));
}
Beispiel #6
0
void ofxImageROI::draw(int x, int y, float w, float h)
{
    ofPushStyle();

    ofSetColor(ofColor::white);
    ofImage::draw(x, y, w, h);
    position.set(x, y);

    if (this->bAllocated()) {
        scale_w = w / this->width;
        scale_h = h / this->height;
    }

    // ROIがセットされてない場合は画像サイズでROIをセットする
    if (bSetROI == false) {
        setROI();
    }

    if (bShow) {
        ofSetColor(ofColor::yellow);
        ofPushMatrix();
        ofTranslate(x, y);

        // ofScaleでスケーリングするとマニピュレーターも小さくなってしまう
        for (int i = 0; i < 4; i++) {
            tmpCornerPoints[i].set(cornerPoints[i].x * scale_w, cornerPoints[i].y * scale_h);
        }

        for (int i = 0; i < 4; i++) {
            int j = (i + 1) % 4;
            ofLine(tmpCornerPoints[i], tmpCornerPoints[j]);
        }

        for (int i = 0; i < 4; i++) {
            if (i == selectedCornerIndex) {
                ofSetColor(ofColor::magenta);
            } else if (i == highlightCornerIndex) {
                ofSetColor(ofColor::orange);
            } else {
                ofSetColor(ofColor::yellow);
            }

            if (mode == OFX_IMAGE_ROI_HANDLE_TWOPOINTS) {
                if (i % 2 == 0) {
                    ofRect(tmpCornerPoints[i].x - anchorSize / 2, tmpCornerPoints[i].y - anchorSize / 2, anchorSize, anchorSize);
                }
            } else {
                ofRect(tmpCornerPoints[i].x - anchorSize / 2, tmpCornerPoints[i].y - anchorSize / 2, anchorSize, anchorSize);
            }
        }
        ofPopMatrix();
    }

    ofPopStyle();
}
//--------------------------------------------------------------------------------
int ofxCvImage::countNonZeroInRegion( int x, int y, int w, int h ) {
	if( !bAllocated ){
		ofLog(OF_LOG_ERROR, "in countNonZeroInRegion, need to allocate image first");
		return 0;		
	}
	
    //TODO: test this method

	if (w == 0 || h == 0) return 0;
    int count = 0;

    // intersect the global ROI with the region to check
    ofRectangle iRoi = getIntersectionROI( getROI(), ofRectangle(x,y,w,h) );

    ofRectangle lastROI = getROI();
    setROI(iRoi);
	count = cvCountNonZero( cvImage );
    setROI(lastROI);

	return count;
}
Beispiel #8
0
//--------------------------------------------------------------------------------
int ofxCvImage::countNonZeroInRegion( int x, int y, int w, int h ) {
	if( !bAllocated ){
		ofLogError("ofxCvImage") << "countNonZeroInRegion(): image not allocated";
		return 0;		
	}
	
    //TODO: test this method

	if (w == 0 || h == 0) return 0;
    int count = 0;

    // intersect the global ROI with the region to check
    ofRectangle iRoi = getIntersectionROI( getROI(), ofRectangle(x,y,w,h) );

    ofRectangle lastROI = getROI();
    setROI(iRoi);
	count = cvCountNonZero( cvImage );
    setROI(lastROI);

	return count;
}
Beispiel #9
0
CameraView::CameraView(QWidget *mainwindow, QWidget *parent,int tabIdx, SharedImageBuffer *sharedImageBuffer) :
QWidget(parent),
ui(new Ui::CameraView),
sharedImageBuffer(sharedImageBuffer)
{
	// Setup UI
	ui->setupUi(this);
	
	tabId = tabIdx;
	// Initialize internal flag
	isCameraConnected=false;
	// Set initial GUI state
	ui->frameLabel->setText("No camera connected.");
	ui->imageBufferBar->setValue(0);
	ui->imageBufferLabel->setText("[000/000]");
	ui->captureRateLabel->setText("");
	ui->processingRateLabel->setText("");
	ui->deviceNumberLabel->setText("");
	ui->cameraResolutionLabel->setText("");
	ui->roiLabel->setText("");
	ui->mouseCursorPosLabel->setText("");
	ui->clearImageBufferButton->setDisabled(true);
	// Initialize ImageProcessingFlags structure
	imageProcessingFlags.grayscaleOn=false;
	imageProcessingFlags.smoothOn=false;
	imageProcessingFlags.dilateOn=false;
	imageProcessingFlags.erodeOn=false;
	imageProcessingFlags.flipOn=false;
	imageProcessingFlags.cannyOn=false;
	// Connect signals/slots
	connect(ui->frameLabel, SIGNAL(onMouseMoveEvent()), this, SLOT(updateMouseCursorPosLabel()));
	connect(ui->clearImageBufferButton, SIGNAL(released()), this, SLOT(clearImageBuffer()));
	connect(ui->frameLabel->menu, SIGNAL(triggered(QAction*)), this, SLOT(handleContextMenuAction(QAction*)));
	connect(ui->startStopDatalogger, SIGNAL(released()), this, SLOT(startStopDatalogger()));
	
	connect(this, SIGNAL(setROI(QRect)), mainwindow, SLOT(newROI(QRect)));
	
	// Register type
	qRegisterMetaType<struct ThreadStatisticsData>("ThreadStatisticsData");
	
	id++;
	
	pYarpDataloggerCtrl.initPortSend(YARPPORT_SEND_DATALOGGERCTRL,YARPPORT_RCV_DATALOGGERCTRL,"tcp");
	recordingDlg = false;
	
	isVideo = false;
}
void CameraView::handleContextMenuAction(QAction *action)
{
    if(action->text() == "Reset ROI")
    {
        emit setROI(QRect(0, 0, m_captureThread->getInputSourceWidth(), m_captureThread->getInputSourceHeight()));
    }
    else if(action->text() == "Scale to Fit Frame")
    {
        ui->frameLabel->setScaledContents(action->isChecked());
    }
    else if(action->text() == "Grayscale")
    {
        m_imageProcessingFlags.grayscaleOn = action->isChecked();
        emit newImageProcessingFlags(m_imageProcessingFlags);
    }
    else if(action->text() == "Smooth")
    {
        m_imageProcessingFlags.smoothOn = action->isChecked();
        emit newImageProcessingFlags(m_imageProcessingFlags);
    }
    else if(action->text() == "Dilate")
    {
        m_imageProcessingFlags.dilateOn = action->isChecked();
        emit newImageProcessingFlags(m_imageProcessingFlags);
    }
    else if(action->text() == "Erode")
    {
        m_imageProcessingFlags.erodeOn = action->isChecked();
        emit newImageProcessingFlags(m_imageProcessingFlags);
    }
    else if(action->text() == "Flip")
    {
        m_imageProcessingFlags.flipOn = action->isChecked();
        emit newImageProcessingFlags(m_imageProcessingFlags);
    }
    else if(action->text() == "Canny")
    {
        m_imageProcessingFlags.cannyOn = action->isChecked();
        emit newImageProcessingFlags(m_imageProcessingFlags);
    }
    else if(action->text() == "Settings...")
    {
        setImageProcessingSettings();
    }
}
//--------------------------------------------------------------------------------
IplImage*  ofxCvFloatImage::getCv8BitsImage() {
	if( !bAllocated ){
		ofLog(OF_LOG_WARNING, "in getCv8BitsImage, image is not allocated");		
	}
		
	if(bPixelsDirty) {
		if( cvGrayscaleImage == NULL ) {
			cvGrayscaleImage = cvCreateImage( cvSize(width,height), IPL_DEPTH_8U, 1 );
		}

		ofRectangle lastROI = getROI();

		resetImageROI(cvGrayscaleImage);
		convertFloatToGray(cvImage, cvGrayscaleImage);
		setROI(lastROI);
	}
	return cvGrayscaleImage;
}
Beispiel #12
0
void ofxOpenNIDepthThreshold::set(ofxOpenNIROI & _roi, 
                                  bool _bUsePointCloud,
                                  bool _bUseMaskPixels,
                                  bool _bUseMaskTexture,
                                  bool _bUseDepthPixels,
                                  bool _bUseDepthTexture,
                                  int _pointCloudDrawSize,
                                  int _pointCloudResolution){
    setROI(_roi);
    pointCloudDrawSize = _pointCloudDrawSize;
    pointCloudResolution = _pointCloudResolution;
    bUsePointCloud = _bUsePointCloud;
    bUseMaskPixels = _bUseMaskPixels;
    bUseMaskTexture = _bUseMaskTexture;
    bUseDepthPixels = _bUseDepthPixels;
    bUseDepthTexture = _bUseDepthTexture;
    maskPixelFormat = OF_PIXELS_RGBA;
    bNewPixels = false;
    bNewPointCloud = false;
}
Beispiel #13
0
//--------------------------------------------------------------------------------
void ofxCvFloatImage::setFromPixels( unsigned char* _pixels, int w, int h ) {
    // This sets the internal image ignoring any ROI

    if( w == width && h == height ) {
        ofRectangle lastROI = getROI();
        if( cvGrayscaleImage == NULL ) {
            cvGrayscaleImage = cvCreateImage( cvSize(width,height), IPL_DEPTH_8U, 1 );
        }
        resetImageROI(cvGrayscaleImage);
        // copy _pixels into cvGrayscaleImage
        for( int i=0; i < height; i++ ) {
            memcpy( cvGrayscaleImage->imageData + (i*cvGrayscaleImage->widthStep),
                    _pixels + (i*w),
                    width );
        }
        convertGrayToFloat(cvGrayscaleImage, cvImage);
        setROI(lastROI);
        flagImageChanged();
    } else {
        ofLog(OF_LOG_ERROR, "in setFromPixels, size mismatch");
    }
}
Beispiel #14
0
void CameraView::newMouseData(struct MouseData mouseData)
{
        
        QRect selectionBox;
        
        // Set ROI
        if(mouseData.leftButtonRelease)
        {       
                int roiX = mouseData.selectionBox.x();
                int roiY = mouseData.selectionBox.y();
                int roiW = mouseData.selectionBox.width();
                int roiH = mouseData.selectionBox.height();
                
                int frameW = ui->frameLabel->width();
                int frameH = ui->frameLabel->height();
                // Copy box dimensions from mouseData to taskData
                
                if(roiW < 0)
                {
                        roiX = roiX + roiW;
                        roiW = roiW*-1;
                }
                if(roiH < 0)
                {
                        roiY = roiY + roiH;
                        roiH = roiH*-1;
                }
                
                
                selectionBox.setX(roiX-(frameW-capW)/2);
                selectionBox.setY(roiY-(frameH-capH)/2);
                selectionBox.setWidth(roiW);
                selectionBox.setHeight(roiH);
                INFOMSG((">>> froi(%d,%d[%d,%d]) roi(%d,%d[%d,%d]) cap(%d,%d) frame(%d,%d)",selectionBox.x(),selectionBox.y(),selectionBox.width(),selectionBox.height(),roiX,roiY,roiW,roiH,capW,capH,frameW,frameH));
                if(processingThread->resizeToFitWindow)
                {
                        double ratioW = (double)capW/(double)frameW;
                        double ratioH = (double)capH/(double)frameH;
                        
                        selectionBox.setX((double)roiX*ratioW);
                        selectionBox.setY((double)roiY*ratioH);
                        selectionBox.setWidth((double)roiW*ratioW);
                        selectionBox.setHeight((double)roiH*ratioH);
                        INFOMSG((">>> froi(%d,%d[%d,%d]) roi(%d,%d[%d,%d]) cap(%d,%d) frame(%d,%d)",selectionBox.x(),selectionBox.y(),selectionBox.width(),selectionBox.height(),roiX,roiY,roiW,roiH,capW,capH,frameW,frameH));
                }
                
                
                if((selectionBox.x()<0)||(selectionBox.y()<0)||
                        ((selectionBox.x()+selectionBox.width())>capW)||
                ((selectionBox.y()+selectionBox.height())>capH) || roiW <=0 || roiH <= 0)
                {
                        // Display error message
                        // QMessageBox::warning(this,"ERROR:","Selection box outside range. Please try again.");
                        WARNMSG(("ROI out of range"));
                }else{
                        
                        INFOMSG(("NEW ROI x,y(%d,%d) w,h(%d,%d)",selectionBox.x(),selectionBox.y(),selectionBox.width(),selectionBox.height()));
                        emit setROI(selectionBox);
                        
                }
        }
        
}
bool CameraView::connectToCamera(bool dropFrameIfBufferFull, int capThreadPrio, int procThreadPrio, bool enableFrameProcessing, int width, int height)
{
    // Set frame label text
    if (m_sharedImageBuffer->isSyncEnabledForDeviceNumber(m_deviceNumber))
    {
        ui->frameLabel->setText(tr("Camera connected. Waiting..."));
    }
    else
    {
        ui->frameLabel->setText(tr("Connecting to camera..."));
    }

    // Create capture thread
    m_captureThread = new CaptureThread(m_sharedImageBuffer, m_deviceNumber, dropFrameIfBufferFull, width, height);
    // Attempt to connect to camera
    if (m_captureThread->connectToCamera())
    {
        // Create processing thread
        m_processingThread = new ProcessingThread(m_sharedImageBuffer, m_deviceNumber);
        // Create image processing settings dialog
        m_imageProcessingSettingsDialog = new ImageProcessingSettingsDialog(this);
        // Setup signal/slot connections
        connect(m_processingThread, &ProcessingThread::newFrame, this, &CameraView::updateFrame);
        connect(m_processingThread, &ProcessingThread::updateStatisticsInGUI, this, &CameraView::updateProcessingThreadStats);
        connect(m_captureThread, &CaptureThread::updateStatisticsInGUI, this, &CameraView::updateCaptureThreadStats);
        connect(m_imageProcessingSettingsDialog, &ImageProcessingSettingsDialog::newImageProcessingSettings, m_processingThread, &ProcessingThread::updateImageProcessingSettings);
        connect(this, &CameraView::newImageProcessingFlags, m_processingThread, &ProcessingThread::updateImageProcessingFlags);
        connect(this, &CameraView::setROI, m_processingThread, &ProcessingThread::setROI);
        // Only enable ROI setting/resetting if frame processing is enabled
        if(enableFrameProcessing)
        {
            connect(ui->frameLabel, &FrameLabel::newMouseData, this, &CameraView::newMouseData);
        }
        // Set initial data in processing thread
        emit setROI(QRect(0, 0, m_captureThread->getInputSourceWidth(), m_captureThread->getInputSourceHeight()));
        emit newImageProcessingFlags(m_imageProcessingFlags);
        m_imageProcessingSettingsDialog->updateStoredSettingsFromDialog();

        // Start capturing frames from camera
        m_captureThread->start((QThread::Priority)capThreadPrio);
        // Start processing captured frames (if enabled)
        if(enableFrameProcessing)
        {
            m_processingThread->start((QThread::Priority)procThreadPrio);
        }

        // Setup imageBufferBar with minimum and maximum values
        ui->imageBufferBar->setMinimum(0);
        ui->imageBufferBar->setMaximum(m_sharedImageBuffer->getByDeviceNumber(m_deviceNumber)->maxSize());
        // Enable "Clear Image Buffer" push button
        ui->clearImageBufferButton->setEnabled(true);
        // Set text in labels
        ui->deviceNumberLabel->setNum(m_deviceNumber);
        ui->cameraResolutionLabel->setText(QString::number(m_captureThread->getInputSourceWidth()) + QString("x") + QString::number(m_captureThread->getInputSourceHeight()));
        // Set internal flag and return
        m_isCameraConnected = true;
        // Set frame label text
        if(!enableFrameProcessing)
        {
            ui->frameLabel->setText(tr("Frame processing disabled."));
        }
        return true;
    }
    // Failed to connect to camera
    else
        return false;
}
Beispiel #16
0
//--------------------------------------------------------------------------------
void ofxCvImage::setROI( const ofRectangle& rect ) {
    setROI( (int)rect.x, (int)rect.y, (int)rect.width, (int)rect.height );
}
void CameraView::newMouseData(MouseData mouseData)
{
    // Local variable(s)
    int x_temp, y_temp, width_temp, height_temp;
    QRect selectionBox;

    // Set ROI
    if(mouseData.leftButtonRelease)
    {
        double xScalingFactor;
        double yScalingFactor;
        double wScalingFactor;
        double hScalingFactor;

        // Selection box calculation depends on whether frame is scaled to fit label or not
        if(!ui->frameLabel->hasScaledContents())
        {
            xScalingFactor=((double) mouseData.selectionBox.x() - ((ui->frameLabel->width() - ui->frameLabel->pixmap()->width()) / 2)) / (double) ui->frameLabel->pixmap()->width();
            yScalingFactor=((double) mouseData.selectionBox.y() - ((ui->frameLabel->height() - ui->frameLabel->pixmap()->height()) / 2)) / (double) ui->frameLabel->pixmap()->height();
            wScalingFactor = (double)m_processingThread->getCurrentROI().width() / (double)ui->frameLabel->pixmap()->width();
            hScalingFactor = (double)m_processingThread->getCurrentROI().height() / (double)ui->frameLabel->pixmap()->height();
        }
        else
        {
            xScalingFactor=(double) mouseData.selectionBox.x() / (double) ui->frameLabel->width();
            yScalingFactor=(double) mouseData.selectionBox.y() / (double) ui->frameLabel->height();
            wScalingFactor = (double)m_processingThread->getCurrentROI().width() / (double)ui->frameLabel->width();
            hScalingFactor = (double)m_processingThread->getCurrentROI().height() / (double)ui->frameLabel->height();
        }

        // Set selection box properties (new ROI)
        selectionBox.setX(xScalingFactor * m_processingThread->getCurrentROI().width() + m_processingThread->getCurrentROI().x());
        selectionBox.setY(yScalingFactor * m_processingThread->getCurrentROI().height() + m_processingThread->getCurrentROI().y());
        selectionBox.setWidth(wScalingFactor * mouseData.selectionBox.width());
        selectionBox.setHeight(hScalingFactor * mouseData.selectionBox.height());

        // Check if selection box has NON-ZERO dimensions
        if((selectionBox.width() != 0) && ((selectionBox.height()) != 0))
        {
            // Selection box can also be drawn from bottom-right to top-left corner
            if(selectionBox.width() < 0)
            {
                x_temp = selectionBox.x();
                width_temp = selectionBox.width();
                selectionBox.setX(x_temp + selectionBox.width());
                selectionBox.setWidth(width_temp * -1);
            }
            if(selectionBox.height() < 0)
            {
                y_temp = selectionBox.y();
                height_temp = selectionBox.height();
                selectionBox.setY(y_temp + selectionBox.height());
                selectionBox.setHeight(height_temp * -1);
            }

            // Check if selection box is not outside window
            if((selectionBox.x() < 0) || (selectionBox.y() < 0)||
                ((selectionBox.x() + selectionBox.width()) > (m_processingThread->getCurrentROI().x() + m_processingThread->getCurrentROI().width())) ||
                ((selectionBox.y() + selectionBox.height()) > (m_processingThread->getCurrentROI().y() + m_processingThread->getCurrentROI().height())) ||
                (selectionBox.x() < m_processingThread->getCurrentROI().x()) ||
                (selectionBox.y() < m_processingThread->getCurrentROI().y()))
            {
                // Display error message
                QMessageBox::critical(this, "Invalid Selection", tr("Selection box outside allowable range. Please try again."));
            }
            // Set ROI
            else
            {
                emit setROI(selectionBox);
            }
        }
    }
}
void ProcessingThread::run()
{
    while(1)
    {
        /////////////////////////////////
        // Stop thread if stopped=TRUE //
        /////////////////////////////////
        stoppedMutex.lock();
        if (stopped)
        {
            stopped=false;
            stoppedMutex.unlock();
            break;
        }
        stoppedMutex.unlock();
        /////////////////////////////////
        /////////////////////////////////

        // Save processing time
        processingTime=t.elapsed();
        // Start timer (used to calculate processing rate)
        t.start();

        // Get frame from queue, store in currentFrame, set ROI
        currentFrame=Mat(imageBuffer->getFrame(),currentROI);

        updateMembersMutex.lock();
        ///////////////////
        // PERFORM TASKS //
        ///////////////////
        // Note: ROI changes will take effect on next frame
        if(resetROIFlag)
            resetROI();
        else if(setROIFlag)
            setROI();
        ////////////////////////////////////
        // PERFORM IMAGE PROCESSING BELOW //
        ////////////////////////////////////
        // Grayscale conversion
        if(grayscaleOn)
            cvtColor(currentFrame,currentFrameGrayscale,CV_BGR2GRAY);
        // Smooth (in-place operations)
        if(smoothOn)
        {
            if(grayscaleOn)
            {
                switch(smoothType)
                {
                    // BLUR
                    case 0:
                        blur(currentFrameGrayscale,currentFrameGrayscale,Size(smoothParam1,smoothParam2));
                        break;
                    // GAUSSIAN
                    case 1:
                        GaussianBlur(currentFrameGrayscale,currentFrameGrayscale,Size(smoothParam1,smoothParam2),smoothParam3,smoothParam4);
                        break;
                    // MEDIAN
                    case 2:
                        medianBlur(currentFrameGrayscale,currentFrameGrayscale,smoothParam1);
                        break;
                }
            }
            else
            {
                switch(smoothType)
                {
                    // BLUR
                    case 0:
                        blur(currentFrame,currentFrame,Size(smoothParam1,smoothParam2));
                        break;
                    // GAUSSIAN
                    case 1:
                        GaussianBlur(currentFrame,currentFrame,Size(smoothParam1,smoothParam2),smoothParam3,smoothParam4);
                        break;
                    // MEDIAN
                    case 2:
                        medianBlur(currentFrame,currentFrame,smoothParam1);
                        break;
                }
            }
        }
        // Dilate
        if(dilateOn)
        {
            if(grayscaleOn)
                dilate(currentFrameGrayscale,currentFrameGrayscale,Mat(),Point(-1,-1),dilateNumberOfIterations);
            else
                dilate(currentFrame,currentFrame,Mat(),Point(-1,-1),dilateNumberOfIterations);
        }
        // Erode
        if(erodeOn)
        {
            if(grayscaleOn)
                erode(currentFrameGrayscale,currentFrameGrayscale,Mat(),Point(-1,-1),erodeNumberOfIterations);
            else
                erode(currentFrame,currentFrame,Mat(),Point(-1,-1),erodeNumberOfIterations);
        }
        // Flip
        if(flipOn)
        {
            if(grayscaleOn)
                flip(currentFrameGrayscale,currentFrameGrayscale,flipCode);
            else
                flip(currentFrame,currentFrame,flipCode);
        }
        // Canny edge detection
        if(cannyOn)
        {
            // Frame must be converted to grayscale first if grayscale conversion is OFF
            if(!grayscaleOn)
                cvtColor(currentFrame,currentFrameGrayscale,CV_BGR2GRAY);

            Canny(currentFrameGrayscale,currentFrameGrayscale,
                  cannyThreshold1,cannyThreshold2,
                  cannyApertureSize,cannyL2gradient);
        }
        ////////////////////////////////////
        // PERFORM IMAGE PROCESSING ABOVE //
        ////////////////////////////////////

        // Convert Mat to QImage: Show grayscale frame [if either Grayscale or Canny processing modes are ON]
        if(grayscaleOn||cannyOn)
            frame=MatToQImage(currentFrameGrayscale);
        // Convert Mat to QImage: Show BGR frame
        else
            frame=MatToQImage(currentFrame);
        updateMembersMutex.unlock();

        // Update statistics
        updateFPS(processingTime);
        currentSizeOfBuffer=imageBuffer->getSizeOfImageBuffer();
        // Inform GUI thread of new frame (QImage)
        emit newFrame(frame);
    }
    qDebug() << "Stopping processing thread...";
}