void CaptureThread::run()
{
    LOGFUNC("CaptureThread::run");

    while(1)
    {
        ////////////////////////////////
        // Stop thread if doStop=TRUE //
        ////////////////////////////////
        doStopMutex.lock();
        if(doStop)
        {
            doStop=false;
            doStopMutex.unlock();
            break;
        }
        doStopMutex.unlock();
        //////////////////////////////////////
        // Stop capturing if doPause = TRUE //
        //////////////////////////////////////
        doPauseMutex.lock();
        if(doPause){
             pauseCondition.wait(&doPauseMutex);
         }
         doPauseMutex.unlock();

        // Save capture time
        captureTime=t.elapsed();
        // Start timer (used to calculate capture rate)
        t.start();

        // Synchronize with other streams (if enabled for this stream)
        sharedImageBuffer->sync(CameraDevice->deviceNumber);

        mCaptureFrameMutex.lock();
        // Capture fraCameraDeviceme (if available)
        if(!CameraDevice->grabFrame(&mGrabbedFrame))continue;

        if(imageNeeded){
            if(imageToCapture!=NULL)
                *imageToCapture = mGrabbedFrame.copy();
            imageNeeded = false;
        }
        // Add frame to buffer
        if(mGrabbedFrame.width()!=0 && mGrabbedFrame.height()!=0){
            sharedImageBuffer->getByDeviceNumber(CameraDevice->deviceNumber)->add(mGrabbedFrame, dropFrameIfBufferFull);
        }
        else{
            LOG_D("Invalid frame");
        }
        mCaptureFrameMutex.unlock();

        // Update statistics
        updateFPS(captureTime);
        statsData.nFramesProcessed++;
        // Inform GUI of updated statistics
        emit updateStatisticsInGUI(statsData);
    }
}
void MyProcessing::run () {
	long long prevFrameNum, curFrameNum;
	prevFrameNum = -1;
	curFrameNum = 0;

    while(1)
    {
        ////////////////////////////////
        // Stop thread if doStop=TRUE //
        ////////////////////////////////
        doStopMutex.lock();
        if(doStop)
        {
            doStop=false;
            doStopMutex.unlock();
            break;
        }
        doStopMutex.unlock();
        /////////////////////////////////
        /////////////////////////////////

        int idx = rb->curRead();
        FrameData *d = rb->getPointer(idx);
        curFrameNum = d->frameNumber;
        if (prevFrameNum < curFrameNum) {
        	// Save processing time
        	processingTime=t.elapsed();
        	// Start timer (used to calculate processing rate)
        	t.start();

        	processingMutex.lock();
        	//rb->lockRead();
        	pluginStack->process(d);

        	Mat currentFrame = d->frame.clone();
        	//cout << "frame number : " << d->frameNumber << endl;

        	//rb->unlockRead();
            // Convert Mat to QImage
            frame=MatToQImage(currentFrame);
            processingMutex.unlock();

            // Inform GUI thread of new frame (QImage)
            emit newFrame(frame);

            // Update statistics
            updateFPS(processingTime);
            statsData.nFramesProcessed++;
            // Inform GUI of updated statistics
            emit updateStatisticsInGUI(statsData);
        }//if

        // With "true" it jumps directly ahead to the most recent data available. This means that we can possibly skip bins.
        rb->nextRead(true);
        prevFrameNum=curFrameNum;
    }
    //qDebug() << "Stopping processing thread...";

}
void ProcessingThread::run()
{
    qDebug() << "Starting processing thread...";
    while(1)
    {
        ////////////////////////// /////// 
        // Stop thread if doStop=TRUE // 
        ////////////////////////// /////// 
        doStopMutex.lock();
        if(doStop)
        {
            doStop=false;
            doStopMutex.unlock();
            break;
        }
        doStopMutex.unlock();
        ////////////////////////// ////////
        ////////////////////////// ////////

        // Save processing time
        processingTime=t.elapsed();
        // Start timer (used to calculate processing rate)
        t.start();

        processingMutex.lock();
        // Get frame from queue, store in currentFrame, set ROI
        currentFrame=Mat(sharedImageBuffer->getByDeviceNumber(deviceNumber)->get().clone(), currentROI);

        ////////////////////////// ///////// // 
        // PERFORM IMAGE PROCESSING BELOW // 
        ////////////////////////// ///////// // 

        // Grayscale conversion (in-place operation)
       if(imgProcFlags.grayscaleOn && (currentFrame.channels() == 3 || currentFrame.channels() == 4)) {
           cvtColor(currentFrame, currentFrame, CV_BGR2GRAY, 1);
       }

       // Save the original Frame after grayscale conversion, so VideoWriter works correct
       if(emitOriginal || captureOriginal)
           originalFrame = currentFrame.clone();

       // Fill Buffer that is processed by Magnificator
       fillProcessingBuffer();

       if (processingBufferFilled()) {
           if(imgProcFlags.colorMagnifyOn)
           {
               magnificator.colorMagnify();
               currentFrame = magnificator.getFrameLast();
           }
           else if(imgProcFlags.laplaceMagnifyOn)

           {
               magnificator.laplaceMagnify();
               currentFrame = magnificator.getFrameLast();
           }
           else if(imgProcFlags.waveletMagnifyOn)
           {
               magnificator.waveletMagnify();
               currentFrame = magnificator.getFrameLast();
           }
           else
               processingBuffer.erase(processingBuffer.begin());
       }

        ////////////////////////// ///////// // 
        // PERFORM IMAGE PROCESSING ABOVE // 
        ////////////////////////// ///////// // 

        // Convert Mat to QImage
        frame=MatToQImage(currentFrame);

        processingMutex.unlock();

        // Save the Stream
        if(doRecord) { 
            if(output.isOpened()) { 
                if(captureOriginal) {

                    processingMutex.lock();
                    // Combine original and processed frame
                    combinedFrame = combineFrames(currentFrame,originalFrame);
                    processingMutex.unlock();

                    output.write(combinedFrame);
                }
                else {
                    output.write(currentFrame);
                }

                framesWritten++;
                emit frameWritten(framesWritten);
            }
        }

        // Emit the original image before converting to grayscale
       if(emitOriginal)
           emit origFrame(MatToQImage(originalFrame));
        // Inform GUI thread of new frame (QImage)
        // emit newFrame(frame);
        emit newFrame(MatToQImage(currentFrame));

        // Update statistics
        updateFPS(processingTime);
        statsData.nFramesProcessed++;
        // Inform GUI of updated statistics
        emit updateStatisticsInGUI(statsData);
    }
    qDebug() << "Stopping processing thread...";
}