const cv::Mat descriptor::SIFTDescriptor::computeDescriptorForPoints(const cv::Mat& image, const PointArrayPtr& pointArray) { cv::Mat processedImage(image); const int BGR2GRAY = 6; cvtColor(processedImage, processedImage, BGR2GRAY); cv::equalizeHist(processedImage, processedImage); std::vector<cv::KeyPoint> keyPoints; keyPoints.reserve(pointArray->size()); for (const PointType& point : *pointArray) { keyPoints.push_back(cv::KeyPoint(point, static_cast<float>(m_diameter))); } cv::Mat floatDescriptors; m_descriptorExtractor->compute(processedImage, keyPoints, floatDescriptors); cv::Mat_<double> resultMat; floatDescriptors.convertTo(resultMat, CV_64FC1); return resultMat; }
Job Job::execute() { int width = this->originalImage.columns(); int height = this->originalImage.rows(); std::vector<double> luminosities; int i, j; for (i = 0; i < width; i++) { for (j = 0; j < height; j++) { Magick::ColorHSL pixelColor = originalImage.pixelColor(i, j); double pixelLuminosity = pixelColor.luminosity(); luminosities.push_back(pixelLuminosity); } } std::sort(luminosities.begin(), luminosities.end()); int cutoff = (int) (width * height * percentage) / 100; double threshold = luminosities[cutoff]; Magick::Image processedImage(Magick::Geometry(width, height), "white"); this->processedImage = processedImage; for (i = 0; i < width; i++) { for (j = 0; j < height; j++) { Magick::ColorHSL pixelColor = originalImage.pixelColor(i, j); double pixelLuminosity = pixelColor.luminosity(); this->processedImage.pixelColor(i, j, Magick::ColorMono(pixelLuminosity > threshold)); } } return *this; }
QPixmap ImageFilter::temperature(QImage origin, int delta) { QImage newImage(origin.width(), origin.height(), QImage::Format_ARGB32); QColor oldColor; int r, g, b; for (int x=0; x < newImage.width(); x++) { for (int y=0; y < newImage.height(); y++) { oldColor = QColor(origin.pixel(x, y)); r = oldColor.red() + delta; g = oldColor.green() + delta; b = oldColor.blue(); //we check if the new values are between 0 and 255 r = qBound(0, r, 255); g = qBound(0, g, 255); newImage.setPixel(x, y, qRgb(r, g, b)); } } QPixmap processedImage(QPixmap::fromImage(newImage)); return processedImage; }
QPixmap ImageFilter::saturate(QImage origin, int delta) { QImage newImage(origin.width(), origin.height(), QImage::Format_ARGB32); QColor oldColor; QColor newColor; int h,s,l; for(int x=0; x < newImage.width(); x++) { for(int y=0; y < newImage.height(); y++) { oldColor = QColor(origin.pixel(x, y)); newColor = oldColor.toHsl(); h = newColor.hue(); s = newColor.saturation() + delta; l = newColor.lightness(); //we check if the new value is between 0 and 255 s = qBound(0, s, 255); newColor.setHsl(h, s, l); newImage.setPixel(x, y, qRgb(newColor.red(), newColor.green(), newColor.blue())); } } QPixmap processedImage(QPixmap::fromImage(newImage)); return processedImage; }
QPixmap ImageFilter::grayScale(QImage origin) { QImage newImage(origin.width(), origin.height(), QImage::Format_ARGB32); QColor oldColor; for (int x = 0; x < newImage.width(); x++) { for (int y = 0; y < newImage.height(); y++) { oldColor = QColor(origin.pixel(x, y)); int average = (oldColor.red() + oldColor.green() + oldColor.blue())/3; newImage.setPixel(x, y, qRgb(average, average, average)); } } QPixmap processedImage(QPixmap::fromImage(newImage)); return processedImage; }
MainWindow::MainWindow(QWidget *parent) : QMainWindow(parent), ui(new Ui::MainWindow) { ui->setupUi(this); this->showNormal(); myPlayer = new player(); /*********choose and show video******************/ QObject::connect(ui->actionOuvrirVideo, SIGNAL(triggered()), this, SLOT(chooseVideo())); QObject::connect(myPlayer, SIGNAL(processedImage(QImage, QString)), this, SLOT(updatePlayerUI(QImage, QString))); /***********************************************/ /*****For choose object**************/ ui->VideoLbl->setMouseTracking(true); QObject::connect(ui->VideoLbl, SIGNAL(Mouse_Move(int, int)), this, SLOT(myMouseMove(int, int))); QObject::connect(ui->VideoLbl, SIGNAL(Mouse_Pressed(int,int)), this, SLOT(myMousePressed(int, int))); QObject::connect(ui->VideoLbl, SIGNAL(Mouse_Left(int, int)), this, SLOT(myMouseLeft(int, int))); QObject::connect(ui->VideoLbl, SIGNAL(Mouse_Move_Pressed(int,int)), this, SLOT(myMouseMovePressed(int, int))); /************************************/ /***Set Button et Slider disabled****/ ui->playBtn->setEnabled(false); ui->backwardButton->setEnabled(false); ui->forwardButton->setEnabled(false); ui->videoSlider->setEnabled(false); ui->quickbackwardButton->setEnabled(false); ui->quickforwardButton->setEnabled(false); ui->trajectoirecheckBox->setEnabled(false); ui->debutButton->setEnabled(false); ui->finButton->setEnabled(false); ui->ouvrirButton->setVisible(false); ui->savefinButton->setEnabled(false); ui->VideoLbl->setEnabled(false); /***********************************/ ui->actionInformationObjet->setEnabled(false); /*************open dialog**************/ QObject::connect(ui->actionInformationObjet, SIGNAL(triggered()),this, SLOT(openInformationDialog())); QObject::connect(ui->actionDeplacement, SIGNAL(triggered()),this, SLOT(openDeplacementDialog())); /**************************************/ /******Initialisation******************/ start = 0; fin = 0; /*************************************/ }
QPixmap ImageFilter::blur(QImage origin) { QImage newImage(origin); int kernel [5][5]= {{0,0,1,0,0}, {0,1,3,1,0}, {1,3,7,3,1}, {0,1,3,1,0}, {0,0,1,0,0}}; int kernelSize = 5; int sumKernel = 27; int r,g,b; QColor color; for(int x=kernelSize/2; x < newImage.width()-(kernelSize/2); x++) { for(int y=kernelSize/2; y < newImage.height()-(kernelSize/2); y++) { r = 0; g = 0; b = 0; for(int i = -kernelSize/2; i <= kernelSize/2; i++){ for(int j = -kernelSize/2; j <= kernelSize/2; j++){ color = QColor(origin.pixel(x+i, y+j)); r += color.red()*kernel[kernelSize/2+i][kernelSize/2+j]; g += color.green()*kernel[kernelSize/2+i][kernelSize/2+j]; b += color.blue()*kernel[kernelSize/2+i][kernelSize/2+j]; } } r = qBound(0, r/sumKernel, 255); g = qBound(0, g/sumKernel, 255); b = qBound(0, b/sumKernel, 255); newImage.setPixel(x, y, qRgb(r,g,b)); } } QPixmap processedImage(QPixmap::fromImage(newImage)); return processedImage; }
QPixmap ImageFilter::sepia(QImage origin) { QImage newImage(origin.width(), origin.height(), QImage::Format_ARGB32); QColor oldColor; int r, g, b; for (int x = 0; x < newImage.width(); x++) { for (int y = 0; y < newImage.height(); y++) { oldColor = QColor(origin.pixel(x, y)); r = (oldColor.red() + oldColor.green() + oldColor.blue() + 112)/4; g = (oldColor.red() + oldColor.green() + oldColor.blue() + 66)/4; b = (oldColor.red() + oldColor.green() + oldColor.blue() + 20)/4; newImage.setPixel(x, y, qRgb(r, g, b)); } } QPixmap processedImage(QPixmap::fromImage(newImage)); return processedImage; }
QImage & BackgroundDodging::dodgeBackground(QImage &image, QImage &background) { double means[] = {0.0, 0.0, 0.0}; const int imageWidth = image.size().width(); const int imageHeight = image.size().height(); QImage processedImage(imageWidth, imageHeight, QImage::Format_RGB666); double r1[imageWidth][imageHeight][3]; double r2[imageWidth][imageHeight][3]; double processed_image[imageWidth][imageHeight][3]; assert((imageWidth == background.size().width()) && (imageHeight == background.size().height())); for (int row = 0; row < imageHeight; row++) { for (int col = 0; col < imageWidth; col++) { QRgb rgbIm = image.pixel(col, row); QRgb rgbBg = background.pixel(col, row); means[0] += (double)qRed(rgbBg); means[1] += (double)qGreen(rgbBg); means[2] += (double)qBlue(rgbBg); r1[row][col][0] = (double)qRed(rgbIm) - (double)qRed(rgbBg); r1[row][col][1] = (double)qGreen(rgbIm) - (double)qGreen(rgbBg); r1[row][col][2] = (double)qBlue(rgbIm) - (double)qBlue(rgbBg); } } means[0] = means[0] / imageWidth / imageHeight; means[1] = means[1] / imageWidth / imageHeight; means[2] = means[2] / imageWidth / imageHeight; for (int row = 0; row < imageHeight; row++) { for (int col = 0; col < imageWidth; col++) { QRgb rgbBg = background.pixel(col, row); r2[row][col][0] = fmin(fmax(means[0] / ((double)qRed(rgbBg) + eps), minr), maxr); r2[row][col][1] = fmin(fmax(means[1] / ((double)qGreen(rgbBg) + eps), minr), maxr); r2[row][col][2] = fmin(fmax(means[2] / ((double)qBlue(rgbBg) + eps), minr), maxr); processed_image[row][col][0] = r1[row][col][0] * r2[row][col][0] + means[0]; processed_image[row][col][1] = r1[row][col][1] * r2[row][col][1] + means[1]; processed_image[row][col][2] = r1[row][col][2] * r2[row][col][2] + means[2]; processedImage.setPixel(col, row, qRgb((int)processed_image[row][col][0], (int)processed_image[row][col][1], (int)processed_image[row][col][2])); } } return processedImage; }
// The constructor MainWindow::MainWindow(QWidget *parent) : QMainWindow(parent), ui(new Ui::MainWindow) { // Create the Player object myPlayer = new Player(); // Processing the images by connecting the signal and the slot QObject::connect(myPlayer, SIGNAL(processedImage(QImage)), this, SLOT(updatePlayerUI(QImage))); // Set up the UI's label ui->setupUi(this); // Set up the webView of the UI ui->webView->settings()->setAttribute(QWebSettings::JavascriptEnabled, true); ui->webView->settings()->setAttribute(QWebSettings::OfflineStorageDatabaseEnabled,true); ui->webView->settings()->setAttribute(QWebSettings::OfflineWebApplicationCacheEnabled,true); ui->webView->settings()->setAttribute(QWebSettings::LocalContentCanAccessRemoteUrls,true); ui->webView->settings()->setAttribute(QWebSettings::LocalStorageEnabled,true); ui->webView->settings()->setAttribute(QWebSettings::JavascriptCanAccessClipboard,true); ui->webView->settings()->setAttribute(QWebSettings::DeveloperExtrasEnabled,true); ui->webView->settings()->setAttribute(QWebSettings::CanvasPathEnabled, true); ui->webView->load(QUrl("file:///home/brian/Documents/C%20and%20C++%20Program/GuiWebcam/map.html")); }
void RaspiVoice::processImage(cv::Mat rawImage) { cv::Mat processedImage = rawImage; if (verbose) { printtime("ProcessImage start"); } if ((image_source > 0) || (opt.input_filename != "")) { if (opt.foveal_mapping) { cv::Matx33f cameraMatrix(100, 0, processedImage.cols / 2, 0, 100, processedImage.rows / 2, 0, 0, 1); cv::Matx41f distCoeffs(5.0, 5.0, 0, 0); cv::Mat processedImage2; cv::undistort(processedImage, processedImage2, cameraMatrix, distCoeffs); float clipzoom = 1.8; //horizontal zoom to remove blinders, decreases resolution if > 1.0 cv::Rect roi(processedImage.cols / 2 - columns / 2 / clipzoom, processedImage.rows / 2 - rows / 2, columns / clipzoom, rows); processedImage = processedImage2(roi); } if (opt.zoom > 1.0) { int w = processedImage.cols; int h = processedImage.rows; float z = opt.zoom; cv::Rect roi((w / 2.0) - w / (2.0*z), (h / 2.0) - h / (2.0*z), w/z, h/z); processedImage = processedImage(roi); } //Bring to size needed by ImageToSoundscape: if (processedImage.rows != rows || processedImage.cols != columns) { cv::resize(processedImage, processedImage, cv::Size(columns, rows)); } if ((opt.blinders > 0) && (opt.blinders < columns/2)) { processedImage(cv::Rect(0, 0, opt.blinders, rows - 1)).setTo(0); processedImage(cv::Rect(columns - 1 - opt.blinders, 0, opt.blinders, rows - 1)).setTo(0); } if (opt.contrast != 0.0) { float avg = 0.0; for (int y = 0; y < processedImage.rows; y++) { for (int x = 0; x < processedImage.cols; x++) { avg += processedImage.at<uchar>(y, x); } } avg = avg / (processedImage.rows * processedImage.cols); for (int y = 0; y < processedImage.rows; y++) { for (int x = 0; x < processedImage.cols; x++) { int mVal = processedImage.at<uchar>(y, x); processedImage.at<uchar>(y, x) = cv::saturate_cast<uchar>(mVal + opt.contrast*(mVal - avg)); } } } if (opt.threshold > 0) { if (opt.threshold < 255) { cv::threshold(processedImage, processedImage, opt.threshold, 255, cv::THRESH_BINARY); } else { //Auto threshold: cv::threshold(processedImage, processedImage, 127, 255, cv::THRESH_BINARY | cv::THRESH_OTSU); } } if (opt.negative_image) { cv::Mat sub_mat = cv::Mat::ones(processedImage.size(), processedImage.type()) * 255; cv::subtract(sub_mat, processedImage, processedImage); } if (opt.edge_detection_opacity > 0.0) { cv::Mat blurImage; cv::Mat edgesImage; int ratio = 3; int kernel_size = 3; int lowThreshold = opt.edge_detection_threshold; if (lowThreshold <= 0) { lowThreshold = 127; } cv::blur(processedImage, blurImage, cv::Size(3, 3)); cv::Canny(blurImage, edgesImage, lowThreshold, lowThreshold*ratio, kernel_size); double alpha = opt.edge_detection_opacity; if (alpha > 1.0) { alpha = 1.0; } double beta = (1.0 - alpha); cv::addWeighted(edgesImage, alpha, processedImage, beta, 0.0, processedImage); } if ((opt.flip >= 1) && (opt.flip <= 3)) { int flipCode; if (opt.flip == 1) //h { flipCode = 1; } else if (opt.flip == 2) //v { flipCode = 0; } else if (opt.flip == 3) //h+v { flipCode = -1; } cv::flip(processedImage, processedImage, flipCode); } if (preview) { //Screen views //imwrite("raspivoice_capture_raw.jpg", rawImage); //imwrite("raspivoice_capture_scaled_gray.jpg", processedImage); cv::imshow("RaspiVoice Preview", processedImage); cv::waitKey(200); } /* Set live camera image */ for (int j = 0; j < columns; j++) { for (int i = 0; i < rows; i++) { int mVal = processedImage.at<uchar>(rows - 1 - i, j) / 16; if (mVal == 0) { (*image)[IDX2D(i, j)] = 0; } else { (*image)[IDX2D(i, j)] = pow(10.0, (mVal - 15) / 10.0); // 2dB steps } } } } }
void BTB::StaticDetector::pruneTrainDescriptors(const std::string &path) { matcher.clear(); std::vector<ProcessedImage> pruningImages; std::map<std::string, cv::Rect> pruningImagesExpected; for (std::vector<ProcessedImage>::iterator it = processedTrainImages.begin(); it != processedTrainImages.end(); ++it) { std::string fullImagePath = path + "webcam_" + it->imageNumber + ".png"; cv::Mat fullImage = cv::imread(fullImagePath); if (!fullImage.data) { throw std::logic_error("error loading image: " + fullImagePath); } ProcessedImage processedImage(algo, fullImage, it->imageNumber); pruningImages.push_back(processedImage); std::string motoImagePath = path + it->imageNumber + ".png"; cv::Mat motoImage = cv::imread(motoImagePath); if (!motoImage.data) { throw std::logic_error("error loading image: " + motoImagePath); } cv::Point2i p = BTB::findExactMatch(fullImage, motoImage); const double factor = 0.3; cv::Point2i tl(std::max(p.x - int(motoImage.cols * factor), 0), std::max(p.y - int(motoImage.rows * factor), 0)); cv::Point2i br(std::min(p.x + int(motoImage.cols * (1 + factor)), fullImage.cols), std::min(p.y + int(motoImage.rows * (1 + factor)), fullImage.rows)); cv::Rect expected(tl, br); pruningImagesExpected.insert(std::pair<std::string, cv::Rect>(it->imageNumber, expected)); } for (std::vector<ProcessedImage>::iterator itTraining = processedTrainImages.begin(); itTraining != processedTrainImages.end(); ++itTraining) { std::vector<int> prunedKeypoints; cv::BFMatcher matcher; std::vector<cv::Mat> descriptors; descriptors.push_back(itTraining->descriptors); matcher.add(descriptors); for (std::vector<ProcessedImage>::iterator itPruning = pruningImages.begin(); itPruning != pruningImages.end(); ++itPruning) { std::vector<cv::DMatch> matches = computeMatches(matcher, *itPruning); for (std::vector<cv::DMatch>::iterator itMatches = matches.begin(); itMatches != matches.end(); ++itMatches) { cv::KeyPoint keypoint = itPruning->keypoints[itMatches->queryIdx]; cv::Point2i point = keypoint.pt; if (!pruningImagesExpected[itPruning->imageNumber].contains(point)) { prunedKeypoints.push_back(itMatches->trainIdx); } } } std::vector<cv::KeyPoint> goodKeypoints; for (int i = 0; i < itTraining->keypoints.size(); i++) { if (std::find(prunedKeypoints.begin(), prunedKeypoints.end(), i) == prunedKeypoints.end()) { goodKeypoints.push_back(itTraining->keypoints[i]); } } if (itTraining->pruneKeypoints(goodKeypoints)) { std::vector<cv::Mat> newDescriptors; newDescriptors.push_back(itTraining->descriptors); this->matcher.add(newDescriptors); } } }
// ============================================================================ //for drawing examples see // http://opencvexamples.blogspot.com/2013/10/basic-drawing-examples.html void VideoReader::nextFrame() { // if (!playingOptions.playing) { return; } Mat inputFrame; if (!capture.read(inputFrame)) { emit finished() ; return; } int gfw = inputFrame.cols; int gfh = inputFrame.rows; float scaleFactor =1; if ( (inputFrame.cols>playingOptions.guiFrameWidth) || (inputFrame.rows>playingOptions.guiFrameHeight) ) { if (playingOptions.guiFrameWidth > playingOptions.guiFrameHeight) { gfw = playingOptions.guiFrameWidth ; scaleFactor = (playingOptions.guiFrameWidth*1.0)/inputFrame.cols ; gfh = (int)round(inputFrame.rows*scaleFactor) ; } else { gfh = playingOptions.guiFrameHeight ; scaleFactor = (playingOptions.guiFrameHeight*1.0)/inputFrame.rows ; gfw = (int)round(inputFrame.cols*scaleFactor) ; } } // c5d(c5, QString("Calculated scale from %1x%2 to %3x%4 (%5)") // .arg(inputFrame.cols).arg(inputFrame.rows) // .arg(playingOptions.guiFrameWidth).arg(playingOptions.guiFrameHeight) // .arg(scaleFactor) ) ; resize(inputFrame, frame, Size(gfw, gfh) ); drawCornerCircles(); switch (playingOptions.crosshairType) { case 0 : drawCrosshairType1(scaleFactor); break; case 1 : drawCrosshairType2(scaleFactor); break; case 2 : drawCrosshairType3(scaleFactor); break; } // drawSelectorRectangle(scaleFactor); drawFilename(scaleFactor); Mat fullFrame = Mat::zeros(playingOptions.guiFrameWidth, playingOptions.guiFrameHeight, CV_8UC3); const int topShift = (playingOptions.guiFrameHeight-frame.rows) /2 ; const int lowShift = playingOptions.guiFrameHeight - topShift - frame.rows; // copyMakeBorder(frame, fullFrame, topShift, lowShift, 0,0,BORDER_REPLICATE ); copyMakeBorder(frame, fullFrame, topShift, lowShift, 0,0,BORDER_CONSTANT, Scalar( 128, 128, 0 ) ); frame = fullFrame; if (frame.channels()== 3){ cv::cvtColor(frame, RGBframe, CV_BGR2RGB); img = QImage((const unsigned char*)(RGBframe.data), RGBframe.cols,RGBframe.rows, RGBframe.step, QImage::Format_RGB888); } else { img = QImage((const unsigned char*)(frame.data), frame.cols,frame.rows,QImage::Format_Indexed8); } emit processedImage(img); }
int main() { // Objects CameraInput *camera = new CameraInput(); Control *controller = new Control(); Process *processer = new Process(); Tracking *tracker = new Tracking(); Serial_Communication *serial = new Serial_Communication("/dev/ttyUSB0", "/dev/ttyUSB1"); // Serial_Communication *serial = new Serial_Communication("/dev/ttyUSB0");// #### For testing with only one arduino File_Handler *file_Handler = new File_Handler(); Window_Handler *window_Handler = new Window_Handler(); Menu *menu = new Menu(); // Threads QThread *t1 = new QThread; QThread *t2 = new QThread; QThread *t3 = new QThread; QThread *t4 = new QThread; QThread *t5 = new QThread; camera->moveToThread(t1); processer->moveToThread(t2); tracker->moveToThread(t3); serial->moveToThread(t3); controller->moveToThread(t4); file_Handler->moveToThread(t5); // Connect signals to slots. Whenever a signal is emitted in a function, its corresponding (connected) function will run. qRegisterMetaType<cv::Mat>("cv::Mat"); //Signals calling from: //Main thread QObject::connect(menu, SIGNAL(startRecording(bool)), controller, SLOT(startRecording(bool))); QObject::connect(menu, SIGNAL(stopRecording()), controller, SLOT(stopRecording())); QObject::connect(menu, SIGNAL(displayMenu(cv::Mat)), window_Handler, SLOT(drawImage(cv::Mat))); QObject::connect(menu, SIGNAL(requestDataFromFootController()), serial, SLOT(receiveDataFromFootControllerLoop())); QObject::connect(menu, SIGNAL(startHighRep()), controller, SLOT(startDelayMode())); QObject::connect(menu, SIGNAL(decreaseDelay()), controller, SLOT(decreaseDelay())); QObject::connect(menu, SIGNAL(increaseDelay()), controller, SLOT(increaseDelay())); QObject::connect(menu, SIGNAL(modeSwitch()), controller, SLOT(endMode())); QObject::connect(menu, SIGNAL(startPlayback()), file_Handler, SLOT(readFromFile())); QObject::connect(menu, SIGNAL(stopPlayback()), file_Handler, SLOT(stopVideo())); QObject::connect(menu, SIGNAL(toggleSlowMotion()), file_Handler, SLOT(toggleSlowMotion())); QObject::connect(menu, SIGNAL(toggleTracking()), controller, SLOT(toggleTracking())); //Thread 1 QObject::connect(t1, SIGNAL(started()), camera, SLOT(captureImage())); QObject::connect(camera, SIGNAL(capturedImage(cv::Mat)), controller, SLOT(inputImage(cv::Mat))); //Thread 2 QObject::connect(t2, SIGNAL(started()), controller, SLOT(processerReady())); QObject::connect(processer, SIGNAL(posXposY(int,int)), tracker, SLOT(position(int,int))); QObject::connect(processer, SIGNAL(readyForWork()), controller, SLOT(processerReady())); //Thread 3 QObject::connect(tracker, SIGNAL(directionAndSpeed(int,int)), serial, SLOT(sendDataToControlUnit(int,int))); QObject::connect(serial, SIGNAL(fromFootController(char)), menu, SLOT(giveInput(char))); //Thread 4 QObject::connect(t4, SIGNAL(started()), controller, SLOT(fileHandlerReadyToWrite())); QObject::connect(controller, SIGNAL(imageToProcess(cv::Mat)), processer, SLOT(processImage(cv::Mat))); QObject::connect(controller, SIGNAL(requestImage()), camera, SLOT(captureImage())); QObject::connect(controller, SIGNAL(imageToRecord(cv::Mat)), file_Handler, SLOT(writeImage(cv::Mat))); // QObject::connect(controller, SIGNAL(imageToShow(cv::Mat)), window_Handler, SLOT(drawImage(cv::Mat))); QObject::connect(processer, SIGNAL(processedImage(cv::Mat)), window_Handler, SLOT(drawImage(cv::Mat))); QObject::connect(controller, SIGNAL(stopMotor()), serial, SLOT(stopMotor())); //Thread 5 QObject::connect(file_Handler, SIGNAL(showFrame(cv::Mat)), window_Handler, SLOT(drawImage(cv::Mat))); QObject::connect(file_Handler, SIGNAL(readyToWrite()), controller, SLOT(fileHandlerReadyToWrite())); QObject::connect(file_Handler, SIGNAL(timeout()), file_Handler, SLOT(playVideo())); QObject::connect(file_Handler, SIGNAL(playbackEnded()), menu, SLOT(returnToLowRep())); // Starting Threads t1->start(); t2->start(); t3->start(); t4->start(); t5->start(); // menu->menu(); menu->inputHandler(); return 0; }