Esempio n. 1
0
void VideoEngine::run()
{
    float fps = _videoFormat.framesPerSecond();
    if (fps == 0){
        fps = 60;
    }
    int milliSeconds = 1000/fps;
    int frameNumber = 0;
    while(!stopped)
    {
        cv::Mat cvFrame;
        if (false == videoCapture.grab()){
            qDebug() << "grab() failed";
            break;
        }
        if (false == videoCapture.retrieve(cvFrame, cameraChannel)){
            qDebug() << "retreive failed";
            break;
        }


        // retrieve Mat::type()
        frameNumber++;
        if (frameNumber == 1){
            _videoFormat.setType(cvFrame.type());
            if (processor != 0){
                processor->startProcessing(_videoFormat);
            }
        }

        // queue the image to the gui
        emit sendInputImage(cvMatToQImage(cvFrame));

        // Process Video Frame
        if (processor != 0){
            cvFrame = processor->process(cvFrame);
        }

        emit sendProcessedImage(cvMatToQImage(cvFrame));

        // check if stopped
        QMutexLocker locker(&mutex);
        if (stopped) {
            break;
        }

        if (usingCamera == false){
            msleep(milliSeconds);
        }
    }
    videoCapture.release();
}
	void SelectCameraWindow::onTestEyeCamClicked()
	{
		#ifndef _WIN32
			ui.lEyeCam->setText("Loading...");

			VideoCapture videoCapture(ui.cbEyeCamIndex->currentIndex());
			videoCapture.set(CV_CAP_PROP_FRAME_WIDTH, 320);
			videoCapture.set(CV_CAP_PROP_FRAME_HEIGHT, 240);

			Mat frame;
			
			/* If camera could not be opened */
			if(!videoCapture.isOpened())
			{
				ui.lEyeCam->setText("Cannot open camera");
				qDebug() << "Capture not opened";
				return;
			}

			for( ; ; )
			{
				videoCapture.read(frame);
				ui.lEyeCam->setPixmap(QPixmap::fromImage(cvMatToQImage(frame)));
				waitKey();
			}
		#else

			// If thread is running
			if(eyeCamFrameReaderThread->isRunning())
			{
				qDebug() << "Eye Camera Thread is already running";

				// If we are already sreaming this cam, go out of there!
				if(eyeCamFrameReaderThread->getCameraIndex() == ui.cbEyeCamIndex->currentIndex())
				{
					qDebug() << "Same camera selected!";
					return;
				}

				// If a cam selected to be tested while streaming from another cam
				else
				{
					qDebug() << "Starting stream with diferent camera";
					eyeCamFrameReaderThread->terminate();
					eyeCamFrameReaderThread->setCameraIndex(ui.cbEyeCamIndex->currentIndex());
					eyeCamFrameReaderThread->start();
				}
			}

			// If thread is not started yet
			else
			{
				qDebug() << "Eye Camera Thread starting first time";
				eyeCamFrameReaderThread->setCameraIndex(ui.cbEyeCamIndex->currentIndex());
				eyeCamFrameReaderThread->start();
			}

		#endif
		return;
	}
Esempio n. 3
0
void MainWindow::update()
{
    if(videoCapture.isOpened()) {
       //Grab image
       videoCapture.read(image);
       markerDetector.detect(image,markers,cameraParameters,markerSize);
       kmarkers.clear();
       for(int i=0;i < markers.size();i++) {
           markers[i].draw(image,cv::Scalar(0,0,255),2);
           //Update coordinate markers if applicalbe, otherwise calculate marker pose
           if(markers[i].id == 0) {
               origin[0] = markers[i].Tvec.ptr<float>(0)[0];
               origin[1] = markers[i].Tvec.ptr<float>(0)[1];
               origin[2] = markers[i].Tvec.ptr<float>(0)[2];
               originPx[0] = markers[i][0].x;
               originPx[1] = markers[i][0].y;
           }
           else if(markers[i].id == 1) {
               xAxis[0] = markers[i].Tvec.ptr<float>(0)[0];
               xAxis[1] = markers[i].Tvec.ptr<float>(0)[1];
               xAxis[2] = markers[i].Tvec.ptr<float>(0)[2];
               xAxisPx[0] = markers[i][0].x;
               xAxisPx[1] = markers[i][0].y;
           }
           else if(markers[i].id == 2) {
               yAxis[0] = markers[i].Tvec.ptr<float>(0)[0];
               yAxis[1] = markers[i].Tvec.ptr<float>(0)[1];
               yAxis[2] = markers[i].Tvec.ptr<float>(0)[2];
               yAxisPx[0] = markers[i][0].x;
               yAxisPx[1] = markers[i][0].y;
           }
           else {
               //Convert marker to keytrack marker by calculating X, Y, and Theta
               Marker kmarker = arucoMarkerToMarker(markers[i]);
               kmarkers.append(kmarker);
           }
       }
       //Fulfill any marker requests
       splw->handleNewMarkers(kmarkers);

       //Display augmented image
       qimage = cvMatToQImage(image);
       ui->imageLabel->setPixmap(QPixmap::fromImage(qimage.scaled(ui->imageLabel->size(),Qt::KeepAspectRatio)));
    }
}
Esempio n. 4
0
void EyeCameraFrameReaderThread::run(void)
{
    emit onCameraTextChanged("Loading...");
    int frontCamIndex = cameraIndex == 1 ? 2 : 1;
    EyeTracker *eyeTracker = new EyeTracker(cameraIndex, frontCamIndex);

    // If camera could not be opened
    if(!eyeTracker->isOpened())
    {
        qDebug() << "Error: Cannot connect to camera.";
        emit onCameraTextChanged("Cannot connect to camera");
        emit finished();
    }

    for( ; ; )
    {
        // Capture images
        eyeTracker->captureEyeFrame();
        eyeTracker->captureSceneFrame();

        Point2f p = eyeTracker->doTrack();

        // Set up x and y coordinates
        char str[50];
        sprintf(str, "%.1f", p.x);
        emit onEyeXChanged(str);

        sprintf(str, "%.1f", p.y);
        emit onEyeYChanged(str);

        // Draw squares to eye image
        eyeTracker->drawSquares(eyeTracker->findSquares());

        // Set eye pixmap
        emit onCameraPixmapChanged(QPixmap::fromImage(cvMatToQImage(cvarrToMat(eyeTracker->getEyeImage()))));

        // Release images
        eyeTracker->releaseGrayEyeImage();
        eyeTracker->releaseGraySceneImage();

        cvWaitKey(20);
    }
}
Esempio n. 5
0
void FrontCameraFrameReaderThread::run(void)
{
    emit onCameraTextChanged("Loading...");

    VideoCapture videoCapture(cameraIndex);
    Mat frame;

    /* If camera could not be opened */
    if(!videoCapture.isOpened())
    {
        emit onCameraTextChanged("Cannot open camera");
        qDebug() << "Capture not opened";
        return;
    }

    for( ; ; )
    {
        videoCapture.read(frame);
        emit onCameraPixmapChanged(QPixmap::fromImage(cvMatToQImage(frame)));
        waitKey();
    }
}
	QPixmap cvMatToQPixmap(const cv::Mat &inMat)
	{
		return QPixmap::fromImage(cvMatToQImage(inMat));
	}
QPixmap QOpenCVScene::cvMatToQPixmap( const cv::Mat &inMat )
{
    return QPixmap::fromImage( cvMatToQImage( inMat ) );
}
Esempio n. 8
0
void MainWindow::showImage(cv::Mat* image)
{
    QImage qimage = cvMatToQImage(*image);
    ui->image->setPixmap(QPixmap::fromImage(qimage.scaled(ui->image->size(),Qt::KeepAspectRatio)));
}