Esempio n. 1
0
void RasterLabel::renderImage()
{
    if(isImageFromWebcam()){
        cam.read(imgMat);
        cv::flip(imgMat, imgMat, 1);
        if(isFilterDefined()){
            filter->setImage(imgMat);
            // TODO
            //((MorphOperations*)filter)->applyFilter();
            filter->applyFilter();
            imgMat = filter->getImage();
        }
        cv::Mat displayMat;
        cvtColor(imgMat, displayMat, CV_BGR2RGB);
        QImage qimg((uchar*)displayMat.data, imgMat.cols, imgMat.rows, imgMat.step, QImage::Format_RGB888);
        setPixmap(QPixmap::fromImage(qimg));
    }else if(isImgDefined()){
        if(isFilterDefined()){
            filter->applyFilter();
            imgMat = filter->getImage().clone();
        }
        // Converting Image to RGB2BGR
        cv::Mat displayMat;
        cvtColor(imgMat, displayMat, CV_BGR2RGB);
        QImage qimg((uchar*)displayMat.data, imgMat.cols, imgMat.rows, imgMat.step, QImage::Format_RGB888);
        setPixmap(QPixmap::fromImage(qimg));
    }else{
        tmr->stop();
        emit nothingToDisplay();
    }
}
void HistogramDialog::showImage(Mat img) {
    QImage qimg((uchar*) img.data, img.cols, img.rows, img.step, QImage::Format_RGB888);
    QPixmap qpix= QPixmap::fromImage(qimg);
    QGraphicsScene* scene = new QGraphicsScene(this);
    scene->addPixmap(qpix);
    ui->graphicsView->setScene(scene);
}
Esempio n. 3
0
/** Get the image.
*/
static int filter_get_image( mlt_frame frame, uint8_t **image, mlt_image_format *format, int *width, int *height, int writable )
{
	int error = 0;
	mlt_filter filter = (mlt_filter)mlt_frame_pop_service( frame );
	private_data* pdata = (private_data*)filter->child;
	mlt_properties frame_properties = MLT_FRAME_PROPERTIES( frame );

	if( mlt_properties_get_data( frame_properties, pdata->fft_prop_name, NULL ) )
	{
		// Get the current image
		*format = mlt_image_rgb24a;
		error = mlt_frame_get_image( frame, image, format, width, height, 1 );

		// Draw the spectrum
		if( !error ) {
			QImage qimg( *width, *height, QImage::Format_ARGB32 );
			copy_mlt_to_qimage_rgba( *image, &qimg );
			draw_spectrum( filter, frame, &qimg );
			copy_qimage_to_mlt_rgba( &qimg, *image );
		}
	} else {
		if ( pdata->preprocess_warned++ == 2 )
		{
			// This filter depends on the consumer processing the audio before
			// the video.
			mlt_log_warning( MLT_FILTER_SERVICE(filter), "Audio not preprocessed.\n" );
		}
		mlt_frame_get_image( frame, image, format, width, height, writable );
	}

	return error;
}
Esempio n. 4
0
//"Run" part of the process thread.
void ProcessThread::run()
{
	VideoCapture cap(0);
	if(!cap.isOpened())
	{
		qDebug () << "Video capture (cap) was unable to start... ";
		return;
	}
	frame_cnt=0;
	Mat matImage;
	while(!(this->isStopped))
	{
		frame_cnt++;
		cap >> matImage;
		cv::resize (matImage, matImage, Size(800, 600));
		//resize(matImage, image, Size(800,600));

		mutex.lock();
		Mat matimg = mode->process(matImage);
		QImage qimg((uchar *)matimg.data, matimg.cols, matimg.rows, QImage::Format_RGB888);
		QLabel *label = mode->getProperLabel();
		mutex.unlock();

		QWaitCondition cond;
		QMutex drawMutex;
		drawMutex.lock();
		emit drawImage (&qimg, &cond, &drawMutex, label);
		cond.wait (&drawMutex);
		drawMutex.unlock();
	}
};
Esempio n. 5
0
void TakePicView::displayVideoFrame()
{
    Mat displayImage;
    capture.read(image);
    cvtColor(image, displayImage, CV_BGR2RGB);
    QImage qimg((uchar*)displayImage.data, displayImage.cols, displayImage.rows, displayImage.step, QImage::Format_RGB888);
    videoLabel->setPixmap(QPixmap::fromImage(qimg));
}
Esempio n. 6
0
static int filter_get_image( mlt_frame frame, uint8_t **image, mlt_image_format *image_format, int *width, int *height, int writable )
{
	int error = 0;
	mlt_properties frame_properties = MLT_FRAME_PROPERTIES( frame );
	mlt_filter filter = (mlt_filter)mlt_frame_pop_service( frame );
	int samples = 0;
	int channels = 0;
	int frequency = 0;
	mlt_audio_format audio_format = mlt_audio_s16;
	int16_t* audio = (int16_t*)mlt_properties_get_data( frame_properties, "audio", NULL );

	if ( !audio && !preprocess_warned ) {
		// This filter depends on the consumer processing the audio before the
		// video. If the audio is not preprocessed, this filter will process it.
		// If this filter processes the audio, it could cause confusion for the
		// consumer if it needs different audio properties.
		mlt_log_warning( MLT_FILTER_SERVICE(filter), "Audio not preprocessed. Potential audio distortion.\n" );
		preprocess_warned = true;
	}

	*image_format = mlt_image_rgb24a;

	// Get the current image
	error = mlt_frame_get_image( frame, image, image_format, width, height, writable );

	// Get the audio
	if( !error ) {
		frequency = mlt_properties_get_int( frame_properties, "audio_frequency" );
		if (!frequency) {
			frequency = 48000;
		}
		channels = mlt_properties_get_int( frame_properties, "audio_channels" );
		if (!channels) {
			channels = 2;
		}
		samples = mlt_properties_get_int( frame_properties, "audio_samples" );
		if (!samples) {
			mlt_producer producer = mlt_frame_get_original_producer( frame );
			double fps = mlt_producer_get_fps( mlt_producer_cut_parent( producer ) );
			samples = mlt_sample_calculator( fps, frequency, mlt_frame_get_position( frame ) );
		}

		error = mlt_frame_get_audio( frame, (void**)&audio, &audio_format, &frequency, &channels, &samples );
	}

	// Draw the waveforms
	if( !error ) {
		QImage qimg( *width, *height, QImage::Format_ARGB32 );
		convert_mlt_to_qimage_rgba( *image, &qimg, *width, *height );
		draw_waveforms( filter, frame, &qimg, audio, channels, samples );
		convert_qimage_to_mlt_rgba( &qimg, *image, *width, *height );
	}

	return error;
}
Esempio n. 7
0
void SquareDialog::drawSquares(cv::Mat &img, const std::vector<std::vector<cv::Point> > &squares) {
    for (size_t i = 0; i < squares.size(); i++) {
        const cv::Point *p = &squares[i][0];
        int n = (int)squares[i].size();
        cv::polylines(img, &p, &n, 1, true, cv::Scalar(0, 255, 0), 3, CV_AA);
    }

    //cv::cvtColor(img, img, CV_BGR2RGB);

    QImage qimg((uchar*)img.data, img.cols, img.rows, img.step, QImage::Format_RGB888);

    ui->squareLbl->setPixmap(QPixmap::fromImage(qimg));
}
Esempio n. 8
0
void CameraWorker::onTimeout()
{
    if(ros::ok())
        ros::spinOnce();

    QImage qimg((uchar*)imageptr_cv.data, imageptr_cv.cols, imageptr_cv.rows, imageptr_cv.step, QImage::Format_RGB888);
    myMutex->lock();
    if(myPixmap)
        delete myPixmap;
    myPixmap = new QPixmap(QPixmap::fromImage(qimg));
    myMutex->unlock();
    emit imageReady(myPixmap);
    timer->setSingleShot(true);
    timer->start(10);
}
void CameraWorker::onTimeout()
{
    if(isCamera)
    {
        displayFrame = displayCamFrame;
        frame = cvQueryFrame(capture);
    }
    else
    {
        displayFrame = displayArenaFrame;
        frame = arenaFrame;
    }
    if(!frame)
        return;
    frame->roi = roi;
    cvResize(frame, calibFrame, CV_INTER_NN);
    cvCopy(calibFrame, displayFrame);
    if(isThreshold)
        colorImage(calibFrame, displayFrame);    
    if(isBlob)
    {
        makeBlobImage(frame, blobImage);
        b->detectBlobs(blobImage, a.getZoneImage());
        blobDataArr = b->getBlobDataArr();
        drawBlobs(displayFrame, blobDataArr);
        myMutex->lock();
        bs->populateFromBlobData(blobDataArr);
        bs->bombDepositPoint = a.getBombDrop();
        bs->resourceDepositPoint = a.getMineDrop();
        bs->startCorner = a.getStartCorner();
        myMutex->unlock();
        emit beliefStateReady(bs);
    }
    if(isArenaCalib)
    {
        a.drawArenaDisplay(displayFrame);
    }
    cvCvtColor(displayFrame, displayFrame,CV_BGR2RGB);
    QImage qimg((uchar*)displayFrame->imageData, displayFrame->width, displayFrame->height, displayFrame->widthStep, QImage::Format_RGB888);
    myMutex->lock();
    if(myPixmap)
        delete myPixmap;
    myPixmap = new QPixmap(QPixmap::fromImage(qimg));
    myMutex->unlock();
    emit imageReady(myPixmap);
    timer->setSingleShot(true);
    timer->start(10);
}
Esempio n. 10
0
QImage mat2Qimg(Mat a)
{
   QImage qimg(a.cols,a.rows, QImage::Format_RGB888 );

   if(a.channels() == 3)
   {
       Mat a_rgb = a;
       cvtColor(a, a_rgb, CV_BGR2RGB);
       memcpy(qimg.bits(), (const unsigned char*)a_rgb.data, a_rgb.cols*a_rgb.rows*a_rgb.channels());
   }
   else
   {
       Mat a_gray;
       cvtColor(a, a_gray, CV_BGR2GRAY);
       memcpy(qimg.bits(), (const unsigned char*)a_gray.data, a_gray.cols*a_gray.rows*a_gray.channels());
   }
   return qimg;
}
Esempio n. 11
0
QImage io_util::qImageFromGray(const cv::Mat & image)
{
    if (image.type()!=CV_8UC1)
    {   //unsupported type
        return QImage();
    }

    QImage qimg(image.cols, image.rows, QImage::Format_RGB32);
    for (int h=0; h<image.rows; h++)
    {
        const unsigned char * row = image.ptr<unsigned char>(h);
        unsigned * qrow = reinterpret_cast<unsigned *>(qimg.scanLine(h));
        for (register int w=0; w<image.cols; w++)
        {
            qrow[w] = qRgb(row[w], row[w], row[w]);
        }
    }
    return qimg;
}
QImage Calibration::iplToQimage(IplImage* image)
{
	int h = image->height;
	int w = image->width;
	int channels = image->nChannels;
	QImage qimg(w, h, QImage::Format_ARGB32);
	char *data = image->imageData;
	
	for (int y = 0; y < h; y++, data += image->widthStep)
	{
		for (int x = 0; x < w; x++)
		{
			char r = 0;
			char g = 0;
			char b = 0;
			char a = 0;
			if (channels == 1)
			{
				r = data[x * channels];
				g = data[x * channels];
				b = data[x * channels];
			}
			else if (channels == 3 || channels == 4)
			{
				r = data[x * channels + 2];
				g = data[x * channels + 1];
				b = data[x * channels];
			}

			if (channels == 4)
			{
				a = data[x * channels + 3];
				qimg.setPixel(x, y, qRgba(r, g, b, a));
			}
			else
			{
				qimg.setPixel(x, y, qRgb(r, g, b));
			}
		}
	}
	return qimg;
}
Esempio n. 13
0
QImage io_util::qImageFromRGB(const cv::Mat & image)
{
    if (image.type()!=CV_8UC3)
    {   //unsupported type
        return QImage();
    }

    QImage qimg(image.cols, image.rows, QImage::Format_RGB32);
    for (int h=0; h<image.rows; h++)
    {
        const cv::Vec3b * row = image.ptr<cv::Vec3b>(h);
        unsigned * qrow = reinterpret_cast<unsigned *>(qimg.scanLine(h));
        for (register int w=0; w<image.cols; w++)
        {
            const cv::Vec3b & vec = row[w];
            qrow[w] = qRgb(vec[2], vec[1], vec[0]);
        }
    }
    return qimg;
}
Esempio n. 14
0
QImage VirtualGlasses::ipl2Qimg(IplImage* iplImg){
    int h = iplImg->height;
    int w = iplImg->width;
    int channels = iplImg->nChannels;
    QImage qimg(w, h, QImage::Format_ARGB32);
    char *data = iplImg->imageData;

    for (int y = 0; y < h; y++, data += iplImg->widthStep)
    {
        for (int x = 0; x < w; x++)
        {
            char r, g, b, a = 0;
            if (channels == 1)
            {
                r = data[x * channels];
                g = data[x * channels];
                b = data[x * channels];
            }
            else if (channels == 3 || channels == 4)
            {
                r = data[x * channels + 2];
                g = data[x * channels + 1];
                b = data[x * channels];
            }

            if (channels == 4)
            {
                a = data[x * channels + 3];
                qimg.setPixel(x, y, qRgba(r, g, b, a));
            }
            else
            {
                qimg.setPixel(x, y, qRgb(r, g, b));
            }
        }
    } return qimg;
}