Esempio n. 1
0
void FrameProcessor::processFrame(QVideoFrame frame )
{
    double tot = 0;

    do {

        if (!frame.map(QAbstractVideoBuffer::ReadOnly)){
            qDebug() << "Unable to map frame!";
            break;
        }

        if (frame.pixelFormat() == QVideoFrame::Format_YUV420P ||
            frame.pixelFormat() == QVideoFrame::Format_NV12) {
            // Process YUV data
            uchar *b = frame.bits();
            for (int y = 0; y < frame.height(); y++) {
                uchar *lastPixel = b + frame.width();
                for (uchar *curPixel = b; curPixel < lastPixel; curPixel++){
                    if(*curPixel != 16 ) tot += *curPixel;
                    //histogram[(*curPixel * levels) >> 8] += 1.0;
                }
                b += frame.bytesPerLine();
            }
        } else {
            QImage::Format imageFormat = QVideoFrame::imageFormatFromPixelFormat(frame.pixelFormat());
            if (imageFormat != QImage::Format_Invalid) {
                // Process RGB data
                QImage image(frame.bits(), frame.width(), frame.height(), imageFormat);
                image = image.convertToFormat(QImage::Format_RGB32);

                const QRgb* b = (const QRgb*)image.bits();
                for (int y = 0; y < image.height(); y++) {
                    const QRgb *lastPixel = b + frame.width();
                    for (const QRgb *curPixel = b; curPixel < lastPixel; curPixel++){
                        //histogram[(qGray(*curPixel) * levels) >> 8] += 1.0;
                        if(*curPixel != 16 ) tot+= qGray(*curPixel);
                    }
                    b = (const QRgb*)((uchar*)b + image.bytesPerLine());
                }
            }
        }

        frame.unmap();
    } while (false);

    // Compute mean
    int mean = tot/frame.width()/frame.height();
    int timestamp = frame.startTime()/1000;
    emit dataReady(timestamp,mean);
}
Esempio n. 2
0
bool ChilitagsSurface::present(const QVideoFrame &frame) {

    //qDebug("time: %d", m_timer.elapsed());
    //qDebug("newFrame: %dx%d", frame.width(), frame.height());
    //m_timer.restart();

    QVideoFrame copy = frame;
    //if (m_frame.pixelFormat() == QVideoFrame::Format_UYVY) {
    if (copy.map(QAbstractVideoBuffer::ReadOnly)) {
        const cv::Mat mat(copy.height(), copy.width(), CV_8UC3,
                          copy.bits());
        if (m_videoSurface) {
            m_converted.create(copy.height(), copy.width(), CV_8UC4);
            cv::cvtColor(mat, m_converted, CV_BGR2RGBA);
        }

        m_item.setTags(m_chilitags.estimate(mat));

        copy.unmap();
    }
    //qDebug("%lu tags", m_tags.size());

    if (m_videoSurface) {
        QImage image(m_converted.data,
                     m_converted.cols, m_converted.rows,
                     QImage::Format_ARGB32);
        return m_videoSurface->present(QVideoFrame(image));
    }

    return true;
}
Esempio n. 3
0
/*!
  Stores the frame as member to allow it to be processed on paint.
  Returns false when there is error, otherwise returns true.
*/
bool CustomCamera::updateFrame(const QVideoFrame &frame)
{
    if (!frame.isValid()) {
        qDebug() << "CustomCameras::updateFrame: Invalid frame";
        return false;
    }
    
    if (m_processedFrameCounter != m_incomingFrameCounter) {
        // Discard frame.
        return true;
    }
    
    m_incomingFrameCounter++;
    
    QVideoFrame f = frame;
    
    if (f.map(QAbstractVideoBuffer::ReadOnly)) {
        if (m_imageFrame.isNull() || m_imageFrame.width() != f.width() ||
                m_imageFrame.height() != f.height()) {
            m_imageFrame = QImage(f.width(), f.height(), QImage::Format_RGB32);
        }
        
        memcpy(m_imageFrame.bits(), f.bits(), f.mappedBytes());
        
        f.unmap();
        
        update();
    }
    
    return true;
}
Esempio n. 4
0
void ImageSource::processFrame(QVideoFrame frame)
{
  qDebug() << recorder->duration();
  if (sendFrame) {
    if (frame.map(QAbstractVideoBuffer::ReadOnly)) {
      if (frame.pixelFormat() == QVideoFrame::Format_NV21) {
        QImage img(frame.size(), QImage::Format_RGB32);
        qt_convert_NV21_to_ARGB32((uchar*)frame.bits(), (quint32*)img.bits(), frame.width(), frame.height());
        img.save(QString("/sdcard/DCIM/DashCam/Images/%1.jpg").arg(QDateTime::currentDateTime().toString(Qt::ISODate)), "JPG");
        emit newFrame(img);
        qDebug() << "Saving Frame" << counter;
        sendFrame = false;
      }
      frame.unmap();
    }
  }

  if (startRecording) {
    QTimer::singleShot(300000, this, SLOT(newVideo()));
    recorder->setOutputLocation(QUrl::fromLocalFile(QString("/sdcard/DCIM/DashCam/Video/%1.mp4").arg(QDateTime::currentDateTime().toString(Qt::ISODate))));
    recorder->record();

    qDebug() << recorder->supportedResolutions();
    qDebug() << recorder->state();
    qDebug() << recorder->status();
    qDebug() << recorder->error();
    startRecording = false;
  }
}
Esempio n. 5
0
QAbstractVideoSurface::Error QVideoSurfaceRasterPainter::paint(
            const QRectF &target, QPainter *painter, const QRectF &source)
{
    if (m_frame.map(QAbstractVideoBuffer::ReadOnly)) {
        QImage image(
                m_frame.bits(),
                m_imageSize.width(),
                m_imageSize.height(),
                m_frame.bytesPerLine(),
                m_imageFormat);

        if (m_scanLineDirection == QVideoSurfaceFormat::BottomToTop) {
            const QTransform oldTransform = painter->transform();

            painter->scale(1, -1);
            painter->translate(0, -target.bottom());
            painter->drawImage(
                QRectF(target.x(), 0, target.width(), target.height()), image, source);
            painter->setTransform(oldTransform);
        } else {
            painter->drawImage(target, image, source);
        }

        m_frame.unmap();
    } else if (m_frame.isValid()) {
        return QAbstractVideoSurface::IncorrectFormatError;
    } else {
        painter->fillRect(target, Qt::black);
    }
    return QAbstractVideoSurface::NoError;
}
Esempio n. 6
0
//如果转换成功,则调用者使用完 pOutFrame 后,需要调用 avpicture_free(pOutFrame) 释放内存  
//成功返回0,不成功返回非0  
int CTool::ConvertFormat(/*[in]*/ const QVideoFrame &inFrame,
                         /*[out]*/AVPicture &outFrame,
                         /*[in]*/ int nOutWidth,
                         /*[in]*/ int nOutHeight,
                         /*[in]*/ AVPixelFormat pixelFormat)
{
    int nRet = 0;
    
    AVPicture pic;
    nRet = avpicture_fill(&pic, (uint8_t*) inFrame.bits(),
                  QVideoFrameFormatToFFMpegPixFormat(inFrame.pixelFormat()),
                  inFrame.width(),
                  inFrame.height());
    if(nRet < 0)
    {
        LOG_MODEL_DEBUG("Tool", "avpicture_fill fail:%x", nRet);
        return nRet;
    }
    
    nRet = ConvertFormat(pic, inFrame.width(), inFrame.height(),
                  QVideoFrameFormatToFFMpegPixFormat(inFrame.pixelFormat()),
                  outFrame, nOutWidth, nOutHeight, pixelFormat);

    return nRet;
}
void MyProbe::processFrame(QVideoFrame frame)
{
    // convert input to cv::mat here
    if (!frame.isValid())
        return;

    if (frame.map(QAbstractVideoBuffer::ReadOnly))
    {
        if (frame.pixelFormat() == QVideoFrame::Format_YUV420P ||
                frame.pixelFormat() == QVideoFrame::Format_NV12 ||
                frame.pixelFormat() == QVideoFrame::Format_NV21)
        {
            if (processMutex.tryLock())
            {
                // extracts gray channel from yuv image
                img = QImage(frame.bits(), frame.width(), frame.height(),
                             frame.bytesPerLine(), QImage::Format_Grayscale8).copy();
                processMutex.unlock();
                emit finished();
            }
        }
        else if (frame.pixelFormat() == QVideoFrame::Format_BGR32)
        {
            if (processMutex.tryLock())
            {
                QImage img_tmp;
                // extracts gray channel from yuv image
                img_tmp = QImage(frame.bits(), frame.width(), frame.height(),
                                 frame.bytesPerLine(), QImage::Format_ARGB32).copy();
                img = img_tmp.convertToFormat(QImage::Format_Grayscale8);
                processMutex.unlock();
                emit finished();
            }
        }
        else
            return;

        frame.unmap();
        return;
    }
    else
        return;
}
bool QtKCaptureBuffer::present(const QVideoFrame &frame)
//qtmultimedia\src\plugins\directshow\camera\dscamerasession.cpp
{
	static int cnt = 0;		
	if(!this->m_doCapture) return false;
	
	m_mutexA.lock();
	QVideoFrame tFrame = frame;	
    if(tFrame.map(QAbstractVideoBuffer::ReadOnly))
    {	
		this->m_doCapture = false;
		if(this->m_widthScale == 0) this->m_widthScale = frame.width();
		switch(this->m_mirrorSetting)
		{
			case mirrorVertical:
				this->m_lastFrame = QImage(frame.bits(), frame.width(), frame.height(), frame.bytesPerLine(), getQImageFormat(tFrame.pixelFormat())).mirrored(0, 1).scaledToWidth(this->m_widthScale, (Qt::TransformationMode)this->m_scaleMode);
				break;

			case mirrorHorizontal:
				this->m_lastFrame = QImage(frame.bits(), frame.width(), frame.height(), frame.bytesPerLine(), getQImageFormat(tFrame.pixelFormat())).mirrored(1, 0).scaledToWidth(this->m_widthScale, (Qt::TransformationMode)this->m_scaleMode);
				break;

			case mirrorAll:
				this->m_lastFrame = QImage(frame.bits(), frame.width(), frame.height(), frame.bytesPerLine(), getQImageFormat(tFrame.pixelFormat())).mirrored(1, 1).scaledToWidth(this->m_widthScale, (Qt::TransformationMode)this->m_scaleMode);
				break;

			case mirrorNone:			
			default:
			this->m_lastFrame = QImage(frame.bits(), frame.width(), frame.height(), frame.bytesPerLine(), getQImageFormat(tFrame.pixelFormat())).scaledToWidth(this->m_widthScale, (Qt::TransformationMode)this->m_scaleMode);
			break;
		}
		
        tFrame.unmap();
		m_mutexA.unlock();	
		
		emit imageCaptured(cnt++, this->m_lastFrame);				
		return true;

	}
	m_mutexA.unlock();
    return false;
}
Esempio n. 9
0
void DCameraView::paint(const QVideoFrame &frame)
{
    QPainter painter(this);

    QImage image(
                frame.bits(),
                frame.width(),
                frame.height(),
                frame.bytesPerLine(),
                QVideoFrame::imageFormatFromPixelFormat(frame.pixelFormat()));

    painter.drawImage(0, 0, image.mirrored(m_mirroredHorizontal, m_mirroredVertical));
}
void FrameProcessor::processFrame(QVideoFrame frame, int levels)
{
    QVector<qreal> histogram(levels);

    do {
        if (!levels)
            break;

        if (!frame.map(QAbstractVideoBuffer::ReadOnly))
            break;

        if (frame.pixelFormat() == QVideoFrame::Format_YUV420P ||
            frame.pixelFormat() == QVideoFrame::Format_NV12) {
            // Process YUV data
            uchar *b = frame.bits();
            for (int y = 0; y < frame.height(); y++) {
                uchar *lastPixel = b + frame.width();
                for (uchar *curPixel = b; curPixel < lastPixel; curPixel++)
                    histogram[(*curPixel * levels) >> 8] += 1.0;
                b += frame.bytesPerLine();
            }
        } else {
            QImage::Format imageFormat = QVideoFrame::imageFormatFromPixelFormat(frame.pixelFormat());
            if (imageFormat != QImage::Format_Invalid) {
                // Process RGB data
                QImage image(frame.bits(), frame.width(), frame.height(), imageFormat);
                image = image.convertToFormat(QImage::Format_RGB32);

                const QRgb* b = (const QRgb*)image.bits();
                for (int y = 0; y < image.height(); y++) {
                    const QRgb *lastPixel = b + frame.width();
                    for (const QRgb *curPixel = b; curPixel < lastPixel; curPixel++)
                        histogram[(qGray(*curPixel) * levels) >> 8] += 1.0;
                    b = (const QRgb*)((uchar*)b + image.bytesPerLine());
                }
            }
        }
Esempio n. 11
0
void DataController::concatenateFrames(DataController::WhichFrame which, QVideoFrame frame)
{
    if (!concatenatingFrameInitialized) {
        int width = frame.width();
        int height = frame.height();
        //concatenatingImage = new QImage(width*2, height, QImage::Format_RGB888);
        //concatenationPainter = new QPainter(concatenatingImage);
        concatenatingFrame = new QVideoFrame(width * 2 * height * 3,
                                             QSize(width*2,height), width*2, QVideoFrame::Format_RGB24);
        qDebug() << "Creating a concatenating frame of size " << 2*width << " x " << height;
        concatenatingFrameInitialized = true;
    }

    if (!frame.map(QAbstractVideoBuffer::ReadOnly))
        qDebug() << "Failed to map current frame";
    else {
        if (!concatenatingFrame->map(QAbstractVideoBuffer::WriteOnly))
            qDebug() << "Failed to map concatenating frame";
        else {
            //concatenationPainter->drawImage(frame.width() * (which==right),0,frame);
            for (int i=0; i < frame.height(); i++)
                memcpy(concatenatingFrame->bits() + concatenatingFrame->width()*3*i
                       + frame.width()*3*(which==right),
                       frame.bits() + frame.width()*3*i, frame.width()*3);
            concatenatingFrame->unmap();

            if (frameConcatenationState == NOT_STARTED) {
                frameConcatenationState = (which==left) ? LEFT_READY : RIGHT_READY;
            } else if (frameConcatenationState == LEFT_READY) {
                if (which == left)
                    qDebug() << "Two left frames received before a right frame";
                else {
                    frameConcatenationState = NOT_STARTED;
                    emit newFrame(*concatenatingFrame);
                }
            } else if (frameConcatenationState == RIGHT_READY) {
                if (which == right)
                    qDebug() << "Two right frames received before a right frame";
                else {
                    frameConcatenationState = NOT_STARTED;
                    emit newFrame(*concatenatingFrame);
                }
            }
        }
        frame.unmap();
    }


}
Esempio n. 12
0
void MainWindow::onImageAvailable( int id, const QVideoFrame& buffer )
{
    qDebug() << "Capture image available...";

    QImage::Format imageFormat = QVideoFrame::imageFormatFromPixelFormat( buffer.pixelFormat() );
    QImage img( buffer.bits(), buffer.width(), buffer.height(), buffer.bytesPerLine(), imageFormat );

    QPixmap image = QPixmap::fromImage( img );
    QLabel* l = new QLabel();

    ui->tabWidget->addTab( l, QString( "%1" ).arg( id ) );

    l->setPixmap( image );
    l->show();
}
Esempio n. 13
0
void VideoWriter::newFrame(QVideoFrame frame)
{
    if ((frame.width() != width) || (frame.height() != height)) {
        width = frame.width();
        height = frame.height();
    }
    if (waitingToInitialize) {
        initialize(*vFilename);
    }
    if (currentlyWriting) {
        if (!frame.map(QAbstractVideoBuffer::ReadOnly)) {
            qDebug() << "Failure to map video frame in writer";
            return;
        }

        AVCodecContext *c = video_st->codec;
        avpicture_fill((AVPicture *)tmp_picture, frame.bits(),
                       PIX_FMT_RGB24, c->width, c->height);
        sws_scale(sws_ctx, tmp_picture->data, tmp_picture->linesize,
                  0, c->height, picture->data, picture->linesize);
        picture->pts = frameCounter++;
        frame.unmap();

        /* encode the image */
        /* if zero size, it means the image was buffered */
        /* write the compressed frame in the media file */
        /* XXX: in case of B frames, the pts is not yet valid */
        int out_size = avcodec_encode_video(c, video_outbuf, video_outbuf_size, picture);
        if (out_size > 0) {
            AVPacket pkt;
            av_init_packet(&pkt);
            if (c->coded_frame->pts != AV_NOPTS_VALUE)
                pkt.pts= av_rescale_q(c->coded_frame->pts, c->time_base, video_st->time_base);
            if(c->coded_frame->key_frame)
                pkt.flags |= AV_PKT_FLAG_KEY;
            pkt.stream_index= video_st->index;
            pkt.data= video_outbuf;
            pkt.size= out_size;

            /* write the compressed frame in the media file */
            //ret = av_interleaved_write_frame(oc, &pkt);
            int ret = av_write_frame(oc, &pkt);
        }


        // Save time stamp
    }
}
Esempio n. 14
0
void c3::onBufferAvailable(int id, const QVideoFrame& pFrame)
{
    this->camera->unlock();
    this->camera->stop();

    QVideoFrame lvFrame = pFrame;
    if (!lvFrame.map(QAbstractVideoBuffer::ReadOnly)) {
        return;
    }
    QImage lvImage;
    lvImage.loadFromData((const uchar*)lvFrame.bits(), lvFrame.mappedBytes(), (const char*)"JPEG");
    lvFrame.unmap();

/* here you can process lvImage before saving */
//    lvImage.invertPixels(QImage::InvertRgb);

    lvImage.save(defaultSaveFileName, "JPEG");
    this->camera->start();
}
    void convertAllFrames() {
        mProgressBar->setMaximum(mFrames.length() - 1);
        int count = 0;

        foreach(QVideoFrame frame, mFrames) {
            mProgressBar->setValue(count++);
            QImage image;
            if (frame.pixelFormat() == QVideoFrame::Format_RGB32) {
                // Copy const QVideoFrame to mutable QVideoFrame.
                QVideoFrame nonConstFrame = frame;
                // Unlock for reading the stack frame (increment ref pointer)
                nonConstFrame.map(QAbstractVideoBuffer::ReadOnly);
                // Create new image from the frame bits
                image = QImage(
                        nonConstFrame.bits(),
                        nonConstFrame.width(),
                        nonConstFrame.height(),
                        nonConstFrame.bytesPerLine(),
                        QVideoFrame::imageFormatFromPixelFormat(nonConstFrame.pixelFormat()));
                nonConstFrame.unmap();
            } else {
                image = QImage(frame.size(), QImage::Format_RGB32);
                mFrameConverter->convertFrame(frame, &image);
            }

            QString imgFileName = QString("%1.%2.png").arg(mFileName).arg(++mCount, 2, 10, QChar('0'));
            //QFile file(imgFileName);
            //file.open(QFile::WriteOnly);

            bool saved = image.save(imgFileName, "png");
            if (saved) {
                log->info("File: %1 saved", imgFileName);
            } else {
                log->info("File: %1 not saved", imgFileName);
            }

        }
    void convertFrame(QVideoFrame &frame, QImage *image) {
        if(frame.pixelFormat() == QVideoFrame::Format_YUV420P && frame.map(QAbstractVideoBuffer::ReadOnly)) {
            const qint32 frameWidth = frame.width();
            const qint32 frameHeight = frame.height();
            const qint32 frameSize = frameHeight * frameWidth;
            const uchar *lumaYBytes = frame.bits();
            const uchar *chromaUBytes = lumaYBytes + frameSize;
            const uchar *chromaVBytes = chromaUBytes + (frameSize / 4);

            for (int y = 0; y < frameHeight; y++) {
                for (int x = 0; x < frameWidth; x++) {
                    const int Y = lumaYBytes[y * frameWidth + x];
                    const int U = chromaUBytes[(y / 2) * (frameWidth / 2) + (x / 2)];
                    const int V = chromaVBytes[(y / 2) * (frameWidth / 2) + (x / 2)];
                    const int r = qBound(0.0, 1.164 * (Y - 16) + 1.596 * (V - 128), 255.0);
                    const int g = qBound(0.0, 1.164 * (Y - 16) - 0.813 * (V - 128) - 0.391 * (U - 128), 255.0);
                    const int b = qBound(0.0, 1.164 * (Y - 16) + 2.018 * (U - 128), 255.0);

                    image->setPixel(x, y, qRgb(r, g, b));
                }
            }
            frame.unmap();
        }
    }
Esempio n. 17
0
void tst_QCameraBackend::testCaptureToBuffer()
{
    QCamera camera;
    QCameraImageCapture imageCapture(&camera);
    camera.exposure()->setFlashMode(QCameraExposure::FlashOff);

    camera.load();

#ifdef Q_WS_MAEMO_6
    QVERIFY(imageCapture.isCaptureDestinationSupported(QCameraImageCapture::CaptureToBuffer));
#endif

    if (!imageCapture.isCaptureDestinationSupported(QCameraImageCapture::CaptureToBuffer))
        QSKIP("Buffer capture not supported");

    QTRY_COMPARE(camera.status(), QCamera::LoadedStatus);

    QCOMPARE(imageCapture.bufferFormat(), QVideoFrame::Format_Jpeg);

    QVERIFY(imageCapture.isCaptureDestinationSupported(QCameraImageCapture::CaptureToFile));
    QVERIFY(imageCapture.isCaptureDestinationSupported(QCameraImageCapture::CaptureToBuffer));
    QVERIFY(imageCapture.isCaptureDestinationSupported(
                QCameraImageCapture::CaptureToBuffer | QCameraImageCapture::CaptureToFile));

    QSignalSpy destinationChangedSignal(&imageCapture, SIGNAL(captureDestinationChanged(QCameraImageCapture::CaptureDestinations)));

    QCOMPARE(imageCapture.captureDestination(), QCameraImageCapture::CaptureToFile);
    imageCapture.setCaptureDestination(QCameraImageCapture::CaptureToBuffer);
    QCOMPARE(imageCapture.captureDestination(), QCameraImageCapture::CaptureToBuffer);
    QCOMPARE(destinationChangedSignal.size(), 1);
    QCOMPARE(destinationChangedSignal.first().first().value<QCameraImageCapture::CaptureDestinations>(),
             QCameraImageCapture::CaptureToBuffer);

    QSignalSpy capturedSignal(&imageCapture, SIGNAL(imageCaptured(int,QImage)));
    QSignalSpy imageAvailableSignal(&imageCapture, SIGNAL(imageAvailable(int,QVideoFrame)));
    QSignalSpy savedSignal(&imageCapture, SIGNAL(imageSaved(int,QString)));
    QSignalSpy errorSignal(&imageCapture, SIGNAL(error(int, QCameraImageCapture::Error,QString)));

    camera.start();
    QTRY_VERIFY(imageCapture.isReadyForCapture());

    int id = imageCapture.capture();
    QTRY_VERIFY(!imageAvailableSignal.isEmpty());

    QVERIFY(errorSignal.isEmpty());
    QVERIFY(!capturedSignal.isEmpty());
    QVERIFY(!imageAvailableSignal.isEmpty());

    QTest::qWait(2000);
    QVERIFY(savedSignal.isEmpty());

    QCOMPARE(capturedSignal.first().first().toInt(), id);
    QCOMPARE(imageAvailableSignal.first().first().toInt(), id);

    QVideoFrame frame = imageAvailableSignal.first().last().value<QVideoFrame>();
    QVERIFY(frame.isValid());
    QCOMPARE(frame.pixelFormat(), QVideoFrame::Format_Jpeg);
    QVERIFY(!frame.size().isEmpty());
    QVERIFY(frame.map(QAbstractVideoBuffer::ReadOnly));
    QByteArray data((const char *)frame.bits(), frame.mappedBytes());
    frame.unmap();
    frame = QVideoFrame();

    QVERIFY(!data.isEmpty());
    QBuffer buffer;
    buffer.setData(data);
    buffer.open(QIODevice::ReadOnly);
    QImageReader reader(&buffer, "JPG");
    reader.setScaledSize(QSize(640,480));
    QImage img(reader.read());
    QVERIFY(!img.isNull());

    capturedSignal.clear();
    imageAvailableSignal.clear();
    savedSignal.clear();

    //Capture to yuv buffer
#ifdef Q_WS_MAEMO_6
    QVERIFY(imageCapture.supportedBufferFormats().contains(QVideoFrame::Format_UYVY));
#endif

    if (imageCapture.supportedBufferFormats().contains(QVideoFrame::Format_UYVY)) {
        imageCapture.setBufferFormat(QVideoFrame::Format_UYVY);
        QCOMPARE(imageCapture.bufferFormat(), QVideoFrame::Format_UYVY);

        id = imageCapture.capture();
        QTRY_VERIFY(!imageAvailableSignal.isEmpty());

        QVERIFY(errorSignal.isEmpty());
        QVERIFY(!capturedSignal.isEmpty());
        QVERIFY(!imageAvailableSignal.isEmpty());
        QVERIFY(savedSignal.isEmpty());

        QTest::qWait(2000);
        QVERIFY(savedSignal.isEmpty());

        frame = imageAvailableSignal.first().last().value<QVideoFrame>();
        QVERIFY(frame.isValid());

        qDebug() << frame.pixelFormat();
        QCOMPARE(frame.pixelFormat(), QVideoFrame::Format_UYVY);
        QVERIFY(!frame.size().isEmpty());
        frame = QVideoFrame();

        capturedSignal.clear();
        imageAvailableSignal.clear();
        savedSignal.clear();

        imageCapture.setBufferFormat(QVideoFrame::Format_Jpeg);
        QCOMPARE(imageCapture.bufferFormat(), QVideoFrame::Format_Jpeg);
    }

    //Try to capture to both buffer and file
#ifdef Q_WS_MAEMO_6
    QVERIFY(imageCapture.isCaptureDestinationSupported(QCameraImageCapture::CaptureToBuffer | QCameraImageCapture::CaptureToFile));
#endif
    if (imageCapture.isCaptureDestinationSupported(QCameraImageCapture::CaptureToBuffer | QCameraImageCapture::CaptureToFile)) {
        imageCapture.setCaptureDestination(QCameraImageCapture::CaptureToBuffer | QCameraImageCapture::CaptureToFile);

        int oldId = id;
        id = imageCapture.capture();
        QVERIFY(id != oldId);
        QTRY_VERIFY(!savedSignal.isEmpty());

        QVERIFY(errorSignal.isEmpty());
        QVERIFY(!capturedSignal.isEmpty());
        QVERIFY(!imageAvailableSignal.isEmpty());
        QVERIFY(!savedSignal.isEmpty());

        QCOMPARE(capturedSignal.first().first().toInt(), id);
        QCOMPARE(imageAvailableSignal.first().first().toInt(), id);

        frame = imageAvailableSignal.first().last().value<QVideoFrame>();
        QVERIFY(frame.isValid());
        QCOMPARE(frame.pixelFormat(), QVideoFrame::Format_Jpeg);
        QVERIFY(!frame.size().isEmpty());

        QString fileName = savedSignal.first().last().toString();
        QVERIFY(QFileInfo(fileName).exists());
    }
}
Esempio n. 18
0
void QSGVideoMaterial_YUV::bind()
{
    QOpenGLFunctions *functions = QOpenGLContext::currentContext()->functions();
    QMutexLocker lock(&m_frameMutex);
    if (m_frame.isValid()) {
        if (m_frame.map(QAbstractVideoBuffer::ReadOnly)) {
            int fw = m_frame.width();
            int fh = m_frame.height();

            // Frame has changed size, recreate textures...
            if (m_textureSize != m_frame.size()) {
                if (!m_textureSize.isEmpty())
                    functions->glDeleteTextures(m_planeCount, m_textureIds);
                functions->glGenTextures(m_planeCount, m_textureIds);
                m_textureSize = m_frame.size();
            }

            GLint previousAlignment;
            functions->glGetIntegerv(GL_UNPACK_ALIGNMENT, &previousAlignment);
            functions->glPixelStorei(GL_UNPACK_ALIGNMENT, 1);

            if (m_format.pixelFormat() == QVideoFrame::Format_NV12
                    || m_format.pixelFormat() == QVideoFrame::Format_NV21) {
                const int y = 0;
                const int uv = 1;

                m_planeWidth[0] = m_planeWidth[1] = qreal(fw) / m_frame.bytesPerLine(y);

                functions->glActiveTexture(GL_TEXTURE1);
                bindTexture(m_textureIds[1], m_frame.bytesPerLine(uv) / 2, fh / 2, m_frame.bits(uv), GL_LUMINANCE_ALPHA);
                functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit
                bindTexture(m_textureIds[0], m_frame.bytesPerLine(y), fh, m_frame.bits(y), GL_LUMINANCE);

            } else { // YUV420P || YV12
                const int y = 0;
                const int u = m_frame.pixelFormat() == QVideoFrame::Format_YUV420P ? 1 : 2;
                const int v = m_frame.pixelFormat() == QVideoFrame::Format_YUV420P ? 2 : 1;

                m_planeWidth[0] = qreal(fw) / m_frame.bytesPerLine(y);
                m_planeWidth[1] = m_planeWidth[2] = qreal(fw) / (2 * m_frame.bytesPerLine(u));

                functions->glActiveTexture(GL_TEXTURE1);
                bindTexture(m_textureIds[1], m_frame.bytesPerLine(u), fh / 2, m_frame.bits(u), GL_LUMINANCE);
                functions->glActiveTexture(GL_TEXTURE2);
                bindTexture(m_textureIds[2], m_frame.bytesPerLine(v), fh / 2, m_frame.bits(v), GL_LUMINANCE);
                functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit
                bindTexture(m_textureIds[0], m_frame.bytesPerLine(y), fh, m_frame.bits(y), GL_LUMINANCE);
            }

            functions->glPixelStorei(GL_UNPACK_ALIGNMENT, previousAlignment);
            m_frame.unmap();
        }

        m_frame = QVideoFrame();
    } else {
        // Go backwards to finish with GL_TEXTURE0
        for (int i = m_planeCount - 1; i >= 0; --i) {
            functions->glActiveTexture(GL_TEXTURE0 + i);
            functions->glBindTexture(GL_TEXTURE_2D, m_textureIds[i]);
        }
    }
}
Esempio n. 19
0
void VideoFrameSurface::render() {
    if (!g_frame.isValid()) return;
	GLCALL_INIT;
	if(g_frame.map(QAbstractVideoBuffer::ReadOnly))
	{
		GLuint tid=*((GLuint *)camtex->getNative());
		GLCALL glBindTexture(GL_TEXTURE_2D, tid);
		//qDebug() << "Render:" << g_frame.width()<< g_frame.height() << tid;
		GLCALL glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, cw, ch,0, GL_RGBA, GL_UNSIGNED_BYTE, g_frame.bits());
		ShaderEngine *engine=gtexture_get_engine();
		engine->reset();
		ShaderBuffer *oldfbo = engine->setFramebuffer(rdrTgt);
		engine->setViewport(0, 0, gtex->width, gtex->height);
		Matrix4 projection = engine->setOrthoFrustum(0,
				gtex->baseWidth, 0, gtex->baseHeight, -1, 1);
		engine->setProjection(projection);
		Matrix4 model;
		engine->setModel(model);
		engine->bindTexture(0,camtex);
		shader->setData(ShaderProgram::DataVertex, ShaderProgram::DFLOAT, 2,
				&vertices[0], vertices.size(), vertices.modified,
				&vertices.bufferCache);
		shader->setData(ShaderProgram::DataTexture, ShaderProgram::DFLOAT, 2,
				&texcoords[0], texcoords.size(), texcoords.modified,
				&texcoords.bufferCache);
		shader->drawElements(ShaderProgram::TriangleStrip, indices.size(),
				ShaderProgram::DUSHORT, &indices[0], indices.modified,
				&indices.bufferCache);
		vertices.modified = false;
		texcoords.modified = false;
		indices.modified = false;

		engine->setFramebuffer(oldfbo);
        g_frame.unmap();
	}

}