Esempio n. 1
0
/*!
  Stores the frame as member to allow it to be processed on paint.
  Returns false when there is error, otherwise returns true.
*/
bool CustomCamera::updateFrame(const QVideoFrame &frame)
{
    if (!frame.isValid()) {
        qDebug() << "CustomCameras::updateFrame: Invalid frame";
        return false;
    }
    
    if (m_processedFrameCounter != m_incomingFrameCounter) {
        // Discard frame.
        return true;
    }
    
    m_incomingFrameCounter++;
    
    QVideoFrame f = frame;
    
    if (f.map(QAbstractVideoBuffer::ReadOnly)) {
        if (m_imageFrame.isNull() || m_imageFrame.width() != f.width() ||
                m_imageFrame.height() != f.height()) {
            m_imageFrame = QImage(f.width(), f.height(), QImage::Format_RGB32);
        }
        
        memcpy(m_imageFrame.bits(), f.bits(), f.mappedBytes());
        
        f.unmap();
        
        update();
    }
    
    return true;
}
Esempio n. 2
0
QAbstractVideoSurface::Error QVideoSurfaceRasterPainter::paint(
            const QRectF &target, QPainter *painter, const QRectF &source)
{
    if (m_frame.map(QAbstractVideoBuffer::ReadOnly)) {
        QImage image(
                m_frame.bits(),
                m_imageSize.width(),
                m_imageSize.height(),
                m_frame.bytesPerLine(),
                m_imageFormat);

        if (m_scanLineDirection == QVideoSurfaceFormat::BottomToTop) {
            const QTransform oldTransform = painter->transform();

            painter->scale(1, -1);
            painter->translate(0, -target.bottom());
            painter->drawImage(
                QRectF(target.x(), 0, target.width(), target.height()), image, source);
            painter->setTransform(oldTransform);
        } else {
            painter->drawImage(target, image, source);
        }

        m_frame.unmap();
    } else if (m_frame.isValid()) {
        return QAbstractVideoSurface::IncorrectFormatError;
    } else {
        painter->fillRect(target, Qt::black);
    }
    return QAbstractVideoSurface::NoError;
}
Esempio n. 3
0
void DCameraView::paint(const QVideoFrame &frame)
{
    QPainter painter(this);

    QImage image(
                frame.bits(),
                frame.width(),
                frame.height(),
                frame.bytesPerLine(),
                QVideoFrame::imageFormatFromPixelFormat(frame.pixelFormat()));

    painter.drawImage(0, 0, image.mirrored(m_mirroredHorizontal, m_mirroredVertical));
}
void tst_QVideoFrame::createNull()
{
    QVideoFrame frame;

    QVERIFY(!frame.isValid());
    QCOMPARE(frame.handleType(), QAbstractVideoBuffer::NoHandle);
    QCOMPARE(frame.pixelFormat(), QVideoFrame::Format_Invalid);
    QCOMPARE(frame.size(), QSize());
    QCOMPARE(frame.width(), -1);
    QCOMPARE(frame.height(), -1);
    QCOMPARE(frame.fieldType(), QVideoFrame::ProgressiveFrame);
    QCOMPARE(frame.startTime(), qint64(-1));
    QCOMPARE(frame.endTime(), qint64(-1));
}
Esempio n. 5
0
bool ChilitagsSurface::present(const QVideoFrame &frame) {

    //qDebug("time: %d", m_timer.elapsed());
    //qDebug("newFrame: %dx%d", frame.width(), frame.height());
    //m_timer.restart();

    QVideoFrame copy = frame;
    //if (m_frame.pixelFormat() == QVideoFrame::Format_UYVY) {
    if (copy.map(QAbstractVideoBuffer::ReadOnly)) {
        const cv::Mat mat(copy.height(), copy.width(), CV_8UC3,
                          copy.bits());
        if (m_videoSurface) {
            m_converted.create(copy.height(), copy.width(), CV_8UC4);
            cv::cvtColor(mat, m_converted, CV_BGR2RGBA);
        }

        m_item.setTags(m_chilitags.estimate(mat));

        copy.unmap();
    }
    //qDebug("%lu tags", m_tags.size());

    if (m_videoSurface) {
        QImage image(m_converted.data,
                     m_converted.cols, m_converted.rows,
                     QImage::Format_ARGB32);
        return m_videoSurface->present(QVideoFrame(image));
    }

    return true;
}
bool AndroidVideoSurface::present(const QVideoFrame &frame)
{
    if (surfaceFormat().pixelFormat() != frame.pixelFormat()
        || surfaceFormat().frameSize() != frame.size()) {
        setError(IncorrectFormatError);
        stop();
        return false;
    } else {
        paintLock.lock();
        m_currentFrame = frame;
        m_widget->update(m_targetRect);
        paintLock.unlock();
        return true;
    }
}
Esempio n. 7
0
void MainWindow::onImageAvailable( int id, const QVideoFrame& buffer )
{
    qDebug() << "Capture image available...";

    QImage::Format imageFormat = QVideoFrame::imageFormatFromPixelFormat( buffer.pixelFormat() );
    QImage img( buffer.bits(), buffer.width(), buffer.height(), buffer.bytesPerLine(), imageFormat );

    QPixmap image = QPixmap::fromImage( img );
    QLabel* l = new QLabel();

    ui->tabWidget->addTab( l, QString( "%1" ).arg( id ) );

    l->setPixmap( image );
    l->show();
}
bool CaptureBuffer::present(const QVideoFrame &frame) {

    QList<QVideoFrame::PixelFormat> formatos = supportedPixelFormats();

    if (!formatos.contains(frame.pixelFormat())) {
        return false;
    } else {

        // Copia del frame
        QVideoFrame f(frame);
        // Permitir copiar del buffer
        f.map(QAbstractVideoBuffer::ReadOnly);
        // Obtener imagen a partir del frame
        QImage imagen = QImage(f.bits(),
                               f.width(),
                               f.height(),
                               f.bytesPerLine(),
                               QVideoFrame::imageFormatFromPixelFormat(f.pixelFormat()));
        // Bloquear buffer
        f.unmap();
        // Emitir señal
        emit transmitirImagen(imagen);

        return true;
    }
}
//! [4]
bool VideoWidgetSurface::present(const QVideoFrame &frame)
{
    if (surfaceFormat().pixelFormat() != frame.pixelFormat()
            || surfaceFormat().frameSize() != frame.size()) {
        setError(IncorrectFormatError);
        stop();

        return false;
    } else {
        currentFrame = frame;

        widget->repaint(targetRect);

        return true;
    }
}
Esempio n. 10
0
	bool WebcamCapture::present(QVideoFrame const & frame)
	{
		if (frame.isValid())
		{
			// QVideoFrame::map() is a non-const method, so we cannot call it on a const frame object.
			// Therefore, we need to clone the original frame to get a non-const object.
			//
			QVideoFrame clonedFrame(frame);

			if (clonedFrame.map(QAbstractVideoBuffer::ReadOnly))
			{
				QImage const frameImage(clonedFrame.bits(), clonedFrame.width(), clonedFrame.height(), QVideoFrame::imageFormatFromPixelFormat(clonedFrame.pixelFormat()));

				// The previously constructed QImage object doesn't copy the data provided by QVideoFrame object.
				// Instead of that, it relies on the validity of this data throughout its lifetime.
				// Unfortunately, QVideoFrame will be destructed once it leaves this scope.
				// Therefore, this situation forces to create a deep copy of the existing QImage object.
				//
				// Keeping in mind, that format RGBA8888 will later be used for QOpenGLTexture objects,
				// this circumstance can be used to create the above mentioned deep copy. Additionally,
				// the convertion is performed in advance, sparing some time in the main thread.
				//
				auto capturedImage = std::make_shared<QImage>(frameImage.convertToFormat(QImage::Format_RGBA8888));
				emit imageCaptured(capturedImage);

				clonedFrame.unmap();
				return true;
			}
		}
		return false;
	}
bool QCustomVideoSurface::present(const QVideoFrame &frame){

    if(frame.isValid()) {
        QVideoFrame cloneFrame(frame); // makes a shallow copy (since QVideoFrame is explicitly shared), to get the access to the pixel data
        cloneFrame.map(QAbstractVideoBuffer::ReadOnly);
#ifdef Q_OS_ANDROID
        cv::Mat mat(cloneFrame.height(), cloneFrame.width(), CV_8UC4, (void *)cloneFrame.bits());
        emit frameAvailable(mat, QImage::Format_RGBX8888);
#else
        QImage::Format format = QVideoFrame::imageFormatFromPixelFormat(cloneFrame.pixelFormat());
        int cvtype = CV_8UC1;
        switch(format) {
            case QImage::Format_RGB32:
                cvtype = CV_8UC4;
                break;
            case QImage::Format_RGB888:
                cvtype = CV_8UC3;
                break;
            case QImage::Format_Invalid:
                qWarning("QCustomVideoSurface Warning: image format is QImage::Format_Invalid");
                return false;
            default:
                // TO DO add the new formats if find
                qWarning("QCustomVideoSurface Warning: image format is not implemented (QImage::Format %d)", format);
                return false;
        }
        cv::Mat mat(cloneFrame.height(), cloneFrame.width(), cvtype, (void *)cloneFrame.bits());
        cv::flip(mat,mat,0);
        emit frameAvailable(mat, format);
#endif
        cloneFrame.unmap();
        return true;
    }
    return false;
}
bool MyVideoSurface::present(const QVideoFrame& frame){
    if (frame.isValid()) {
        QVideoFrame cloneFrame(frame);
        cloneFrame.map(QAbstractVideoBuffer::ReadOnly);
        img = QImage(cloneFrame.bits(),
                     cloneFrame.width(),
                     cloneFrame.height(),
                     QVideoFrame::imageFormatFromPixelFormat(cloneFrame.pixelFormat()));

        //do something with the image ...
        //img = &img1;
        //counter++;
        //if (counter % 100 == 0) {
            counter = 0;
            //qDebug() << "PrintImage";
            QRect rect(400, 240, 800, 480);
            img = img.copy(rect);
            img = img.mirrored(true,false);
            //qDebug() << img.width() << " " << img.height();
            QImage image = show->fit500(&img);
            show->setImage(image);
            show->computeMostFitTemplateX(10);
            GT.m_TV = show->getTV();
            GT.m_image = image;
            show->update();
        //}

        cloneFrame.unmap();
        return true;
    }
    return false;
}
Esempio n. 13
0
//如果转换成功,则调用者使用完 pOutFrame 后,需要调用 avpicture_free(pOutFrame) 释放内存  
//成功返回0,不成功返回非0  
int CTool::ConvertFormat(/*[in]*/ const QVideoFrame &inFrame,
                         /*[out]*/AVPicture &outFrame,
                         /*[in]*/ int nOutWidth,
                         /*[in]*/ int nOutHeight,
                         /*[in]*/ AVPixelFormat pixelFormat)
{
    int nRet = 0;
    
    AVPicture pic;
    nRet = avpicture_fill(&pic, (uint8_t*) inFrame.bits(),
                  QVideoFrameFormatToFFMpegPixFormat(inFrame.pixelFormat()),
                  inFrame.width(),
                  inFrame.height());
    if(nRet < 0)
    {
        LOG_MODEL_DEBUG("Tool", "avpicture_fill fail:%x", nRet);
        return nRet;
    }
    
    nRet = ConvertFormat(pic, inFrame.width(), inFrame.height(),
                  QVideoFrameFormatToFFMpegPixFormat(inFrame.pixelFormat()),
                  outFrame, nOutWidth, nOutHeight, pixelFormat);

    return nRet;
}
Esempio n. 14
0
void D3DPresentEngine::presentSample(void *opaque, qint64)
{
    HRESULT hr = S_OK;

    IMFSample *sample = reinterpret_cast<IMFSample*>(opaque);
    IMFMediaBuffer* buffer = NULL;
    IDirect3DSurface9* surface = NULL;

    if (m_surface && m_surface->isActive()) {
        if (sample) {
            // Get the buffer from the sample.
            hr = sample->GetBufferByIndex(0, &buffer);
            if (FAILED(hr))
                goto done;

            // Get the surface from the buffer.
            hr = MFGetService(buffer, MR_BUFFER_SERVICE, IID_PPV_ARGS(&surface));
            if (FAILED(hr))
                goto done;
        }

        if (surface && updateTexture(surface)) {
            QVideoFrame frame = QVideoFrame(new TextureVideoBuffer(m_glTexture),
                                            m_surfaceFormat.frameSize(),
                                            m_surfaceFormat.pixelFormat());

            // WMF uses 100-nanosecond units, Qt uses microseconds
            LONGLONG startTime = -1;
            if (SUCCEEDED(sample->GetSampleTime(&startTime))) {
                frame.setStartTime(startTime * 0.1);

                LONGLONG duration = -1;
                if (SUCCEEDED(sample->GetSampleDuration(&duration)))
                    frame.setEndTime((startTime + duration) * 0.1);
            }

            m_surface->present(frame);
        }
    }

done:
    qt_wmf_safeRelease(&surface);
    qt_wmf_safeRelease(&buffer);
    qt_wmf_safeRelease(&sample);
}
Esempio n. 15
0
void DataController::concatenateFrames(DataController::WhichFrame which, QVideoFrame frame)
{
    if (!concatenatingFrameInitialized) {
        int width = frame.width();
        int height = frame.height();
        //concatenatingImage = new QImage(width*2, height, QImage::Format_RGB888);
        //concatenationPainter = new QPainter(concatenatingImage);
        concatenatingFrame = new QVideoFrame(width * 2 * height * 3,
                                             QSize(width*2,height), width*2, QVideoFrame::Format_RGB24);
        qDebug() << "Creating a concatenating frame of size " << 2*width << " x " << height;
        concatenatingFrameInitialized = true;
    }

    if (!frame.map(QAbstractVideoBuffer::ReadOnly))
        qDebug() << "Failed to map current frame";
    else {
        if (!concatenatingFrame->map(QAbstractVideoBuffer::WriteOnly))
            qDebug() << "Failed to map concatenating frame";
        else {
            //concatenationPainter->drawImage(frame.width() * (which==right),0,frame);
            for (int i=0; i < frame.height(); i++)
                memcpy(concatenatingFrame->bits() + concatenatingFrame->width()*3*i
                       + frame.width()*3*(which==right),
                       frame.bits() + frame.width()*3*i, frame.width()*3);
            concatenatingFrame->unmap();

            if (frameConcatenationState == NOT_STARTED) {
                frameConcatenationState = (which==left) ? LEFT_READY : RIGHT_READY;
            } else if (frameConcatenationState == LEFT_READY) {
                if (which == left)
                    qDebug() << "Two left frames received before a right frame";
                else {
                    frameConcatenationState = NOT_STARTED;
                    emit newFrame(*concatenatingFrame);
                }
            } else if (frameConcatenationState == RIGHT_READY) {
                if (which == right)
                    qDebug() << "Two right frames received before a right frame";
                else {
                    frameConcatenationState = NOT_STARTED;
                    emit newFrame(*concatenatingFrame);
                }
            }
        }
        frame.unmap();
    }


}
Esempio n. 16
0
void QPxaVideoOutput::doRenderFrame( const QVideoFrame& frame )
{
    //qWarning() << "QPxaVideoOutput::renderFrame" << geometry();
    if ( frame.isNull() ) {
        if ( d->overlay )
            d->overlay->fill( 16,128,128 ); // yuv:black
        return;
    }

    if ( frame.size() != d->videoSize ) {
        d->videoSize = frame.size();
        setModified(true);
    }

    //if something has changed, recalculate position of the image:
    if ( isModified() ) {
        setModified(false);

        QRegion paintRegion = deviceMappedClipRegion();
        QRect geometry = deviceMappedGeometry();

        QSize imageSize = frame.size();
        //respect frame aspect ratio
        if ( frame.hasCustomAspectRatio() ) {
            imageSize.setWidth( int(imageSize.height() * frame.aspectRatio()) );
        }

        switch ( effectiveRotation() ) {
            case QtopiaVideo::Rotate0:
            case QtopiaVideo::Rotate180:
                break;
            case QtopiaVideo::Rotate90:
            case QtopiaVideo::Rotate270:
                imageSize = QSize( imageSize.height(), imageSize.width() );
        };

        if ( scaleMode() == QtopiaVideo::FitWindow ) {
            double scaleFactor = qMin( double(geometry.width())/imageSize.width(),
                                       double(geometry.height())/imageSize.height() );

            //don't scale if the size is close to required
            if ( scaleFactor < 0.95 || scaleFactor > 1.1 ) {
                imageSize *= scaleFactor;
            }
        }

        d->imageRect = QRect( QPoint(0,0), imageSize );
        d->imageRect.moveCenter( QPoint( geometry.width()/2, geometry.height()/2 ) );

        if ( d->overlay )
            d->overlay->fill( 16, 128, 128 );//black color in yuv
    }

    if ( d->overlay )
        d->overlay->drawFrame( frame,
                               QRect( QPoint(0,0), frame.size() ),
                               d->imageRect,
                               effectiveRotation() );
}
Esempio n. 17
0
void c3::onBufferAvailable(int id, const QVideoFrame& pFrame)
{
    this->camera->unlock();
    this->camera->stop();

    QVideoFrame lvFrame = pFrame;
    if (!lvFrame.map(QAbstractVideoBuffer::ReadOnly)) {
        return;
    }
    QImage lvImage;
    lvImage.loadFromData((const uchar*)lvFrame.bits(), lvFrame.mappedBytes(), (const char*)"JPEG");
    lvFrame.unmap();

/* here you can process lvImage before saving */
//    lvImage.invertPixels(QImage::InvertRgb);

    lvImage.save(defaultSaveFileName, "JPEG");
    this->camera->start();
}
Esempio n. 18
0
void CFrmPlayer::slotPresent(const QVideoFrame &frame)
{
    if(frame.pixelFormat() != QVideoFrame::Format_BGR32)
    {
        m_Process.slotFrameConvertedToRGB32(frame);
        return;
    }
    m_VideoFrame = frame;
    update();
}
Esempio n. 19
0
bool VideoFrameSurface::present(const QVideoFrame& frame)
{
	if(frame.isValid())
	{
		g_mutex.lock();
		g_frame=QVideoFrame(frame);
		g_mutex.unlock();
	}

	return true;
}
Esempio n. 20
0
void ImageSource::processFrame(QVideoFrame frame)
{
  qDebug() << recorder->duration();
  if (sendFrame) {
    if (frame.map(QAbstractVideoBuffer::ReadOnly)) {
      if (frame.pixelFormat() == QVideoFrame::Format_NV21) {
        QImage img(frame.size(), QImage::Format_RGB32);
        qt_convert_NV21_to_ARGB32((uchar*)frame.bits(), (quint32*)img.bits(), frame.width(), frame.height());
        img.save(QString("/sdcard/DCIM/DashCam/Images/%1.jpg").arg(QDateTime::currentDateTime().toString(Qt::ISODate)), "JPG");
        emit newFrame(img);
        qDebug() << "Saving Frame" << counter;
        sendFrame = false;
      }
      frame.unmap();
    }
  }

  if (startRecording) {
    QTimer::singleShot(300000, this, SLOT(newVideo()));
    recorder->setOutputLocation(QUrl::fromLocalFile(QString("/sdcard/DCIM/DashCam/Video/%1.mp4").arg(QDateTime::currentDateTime().toString(Qt::ISODate))));
    recorder->record();

    qDebug() << recorder->supportedResolutions();
    qDebug() << recorder->state();
    qDebug() << recorder->status();
    qDebug() << recorder->error();
    startRecording = false;
  }
}
QVideoFrame MFTransform::makeVideoFrame()
{
    QVideoFrame frame;

    if (!m_format.isValid())
        return frame;

    IMFMediaBuffer *buffer = 0;

    do {
        if (FAILED(m_sample->ConvertToContiguousBuffer(&buffer)))
            break;

        QByteArray array = dataFromBuffer(buffer, m_format.frameHeight(), &m_bytesPerLine);
        if (array.isEmpty())
            break;

        // Wrapping IMFSample or IMFMediaBuffer in a QVideoFrame is not possible because we cannot hold
        // IMFSample for a "long" time without affecting the rest of the topology.
        // If IMFSample is held for more than 5 frames decoder starts to reuse it even though it hasn't been released it yet.
        // That is why we copy data from IMFMediaBuffer here.
        frame = QVideoFrame(new QMemoryVideoBuffer(array, m_bytesPerLine), m_format.frameSize(), m_format.pixelFormat());

        // WMF uses 100-nanosecond units, Qt uses microseconds
        LONGLONG startTime = -1;
        if (SUCCEEDED(m_sample->GetSampleTime(&startTime))) {
            frame.setStartTime(startTime * 0.1);

            LONGLONG duration = -1;
            if (SUCCEEDED(m_sample->GetSampleDuration(&duration)))
                frame.setEndTime((startTime + duration) * 0.1);
        }
    } while (false);

    if (buffer)
        buffer->Release();

    return frame;
}
bool VideoSurface_ForQQuickItem::present(const QVideoFrame& frame)
{
    if ((_frameAvailable) || (_context == nullptr))
        return true;
    _context->lock();
    if (!frame.isValid())
        return false;
    _frame = frame;
    _frameAvailable = true;
    _context->unlock();
    QMetaObject::invokeMethod(_parentQuickItem, "update");
    return true;
}
Esempio n. 23
0
bool VideoWidgetSurface::present(const QVideoFrame &frame)
{
    if (surfaceFormat().pixelFormat() != frame.pixelFormat()
            || surfaceFormat().frameSize() != frame.size())
    {
        setError(IncorrectFormatError);
        stop();

        return false;
    }
    else
    {
        currentFrame = frame;
        currentFrame.map(QAbstractVideoBuffer::ReadOnly);
        QImage image(currentFrame.bits(),
                     currentFrame.width(),
                     currentFrame.height(),
                     imageFormat);
        emit aviImage(image);
        currentFrame.unmap();
        return true;
    }
}
Esempio n. 24
0
void VideoFrameSurface::render() {
    if (!g_frame.isValid()) return;
	GLCALL_INIT;
	if(g_frame.map(QAbstractVideoBuffer::ReadOnly))
	{
		GLuint tid=*((GLuint *)camtex->getNative());
		GLCALL glBindTexture(GL_TEXTURE_2D, tid);
		//qDebug() << "Render:" << g_frame.width()<< g_frame.height() << tid;
		GLCALL glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, cw, ch,0, GL_RGBA, GL_UNSIGNED_BYTE, g_frame.bits());
		ShaderEngine *engine=gtexture_get_engine();
		engine->reset();
		ShaderBuffer *oldfbo = engine->setFramebuffer(rdrTgt);
		engine->setViewport(0, 0, gtex->width, gtex->height);
		Matrix4 projection = engine->setOrthoFrustum(0,
				gtex->baseWidth, 0, gtex->baseHeight, -1, 1);
		engine->setProjection(projection);
		Matrix4 model;
		engine->setModel(model);
		engine->bindTexture(0,camtex);
		shader->setData(ShaderProgram::DataVertex, ShaderProgram::DFLOAT, 2,
				&vertices[0], vertices.size(), vertices.modified,
				&vertices.bufferCache);
		shader->setData(ShaderProgram::DataTexture, ShaderProgram::DFLOAT, 2,
				&texcoords[0], texcoords.size(), texcoords.modified,
				&texcoords.bufferCache);
		shader->drawElements(ShaderProgram::TriangleStrip, indices.size(),
				ShaderProgram::DUSHORT, &indices[0], indices.modified,
				&indices.bufferCache);
		vertices.modified = false;
		texcoords.modified = false;
		indices.modified = false;

		engine->setFramebuffer(oldfbo);
        g_frame.unmap();
	}

}
Esempio n. 25
0
bool CameraFrameGrabber::present(const QVideoFrame& frame) {
    if(ready) {
        beNotReady();
        if (frame.isValid()) {
            QVideoFrame cloneFrame(frame);
            cloneFrame.map(QAbstractVideoBuffer::ReadOnly);
            QImage image(cloneFrame.bits(), cloneFrame.width(), cloneFrame.height(), QVideoFrame::imageFormatFromPixelFormat(cloneFrame.pixelFormat()));
            emit frameAvailable(image);
            cloneFrame.unmap();
            return true;
        }
        return false;
    }
    return true;
}
Esempio n. 26
0
void VideoWriter::newFrame(QVideoFrame frame)
{
    if ((frame.width() != width) || (frame.height() != height)) {
        width = frame.width();
        height = frame.height();
    }
    if (waitingToInitialize) {
        initialize(*vFilename);
    }
    if (currentlyWriting) {
        if (!frame.map(QAbstractVideoBuffer::ReadOnly)) {
            qDebug() << "Failure to map video frame in writer";
            return;
        }

        AVCodecContext *c = video_st->codec;
        avpicture_fill((AVPicture *)tmp_picture, frame.bits(),
                       PIX_FMT_RGB24, c->width, c->height);
        sws_scale(sws_ctx, tmp_picture->data, tmp_picture->linesize,
                  0, c->height, picture->data, picture->linesize);
        picture->pts = frameCounter++;
        frame.unmap();

        /* encode the image */
        /* if zero size, it means the image was buffered */
        /* write the compressed frame in the media file */
        /* XXX: in case of B frames, the pts is not yet valid */
        int out_size = avcodec_encode_video(c, video_outbuf, video_outbuf_size, picture);
        if (out_size > 0) {
            AVPacket pkt;
            av_init_packet(&pkt);
            if (c->coded_frame->pts != AV_NOPTS_VALUE)
                pkt.pts= av_rescale_q(c->coded_frame->pts, c->time_base, video_st->time_base);
            if(c->coded_frame->key_frame)
                pkt.flags |= AV_PKT_FLAG_KEY;
            pkt.stream_index= video_st->index;
            pkt.data= video_outbuf;
            pkt.size= out_size;

            /* write the compressed frame in the media file */
            //ret = av_interleaved_write_frame(oc, &pkt);
            int ret = av_write_frame(oc, &pkt);
        }


        // Save time stamp
    }
}
bool VideoSurface::present(const QVideoFrame &frame)
{
    if (frame.isValid())
    {
        QVideoFrame videoFrame(frame);
        if( videoFrame.map(QAbstractVideoBuffer::ReadOnly) )
        {
            lastFrame = QImage(videoFrame.width(), videoFrame.height(), QImage::Format_RGB888);
            memcpy(lastFrame.bits(), videoFrame.bits(), videoFrame.mappedBytes());

            videoFrame.unmap();

            // Use thread for processing
            emit frameAvailable();
            return true;
        }
    }
    return false;
}
    void convertAllFrames() {
        mProgressBar->setMaximum(mFrames.length() - 1);
        int count = 0;

        foreach(QVideoFrame frame, mFrames) {
            mProgressBar->setValue(count++);
            QImage image;
            if (frame.pixelFormat() == QVideoFrame::Format_RGB32) {
                // Copy const QVideoFrame to mutable QVideoFrame.
                QVideoFrame nonConstFrame = frame;
                // Unlock for reading the stack frame (increment ref pointer)
                nonConstFrame.map(QAbstractVideoBuffer::ReadOnly);
                // Create new image from the frame bits
                image = QImage(
                        nonConstFrame.bits(),
                        nonConstFrame.width(),
                        nonConstFrame.height(),
                        nonConstFrame.bytesPerLine(),
                        QVideoFrame::imageFormatFromPixelFormat(nonConstFrame.pixelFormat()));
                nonConstFrame.unmap();
            } else {
                image = QImage(frame.size(), QImage::Format_RGB32);
                mFrameConverter->convertFrame(frame, &image);
            }

            QString imgFileName = QString("%1.%2.png").arg(mFileName).arg(++mCount, 2, 10, QChar('0'));
            //QFile file(imgFileName);
            //file.open(QFile::WriteOnly);

            bool saved = image.save(imgFileName, "png");
            if (saved) {
                log->info("File: %1 saved", imgFileName);
            } else {
                log->info("File: %1 not saved", imgFileName);
            }

        }
    void convertFrame(QVideoFrame &frame, QImage *image) {
        if(frame.pixelFormat() == QVideoFrame::Format_YUV420P && frame.map(QAbstractVideoBuffer::ReadOnly)) {
            const qint32 frameWidth = frame.width();
            const qint32 frameHeight = frame.height();
            const qint32 frameSize = frameHeight * frameWidth;
            const uchar *lumaYBytes = frame.bits();
            const uchar *chromaUBytes = lumaYBytes + frameSize;
            const uchar *chromaVBytes = chromaUBytes + (frameSize / 4);

            for (int y = 0; y < frameHeight; y++) {
                for (int x = 0; x < frameWidth; x++) {
                    const int Y = lumaYBytes[y * frameWidth + x];
                    const int U = chromaUBytes[(y / 2) * (frameWidth / 2) + (x / 2)];
                    const int V = chromaVBytes[(y / 2) * (frameWidth / 2) + (x / 2)];
                    const int r = qBound(0.0, 1.164 * (Y - 16) + 1.596 * (V - 128), 255.0);
                    const int g = qBound(0.0, 1.164 * (Y - 16) - 0.813 * (V - 128) - 0.391 * (U - 128), 255.0);
                    const int b = qBound(0.0, 1.164 * (Y - 16) + 2.018 * (U - 128), 255.0);

                    image->setPixel(x, y, qRgb(r, g, b));
                }
            }
            frame.unmap();
        }
    }
Esempio n. 30
0
void MyProbe::processFrame(QVideoFrame frame)
{
    // convert input to cv::mat here
    if (!frame.isValid())
        return;

    if (frame.map(QAbstractVideoBuffer::ReadOnly))
    {
        if (frame.pixelFormat() == QVideoFrame::Format_YUV420P ||
                frame.pixelFormat() == QVideoFrame::Format_NV12 ||
                frame.pixelFormat() == QVideoFrame::Format_NV21)
        {
            if (processMutex.tryLock())
            {
                // extracts gray channel from yuv image
                img = QImage(frame.bits(), frame.width(), frame.height(),
                             frame.bytesPerLine(), QImage::Format_Grayscale8).copy();
                processMutex.unlock();
                emit finished();
            }
        }
        else if (frame.pixelFormat() == QVideoFrame::Format_BGR32)
        {
            if (processMutex.tryLock())
            {
                QImage img_tmp;
                // extracts gray channel from yuv image
                img_tmp = QImage(frame.bits(), frame.width(), frame.height(),
                                 frame.bytesPerLine(), QImage::Format_ARGB32).copy();
                img = img_tmp.convertToFormat(QImage::Format_Grayscale8);
                processMutex.unlock();
                emit finished();
            }
        }
        else
            return;

        frame.unmap();
        return;
    }
    else
        return;
}