示例#1
0
void QmlVlcMmVideoOutput::video_cleanup_cb()
{
    m_pixelFormat = QVideoFrame::Format_Invalid;
    m_frameWidth = m_frameHeight = 0;
    m_frameSize = 0;
    m_pitchSize = 0;
    m_UPlaneOffset = m_VPlaneOffset = 0;
    m_videoFrame = QVideoFrame();

    QMetaObject::invokeMethod( this, "cleanupVideoSurface" );
}
示例#2
0
bool VideoFrameSurface::present(const QVideoFrame& frame)
{
	if(frame.isValid())
	{
		g_mutex.lock();
		g_frame=QVideoFrame(frame);
		g_mutex.unlock();
	}

	return true;
}
示例#3
0
QAbstractVideoSurface::Error QVideoSurfaceRasterPainter::start(const QVideoSurfaceFormat &format)
{
    m_frame = QVideoFrame();
    m_imageFormat = QVideoFrame::imageFormatFromPixelFormat(format.pixelFormat());
    m_imageSize = format.frameSize();
    m_scanLineDirection = format.scanLineDirection();

    return format.handleType() == QAbstractVideoBuffer::NoHandle
            && m_imageFormat != QImage::Format_Invalid
            && !m_imageSize.isEmpty()
            ? QAbstractVideoSurface::NoError
            : QAbstractVideoSurface::UnsupportedFormatError;
}
void D3DPresentEngine::presentSample(void *opaque, qint64)
{
    HRESULT hr = S_OK;

    IMFSample *sample = reinterpret_cast<IMFSample*>(opaque);
    IMFMediaBuffer* buffer = NULL;
    IDirect3DSurface9* surface = NULL;

    if (m_surface && m_surface->isActive()) {
        if (sample) {
            // Get the buffer from the sample.
            hr = sample->GetBufferByIndex(0, &buffer);
            if (FAILED(hr))
                goto done;

            // Get the surface from the buffer.
            hr = MFGetService(buffer, MR_BUFFER_SERVICE, IID_PPV_ARGS(&surface));
            if (FAILED(hr))
                goto done;
        }

        if (surface && updateTexture(surface)) {
            QVideoFrame frame = QVideoFrame(new TextureVideoBuffer(m_glTexture),
                                            m_surfaceFormat.frameSize(),
                                            m_surfaceFormat.pixelFormat());

            // WMF uses 100-nanosecond units, Qt uses microseconds
            LONGLONG startTime = -1;
            if (SUCCEEDED(sample->GetSampleTime(&startTime))) {
                frame.setStartTime(startTime * 0.1);

                LONGLONG duration = -1;
                if (SUCCEEDED(sample->GetSampleDuration(&duration)))
                    frame.setEndTime((startTime + duration) * 0.1);
            }

            m_surface->present(frame);
        }
    }

done:
    qt_wmf_safeRelease(&surface);
    qt_wmf_safeRelease(&buffer);
    qt_wmf_safeRelease(&sample);
}
QVideoFrame MFTransform::makeVideoFrame()
{
    QVideoFrame frame;

    if (!m_format.isValid())
        return frame;

    IMFMediaBuffer *buffer = 0;

    do {
        if (FAILED(m_sample->ConvertToContiguousBuffer(&buffer)))
            break;

        QByteArray array = dataFromBuffer(buffer, m_format.frameHeight(), &m_bytesPerLine);
        if (array.isEmpty())
            break;

        // Wrapping IMFSample or IMFMediaBuffer in a QVideoFrame is not possible because we cannot hold
        // IMFSample for a "long" time without affecting the rest of the topology.
        // If IMFSample is held for more than 5 frames decoder starts to reuse it even though it hasn't been released it yet.
        // That is why we copy data from IMFMediaBuffer here.
        frame = QVideoFrame(new QMemoryVideoBuffer(array, m_bytesPerLine), m_format.frameSize(), m_format.pixelFormat());

        // WMF uses 100-nanosecond units, Qt uses microseconds
        LONGLONG startTime = -1;
        if (SUCCEEDED(m_sample->GetSampleTime(&startTime))) {
            frame.setStartTime(startTime * 0.1);

            LONGLONG duration = -1;
            if (SUCCEEDED(m_sample->GetSampleDuration(&duration)))
                frame.setEndTime((startTime + duration) * 0.1);
        }
    } while (false);

    if (buffer)
        buffer->Release();

    return frame;
}
示例#6
0
void QGstreamerGLTextureRenderer::stopRenderer()
{
#ifdef GL_TEXTURE_SINK_DEBUG
    qDebug() << Q_FUNC_INFO;
#endif

    if (m_surface && m_surface->isActive())
        m_surface->stop();

    if (m_fallbackImage.isNull()) {
        if (!m_nativeSize.isEmpty()) {
            m_nativeSize = QSize();
            emit nativeSizeChanged();
        }
    } else {
        if (m_surface) {
            QVideoSurfaceFormat format(m_fallbackImage.size(), QVideoFrame::Format_RGB32);
            format.setPixelAspectRatio(m_nativeSize.width(), m_fallbackImage.width());

            if (m_surface->start(format))
                m_surface->present(QVideoFrame(m_fallbackImage));
        }
    }
}
void VideoSurfaceFilter::flush()
{
    m_surface->present(QVideoFrame());

    m_wait.wakeAll();
}
 ~QSGVideoMaterial_Texture()
 {
     m_frame = QVideoFrame();
 }
示例#9
0
void QVideoSurfaceRasterPainter::stop()
{
    m_frame = QVideoFrame();
}
示例#10
0
    void bind()
    {
        QOpenGLFunctions *functions = QOpenGLContext::currentContext()->functions();

        QMutexLocker lock(&m_frameMutex);
        if (m_frame.isValid()) {
            if (m_frame.map(QAbstractVideoBuffer::ReadOnly)) {
                QSize textureSize = m_frame.size();

                int stride = m_frame.bytesPerLine();
                switch (m_frame.pixelFormat()) {
                case QVideoFrame::Format_RGB565:
                    stride /= 2;
                    break;
                default:
                    stride /= 4;
                }

                m_width = qreal(m_frame.width()) / stride;
                textureSize.setWidth(stride);

                if (m_textureSize != textureSize) {
                    if (!m_textureSize.isEmpty())
                        functions->glDeleteTextures(1, &m_textureId);
                    functions->glGenTextures(1, &m_textureId);
                    m_textureSize = textureSize;
                }

                GLint dataType = GL_UNSIGNED_BYTE;
                GLint dataFormat = GL_RGBA;

                if (m_frame.pixelFormat() == QVideoFrame::Format_RGB565) {
                    dataType = GL_UNSIGNED_SHORT_5_6_5;
                    dataFormat = GL_RGB;
                }

                GLint previousAlignment;
                functions->glGetIntegerv(GL_UNPACK_ALIGNMENT, &previousAlignment);
                functions->glPixelStorei(GL_UNPACK_ALIGNMENT, 1);

                functions->glActiveTexture(GL_TEXTURE0);
                functions->glBindTexture(GL_TEXTURE_2D, m_textureId);
                functions->glTexImage2D(GL_TEXTURE_2D, 0, dataFormat,
                                        m_textureSize.width(), m_textureSize.height(),
                                        0, dataFormat, dataType, m_frame.bits());

                functions->glPixelStorei(GL_UNPACK_ALIGNMENT, previousAlignment);

                functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
                functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
                functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
                functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

                m_frame.unmap();
            }
            m_frame = QVideoFrame();
        } else {
            functions->glActiveTexture(GL_TEXTURE0);
            functions->glBindTexture(GL_TEXTURE_2D, m_textureId);
        }
    }
示例#11
0
void tst_QVideoFrame::assign()
{
    QFETCH(QAbstractVideoBuffer::HandleType, handleType);
    QFETCH(QSize, size);
    QFETCH(QVideoFrame::PixelFormat, pixelFormat);
    QFETCH(QVideoFrame::FieldType, fieldType);
    QFETCH(qint64, startTime);
    QFETCH(qint64, endTime);

    QPointer<QtTestVideoBuffer> buffer = new QtTestVideoBuffer(handleType);

    QVideoFrame frame;
    {
        QVideoFrame otherFrame(buffer, size, pixelFormat);
        otherFrame.setFieldType(fieldType);
        otherFrame.setStartTime(startTime);
        otherFrame.setEndTime(endTime);

        frame = otherFrame;

        QVERIFY(!buffer.isNull());

        QVERIFY(otherFrame.isValid());
        QCOMPARE(otherFrame.handleType(), handleType);
        QCOMPARE(otherFrame.pixelFormat(), pixelFormat);
        QCOMPARE(otherFrame.size(), size);
        QCOMPARE(otherFrame.width(), size.width());
        QCOMPARE(otherFrame.height(), size.height());
        QCOMPARE(otherFrame.fieldType(), fieldType);
        QCOMPARE(otherFrame.startTime(), startTime);
        QCOMPARE(otherFrame.endTime(), endTime);

        otherFrame.setStartTime(-1);

        QVERIFY(!buffer.isNull());

        QVERIFY(otherFrame.isValid());
        QCOMPARE(otherFrame.handleType(), handleType);
        QCOMPARE(otherFrame.pixelFormat(), pixelFormat);
        QCOMPARE(otherFrame.size(), size);
        QCOMPARE(otherFrame.width(), size.width());
        QCOMPARE(otherFrame.height(), size.height());
        QCOMPARE(otherFrame.fieldType(), fieldType);
        QCOMPARE(otherFrame.startTime(), qint64(-1));
        QCOMPARE(otherFrame.endTime(), endTime);
    }

    QVERIFY(!buffer.isNull());

    QVERIFY(frame.isValid());
    QCOMPARE(frame.handleType(), handleType);
    QCOMPARE(frame.pixelFormat(), pixelFormat);
    QCOMPARE(frame.size(), size);
    QCOMPARE(frame.width(), size.width());
    QCOMPARE(frame.height(), size.height());
    QCOMPARE(frame.fieldType(), fieldType);
    QCOMPARE(frame.startTime(), qint64(-1));
    QCOMPARE(frame.endTime(), endTime);

    frame = QVideoFrame();

    QVERIFY(buffer.isNull());

    QVERIFY(!frame.isValid());
    QCOMPARE(frame.handleType(), QAbstractVideoBuffer::NoHandle);
    QCOMPARE(frame.pixelFormat(), QVideoFrame::Format_Invalid);
    QCOMPARE(frame.size(), QSize());
    QCOMPARE(frame.width(), -1);
    QCOMPARE(frame.height(), -1);
    QCOMPARE(frame.fieldType(), QVideoFrame::ProgressiveFrame);
    QCOMPARE(frame.startTime(), qint64(-1));
    QCOMPARE(frame.endTime(), qint64(-1));
}
示例#12
0
void QSGVideoMaterial_YUV::bind()
{
    QOpenGLFunctions *functions = QOpenGLContext::currentContext()->functions();
    QMutexLocker lock(&m_frameMutex);
    if (m_frame.isValid()) {
        if (m_frame.map(QAbstractVideoBuffer::ReadOnly)) {
            int fw = m_frame.width();
            int fh = m_frame.height();

            // Frame has changed size, recreate textures...
            if (m_textureSize != m_frame.size()) {
                if (!m_textureSize.isEmpty())
                    functions->glDeleteTextures(m_planeCount, m_textureIds);
                functions->glGenTextures(m_planeCount, m_textureIds);
                m_textureSize = m_frame.size();
            }

            GLint previousAlignment;
            functions->glGetIntegerv(GL_UNPACK_ALIGNMENT, &previousAlignment);
            functions->glPixelStorei(GL_UNPACK_ALIGNMENT, 1);

            if (m_format.pixelFormat() == QVideoFrame::Format_NV12
                    || m_format.pixelFormat() == QVideoFrame::Format_NV21) {
                const int y = 0;
                const int uv = 1;

                m_planeWidth[0] = m_planeWidth[1] = qreal(fw) / m_frame.bytesPerLine(y);

                functions->glActiveTexture(GL_TEXTURE1);
                bindTexture(m_textureIds[1], m_frame.bytesPerLine(uv) / 2, fh / 2, m_frame.bits(uv), GL_LUMINANCE_ALPHA);
                functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit
                bindTexture(m_textureIds[0], m_frame.bytesPerLine(y), fh, m_frame.bits(y), GL_LUMINANCE);

            } else { // YUV420P || YV12
                const int y = 0;
                const int u = m_frame.pixelFormat() == QVideoFrame::Format_YUV420P ? 1 : 2;
                const int v = m_frame.pixelFormat() == QVideoFrame::Format_YUV420P ? 2 : 1;

                m_planeWidth[0] = qreal(fw) / m_frame.bytesPerLine(y);
                m_planeWidth[1] = m_planeWidth[2] = qreal(fw) / (2 * m_frame.bytesPerLine(u));

                functions->glActiveTexture(GL_TEXTURE1);
                bindTexture(m_textureIds[1], m_frame.bytesPerLine(u), fh / 2, m_frame.bits(u), GL_LUMINANCE);
                functions->glActiveTexture(GL_TEXTURE2);
                bindTexture(m_textureIds[2], m_frame.bytesPerLine(v), fh / 2, m_frame.bits(v), GL_LUMINANCE);
                functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit
                bindTexture(m_textureIds[0], m_frame.bytesPerLine(y), fh, m_frame.bits(y), GL_LUMINANCE);
            }

            functions->glPixelStorei(GL_UNPACK_ALIGNMENT, previousAlignment);
            m_frame.unmap();
        }

        m_frame = QVideoFrame();
    } else {
        // Go backwards to finish with GL_TEXTURE0
        for (int i = m_planeCount - 1; i >= 0; --i) {
            functions->glActiveTexture(GL_TEXTURE0 + i);
            functions->glBindTexture(GL_TEXTURE_2D, m_textureIds[i]);
        }
    }
}
示例#13
0
    bool ICLVideoSurface::present(const QVideoFrame &frame)
    {
        if (frame.isValid()) {
            QVideoFrame cloneFrame(frame);
            cloneFrame.map(QAbstractVideoBuffer::ReadWrite);
            lock.lock();
            std::swap(imgWork,imgNextDisplay);
            if(cloneFrame.pixelFormat() == QVideoFrame::Format_RGB24) {
                imgWork->setChannels(3);
                imgWork->setSize(utils::Size(cloneFrame.width(),cloneFrame.height()));
              core::interleavedToPlanar<uchar,icl8u>(cloneFrame.bits(),imgWork,cloneFrame.bytesPerLine());
            }
            else if(cloneFrame.pixelFormat() == QVideoFrame::Format_YUV420P) {
              imgWork->setChannels(3);
              imgWork->setSize(utils::Size(cloneFrame.bytesPerLine(0),cloneFrame.height()));
              core::convertYUV420ToRGB8(cloneFrame.bits(),utils::Size(cloneFrame.bytesPerLine(),cloneFrame.height()),imgWork);
              if(cloneFrame.bytesPerLine()!=cloneFrame.width()) {
                imgWork->setROI(utils::Point(0,0),utils::Size(cloneFrame.width(),cloneFrame.height()));
              }
              else
                imgWork->setFullROI();
            } else if(cloneFrame.pixelFormat() == QVideoFrame::Format_ARGB32) {
              imgWork->setChannels(3);
              imgWork->setSize(utils::Size(cloneFrame.width(),cloneFrame.height()));
              const int dim = imgWork->getDim();
              icl8u *res_r = imgWork->begin(0);
              icl8u *res_g = imgWork->begin(1);
              icl8u *res_b = imgWork->begin(2);
              uchar *src = cloneFrame.bits();

              // channel order in QVideoFrame seems switched
              // its rather 0xBBGGRRAA than 0xAARRBBGG
              // that is why we match Channel 0 -> Blue and 2 -> Reds
              for(int i=0;i<dim;++i){
                res_r[i] = src[i*4+2]; // red channel
                res_g[i] = src[i*4+1]; // green channel
                res_b[i] = src[i*4+0]; // blue channel
              } 
            } else {
              // fallback if no native conversion is available
              imgWork->setChannels(3);
              imgWork->setSize(utils::Size(cloneFrame.width(),cloneFrame.height()));
              const QImage tmpImg(cloneFrame.bits(),
                           cloneFrame.width(),
                           cloneFrame.height(),
                           QVideoFrame::imageFormatFromPixelFormat(cloneFrame.pixelFormat()));
              QVideoFrame tmp = QVideoFrame(tmpImg.convertToFormat(QImage::Format_RGB888));  
              tmp.map(QAbstractVideoBuffer::ReadWrite);
              cloneFrame = tmp;
              imgWork->setChannels(3);
              imgWork->setSize(utils::Size(cloneFrame.width(),cloneFrame.height()));
              core::interleavedToPlanar<uchar,icl8u>(cloneFrame.bits(),imgWork,cloneFrame.bytesPerLine());
            }
            if(useLocking.load()) {
				QMutexLocker waitLock(&waitMutex);// boost::mutex::scoped_lock waitLock(waitMutex);
              if(!nextFrameReady)
              {
                  nextFrameReady=true;
				  waitCondition.wakeOne();// notify_one();
              }
            }
            nextFrameReady=true;
            lock.unlock();
            return true;
        }
        return false;
    }
QAndroidSGVideoNode::~QAndroidSGVideoNode()
{
    m_frame = QVideoFrame();
}
示例#15
0
void VideoWidgetSurface::stop()
{
    currentFrame = QVideoFrame();
    QAbstractVideoSurface::stop();
}
示例#16
0
void tst_QCameraBackend::testCaptureToBuffer()
{
    QCamera camera;
    QCameraImageCapture imageCapture(&camera);
    camera.exposure()->setFlashMode(QCameraExposure::FlashOff);

    camera.load();

#ifdef Q_WS_MAEMO_6
    QVERIFY(imageCapture.isCaptureDestinationSupported(QCameraImageCapture::CaptureToBuffer));
#endif

    if (!imageCapture.isCaptureDestinationSupported(QCameraImageCapture::CaptureToBuffer))
        QSKIP("Buffer capture not supported");

    QTRY_COMPARE(camera.status(), QCamera::LoadedStatus);

    QCOMPARE(imageCapture.bufferFormat(), QVideoFrame::Format_Jpeg);

    QVERIFY(imageCapture.isCaptureDestinationSupported(QCameraImageCapture::CaptureToFile));
    QVERIFY(imageCapture.isCaptureDestinationSupported(QCameraImageCapture::CaptureToBuffer));
    QVERIFY(imageCapture.isCaptureDestinationSupported(
                QCameraImageCapture::CaptureToBuffer | QCameraImageCapture::CaptureToFile));

    QSignalSpy destinationChangedSignal(&imageCapture, SIGNAL(captureDestinationChanged(QCameraImageCapture::CaptureDestinations)));

    QCOMPARE(imageCapture.captureDestination(), QCameraImageCapture::CaptureToFile);
    imageCapture.setCaptureDestination(QCameraImageCapture::CaptureToBuffer);
    QCOMPARE(imageCapture.captureDestination(), QCameraImageCapture::CaptureToBuffer);
    QCOMPARE(destinationChangedSignal.size(), 1);
    QCOMPARE(destinationChangedSignal.first().first().value<QCameraImageCapture::CaptureDestinations>(),
             QCameraImageCapture::CaptureToBuffer);

    QSignalSpy capturedSignal(&imageCapture, SIGNAL(imageCaptured(int,QImage)));
    QSignalSpy imageAvailableSignal(&imageCapture, SIGNAL(imageAvailable(int,QVideoFrame)));
    QSignalSpy savedSignal(&imageCapture, SIGNAL(imageSaved(int,QString)));
    QSignalSpy errorSignal(&imageCapture, SIGNAL(error(int, QCameraImageCapture::Error,QString)));

    camera.start();
    QTRY_VERIFY(imageCapture.isReadyForCapture());

    int id = imageCapture.capture();
    QTRY_VERIFY(!imageAvailableSignal.isEmpty());

    QVERIFY(errorSignal.isEmpty());
    QVERIFY(!capturedSignal.isEmpty());
    QVERIFY(!imageAvailableSignal.isEmpty());

    QTest::qWait(2000);
    QVERIFY(savedSignal.isEmpty());

    QCOMPARE(capturedSignal.first().first().toInt(), id);
    QCOMPARE(imageAvailableSignal.first().first().toInt(), id);

    QVideoFrame frame = imageAvailableSignal.first().last().value<QVideoFrame>();
    QVERIFY(frame.isValid());
    QCOMPARE(frame.pixelFormat(), QVideoFrame::Format_Jpeg);
    QVERIFY(!frame.size().isEmpty());
    QVERIFY(frame.map(QAbstractVideoBuffer::ReadOnly));
    QByteArray data((const char *)frame.bits(), frame.mappedBytes());
    frame.unmap();
    frame = QVideoFrame();

    QVERIFY(!data.isEmpty());
    QBuffer buffer;
    buffer.setData(data);
    buffer.open(QIODevice::ReadOnly);
    QImageReader reader(&buffer, "JPG");
    reader.setScaledSize(QSize(640,480));
    QImage img(reader.read());
    QVERIFY(!img.isNull());

    capturedSignal.clear();
    imageAvailableSignal.clear();
    savedSignal.clear();

    //Capture to yuv buffer
#ifdef Q_WS_MAEMO_6
    QVERIFY(imageCapture.supportedBufferFormats().contains(QVideoFrame::Format_UYVY));
#endif

    if (imageCapture.supportedBufferFormats().contains(QVideoFrame::Format_UYVY)) {
        imageCapture.setBufferFormat(QVideoFrame::Format_UYVY);
        QCOMPARE(imageCapture.bufferFormat(), QVideoFrame::Format_UYVY);

        id = imageCapture.capture();
        QTRY_VERIFY(!imageAvailableSignal.isEmpty());

        QVERIFY(errorSignal.isEmpty());
        QVERIFY(!capturedSignal.isEmpty());
        QVERIFY(!imageAvailableSignal.isEmpty());
        QVERIFY(savedSignal.isEmpty());

        QTest::qWait(2000);
        QVERIFY(savedSignal.isEmpty());

        frame = imageAvailableSignal.first().last().value<QVideoFrame>();
        QVERIFY(frame.isValid());

        qDebug() << frame.pixelFormat();
        QCOMPARE(frame.pixelFormat(), QVideoFrame::Format_UYVY);
        QVERIFY(!frame.size().isEmpty());
        frame = QVideoFrame();

        capturedSignal.clear();
        imageAvailableSignal.clear();
        savedSignal.clear();

        imageCapture.setBufferFormat(QVideoFrame::Format_Jpeg);
        QCOMPARE(imageCapture.bufferFormat(), QVideoFrame::Format_Jpeg);
    }

    //Try to capture to both buffer and file
#ifdef Q_WS_MAEMO_6
    QVERIFY(imageCapture.isCaptureDestinationSupported(QCameraImageCapture::CaptureToBuffer | QCameraImageCapture::CaptureToFile));
#endif
    if (imageCapture.isCaptureDestinationSupported(QCameraImageCapture::CaptureToBuffer | QCameraImageCapture::CaptureToFile)) {
        imageCapture.setCaptureDestination(QCameraImageCapture::CaptureToBuffer | QCameraImageCapture::CaptureToFile);

        int oldId = id;
        id = imageCapture.capture();
        QVERIFY(id != oldId);
        QTRY_VERIFY(!savedSignal.isEmpty());

        QVERIFY(errorSignal.isEmpty());
        QVERIFY(!capturedSignal.isEmpty());
        QVERIFY(!imageAvailableSignal.isEmpty());
        QVERIFY(!savedSignal.isEmpty());

        QCOMPARE(capturedSignal.first().first().toInt(), id);
        QCOMPARE(imageAvailableSignal.first().first().toInt(), id);

        frame = imageAvailableSignal.first().last().value<QVideoFrame>();
        QVERIFY(frame.isValid());
        QCOMPARE(frame.pixelFormat(), QVideoFrame::Format_Jpeg);
        QVERIFY(!frame.size().isEmpty());

        QString fileName = savedSignal.first().last().toString();
        QVERIFY(QFileInfo(fileName).exists());
    }
}