static PyObject *meth_QVideoSurfaceFormat_setProperty(PyObject *sipSelf, PyObject *sipArgs) { PyObject *sipParseErr = NULL; { const char* a0; const QVariant* a1; int a1State = 0; QVideoSurfaceFormat *sipCpp; if (sipParseArgs(&sipParseErr, sipArgs, "BsJ1", &sipSelf, sipType_QVideoSurfaceFormat, &sipCpp, &a0, sipType_QVariant, &a1, &a1State)) { sipCpp->setProperty(a0,*a1); sipReleaseType(const_cast<QVariant *>(a1),sipType_QVariant,a1State); Py_INCREF(Py_None); return Py_None; } } /* Raise an exception if the arguments couldn't be parsed. */ sipNoMethod(sipParseErr, sipName_QVideoSurfaceFormat, sipName_setProperty, doc_QVideoSurfaceFormat_setProperty); return NULL; }
bool QVideoSurfaceRasterPainter::isFormatSupported( const QVideoSurfaceFormat &format, QVideoSurfaceFormat *) const { return format.handleType() == QAbstractVideoBuffer::NoHandle && m_imagePixelFormats.contains(format.pixelFormat()) && !format.frameSize().isEmpty(); }
bool myVideoPlayer::presentImage(const QImage &image) { QVideoFrame frame(image); if (!frame.isValid()) return false; QVideoSurfaceFormat currentFormat = surface->surfaceFormat(); if (frame.pixelFormat() != currentFormat.pixelFormat() || frame.size() != currentFormat.frameSize()) { QVideoSurfaceFormat format(frame.size(), frame.pixelFormat()); if (!surface->start(format)) return false; } if (!surface->present(frame)) { surface->stop(); return false; } else { return true; } }
int DirectShowMediaType::bytesPerLine(const QVideoSurfaceFormat &format) { switch (format.pixelFormat()) { // 32 bpp packed formats. case QVideoFrame::Format_RGB32: case QVideoFrame::Format_AYUV444: return format.frameWidth() * 4; // 24 bpp packed formats. case QVideoFrame::Format_RGB24: return PAD_TO_DWORD(format.frameWidth() * 3); // 16 bpp packed formats. case QVideoFrame::Format_RGB565: case QVideoFrame::Format_RGB555: case QVideoFrame::Format_YUYV: case QVideoFrame::Format_UYVY: return PAD_TO_DWORD(format.frameWidth() * 2); // Planar formats. case QVideoFrame::Format_IMC1: case QVideoFrame::Format_IMC2: case QVideoFrame::Format_IMC3: case QVideoFrame::Format_IMC4: case QVideoFrame::Format_YV12: case QVideoFrame::Format_NV12: case QVideoFrame::Format_YUV420P: return PAD_TO_DWORD(format.frameWidth()); default: return 0; } }
bool VideoSurface_ForQQuickItem::start(const QVideoSurfaceFormat& format) { if (!supportedPixelFormats(format.handleType()).contains(format.pixelFormat())) { qDebug() << format.handleType() << " " << format.pixelFormat() << " - format is not supported."; return false; } return QAbstractVideoSurface::start(format); }
bool VideoWidgetSurface::isFormatSupported(const QVideoSurfaceFormat &format, QVideoSurfaceFormat *similar) const { Q_UNUSED(similar); const QImage::Format imageFormat = QVideoFrame::imageFormatFromPixelFormat(format.pixelFormat()); const QSize size = format.frameSize(); return imageFormat != QImage::Format_Invalid && !size.isEmpty() && format.handleType() == QAbstractVideoBuffer::NoHandle; }
QAbstractVideoSurface::Error QVideoSurfaceRasterPainter::start(const QVideoSurfaceFormat &format) { m_frame = QVideoFrame(); m_imageFormat = QVideoFrame::imageFormatFromPixelFormat(format.pixelFormat()); m_imageSize = format.frameSize(); m_scanLineDirection = format.scanLineDirection(); return format.handleType() == QAbstractVideoBuffer::NoHandle && m_imageFormat != QImage::Format_Invalid && !m_imageSize.isEmpty() ? QAbstractVideoSurface::NoError : QAbstractVideoSurface::UnsupportedFormatError; }
// Overridden from QAbstractVideoSurface virtual bool start( const QVideoSurfaceFormat &format ) { m_mutex.lock(); m_flipped = ( format.scanLineDirection() == QVideoSurfaceFormat::BottomToTop ); m_frameFormat = QVideoFrame::imageFormatFromPixelFormat( format.pixelFormat() ); m_viewport = format.viewport(); m_valid = 1; // We want to unlock it before calling the parent function, which may call present() and deadlock m_mutex.unlock(); return QAbstractVideoSurface::start( format ); }
bool MyVideoSurface::start(const QVideoSurfaceFormat &format) { m_videoFormat = format; const QImage::Format imageFormat = QVideoFrame::imageFormatFromPixelFormat(format.pixelFormat()); const QSize size = format.frameSize(); if (imageFormat != QImage::Format_Invalid && !size.isEmpty()) { m_imageFormat = imageFormat; QAbstractVideoSurface::start(format); return true; } else { return false; } }
bool VideoItem::start(const QVideoSurfaceFormat &format) { if (isFormatSupported(format)) { imageFormat = QVideoFrame::imageFormatFromPixelFormat(format.pixelFormat()); imageSize = format.frameSize(); framePainted = true; QAbstractVideoSurface::start(format); prepareGeometryChange(); return true; } else { return false; } }
void QmlVlcMmVideoOutput::initVideoSurface( const QVideoSurfaceFormat& format ) { assert( !m_videoSurface || !m_videoSurface->isActive() ); if( m_videoSurface && format.isValid() ) m_videoSurface->start( format ); }
bool AndroidVideoSurface::start(const QVideoSurfaceFormat &format) { const QImage::Format m_imageFormat = QVideoFrame::imageFormatFromPixelFormat(format.pixelFormat()); const QSize size = format.frameSize(); if (m_imageFormat != QImage::Format_Invalid && !size.isEmpty()) { this->m_imageFormat = m_imageFormat; m_imageSize = size; m_sourceRect = format.viewport(); emit(surfaceStarted(format)); m_widget->updateGeometry(); updateVideoRect(); return true; } else { return false; } }
QSGVideoMaterial_YUV::QSGVideoMaterial_YUV(const QVideoSurfaceFormat &format) : m_format(format), m_opacity(1.0) { memset(m_textureIds, 0, sizeof(m_textureIds)); switch (format.pixelFormat()) { case QVideoFrame::Format_NV12: case QVideoFrame::Format_NV21: m_planeCount = 2; break; case QVideoFrame::Format_YUV420P: case QVideoFrame::Format_YV12: m_planeCount = 3; break; default: m_planeCount = 1; break; } switch (format.yCbCrColorSpace()) { case QVideoSurfaceFormat::YCbCr_JPEG: m_colorMatrix = QMatrix4x4( 1.0f, 0.000f, 1.402f, -0.701f, 1.0f, -0.344f, -0.714f, 0.529f, 1.0f, 1.772f, 0.000f, -0.886f, 0.0f, 0.000f, 0.000f, 1.0000f); break; case QVideoSurfaceFormat::YCbCr_BT709: case QVideoSurfaceFormat::YCbCr_xvYCC709: m_colorMatrix = QMatrix4x4( 1.164f, 0.000f, 1.793f, -0.5727f, 1.164f, -0.534f, -0.213f, 0.3007f, 1.164f, 2.115f, 0.000f, -1.1302f, 0.0f, 0.000f, 0.000f, 1.0000f); break; default: //BT 601: m_colorMatrix = QMatrix4x4( 1.164f, 0.000f, 1.596f, -0.8708f, 1.164f, -0.392f, -0.813f, 0.5296f, 1.164f, 2.017f, 0.000f, -1.081f, 0.0f, 0.000f, 0.000f, 1.0000f); } setFlag(Blending, false); }
bool QGstXvImageBufferPool::isFormatSupported(const QVideoSurfaceFormat &surfaceFormat) const { bool ok = true; surfaceFormat.property("portId").toULongLong(&ok); if (!ok) return false; int xvFormatId = surfaceFormat.property("xvFormatId").toInt(&ok); if (!ok || xvFormatId < 0) return false; int dataSize = surfaceFormat.property("dataSize").toInt(&ok); if (!ok || dataSize<=0) return false; return true; }
bool AVL_QT_DLL_EXPORT WidgetSurface::start(const QVideoSurfaceFormat & format){ const QSize size = format.frameSize(); if (size.isEmpty()==false) { imageSize.set(size.width(),size.height()); sourceRect = format.viewport(); QAbstractVideoSurface::start(format); widget->updateGeometry(); updateVideoRect(); return true; }else{ return false; } }
void QRendererVideoWidgetBackend::formatChanged(const QVideoSurfaceFormat &format) { m_nativeSize = format.sizeHint(); updateRects(); m_widget->updateGeometry(); m_widget->update(); }
bool VideoSurface::start(const QVideoSurfaceFormat &format) { if (isActive()) { stop(); } else if (!format.frameSize().isEmpty()) { return QAbstractVideoSurface::start(format); } return false; }
bool MyVideoSurface::start(const QVideoSurfaceFormat &format) { QImage::Format imageFormat = QVideoFrame::imageFormatFromPixelFormat(format.pixelFormat()); if (imageFormat == QImage::Format_Invalid) imageFormat = QImage::Format_RGB32; const QSize frameSize = format.frameSize(); if ((imageFormat != QImage::Format_Invalid) && !frameSize.isEmpty()) { QAbstractVideoSurface::start(format); emit frameSizeChanged(frameSize); return true; } return false; }
bool VideoSurface::start(const QVideoSurfaceFormat &format) { mVideoFormat = format; //start only if format is UYVY, dont handle other format now if( format.pixelFormat() == QVideoFrame::Format_YV12 ){ QAbstractVideoSurface::start(format); return true; } else { return false; } }
//! [2] bool VideoWidgetSurface::start(const QVideoSurfaceFormat &format) { const QImage::Format imageFormat = QVideoFrame::imageFormatFromPixelFormat(format.pixelFormat()); const QSize size = format.frameSize(); if (imageFormat != QImage::Format_Invalid && !size.isEmpty()) { this->imageFormat = imageFormat; imageSize = size; sourceRect = format.viewport(); QAbstractVideoSurface::start(format); widget->updateGeometry(); updateVideoRect(); return true; } else { return false; } }
bool ChilitagsSurface::start(const QVideoSurfaceFormat &format) { QVideoSurfaceFormat outputFormat(format.frameSize(), QVideoFrame::Format_ARGB32); //if (m_videoSurface) { // qDebug() << m_videoSurface->supportedPixelFormats(); // qDebug() << m_videoSurface->isFormatSupported(outputFormat); // qDebug() << outputFormat; //} if (m_videoSurface) return m_videoSurface->start(outputFormat) && QAbstractVideoSurface::start(format); return QAbstractVideoSurface::start(format); }
static PyObject *meth_QVideoSurfaceFormat_setYCbCrColorSpace(PyObject *sipSelf, PyObject *sipArgs) { PyObject *sipParseErr = NULL; { QVideoSurfaceFormat::YCbCrColorSpace a0; QVideoSurfaceFormat *sipCpp; if (sipParseArgs(&sipParseErr, sipArgs, "BE", &sipSelf, sipType_QVideoSurfaceFormat, &sipCpp, sipType_QVideoSurfaceFormat_YCbCrColorSpace, &a0)) { sipCpp->setYCbCrColorSpace(a0); Py_INCREF(Py_None); return Py_None; } } /* Raise an exception if the arguments couldn't be parsed. */ sipNoMethod(sipParseErr, sipName_QVideoSurfaceFormat, sipName_setYCbCrColorSpace, doc_QVideoSurfaceFormat_setYCbCrColorSpace); return NULL; }
static PyObject *meth_QVideoSurfaceFormat_setViewport(PyObject *sipSelf, PyObject *sipArgs) { PyObject *sipParseErr = NULL; { const QRect* a0; QVideoSurfaceFormat *sipCpp; if (sipParseArgs(&sipParseErr, sipArgs, "BJ9", &sipSelf, sipType_QVideoSurfaceFormat, &sipCpp, sipType_QRect, &a0)) { sipCpp->setViewport(*a0); Py_INCREF(Py_None); return Py_None; } } /* Raise an exception if the arguments couldn't be parsed. */ sipNoMethod(sipParseErr, sipName_QVideoSurfaceFormat, sipName_setViewport, doc_QVideoSurfaceFormat_setViewport); return NULL; }
QDebug operator<<(QDebug dbg, const QVideoSurfaceFormat &f) { dbg.nospace() << "QVideoSurfaceFormat(" << f.pixelFormat(); dbg.nospace() << ", " << f.frameSize(); dbg.nospace() << ", viewport=" << f.viewport(); dbg.nospace() << ", pixelAspectRatio=" << f.pixelAspectRatio(); dbg.nospace() << ", handleType=" << f.handleType(); dbg.nospace() << ", yCbCrColorSpace=" << f.yCbCrColorSpace(); dbg.nospace() << ")"; foreach(const QByteArray& propertyName, f.propertyNames()) dbg << "\n " << propertyName.data() << " = " << f.property(propertyName.data()); return dbg.space(); }
bool QX11VideoSurface::start(const QVideoSurfaceFormat &format) { if (m_image) XFree(m_image); int xvFormatId = 0; for (int i = 0; i < m_supportedPixelFormats.count(); ++i) { if (m_supportedPixelFormats.at(i) == format.pixelFormat()) { xvFormatId = m_formatIds.at(i); break; } } if (xvFormatId == 0) { setError(UnsupportedFormatError); } else { XvImage *image = XvCreateImage( QX11Info::display(), m_portId, xvFormatId, 0, format.frameWidth(), format.frameHeight()); if (!image) { setError(ResourceError); } else { m_viewport = format.viewport(); m_image = image; QVideoSurfaceFormat newFormat = format; newFormat.setProperty("portId", QVariant(quint64(m_portId))); newFormat.setProperty("xvFormatId", xvFormatId); newFormat.setProperty("dataSize", image->data_size); return QAbstractVideoSurface::start(newFormat); } } if (m_image) { m_image = 0; QAbstractVideoSurface::stop(); } return false; }
static PyObject *meth_QVideoSurfaceFormat_setPixelAspectRatio(PyObject *sipSelf, PyObject *sipArgs) { PyObject *sipParseErr = NULL; { const QSize* a0; QVideoSurfaceFormat *sipCpp; if (sipParseArgs(&sipParseErr, sipArgs, "BJ9", &sipSelf, sipType_QVideoSurfaceFormat, &sipCpp, sipType_QSize, &a0)) { sipCpp->setPixelAspectRatio(*a0); Py_INCREF(Py_None); return Py_None; } } { int a0; int a1; QVideoSurfaceFormat *sipCpp; if (sipParseArgs(&sipParseErr, sipArgs, "Bii", &sipSelf, sipType_QVideoSurfaceFormat, &sipCpp, &a0, &a1)) { sipCpp->setPixelAspectRatio(a0,a1); Py_INCREF(Py_None); return Py_None; } } /* Raise an exception if the arguments couldn't be parsed. */ sipNoMethod(sipParseErr, sipName_QVideoSurfaceFormat, sipName_setPixelAspectRatio, doc_QVideoSurfaceFormat_setPixelAspectRatio); return NULL; }
/*! \since 1.2 */ bool QEglImageTextureSurface::start(const QVideoSurfaceFormat &format) { #ifdef DEBUG_OMAPFB_SURFACE qDebug() << Q_FUNC_INFO << format; #endif m_fallbackSurfaceActive = false; if (format.handleType() != EGLImageTextureHandle) { qWarning() << Q_FUNC_INFO << "Non EGLImageTextureHandle based format requested, fallback to QPainterVideoSurface"; connect(m_fallbackSurface, SIGNAL(activeChanged(bool)), this, SIGNAL(activeChanged(bool))); connect(m_fallbackSurface, SIGNAL(surfaceFormatChanged(QVideoSurfaceFormat)), this, SIGNAL(surfaceFormatChanged(QVideoSurfaceFormat))); connect(m_fallbackSurface, SIGNAL(supportedFormatsChanged()), this, SIGNAL(supportedFormatsChanged())); connect(m_fallbackSurface, SIGNAL(nativeResolutionChanged(QSize)), this, SIGNAL(nativeResolutionChanged(QSize))); connect(m_fallbackSurface, SIGNAL(frameChanged()), this, SIGNAL(frameChanged())); if (m_fallbackSurface->start(format)) { m_fallbackSurfaceActive = true; QAbstractVideoSurface::start(format); } else { qWarning() << Q_FUNC_INFO << "failed to start video surface:" << m_fallbackSurface->error(); setError(m_fallbackSurface->error()); disconnect(m_fallbackSurface, SIGNAL(activeChanged(bool)), this, SIGNAL(activeChanged(bool))); disconnect(m_fallbackSurface, SIGNAL(surfaceFormatChanged(QVideoSurfaceFormat)), this, SIGNAL(surfaceFormatChanged(QVideoSurfaceFormat))); disconnect(m_fallbackSurface, SIGNAL(supportedFormatsChanged()), this, SIGNAL(supportedFormatsChanged())); disconnect(m_fallbackSurface, SIGNAL(nativeResolutionChanged(QSize)), this, SIGNAL(nativeResolutionChanged(QSize))); disconnect(m_fallbackSurface, SIGNAL(frameChanged()), this, SIGNAL(frameChanged())); } return m_fallbackSurfaceActive; }
/* Test case for api isValid */ void tst_QVideoSurfaceFormat::isValid() { /* When both pixel format and framesize is not valid */ QVideoSurfaceFormat format; QVERIFY(!format.isValid()); /* When framesize is valid and pixel format is not valid */ format.setFrameSize(64,64); QVERIFY(format.frameSize() == QSize(64,64)); QVERIFY(!format.pixelFormat()); QVERIFY(!format.isValid()); /* When both the pixel format and framesize is valid. */ QVideoSurfaceFormat format1(QSize(32, 32), QVideoFrame::Format_AYUV444); QVERIFY(format1.isValid()); /* When pixel format is valid and frame size is not valid */ format1.setFrameSize(-1,-1); QVERIFY(!format1.frameSize().isValid()); QVERIFY(!format1.isValid()); }
bool QAbstractVideoSurface::isFormatSupported(const QVideoSurfaceFormat &format) const { return supportedPixelFormats(format.handleType()).contains(format.pixelFormat()); }
bool CameraFormatProxy::isFormatSupported(const QVideoSurfaceFormat &format) const { return QVideoFrame::imageFormatFromPixelFormat(format.pixelFormat())!=QImage::Format_Invalid; }