QVideoSurfaceFormat MFTransform::videoFormatForMFMediaType(IMFMediaType *mediaType, int *bytesPerLine) { UINT32 stride; if (FAILED(mediaType->GetUINT32(MF_MT_DEFAULT_STRIDE, &stride))) { *bytesPerLine = 0; return QVideoSurfaceFormat(); } *bytesPerLine = (int)stride; QSize size; UINT32 width, height; if (FAILED(MFGetAttributeSize(mediaType, MF_MT_FRAME_SIZE, &width, &height))) return QVideoSurfaceFormat(); size.setWidth(width); size.setHeight(height); GUID subtype = GUID_NULL; if (FAILED(mediaType->GetGUID(MF_MT_SUBTYPE, &subtype))) return QVideoSurfaceFormat(); QVideoFrame::PixelFormat pixelFormat = formatFromSubtype(subtype); QVideoSurfaceFormat format(size, pixelFormat); UINT32 num, den; if (SUCCEEDED(MFGetAttributeRatio(mediaType, MF_MT_PIXEL_ASPECT_RATIO, &num, &den))) { format.setPixelAspectRatio(num, den); } if (SUCCEEDED(MFGetAttributeRatio(mediaType, MF_MT_FRAME_RATE, &num, &den))) { format.setFrameRate(qreal(num)/den); } return format; }
void MmRendererPlayerVideoRendererControl::updateScene(const QSize &size) { if (m_surface) { if (!m_surface->isActive()) { if (m_windowGrabber->eglImageSupported()) { m_surface->start(QVideoSurfaceFormat(size, QVideoFrame::Format_BGR32, QAbstractVideoBuffer::GLTextureHandle)); } else { m_surface->start(QVideoSurfaceFormat(size, QVideoFrame::Format_ARGB32)); } } else { if (m_surface->surfaceFormat().frameSize() != size) { m_surface->stop(); if (m_windowGrabber->eglImageSupported()) { m_surface->start(QVideoSurfaceFormat(size, QVideoFrame::Format_BGR32, QAbstractVideoBuffer::GLTextureHandle)); } else { m_surface->start(QVideoSurfaceFormat(size, QVideoFrame::Format_ARGB32)); } } } // Depending on the support of EGL images on the current platform we either pass a texture // handle or a copy of the image data if (m_windowGrabber->eglImageSupported()) { QnxTextureBuffer *textBuffer = new QnxTextureBuffer(m_windowGrabber); QVideoFrame actualFrame(textBuffer, size, QVideoFrame::Format_BGR32); m_surface->present(actualFrame); } else { m_surface->present(m_windowGrabber->getNextImage().copy()); } } }
QAbstractVideoSurface::QAbstractVideoSurface(QObject *parent) : QObject(parent) { setProperty("_q_surfaceFormat", QVariant::fromValue(QVideoSurfaceFormat())); setProperty("_q_active", false); setProperty("_q_error", QVariant::fromValue(QAbstractVideoSurface::NoError)); setProperty("_q_nativeResolution", QSize()); }
void MmRendererPlayerVideoRendererControl::frameGrabbed(const QImage &frame, int handle) { if (m_surface) { if (!m_surface->isActive()) { if (m_windowGrabber->eglImageSupported()) { if (QOpenGLContext::currentContext()) m_windowGrabber->createEglImages(); else m_surface->setProperty("_q_GLThreadCallback", QVariant::fromValue<QObject*>(this)); m_surface->start(QVideoSurfaceFormat(frame.size(), QVideoFrame::Format_BGR32, QAbstractVideoBuffer::GLTextureHandle)); } else { m_surface->start(QVideoSurfaceFormat(frame.size(), QVideoFrame::Format_ARGB32)); } } else { if (m_surface->surfaceFormat().frameSize() != frame.size()) { QAbstractVideoBuffer::HandleType type = m_surface->surfaceFormat().handleType(); m_surface->stop(); if (type != QAbstractVideoBuffer::NoHandle) { m_surface->setProperty("_q_GLThreadCallback", QVariant::fromValue<QObject*>(this)); m_surface->start(QVideoSurfaceFormat(frame.size(), QVideoFrame::Format_BGR32, QAbstractVideoBuffer::GLTextureHandle)); } else { m_surface->start(QVideoSurfaceFormat(frame.size(), QVideoFrame::Format_ARGB32)); } } } // Depending on the support of EGL images on the current platform we either pass a texture // handle or a copy of the image data if (m_surface->surfaceFormat().handleType() != QAbstractVideoBuffer::NoHandle) { if (m_windowGrabber->eglImagesInitialized() && m_surface->property("_q_GLThreadCallback") != 0) m_surface->setProperty("_q_GLThreadCallback", 0); BBTextureBuffer *textBuffer = new BBTextureBuffer(handle); QVideoFrame actualFrame(textBuffer, frame.size(), QVideoFrame::Format_BGR32); m_surface->present(actualFrame); } else { m_surface->present(frame.copy()); } } }
void QAbstractVideoSurface::stop() { if (property("_q_active").toBool()) { setProperty("_q_format", QVariant::fromValue(QVideoSurfaceFormat())); setProperty("_q_active", false); emit activeChanged(false); emit surfaceFormatChanged(surfaceFormat()); } }
QVideoSurfaceFormat DirectShowMediaType::formatFromType(const AM_MEDIA_TYPE &type) { // UWEBKIT VIDEOINFOHEADER *uwkheader = reinterpret_cast<VIDEOINFOHEADER *>(type.pbFormat); QVideoSurfaceFormat uwkformat( QSize(uwkheader->bmiHeader.biWidth, qAbs(uwkheader->bmiHeader.biHeight)), QVideoFrame::Format_RGB32); if (uwkheader->AvgTimePerFrame > 0) uwkformat.setFrameRate(10000 / uwkheader->AvgTimePerFrame); uwkformat.setScanLineDirection(scanLineDirection(uwkformat.pixelFormat(), uwkheader->bmiHeader)); return uwkformat; // END UWEBKIT const int count = sizeof(qt_typeLookup) / sizeof(TypeLookup); for (int i = 0; i < count; ++i) { if (IsEqualGUID(qt_typeLookup[i].mediaType, type.subtype) && type.cbFormat > 0) { if (IsEqualGUID(type.formattype, FORMAT_VideoInfo)) { VIDEOINFOHEADER *header = reinterpret_cast<VIDEOINFOHEADER *>(type.pbFormat); QVideoSurfaceFormat format( QSize(header->bmiHeader.biWidth, qAbs(header->bmiHeader.biHeight)), qt_typeLookup[i].pixelFormat); if (header->AvgTimePerFrame > 0) format.setFrameRate(10000 /header->AvgTimePerFrame); format.setScanLineDirection(scanLineDirection(format.pixelFormat(), header->bmiHeader)); return format; } else if (IsEqualGUID(type.formattype, FORMAT_VideoInfo2)) { VIDEOINFOHEADER2 *header = reinterpret_cast<VIDEOINFOHEADER2 *>(type.pbFormat); QVideoSurfaceFormat format( QSize(header->bmiHeader.biWidth, qAbs(header->bmiHeader.biHeight)), qt_typeLookup[i].pixelFormat); if (header->AvgTimePerFrame > 0) format.setFrameRate(10000 / header->AvgTimePerFrame); format.setScanLineDirection(scanLineDirection(format.pixelFormat(), header->bmiHeader)); return format; } } } return QVideoSurfaceFormat(); }
/*! Stops a video surface presenting frames and releases any resources acquired in start(). \note You must call the base class implementation of stop() at the start of your implementation. \sa isActive(), start() */ void QAbstractVideoSurface::stop() { Q_D(QAbstractVideoSurface); if (d->active) { d->surfaceFormat = QVideoSurfaceFormat(); d->active = false; emit activeChanged(false); emit surfaceFormatChanged(surfaceFormat()); } }
void AVL_QT_DLL_EXPORT WidgetSurface::renderFrame(Image::Image<Pixel::PixelRGBi1u> * frame,const I8u & frameIndex){ if(currentFrame!=nullptr){delete currentFrame;} currentFrame = frame; currentQFrame = QImage(static_cast<uchar*>(static_cast<void*>(currentFrame->getDataPtr())),currentFrame->getWidth(),currentFrame->getHeight(),currentFrame->getWidth()*3,QImage::Format_RGB888); if (imageSize != frame->getSize()) { if(start(QVideoSurfaceFormat(QSize(frame->getWidth(),frame->getHeight()), QVideoFrame::pixelFormatFromImageFormat(QImage::Format_RGB888)))==false){ //TODO: ERROR return; } widget->repaint(); }else{ widget->repaint(targetRect); } emit frameChanged(frameIndex); }
QVideoSurfaceFormat DirectShowMediaType::formatFromType(const AM_MEDIA_TYPE &type) { const int count = sizeof(qt_typeLookup) / sizeof(TypeLookup); for (int i = 0; i < count; ++i) { if (IsEqualGUID(qt_typeLookup[i].mediaType, type.subtype) && type.cbFormat > 0) { if (IsEqualGUID(type.formattype, FORMAT_VideoInfo)) { VIDEOINFOHEADER *header = reinterpret_cast<VIDEOINFOHEADER *>(type.pbFormat); QVideoSurfaceFormat format( QSize(header->bmiHeader.biWidth, qAbs(header->bmiHeader.biHeight)), qt_typeLookup[i].pixelFormat); if (header->AvgTimePerFrame > 0) format.setFrameRate(10000 /header->AvgTimePerFrame); format.setScanLineDirection(header->bmiHeader.biHeight < 0 ? QVideoSurfaceFormat::TopToBottom : QVideoSurfaceFormat::BottomToTop); return format; } else if (IsEqualGUID(type.formattype, FORMAT_VideoInfo2)) { VIDEOINFOHEADER2 *header = reinterpret_cast<VIDEOINFOHEADER2 *>(type.pbFormat); QVideoSurfaceFormat format( QSize(header->bmiHeader.biWidth, qAbs(header->bmiHeader.biHeight)), qt_typeLookup[i].pixelFormat); if (header->AvgTimePerFrame > 0) format.setFrameRate(10000 / header->AvgTimePerFrame); format.setScanLineDirection(header->bmiHeader.biHeight < 0 ? QVideoSurfaceFormat::TopToBottom : QVideoSurfaceFormat::BottomToTop); return format; } } } return QVideoSurfaceFormat(); }
QVideoSurfaceFormat QAbstractVideoSurface::nearestFormat(const QVideoSurfaceFormat &format) const { return isFormatSupported(format) ? format : QVideoSurfaceFormat(); }
doClear(); } void QGstXvImageBufferPool::doClear() { foreach (QGstXvImageBuffer *xvBuffer, m_allBuffers) { xvBuffer->markedForDeletion = true; } m_allBuffers.clear(); foreach (QGstXvImageBuffer *xvBuffer, m_pool) { gst_buffer_unref(GST_BUFFER(xvBuffer)); } m_pool.clear(); m_format = QVideoSurfaceFormat(); } void QGstXvImageBufferPool::queuedDestroy() { QMutexLocker lock(&m_destroyMutex); XSync(QX11Info::display(), false); foreach(XvShmImage xvImage, m_imagesToDestroy) { if (xvImage.shmInfo.shmaddr != ((void *) -1)) { XShmDetach(QX11Info::display(), &xvImage.shmInfo); XSync(QX11Info::display(), false); shmdt(xvImage.shmInfo.shmaddr); }
QVideoSurfaceFormat ChilitagsSurface::nearestFormat(const QVideoSurfaceFormat &) const { qDebug() << "Not implemented: ChilitagsSurface::nearestFormat"; return QVideoSurfaceFormat(); }