bool CameraVideoSurface::present (const QVideoFrame &frame) { m_frame = frame; if (surfaceFormat().pixelFormat() != m_frame.pixelFormat() || surfaceFormat().frameSize() != m_frame.size()) { stop(); return false; } else { m_frame.map(QAbstractVideoBuffer::ReadOnly); frWidth = m_frame.width(); frHeight = m_frame.height(); int line = m_frame.bytesPerLine(); // build QImage from frame m_completeImage = QImage( m_frame.bits(), frWidth, frHeight, line, m_frame.imageFormatFromPixelFormat(m_frame.pixelFormat())); m_frame.unmap(); QImage dstImage = scaleImage(m_completeImage); m_frame = QVideoFrame(dstImage); // enlarge faces m_target->updateVideo(); return true; } }
bool MyVideoSurface::present(const QVideoFrame &frame) { m_frame = frame; if (surfaceFormat().pixelFormat() != m_frame.pixelFormat() || surfaceFormat().frameSize() != m_frame.size()) { stop(); return false; } else { m_target->updateVideo(); return true; } }
bool AndroidVideoSurface::present(const QVideoFrame &frame) { if (surfaceFormat().pixelFormat() != frame.pixelFormat() || surfaceFormat().frameSize() != frame.size()) { setError(IncorrectFormatError); stop(); return false; } else { paintLock.lock(); m_currentFrame = frame; m_widget->update(m_targetRect); paintLock.unlock(); return true; } }
//! [4] bool VideoWidgetSurface::present(const QVideoFrame &frame) { if (surfaceFormat().pixelFormat() != frame.pixelFormat() || surfaceFormat().frameSize() != frame.size()) { setError(IncorrectFormatError); stop(); return false; } else { currentFrame = frame; widget->repaint(targetRect); return true; } }
void VideoItem::paint(QPainter *painter, const QStyleOptionGraphicsItem *option, QWidget *widget) { Q_UNUSED(option); Q_UNUSED(widget); if (currentFrame.map(QAbstractVideoBuffer::ReadOnly)) { const QTransform oldTransform = painter->transform(); if (surfaceFormat().scanLineDirection() == QVideoSurfaceFormat::BottomToTop) { painter->scale(1, -1); painter->translate(0, -boundingRect().height()); } painter->drawImage(boundingRect(), QImage( currentFrame.bits(), imageSize.width(), imageSize.height(), imageFormat)); painter->setTransform(oldTransform); framePainted = true; currentFrame.unmap(); } }
//! [6] void VideoWidgetSurface::paint(QPainter *painter) { if (currentFrame.map(QAbstractVideoBuffer::ReadOnly)) { const QTransform oldTransform = painter->transform(); if (surfaceFormat().scanLineDirection() == QVideoSurfaceFormat::BottomToTop) { painter->scale(1, -1); painter->translate(0, -widget->height()); } currentImage = QImage(currentFrame.bits(), currentFrame.width(), currentFrame.height(), currentFrame.bytesPerLine(), imageFormat); //Tiny hack to bring Opencv Into the game //For now it should be fine imp->readImage(currentImage); imp->loop(); QImage image = imp->getQImage(); //QImage image = currentImage; painter->drawImage(targetRect, image, sourceRect); painter->setTransform(oldTransform); currentFrame.unmap(); } }
void QX11VideoSurface::setWinId(WId id) { //qDebug() << "setWinID:" << id; if (id == m_winId) return; if (m_image) XFree(m_image); if (m_gc) { XFreeGC(QX11Info::display(), m_gc); m_gc = 0; } if (m_portId != 0) XvUngrabPort(QX11Info::display(), m_portId, 0); m_supportedPixelFormats.clear(); m_formatIds.clear(); m_winId = id; if (m_winId && findPort()) { querySupportedFormats(); m_gc = XCreateGC(QX11Info::display(), m_winId, 0, 0); if (m_image) { m_image = 0; if (!start(surfaceFormat())) { QAbstractVideoSurface::stop(); qWarning() << "Failed to start video surface with format" << surfaceFormat(); } } } else { qWarning() << "Failed to find XVideo port"; if (m_image) { m_image = 0; QAbstractVideoSurface::stop(); } } emit supportedFormatsChanged(); }
void VideoWidgetSurface::updateVideoRect() { QSize size = surfaceFormat().sizeHint(); size.scale(widget->size().boundedTo(size), Qt::KeepAspectRatio); targetRect = QRect(QPoint(0, 0), size); targetRect.moveCenter(widget->rect().center()); }
void QAbstractVideoSurface::stop() { if (property("_q_active").toBool()) { setProperty("_q_format", QVariant::fromValue(QVideoSurfaceFormat())); setProperty("_q_active", false); emit activeChanged(false); emit surfaceFormatChanged(surfaceFormat()); } }
void AVL_QT_DLL_EXPORT WidgetSurface::updateVideoRect(){ QSize videoSize = surfaceFormat().sizeHint(); QSize widgetSize = widget->size(); QSize targetSize = videoSize; targetSize.scale(widgetSize, Qt::KeepAspectRatio); targetRect = QRect(QPoint(0, 0), targetSize); targetRect.moveCenter(widget->rect().center()); }
/*! Stops a video surface presenting frames and releases any resources acquired in start(). \note You must call the base class implementation of stop() at the start of your implementation. \sa isActive(), start() */ void QAbstractVideoSurface::stop() { Q_D(QAbstractVideoSurface); if (d->active) { d->surfaceFormat = QVideoSurfaceFormat(); d->active = false; emit activeChanged(false); emit surfaceFormatChanged(surfaceFormat()); } }
bool VideoWidgetSurface::present(const QVideoFrame &frame) { if (surfaceFormat().pixelFormat() != frame.pixelFormat() || surfaceFormat().frameSize() != frame.size()) { setError(IncorrectFormatError); stop(); return false; } else { currentFrame = frame; currentFrame.map(QAbstractVideoBuffer::ReadOnly); QImage image(currentFrame.bits(), currentFrame.width(), currentFrame.height(), imageFormat); emit aviImage(image); currentFrame.unmap(); return true; } }
void VideoWidgetSurface::paint(QPainter *painter) { if (currentFrame.map(QAbstractVideoBuffer::ReadOnly)) { const QTransform oldTransform = painter->transform(); if (surfaceFormat().scanLineDirection() == QVideoSurfaceFormat::BottomToTop) { painter->scale(1, -1); painter->translate(0, -widget->height()); } QImage image( currentFrame.bits(), currentFrame.width(), currentFrame.height(), currentFrame.bytesPerLine(), imageFormat); painter->drawImage(targetRect, image, sourceRect); painter->setTransform(oldTransform); currentFrame.unmap(); } }
QRectF VideoItem::boundingRect() const { return QRectF(QPointF(0,0), surfaceFormat().sizeHint()); }