bool MyVideoSurface::present(const QVideoFrame& frame){
    if (frame.isValid()) {
        QVideoFrame cloneFrame(frame);
        cloneFrame.map(QAbstractVideoBuffer::ReadOnly);
        img = QImage(cloneFrame.bits(),
                     cloneFrame.width(),
                     cloneFrame.height(),
                     QVideoFrame::imageFormatFromPixelFormat(cloneFrame.pixelFormat()));

        //do something with the image ...
        //img = &img1;
        //counter++;
        //if (counter % 100 == 0) {
            counter = 0;
            //qDebug() << "PrintImage";
            QRect rect(400, 240, 800, 480);
            img = img.copy(rect);
            img = img.mirrored(true,false);
            //qDebug() << img.width() << " " << img.height();
            QImage image = show->fit500(&img);
            show->setImage(image);
            show->computeMostFitTemplateX(10);
            GT.m_TV = show->getTV();
            GT.m_image = image;
            show->update();
        //}

        cloneFrame.unmap();
        return true;
    }
    return false;
}
bool QCustomVideoSurface::present(const QVideoFrame &frame){

    if(frame.isValid()) {
        QVideoFrame cloneFrame(frame); // makes a shallow copy (since QVideoFrame is explicitly shared), to get the access to the pixel data
        cloneFrame.map(QAbstractVideoBuffer::ReadOnly);
#ifdef Q_OS_ANDROID
        cv::Mat mat(cloneFrame.height(), cloneFrame.width(), CV_8UC4, (void *)cloneFrame.bits());
        emit frameAvailable(mat, QImage::Format_RGBX8888);
#else
        QImage::Format format = QVideoFrame::imageFormatFromPixelFormat(cloneFrame.pixelFormat());
        int cvtype = CV_8UC1;
        switch(format) {
            case QImage::Format_RGB32:
                cvtype = CV_8UC4;
                break;
            case QImage::Format_RGB888:
                cvtype = CV_8UC3;
                break;
            case QImage::Format_Invalid:
                qWarning("QCustomVideoSurface Warning: image format is QImage::Format_Invalid");
                return false;
            default:
                // TO DO add the new formats if find
                qWarning("QCustomVideoSurface Warning: image format is not implemented (QImage::Format %d)", format);
                return false;
        }
        cv::Mat mat(cloneFrame.height(), cloneFrame.width(), cvtype, (void *)cloneFrame.bits());
        cv::flip(mat,mat,0);
        emit frameAvailable(mat, format);
#endif
        cloneFrame.unmap();
        return true;
    }
    return false;
}
Пример #3
0
bool CameraFrameGrabber::present(const QVideoFrame& frame) {
    if(ready) {
        beNotReady();
        if (frame.isValid()) {
            QVideoFrame cloneFrame(frame);
            cloneFrame.map(QAbstractVideoBuffer::ReadOnly);
            QImage image(cloneFrame.bits(), cloneFrame.width(), cloneFrame.height(), QVideoFrame::imageFormatFromPixelFormat(cloneFrame.pixelFormat()));
            emit frameAvailable(image);
            cloneFrame.unmap();
            return true;
        }
        return false;
    }
    return true;
}
Пример #4
0
unsigned QmlVlcVideoOutput::video_format_cb( char *chroma,
                                             unsigned *width, unsigned *height,
                                             unsigned *pitches, unsigned *lines )
{
    memcpy( chroma, "I420", 4 );

    uint16_t evenWidth = *width + ( *width & 1 ? 1 : 0 );
    uint16_t evenHeight = *height + ( *height & 1 ? 1 : 0 );

    pitches[0] = evenWidth; if( pitches[0] % 4 ) pitches[0] += 4 - pitches[0] % 4;
    pitches[1] = evenWidth / 2; if( pitches[1] % 4 ) pitches[1] += 4 - pitches[1] % 4;
    pitches[2] = pitches[1];

    lines[0] = evenHeight;
    lines[1] = evenHeight / 2;
    lines[2] = lines[1];

    m_decodeFrame.reset( new QmlVlcI420Frame );

    m_decodeFrame->frameBuf.resize( pitches[0] * lines[0] + pitches[1] * lines[1] + pitches[2] * lines[2] );

    m_decodeFrame->width = evenWidth;
    m_decodeFrame->height = evenHeight;

    char* fb = m_decodeFrame->frameBuf.data();

    m_decodeFrame->yPlane = fb;
    m_decodeFrame->yPlaneSize = pitches[0] * lines[0];

    m_decodeFrame->uPlane = fb + m_decodeFrame->yPlaneSize;
    m_decodeFrame->uPlaneSize = pitches[1] * lines[1];

    m_decodeFrame->vPlane = fb + m_decodeFrame->yPlaneSize + m_decodeFrame->uPlaneSize;
    m_decodeFrame->vPlaneSize = pitches[2] * lines[2];

    m_renderFrame = cloneFrame( m_decodeFrame );

    return 3;
}
Пример #5
0
    bool ICLVideoSurface::present(const QVideoFrame &frame)
    {
        if (frame.isValid()) {
            QVideoFrame cloneFrame(frame);
            cloneFrame.map(QAbstractVideoBuffer::ReadWrite);
            lock.lock();
            std::swap(imgWork,imgNextDisplay);
            if(cloneFrame.pixelFormat() == QVideoFrame::Format_RGB24) {
                imgWork->setChannels(3);
                imgWork->setSize(utils::Size(cloneFrame.width(),cloneFrame.height()));
              core::interleavedToPlanar<uchar,icl8u>(cloneFrame.bits(),imgWork,cloneFrame.bytesPerLine());
            }
            else if(cloneFrame.pixelFormat() == QVideoFrame::Format_YUV420P) {
              imgWork->setChannels(3);
              imgWork->setSize(utils::Size(cloneFrame.bytesPerLine(0),cloneFrame.height()));
              core::convertYUV420ToRGB8(cloneFrame.bits(),utils::Size(cloneFrame.bytesPerLine(),cloneFrame.height()),imgWork);
              if(cloneFrame.bytesPerLine()!=cloneFrame.width()) {
                imgWork->setROI(utils::Point(0,0),utils::Size(cloneFrame.width(),cloneFrame.height()));
              }
              else
                imgWork->setFullROI();
            } else if(cloneFrame.pixelFormat() == QVideoFrame::Format_ARGB32) {
              imgWork->setChannels(3);
              imgWork->setSize(utils::Size(cloneFrame.width(),cloneFrame.height()));
              const int dim = imgWork->getDim();
              icl8u *res_r = imgWork->begin(0);
              icl8u *res_g = imgWork->begin(1);
              icl8u *res_b = imgWork->begin(2);
              uchar *src = cloneFrame.bits();

              // channel order in QVideoFrame seems switched
              // its rather 0xBBGGRRAA than 0xAARRBBGG
              // that is why we match Channel 0 -> Blue and 2 -> Reds
              for(int i=0;i<dim;++i){
                res_r[i] = src[i*4+2]; // red channel
                res_g[i] = src[i*4+1]; // green channel
                res_b[i] = src[i*4+0]; // blue channel
              } 
            } else {
              // fallback if no native conversion is available
              imgWork->setChannels(3);
              imgWork->setSize(utils::Size(cloneFrame.width(),cloneFrame.height()));
              const QImage tmpImg(cloneFrame.bits(),
                           cloneFrame.width(),
                           cloneFrame.height(),
                           QVideoFrame::imageFormatFromPixelFormat(cloneFrame.pixelFormat()));
              QVideoFrame tmp = QVideoFrame(tmpImg.convertToFormat(QImage::Format_RGB888));  
              tmp.map(QAbstractVideoBuffer::ReadWrite);
              cloneFrame = tmp;
              imgWork->setChannels(3);
              imgWork->setSize(utils::Size(cloneFrame.width(),cloneFrame.height()));
              core::interleavedToPlanar<uchar,icl8u>(cloneFrame.bits(),imgWork,cloneFrame.bytesPerLine());
            }
            if(useLocking.load()) {
				QMutexLocker waitLock(&waitMutex);// boost::mutex::scoped_lock waitLock(waitMutex);
              if(!nextFrameReady)
              {
                  nextFrameReady=true;
				  waitCondition.wakeOne();// notify_one();
              }
            }
            nextFrameReady=true;
            lock.unlock();
            return true;
        }
        return false;
    }