unsigned VlcVideoOutput::video_format_cb( char* chroma,
                                          unsigned* width, unsigned* height,
                                          unsigned* pitches, unsigned* lines )
{
    std::unique_ptr<VideoEvent> frameSetupEvent;
    switch( _pixelFormat ) {
        case PixelFormat::RV32: {
            std::shared_ptr<RV32VideoFrame> videoFrame( new RV32VideoFrame() );
            frameSetupEvent.reset( new RV32FrameSetupEvent( videoFrame ) );
            _videoFrame = videoFrame;
            break;
        }
        case PixelFormat::I420:
        default: {
            std::shared_ptr<I420VideoFrame> videoFrame( new I420VideoFrame() );
            frameSetupEvent.reset( new I420FrameSetupEvent( videoFrame ) );
            _videoFrame = videoFrame;
            break;
        }
    }

    const unsigned planeCount = _videoFrame->video_format_cb( chroma,
                                                              width, height,
                                                              pitches, lines );

    _guard.lock();
    _videoEvents.push_back( std::move( frameSetupEvent ) );
    _guard.unlock();
    uv_async_send( &_async );

    _videoFrame->waitBuffer();

    return planeCount;
}
void USBCamReader::readCurFrameGray(unsigned char* grayImgData) {
	assert(videoCap);
	IplImage* img = cvRetrieveFrame(videoCap);
	cv::Mat rawFrame(img);
	cv::Mat videoFrame(_h, _w, CV_8UC1, grayImgData);
	cv::cvtColor(rawFrame, videoFrame, CV_RGB2GRAY);
}
void AVIReader::readCurFrameGray(unsigned char* grayImgData) {
	assert(videoCap);
	IplImage* img = cvRetrieveFrame(videoCap);
	cv::Mat rawFrame(img);
	cv::Mat videoFrame(_h, _w, CV_8UC1, grayImgData);
	if(rawFrame.type() == CV_8UC1)
		rawFrame.copyTo(videoFrame);
	else
		cv::cvtColor(rawFrame, videoFrame, CV_RGB2GRAY);
}
void USBCamReader::readCurFrame(unsigned char* rgbdata,
		unsigned char* graydata) {
	assert(videoCap);
	IplImage* img = cvRetrieveFrame(videoCap);
	cv::Mat rawFrame(img);

	cv::Mat rgbImg(_h, _w, CV_8UC3, rgbdata);
	cv::cvtColor(rawFrame, rgbImg, CV_BGR2RGB);

	cv::Mat videoFrame(_h, _w, CV_8UC1, graydata);
	cv::cvtColor(rawFrame, videoFrame, CV_RGB2GRAY);
}
示例#5
0
void DesktopCapturer::OnCaptureResult(webrtc::DesktopCapturer::Result result, std::unique_ptr<webrtc::DesktopFrame> frame) {
	
	RTC_LOG(INFO) << "DesktopCapturer:OnCaptureResult";
	
	if (result == webrtc::DesktopCapturer::Result::SUCCESS) {
		int width = frame->rect().width();
		int height = frame->rect().height();
		rtc::scoped_refptr<webrtc::I420Buffer> I420buffer = webrtc::I420Buffer::Create(width, height);

		const int conversionResult = libyuv::ConvertToI420(frame->data(), frame->stride()*webrtc::DesktopFrame::kBytesPerPixel,
			I420buffer->MutableDataY(), I420buffer->StrideY(),
			I420buffer->MutableDataU(), I420buffer->StrideU(),
			I420buffer->MutableDataV(), I420buffer->StrideV(),
			0, 0,
			width, height,
			width, height,
			libyuv::kRotate0, ::libyuv::FOURCC_ARGB);									
				
		if (conversionResult >= 0) {
			webrtc::VideoFrame videoFrame(I420buffer, webrtc::VideoRotation::kVideoRotation_0, rtc::TimeMicros());
			if ( (m_height == 0) && (m_width == 0) ) {
				broadcaster_.OnFrame(videoFrame);	

			} else {
				int height = m_height;
				int width = m_width;
				if (height == 0) {
					height = (videoFrame.height() * width) / videoFrame.width();
				}
				else if (width == 0) {
					width = (videoFrame.width() * height) / videoFrame.height();
				}
				int stride_y = width;
				int stride_uv = (width + 1) / 2;
				rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer = webrtc::I420Buffer::Create(width, height, stride_y, stride_uv, stride_uv);
				scaled_buffer->ScaleFrom(*videoFrame.video_frame_buffer()->ToI420());
				webrtc::VideoFrame frame = webrtc::VideoFrame(scaled_buffer, webrtc::kVideoRotation_0, rtc::TimeMicros());
						
				broadcaster_.OnFrame(videoFrame);
			}
		} else {
			RTC_LOG(LS_ERROR) << "DesktopCapturer:OnCaptureResult conversion error:" << conversionResult;
		}				

	} else {
		RTC_LOG(LS_ERROR) << "DesktopCapturer:OnCaptureResult capture error:" << (int)result;
	}
}
bool VideoSurface::present(const QVideoFrame &frame)
{
    if (frame.isValid())
    {
        QVideoFrame videoFrame(frame);
        if( videoFrame.map(QAbstractVideoBuffer::ReadOnly) )
        {
            lastFrame = QImage(videoFrame.width(), videoFrame.height(), QImage::Format_RGB888);
            memcpy(lastFrame.bits(), videoFrame.bits(), videoFrame.mappedBytes());

            videoFrame.unmap();

            // Use thread for processing
            emit frameAvailable();
            return true;
        }
    }
    return false;
}
示例#7
0
void QGstreamerGLTextureRenderer::renderGLFrame(int frame)
{
#if defined(GL_TEXTURE_SINK_DEBUG) && GL_TEXTURE_SINK_DEBUG > 1
    qDebug() << Q_FUNC_INFO << "frame:" << frame << "surface active:" << m_surface->isActive();
#endif
    QMutexLocker locker(&m_mutex);

    if (!m_surface || !m_glEnabled) {
        m_renderCondition.wakeAll();
        return;
    }

    MeegoGstVideoTexture *textureSink = MEEGO_GST_VIDEO_TEXTURE(m_videoSink);

    if (m_context)
        m_context->makeCurrent();

    //don't try to render the frame if state is changed to NULL or READY
    GstState pendingState = GST_STATE_NULL;
    GstState newState = GST_STATE_NULL;
    GstStateChangeReturn res = gst_element_get_state(m_videoSink,
                                                     &newState,
                                                     &pendingState,
                                                     0);//don't block and return immediately

    if (res == GST_STATE_CHANGE_FAILURE ||
            newState == GST_STATE_NULL ||
            pendingState == GST_STATE_NULL) {
        stopRenderer();
        m_renderCondition.wakeAll();
        return;
    }

    if (m_surface->isActive() && m_surface->surfaceFormat().handleType() != EGLImageTextureHandle)
        m_surface->stop();

    if (!m_surface->isActive()) {
        //find the native video size
        GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
        GstCaps *caps = gst_pad_get_negotiated_caps(pad);

        if (caps) {
            QSize newNativeSize = QGstUtils::capsCorrectedResolution(caps);
            if (m_nativeSize != newNativeSize) {
                m_nativeSize = newNativeSize;
                emit nativeSizeChanged();
            }
            gst_caps_unref(caps);
        }

        //start the surface...
        QVideoSurfaceFormat format(m_nativeSize, QVideoFrame::Format_RGB32, EGLImageTextureHandle);
        if (!m_surface->start(format)) {
            qWarning() << Q_FUNC_INFO << "failed to start video surface" << format;
            m_renderCondition.wakeAll();
            return;
        }
    }

    QGStreamerGLTextureBuffer *buffer = new QGStreamerGLTextureBuffer(textureSink, frame);
    QVideoFrame videoFrame(buffer,
                           m_surface->surfaceFormat().frameSize(),
                           m_surface->surfaceFormat().pixelFormat());
    m_surface->present(videoFrame);
    m_renderCondition.wakeAll();
}
uint8_t DIA_builtin(void)
{
  uint32_t altivec=0,mad=0,a52dec=0,xvid4=0,X264=0,freetype=0,esd=0,arts=0,vorbis=0,win32=0;
  uint32_t faac=0,faad=0,libdca=0,aften=0,libamrnb=0,lame=0,sdl=0,oss=0,xvideo=0,x86=0,x86_64=0,alsa=0;
  uint32_t adm_powerpc=0,adm_gettext=0,adm_fontconfig=0;
#ifdef USE_FONTCONFIG
  adm_fontconfig=1;
#endif
#ifdef USE_ALTIVEC
        altivec=1;
#endif
#ifdef USE_MP3
        mad=1;
#endif
#ifdef USE_AC3
        a52dec=1;
#endif
#ifdef USE_XVID_4
        xvid4=1;
#endif
#ifdef USE_X264
        X264=1;
#endif
#ifdef USE_FREETYPE
        freetype=1;
#endif
#ifdef USE_ESD
        esd=1;
#endif
#ifdef USE_ARTS
        arts=1;
#endif
#ifdef USE_VORBIS
        vorbis=1;
#endif
#ifdef ADM_WIN32
        win32=1;
#endif
#ifdef USE_FAAC
        faac=1;
#endif
#ifdef USE_FAAD
        faad=1;
#endif
#ifdef USE_LIBDCA
	if (dca->isAvailable())
        libdca=1;
#endif
#ifdef USE_AFTEN
        aften=1;
#endif
#ifdef USE_AMR_NB
	if (amrnb->isAvailable())
		libamrnb=1;
#endif
#ifdef HAVE_LIBMP3LAME
	lame=1;
#endif
#ifdef USE_SDL
	sdl=1;
#endif
#ifdef OSS_SUPPORT
	oss=1;
#endif
#ifdef ALSA_SUPPORT
	alsa=1;
#endif

#ifdef USE_XV
	xvideo=1;
#endif
#ifdef ARCH_X86
	x86=1;
#endif
#ifdef ARCH_X86_64
	x86_64=1;
#endif
#ifdef ARCH_POWERPC
	adm_powerpc=1;
#endif
#ifdef HAVE_GETTEXT
	adm_gettext=1;
#endif
    
	diaElemFrame videoFrame(QT_TR_NOOP("Video Codecs"));
	diaElemNotch tXvid4(xvid4, QT_TR_NOOP("Xvid"));
	diaElemNotch tX264(X264, QT_TR_NOOP("x264"));

	videoFrame.swallow(&tXvid4);
	videoFrame.swallow(&tX264);

	diaElemFrame audioFrame(QT_TR_NOOP("Audio Codecs"));	
	diaElemNotch tAften(aften, QT_TR_NOOP("Aften"));	
	diaElemNotch tLibamrnb(libamrnb, QT_TR_NOOP("amrnb"));
    diaElemNotch tFaac(faac, QT_TR_NOOP("FAAC"));
    diaElemNotch tFaad(faad, QT_TR_NOOP("FAAD2"));
	diaElemNotch tLame(lame, QT_TR_NOOP("LAME"));
	diaElemNotch tA52dec(a52dec, QT_TR_NOOP("liba52"));
	diaElemNotch tLibdca(libdca, QT_TR_NOOP("libdca"));
	diaElemNotch tMad(mad, QT_TR_NOOP("MAD"));
	diaElemNotch tVorbis(vorbis, QT_TR_NOOP("Vorbis"));

	audioFrame.swallow(&tAften);
	audioFrame.swallow(&tLibamrnb);	
	audioFrame.swallow(&tFaac);
	audioFrame.swallow(&tFaad);
	audioFrame.swallow(&tLame);
	audioFrame.swallow(&tA52dec);
	audioFrame.swallow(&tLibdca);
	audioFrame.swallow(&tMad);
	audioFrame.swallow(&tVorbis);

	diaElemNotch tArts(arts, QT_TR_NOOP("aRts"));
	diaElemNotch tEsd(esd, QT_TR_NOOP("ESD"));
        diaElemNotch tFontConfig(adm_fontconfig, QT_TR_NOOP("Fontconfig"));
	diaElemNotch tFreetype(freetype, QT_TR_NOOP("FreeType 2"));
	diaElemNotch tGettext(adm_gettext, QT_TR_NOOP("Gettext"));
        diaElemNotch tAlsa(alsa, QT_TR_NOOP("ALSA"));
	diaElemNotch tOss(oss, QT_TR_NOOP("OSS"));
	diaElemNotch tSdl(sdl, QT_TR_NOOP("SDL"));
	diaElemNotch tXvideo(xvideo, QT_TR_NOOP("XVideo"));

	diaElemNotch tAltivec(altivec, QT_TR_NOOP("AltiVec"));
	diaElemNotch tPowerPc(adm_powerpc, QT_TR_NOOP("PowerPC"));
	diaElemNotch tX86(x86, QT_TR_NOOP("x86"));
	diaElemNotch tX86_64(x86_64, QT_TR_NOOP("x86-64"));


	diaElem *codecElems[] = {&videoFrame, &audioFrame};
	diaElem *libsElems[] = {&tArts, &tEsd, &tFontConfig, &tFreetype, &tGettext, &tAlsa, &tOss, &tSdl, &tXvideo};
	diaElem *CPUElems[] = {&tAltivec, &tPowerPc, &tX86, &tX86_64};

	diaElemTabs tabCodec(QT_TR_NOOP("Codecs"), 2, codecElems);
	diaElemTabs tabLibs(QT_TR_NOOP("Libraries"), 9, libsElems);
	diaElemTabs tabCPU(QT_TR_NOOP("CPU"), 4, CPUElems);

	diaElemTabs *tabs[] = {&tabCodec, &tabLibs, &tabCPU};

    diaFactoryRunTabs(QT_TR_NOOP("Built-in Support"), 3, tabs);

    return 1;
}
bool FFmpegDecoder::handleVideoPacket(
    const AVPacket& packet,
    double& videoClock,
    VideoParseContext& context)
{
    enum { MAX_SKIPPED = 4 };
    const double MAX_DELAY = 0.2;

    const int ret = avcodec_send_packet(m_videoCodecContext, &packet);
    if (ret < 0)
        return false;

    AVFramePtr videoFrame(av_frame_alloc());
    while (avcodec_receive_frame(m_videoCodecContext, videoFrame.get()) == 0)
    {
		const int64_t duration_stamp =
			videoFrame->best_effort_timestamp; //av_frame_get_best_effort_timestamp(m_videoFrame);

        // compute the exact PTS for the picture if it is omitted in the stream
        // pts1 is the dts of the pkt / pts of the frame
        if (duration_stamp != AV_NOPTS_VALUE)
        {
            videoClock = duration_stamp * av_q2d(m_videoStream->time_base);
        }
        const double pts = videoClock;

        // update video clock for next frame
        // for MPEG2, the frame can be repeated, so we update the clock accordingly
        const double frameDelay = av_q2d(m_videoCodecContext->time_base) *
            (1. + videoFrame->repeat_pict * 0.5);
        videoClock += frameDelay;

        boost::posix_time::time_duration td(boost::posix_time::pos_infin);
        bool inNextFrame = false;
        const bool haveVideoPackets = !m_videoPacketsQueue.empty();

        {
            boost::lock_guard<boost::mutex> locker(m_isPausedMutex);

            inNextFrame = m_isPaused && m_isVideoSeekingWhilePaused;
            if (!context.initialized || inNextFrame)
            {
                m_videoStartClock = (m_isPaused ? m_pauseTimer : GetHiResTime()) - pts;
            }

            // Skipping frames
            if (context.initialized && !inNextFrame && haveVideoPackets)
            {
                const double curTime = GetHiResTime();
                if (m_videoStartClock + pts <= curTime)
                {
                    if (m_videoStartClock + pts < curTime - MAX_DELAY)
                    {
                        InterlockedAdd(m_videoStartClock, MAX_DELAY);
                    }

                    if (++context.numSkipped > MAX_SKIPPED)
                    {
                        context.numSkipped = 0;
                    }
                    else
                    {
                        CHANNEL_LOG(ffmpeg_sync) << "Hard skip frame";

                        // pause
                        if (m_isPaused && !m_isVideoSeekingWhilePaused)
                        {
                            break;
                        }

                        continue;
                    }
                }
                else
                {
                    int speedNumerator, speedDenominator;
                    std::tie(speedNumerator, speedDenominator) = static_cast<const std::pair<int, int>&>(m_speedRational);
                    context.numSkipped = 0;
                    td = boost::posix_time::milliseconds(
                        int((m_videoStartClock + pts - curTime) * 1000.  * speedDenominator / speedNumerator) + 1);
                }
            }
        }

        context.initialized = true;

        {
            boost::unique_lock<boost::mutex> locker(m_videoFramesMutex);

            if (!m_videoFramesCV.timed_wait(locker, td, [this]
                {
                    return m_isPaused && !m_isVideoSeekingWhilePaused ||
                        m_videoFramesQueue.canPush();
                }))
            {
                continue;
            }
        }

        {
            boost::lock_guard<boost::mutex> locker(m_isPausedMutex);
            if (m_isPaused && !m_isVideoSeekingWhilePaused)
            {
                break;
            }

            m_isVideoSeekingWhilePaused = false;
        }

        if (inNextFrame)
        {
            m_isPausedCV.notify_all();
        }

        VideoFrame& current_frame = m_videoFramesQueue.back();
        handleDirect3dData(videoFrame.get());
        if (!frameToImage(current_frame, videoFrame, m_imageCovertContext, m_pixelFormat))
        {
            continue;
        }

        current_frame.m_pts = pts;
        current_frame.m_duration = duration_stamp;

        {
            boost::lock_guard<boost::mutex> locker(m_videoFramesMutex);
            m_videoFramesQueue.pushBack();
        }
        m_videoFramesCV.notify_all();
    }

    return true;
}