コード例 #1
0
void VideoDecoder::setRate(const Common::Rational &rate) {
	if (!isVideoLoaded() || _playbackRate == rate)
		return;

	if (rate == 0) {
		stop();
		return;
	} else if (rate != 1 && hasAudio()) {
		warning("Cannot set custom rate in videos with audio");
		return;
	}

	Common::Rational targetRate = rate;

	if (rate < 0) {
		// TODO: Implement support for this
		warning("Cannot set custom rate to backwards");
		targetRate = 1;

		if (_playbackRate == targetRate)
			return;
	}

	if (_playbackRate != 0)
		_lastTimeChange = getTime();

	_playbackRate = targetRate;
	_startTime = g_system->getMillis();

	// Adjust start time if we've seeked to something besides zero time
	if (_lastTimeChange.totalNumberOfFrames() != 0)
		_startTime -= (_lastTimeChange.msecs() / _playbackRate).toInt();

	startAudio();
}
コード例 #2
0
ファイル: MediaPlayer.cpp プロジェクト: Skiminok/vlc-qt
void VlcMediaPlayer::emitStatus()
{
    Vlc::State s = state();
    bool audio_count;
    bool video_count;

    if (s == Vlc::Buffering ||
        s == Vlc::Playing ||
        s == Vlc::Paused) {
        audio_count = _vlcAudio->trackCount() > 0;
        video_count = _vlcVideo->trackCount() > 0;
    } else {
        audio_count = false;
        video_count = false;
    }

    VlcError::errmsg();

    emit currentState(s);
    emit hasAudio(audio_count);
    emit hasVideo(video_count);

    // Deprecated
    bool play = s == Vlc::Playing;
    bool buffering = s == Vlc::Buffering;
    emit playing(play, buffering);
}
コード例 #3
0
QString MmRendererMetaData::mediaType() const
{
    if (hasVideo())
        return QLatin1String("video");
    else if (hasAudio())
        return QLatin1String("audio");
    else
        return QString();
}
コード例 #4
0
bool DirectShowVideoWrapper::isAudioEnabled(void) const
{
    if(isInitialized() && hasAudio())
    {
        IBaseFilter* pAudioRenderer = NULL;
        _pGraphBuilder->FindFilterByName(L"NULL Audio Renderer",&pAudioRenderer);

        return (pAudioRenderer == NULL);
    }

    return false;
}
コード例 #5
0
ファイル: video_decoder.cpp プロジェクト: 86400/scummvm
bool VideoDecoder::setReverse(bool reverse) {
	// Can only reverse video-only videos
	if (reverse && hasAudio())
		return false;

	// Attempt to make sure all the tracks are in the requested direction
	for (TrackList::iterator it = _tracks.begin(); it != _tracks.end(); it++) {
		if ((*it)->getTrackType() == Track::kTrackTypeVideo && ((VideoTrack *)*it)->isReversed() != reverse) {
			if (!((VideoTrack *)*it)->setReverse(reverse))
				return false;

			_needsUpdate = true; // force an update
		}
	}

	findNextVideoTrack();
	return true;
}
コード例 #6
0
ファイル: Capability.cpp プロジェクト: spsu/image-research
void Capability::printAll()
{
	// A lot of information to output...
	printf(
		"Basic Info:\n"					\
		"    Driver:\t\t %s\n"			\
		"    Card:\t\t %s\n"			\
		"    Bus info:\t\t %s\n"		\
		"    Version:\t\t %2d\n"		\
		"\nCapabilities:\n"				\
		"    video capture:\t %s\n"		\
		"    video overlay:\t %s\n"		\
		"    VBI capture:\t %s\n"		\
		"    VBI output:\t\t %s\n"		\
		"    sliced VBI capture:\t %s\n"\
		"    sliced VBI output:\t %s\n"	\
		"    RDS capture:\t %s\n"		\
		"    video ouput overlay: %s\n"	\
		"    has tuner:\t\t %s\n"		\
		"    has audio:\t\t %s\n"		\
		"    has radio:\t\t %s\n"		\
		"    async IO:\t\t %s\n"		\
		"    streaming:\t\t %s\n",
		
		driver(),
		card(),
		busInfo(),
		version(),
		hasVideoCapture()? "YES" : "no",
		hasVideoOverlay()? "YES" : "no",
		hasVbiCapture()? "YES" : "no",
		hasVbiOutput()? "YES" : "no",
		hasSlicedVbiCapture()? "YES" : "no",
		hasSlicedVbiOutput()? "YES" : "no",
		hasRdsCapture()? "YES" : "no",
		hasVideoOutputOverlay()? "YES" : "no",
		hasTuner()? "YES" : "no",
		hasAudio()? "YES" : "no",
		hasRadio()? "YES" : "no",
		hasAsyncIo()? "YES" : "no",
		hasStreaming()? "YES" : "no"
	);
}
コード例 #7
0
ファイル: video_decoder.cpp プロジェクト: 86400/scummvm
void VideoDecoder::setRate(const Common::Rational &rate) {
	if (!isVideoLoaded() || _playbackRate == rate)
		return;

	if (rate == 0) {
		stop();
		return;
	} else if (rate != 1 && hasAudio()) {
		warning("Cannot set custom rate in videos with audio");
		return;
	}

	Common::Rational targetRate = rate;

	// Attempt to set the reverse
	if (!setReverse(rate < 0)) {
		assert(rate < 0); // We shouldn't fail for forward.
		warning("Cannot set custom rate to backwards");
		setReverse(false);
		targetRate = 1;

		if (_playbackRate == targetRate)
			return;
	}

	if (_playbackRate != 0)
		_lastTimeChange = getTime();

	_playbackRate = targetRate;
	_startTime = g_system->getMillis();

	// Adjust start time if we've seeked to something besides zero time
	if (_lastTimeChange != 0)
		_startTime -= (_lastTimeChange.msecs() / _playbackRate).toInt();

	startAudio();
}
コード例 #8
0
 void DirectShowVideoWrapper::disableAudio(void)
{
    if(isInitialized() && hasAudio())
    {
		HRESULT hr;
        TCHAR szErr[MAX_ERROR_TEXT_LEN];

        IBaseFilter* pAudioRenderer = NULL;
        hr = FindAudioRenderer(_pGraphBuilder,&pAudioRenderer);
        if (FAILED(hr))
        {
            AMGetErrorText(hr, szErr, MAX_ERROR_TEXT_LEN);
            SWARNING << "Failed to find audio renderer, error: " << szErr << std::endl;
            return;
        }

        if(pAudioRenderer != NULL)
        {
            stop();
            //Remove the Renderer
		    IPin* ipin;
            hr = GetPin(pAudioRenderer, PINDIR_INPUT, 0, &ipin);
			if (FAILED(hr))
			{
				AMGetErrorText(hr, szErr, MAX_ERROR_TEXT_LEN);
				SWARNING << "Get Audio Renderer in pin, error: " << szErr << std::endl;
				return;
			}
		    IPin* opin = NULL;
		    //find out who the renderer is connected to and disconnect from them
		    hr = ipin->ConnectedTo(&opin);
            if (FAILED(hr))
            {
                AMGetErrorText(hr, szErr, MAX_ERROR_TEXT_LEN);
                SWARNING << "error: " << szErr << std::endl;
                return;
            }
		    hr = ipin->Disconnect();
            if (FAILED(hr))
            {
                AMGetErrorText(hr, szErr, MAX_ERROR_TEXT_LEN);
                SWARNING << "error: " << szErr << std::endl;
                return;
            }
		    hr = opin->Disconnect();
            if (FAILED(hr))
            {
                AMGetErrorText(hr, szErr, MAX_ERROR_TEXT_LEN);
                SWARNING << "error: " << szErr << std::endl;
                return;
            }

            //Remove the Renderer		
		    hr = _pGraphBuilder->RemoveFilter(pAudioRenderer);
            if (FAILED(hr))
            {
                AMGetErrorText(hr, szErr, MAX_ERROR_TEXT_LEN);
                SWARNING << "error: " << szErr << std::endl;
                return;
            }

            
            //Connect an NULL Audio renderer
		    hr = _pGraphBuilder->AddFilter(_pNullAudioFilter, L"NULL Audio Renderer");
            if (FAILED(hr))
            {
                AMGetErrorText(hr, szErr, MAX_ERROR_TEXT_LEN);
                SWARNING << "error: " << szErr << std::endl;
                return;
            }

		    //get the input pin of the Color Space Converter
		    hr = GetPin(_pNullAudioFilter, PINDIR_INPUT, 0, &ipin);
			if (FAILED(hr))
			{
				AMGetErrorText(hr, szErr, MAX_ERROR_TEXT_LEN);
				SWARNING << "Get Null Audio in pin, error: " << szErr << std::endl;
				return;
			}

            //Connect an Audio renderer
		    hr = _pGraphBuilder->Connect(opin, ipin);
            if (FAILED(hr))
            {
                AMGetErrorText(hr, szErr, MAX_ERROR_TEXT_LEN);
                SWARNING << "error: " << szErr << std::endl;
                return;
            }
        }
    }
}
コード例 #9
0
ファイル: source.cpp プロジェクト: libyuni/libyuni
	bool Source::updateDispatched(uint source)
	{
		if (!valid())
			return false;

		// Audio
		if (hasAudio())
		{
			// Update time progression in the current buffer
			::alGetSourcef(source, AL_SEC_OFFSET, &pSecondsCurrent);
			// Check if a buffer has finished playing
			ALint processed = 0;
			::alGetSourcei(source, AL_BUFFERS_PROCESSED, &processed);
			if (!processed)
				return true;

			// A buffer has finished playing, unqueue it
			ALuint buffer = Private::Media::OpenAL::UnqueueBufferFromSource(source);
			// Reset current buffer time
			pSecondsCurrent = 0.0f;
			uint bits = pAStream->bits();
			uint channels = pAStream->channels();
			uint frequency = pAStream->rate();
			int bufferSize;
			::alGetBufferi(buffer, AL_SIZE, &bufferSize);
			pSecondsElapsed += bufferSize * 8.0f / bits / frequency;
			// Get the next data to feed the buffer
			uint size = fillBuffer();
			if (!size)
				return false;

			// Buffer the data with OpenAL and queue the buffer onto the source
			if (!Private::Media::OpenAL::SetBufferData(buffer, pAStream->alFormat(), pData.data(),
				size, frequency / channels))
				return false;
			if (!Private::Media::OpenAL::QueueBufferToSource(buffer, source))
				return false;
		}

		// Video
		if (hasVideo())
		{
			/*
			if (hasAudio() and Private::Media::OpenAL::IsSourcePlaying(source))
			{
				std::cout << "Video and audio sync !" << std::endl;
				// Try to sync with audio
				ALfloat elapsed;
				::alGetSourcef(source, AL_SEC_OFFSET, &elapsed);
				while (!pFrames.empty() and elapsed > pFrames.front()->timestamp())
				{
					pFrames.pop_front();
					if (pFrames.empty())
						fillQueue();
				}
			}
			*/

			// TEMPORARY
			// The sync code is not working yet, just get some frames when we need them for now
			// if (pFrames.empty())
			// 	fillQueue();

			if (pFrames.empty())
				// Failed to load anymore
				return false;
		}

		return true;
	}
コード例 #10
0
bool GStreamerWrapper::open( std::string strFilename, bool bGenerateVideoBuffer, bool bGenerateAudioBuffer )
{
	if( m_bFileIsOpen )
	{
		stop();
		close();
	}

	// init property variables
	m_iNumVideoStreams = 0;
	m_iNumAudioStreams = 0;
	m_iCurrentVideoStream = 0;
	m_iCurrentAudioStream = 0;
	m_iWidth = m_iHeight = 0;
	m_iCurrentFrameNumber = 0;		// set to invalid, as it is not decoded yet
	m_dCurrentTimeInMs = 0;			// set to invalid, as it is not decoded yet
	m_bIsAudioSigned = false;
	m_bIsNewVideoFrame = false;
	m_iNumAudioChannels = 0;
	m_iAudioSampleRate = 0;
	m_iAudioBufferSize = 0;
	m_iAudioWidth = 0;
	m_AudioEndianness = LITTLE_ENDIAN;
	m_fFps = 0;
	m_dDurationInMs = 0;
	m_iNumberOfFrames = 0;

	m_fVolume = 1.0f;
	m_fSpeed = 1.0f;
	m_PlayDirection = FORWARD;
	m_CurrentPlayState = NOT_INITIALIZED;
	m_LoopMode = LOOP;
	m_strFilename = strFilename;

#ifdef THREADED_MESSAGE_HANDLER
		m_MsgHandlingThread = std::thread( std::bind( threadedMessageHandler, this ) );
#endif


	////////////////////////////////////////////////////////////////////////// PIPELINE
	// Init main pipeline --> playbin2
	m_GstPipeline = gst_element_factory_make( "playbin2", "pipeline" );

	// Check and re-arrange filename string
	if ( strFilename.find( "file:/", 0 ) == std::string::npos &&
		 strFilename.find( "file:///", 0 ) == std::string::npos &&
		 strFilename.find( "http://", 0 ) == std::string::npos )
	{
		strFilename = "file:/" + strFilename;
	}

	// Open Uri
	g_object_set( m_GstPipeline, "uri", strFilename.c_str(), NULL );


	////////////////////////////////////////////////////////////////////////// VIDEO SINK
	// Extract and Config Video Sink
	if ( bGenerateVideoBuffer )
	{
		// Create the video appsink and configure it
		m_GstVideoSink = gst_element_factory_make( "appsink", "videosink" );
		gst_base_sink_set_sync( GST_BASE_SINK( m_GstVideoSink ), true );
		gst_app_sink_set_max_buffers( GST_APP_SINK( m_GstVideoSink ), 8 );
		gst_app_sink_set_drop( GST_APP_SINK( m_GstVideoSink ),true );
		gst_base_sink_set_max_lateness( GST_BASE_SINK( m_GstVideoSink ), -1);

		// Set some fix caps for the video sink
		// It would seem that GStreamer then tries to transform any incoming video stream according to these caps
		GstCaps* caps = gst_caps_new_simple( "video/x-raw-rgb",
			"bpp", G_TYPE_INT, 24,
			"depth", G_TYPE_INT, 24,
			"endianness",G_TYPE_INT,4321,
			"red_mask",G_TYPE_INT,0xff0000,
			"green_mask",G_TYPE_INT,0x00ff00,
			"blue_mask",G_TYPE_INT,0x0000ff,
			"alpha_mask",G_TYPE_INT,0x000000ff,
			NULL );


		gst_app_sink_set_caps( GST_APP_SINK( m_GstVideoSink ), caps );
		gst_caps_unref( caps );

		// Set the configured video appsink to the main pipeline
		g_object_set( m_GstPipeline, "video-sink", m_GstVideoSink, (void*)NULL );
		// Tell the video appsink that it should not emit signals as the buffer retrieving is handled via callback methods
		g_object_set( m_GstVideoSink, "emit-signals", false, "sync", true, (void*)NULL );

		// Set Video Sink callback methods
		m_GstVideoSinkCallbacks.eos = &GStreamerWrapper::onEosFromVideoSource;
		m_GstVideoSinkCallbacks.new_preroll = &GStreamerWrapper::onNewPrerollFromVideoSource;
		m_GstVideoSinkCallbacks.new_buffer = &GStreamerWrapper::onNewBufferFromVideoSource;
		gst_app_sink_set_callbacks( GST_APP_SINK( m_GstVideoSink ), &m_GstVideoSinkCallbacks, this, NULL );
	}
	else
	{
#if defined _WIN32 // Use direct show as playback plugin if on Windows; Needed for features like play direction and playback speed to work correctly
		GstElement* videoSink = gst_element_factory_make( "directdrawsink", NULL );
		g_object_set( m_GstPipeline, "video-sink", videoSink, NULL );
#elif defined LINUX
		GstElement* videoSink = gst_element_factory_make( "xvimagesink", NULL );    //possible alternatives: ximagesink (no (gpu) fancy stuff) or better: cluttersink
		g_object_set( m_GstPipeline, "video-sink", videoSink, NULL );
#else // Use Mac OSX plugin otherwise
		GstElement* videoSink = gst_element_factory_make( "osxvideosink", NULL );
		g_object_set( m_GstPipeline, "video-sink", videoSink, NULL );
#endif
	}

	////////////////////////////////////////////////////////////////////////// AUDIO SINK
	// Extract and config Audio Sink
	if ( bGenerateAudioBuffer )
	{
		// Create and configure audio appsink
		m_GstAudioSink = gst_element_factory_make( "appsink", "audiosink" );
		gst_base_sink_set_sync( GST_BASE_SINK( m_GstAudioSink ), true );
		// Set the configured audio appsink to the main pipeline
		g_object_set( m_GstPipeline, "audio-sink", m_GstAudioSink, (void*)NULL );
		// Tell the video appsink that it should not emit signals as the buffer retrieving is handled via callback methods
		g_object_set( m_GstAudioSink, "emit-signals", false, "sync", true, (void*)NULL );

		// Set Audio Sink callback methods
		m_GstAudioSinkCallbacks.eos = &GStreamerWrapper::onEosFromAudioSource;
		m_GstAudioSinkCallbacks.new_preroll = &GStreamerWrapper::onNewPrerollFromAudioSource;
		m_GstAudioSinkCallbacks.new_buffer = &GStreamerWrapper::onNewBufferFromAudioSource;
		gst_app_sink_set_callbacks( GST_APP_SINK( m_GstAudioSink ), &m_GstAudioSinkCallbacks, this, NULL );
	}
	else
	{
#if defined _WIN32 // Use direct sound plugin if on Windows; Needed for features like play direction and playback speed to work correctly
		GstElement* audioSink = gst_element_factory_make( "directsoundsink", NULL );
		g_object_set ( m_GstPipeline, "audio-sink", audioSink, NULL );
#elif defined LINUX
		GstElement* audioSink = gst_element_factory_make( "pulsesink", NULL );  //alternative: alsasink
		g_object_set ( m_GstPipeline, "audio-sink", audioSink, NULL );
#else // Use Mac OSC plugin otherwise
		GstElement* audioSink = gst_element_factory_make( "osxaudiosink", NULL );
		g_object_set ( m_GstPipeline,"audio-sink", audioSink, NULL );
#endif
	}

	////////////////////////////////////////////////////////////////////////// BUS
	// Set GstBus
	m_GstBus = gst_pipeline_get_bus( GST_PIPELINE( m_GstPipeline ) );

	if ( m_GstPipeline != NULL )
	{
//just add this callback for threaded message handling
#ifdef THREADED_MESSAGE_HANDLER
		gst_bus_add_watch (m_GstBus, onHandleGstMessages, this );
#endif
		// We need to stream the file a little bit in order to be able to retrieve information from it
		gst_element_set_state( m_GstPipeline, GST_STATE_READY );
		gst_element_set_state( m_GstPipeline, GST_STATE_PAUSED );

		// For some reason this is needed in order to gather video information such as size, framerate etc ...
		GstState state;
		gst_element_get_state( m_GstPipeline, &state, NULL, 2 * GST_SECOND );
		m_CurrentPlayState = OPENED;
	}

	// Retrieve and store all relevant Media Information
	retrieveVideoInfo();

	if( !hasVideo() && !hasAudio() )	// is a valid multimedia file?
	{
		close();
		return false;
	}

	// Print Media Info
	printMediaFileInfo();

	// TODO: Check if everything was initialized correctly
	// A file has been opened
	m_bFileIsOpen = true;

	return true;
}