示例#1
0
 bool VideoImpl::loadMovie(const QString& filename) {
   // Verify if file exists.
   const gchar* filetestpath = (const gchar*) filename.toUtf8().constData();
   if (FALSE == g_file_test(filetestpath, G_FILE_TEST_EXISTS))
   {
     qDebug() << "File " << filename << " does not exist" << endl;
     return false;
   }

   qDebug() << "Opening movie: " << filename << ".";

   // Assign URI.
   _uri = filename;

   // Free previously allocated structures
   unloadMovie();

   // Prepare handler data.
   _videoIsConnected = false;
   _audioIsConnected = false;

   // Create the empty pipeline.
   _pipeline = gst_pipeline_new ( "video-source-pipeline" );
   if (!_pipeline)
   {
     qWarning() << "Pipeline could not be created." << endl;
     unloadMovie();
     return (-1);
   }

   // Create and link video components.
   if (!createVideoComponents())
   {
     qWarning() << "Video components could not be initialized." << endl;
     unloadMovie();
     return (-1);
   }

   //setVolume(0);

   // Listen to the bus.
   _bus = gst_element_get_bus (_pipeline);

   // Start playing.

   return true;
 }
示例#2
0
    DirectShowMovieTexture::~DirectShowMovieTexture()
    {
        // 1) DEINITIALIZE DIRECT SHOW
        unloadMovie();
        CoUninitialize();
 
        // 2) DESTROY TEXTURE
        Ogre::TextureManager::getSingleton().remove(mTexture->getName());
 
        // 3) DELETE DSDATA
        delete dsdata;
    }
示例#3
0
bool MoviePlayer::loadMovie(const Common::String &filename, uint z) {
	if (isMovieLoaded())
		unloadMovie();
	// Get the file and load it into the decoder
	Common::SeekableReadStream *in = Kernel::getInstance()->getPackage()->getStream(filename);
	_decoder.loadStream(in);
	_decoder.start();

	GraphicEngine *pGfx = Kernel::getInstance()->getGfx();

#ifdef THEORA_INDIRECT_RENDERING
	_outputBitmap = pGfx->getMainPanel()->addDynamicBitmap(_decoder.getWidth(), _decoder.getHeight());
	if (!_outputBitmap.isValid()) {
		error("Output bitmap for movie playback could not be created.");
		return false;
	}

	// Compute the scaling of the output bitmap, so that it takes up the most space
	float screenToVideoWidth = (float)pGfx->getDisplayWidth() / (float)_outputBitmap->getWidth();
	float screenToVideoHeight = (float)pGfx->getDisplayHeight() / (float)_outputBitmap->getHeight();
	float scaleFactor = MIN(screenToVideoWidth, screenToVideoHeight);

	if (abs((int)(scaleFactor - 1.0f)) < FLT_EPSILON)
		scaleFactor = 1.0f;

	_outputBitmap->setScaleFactor(scaleFactor);
	_outputBitmap->setZ(z);

	// Center bitmap on screen
	_outputBitmap->setX((pGfx->getDisplayWidth() - _outputBitmap->getWidth()) / 2);
	_outputBitmap->setY((pGfx->getDisplayHeight() - _outputBitmap->getHeight()) / 2);
#else
	_backSurface = pGfx->getSurface();

	_outX = (pGfx->getDisplayWidth() - _decoder.getWidth()) / 2;
	_outY = (pGfx->getDisplayHeight() - _decoder.getHeight()) / 2;

	if (_outX < 0)
		_outX = 0;
	if (_outY < 0)
		_outY = 0;
#endif

	return true;
}
示例#4
0
bool MediaImpl::setPlayState(bool play)
{
  if (_pipeline == NULL)
    return false;

  GstStateChangeReturn ret = gst_element_set_state (_pipeline, (play ? GST_STATE_PLAYING : GST_STATE_PAUSED));
  if (ret == GST_STATE_CHANGE_FAILURE)
  {
    qDebug() << "Unable to set the pipeline to the playing state.";
    unloadMovie();
    return false;
  }
  else
  {
    _setReady(play);

    return true;
  }
}
示例#5
0
void MoviePlayer::update() {
	if (_decoder.isVideoLoaded()) {
		if (_decoder.endOfVideo()) {
			// Movie complete, so unload the movie
			unloadMovie();
		} else if (_decoder.needsUpdate()) {
			const Graphics::Surface *s = _decoder.decodeNextFrame();
			if (s) {
				// Transfer the next frame
				assert(s->format.bytesPerPixel == 4);

#ifdef THEORA_INDIRECT_RENDERING
				const byte *frameData = (const byte *)s->getPixels();
				_outputBitmap->setContent(frameData, s->pitch * s->h, 0, s->pitch);
#else
				g_system->copyRectToScreen(s->getPixels(), s->pitch, _outX, _outY, MIN(s->w, _backSurface->w), MIN(s->h, _backSurface->h));
				g_system->updateScreen();
#endif
			}
		}
	}
}
示例#6
0
    void DirectShowMovieTexture::loadMovie(
        const Ogre::String& moviePath, bool horizontalMirroring)
    {
        HRESULT hr;
 
        // log it!
        Ogre::LogManager::getSingletonPtr()->logMessage(
            Ogre::String("[DSHOW] Loading movie named '")+
            moviePath+"'.");
 
        // destroy previous movie objects (if any)
        unloadMovie();
 
        // create filter graph and get interfaces
        hr=CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER,
            IID_IGraphBuilder, (void**) &dsdata->pGraph);
        if (FAILED(hr)) throw("[DSHOW] Error in creating graph");
 
        hr=dsdata->pGraph->QueryInterface(IID_IMediaControl, (void**) & dsdata->pControl);
        if (FAILED(hr)) throw("[DSHOW] Error in querying media control");
 
        hr=dsdata->pGraph->QueryInterface(IID_IMediaEvent, (void**) & dsdata->pEvent);
        if (FAILED(hr)) throw("[DSHOW] Error in querying media event");
 
        hr=dsdata->pGraph->QueryInterface(IID_IMediaSeeking, (void**) & dsdata->pSeeking);
        if (FAILED(hr)) throw("[DSHOW] Error in querying seeking interface");
 
        // create sample grabber
        hr=CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
            IID_IBaseFilter, (void**)&dsdata->pGrabberF);
        if (FAILED(hr)) throw("[DSHOW] Error in creating sample grabber");
 
        // add sample grabber to the graph
        hr=dsdata->pGraph->AddFilter(dsdata->pGrabberF, L"Sample Grabber");
        if (FAILED(hr)) throw("[DSHOW] Error in adding sample grabber to the graph");
 
        // get sample grabber object
        dsdata->pGrabberF->QueryInterface(IID_ISampleGrabber,
            (void**)&dsdata->pGrabber);
 
        // set sample grabber media type
        AM_MEDIA_TYPE mt;
        ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
        mt.majortype = MEDIATYPE_Video;
        mt.subtype = MEDIASUBTYPE_RGB24;
        mt.formattype = FORMAT_VideoInfo;
        hr=dsdata->pGrabber->SetMediaType(&mt);
        if (FAILED(hr)) throw("[DSHOW] Error in setting sample grabber media type");
 
                //--------------- Seregvan's modification 
        IBaseFilter* srcFilter; 
        WCHAR* filepath = util_convertCStringToWString(moviePath.c_str());    
        hr = dsdata->pGraph->AddSourceFilter(filepath, L"Source", &srcFilter); 
        if(FAILED(hr)) throw ("[DSHOW] Unsupported media type!"); 
 
        // Connect the src and grabber 
        hr = ConnectFilters(dsdata->pGraph, srcFilter, dsdata->pGrabberF); 
        if(FAILED(hr)) throw ("[DSHOW] Unsupported media type!"); 
 
        IBaseFilter * render;
        hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&render);
        if(FAILED(hr)) throw ("[DSHOW] Unsupported media type!"); 
 
        dsdata->pGraph->AddFilter(render, L"Render");
 
        hr = ConnectFilters(dsdata->pGraph, dsdata->pGrabberF, render); 
        if(FAILED(hr)) throw ("[DSHOW] Can't create render!"); 
 
        //--------------- End of modification
 
        // open the file!
        filepath=util_convertCStringToWString(moviePath.c_str());
        hr=dsdata->pGraph->RenderFile(filepath, NULL);
        if (FAILED(hr)) throw("[DSHOW] Error opening video file!");
 
        // disable auto show
        // (wouldn't be needed if we used the null renderer)
        hr=dsdata->pGraph->QueryInterface(IID_IVideoWindow, (void**) & dsdata->pWindow);
        if (FAILED(hr)) throw("[DSHOW] Error getting video window interface");
        dsdata->pWindow->put_AutoShow(OAFALSE);
 
        // get video information
        AM_MEDIA_TYPE mtt;
        hr=dsdata->pGrabber->GetConnectedMediaType(&mtt);
        if (FAILED(hr)) throw("[DSHOW] Error getting connected media type info");
 
        VIDEOINFOHEADER *vih = (VIDEOINFOHEADER*) mtt.pbFormat;
        dsdata->videoWidth=vih->bmiHeader.biWidth;
        dsdata->videoHeight=vih->bmiHeader.biHeight;
        // microsoft's help version of free media type
        if (mtt.cbFormat != 0)
        {
            CoTaskMemFree((PVOID)mtt.pbFormat);
            mtt.cbFormat = 0;
            mtt.pbFormat = NULL;
        }
        if (mtt.pUnk != NULL)
        {
            mtt.pUnk->Release();
            mtt.pUnk = NULL;
        }
 
        // log it
        Ogre::LogManager::getSingletonPtr()->logMessage(
            Ogre::String("[DSHOW] -> This movie has dimensions: ")+
            Ogre::StringConverter::toString(dsdata->videoWidth)+"x"+
            Ogre::StringConverter::toString(dsdata->videoHeight)+".");
 
        // set sampling options
        dsdata->pGrabber->SetOneShot(FALSE);
        dsdata->pGrabber->SetBufferSamples(TRUE);
 
        // set some basic data
        mHorizontalMirroring=horizontalMirroring;
 
        // clean the texture, so that it's ready for rendering this video
        cleanTextureContents();
    }
示例#7
0
	void DirectShowMovieTexture::loadMovie(
		const Ogre::String& moviePath, bool horizontalMirroring)
	{
		HRESULT hr;

		// log it!
		Ogre::LogManager::getSingletonPtr()->logMessage(
			Ogre::String("[DSHOW] Loading movie named '")+
			moviePath+"'.");

		// destroy previous movie objects (if any)
		unloadMovie();

		// create filter graph and get interfaces
		hr=CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER,
			IID_IGraphBuilder, (void**) &dsdata->pGraph);
		if (FAILED(hr)) throw("[DSHOW] Error in creating graph");

		hr=dsdata->pGraph->QueryInterface(IID_IMediaControl, (void**) & dsdata->pControl);
		if (FAILED(hr)) throw("[DSHOW] Error in querying media control");

		hr=dsdata->pGraph->QueryInterface(IID_IMediaEvent, (void**) & dsdata->pEvent);
		if (FAILED(hr)) throw("[DSHOW] Error in querying media event");

		hr=dsdata->pGraph->QueryInterface(IID_IMediaSeeking, (void**) & dsdata->pSeeking);
		if (FAILED(hr)) throw("[DSHOW] Error in querying seeking interface");

		// create sample grabber
		hr=CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
			IID_IBaseFilter, (void**)&dsdata->pGrabberF);
		if (FAILED(hr)) throw("[DSHOW] Error in creating sample grabber");

		// add sample grabber to the graph
		hr=dsdata->pGraph->AddFilter(dsdata->pGrabberF, L"Sample Grabber");
		if (FAILED(hr)) throw("[DSHOW] Error in adding sample grabber to the graph");

		// get sample grabber object
		dsdata->pGrabberF->QueryInterface(IID_ISampleGrabber,
			(void**)&dsdata->pGrabber);

		// set sample grabber media type
		AM_MEDIA_TYPE mt;
		ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
		mt.majortype = MEDIATYPE_Video;
		mt.subtype = MEDIASUBTYPE_RGB24;
		mt.formattype = FORMAT_VideoInfo;
		hr=dsdata->pGrabber->SetMediaType(&mt);
		if (FAILED(hr)) throw("[DSHOW] Error in setting sample grabber media type");

		// open the file!
		WCHAR* filepath=util_convertCStringToWString(moviePath.c_str());
		hr=dsdata->pGraph->RenderFile(L"C:\Users\Harry\Desktop\Ogre_Framework\bin\welcome.avi", NULL);
		if (FAILED(hr)) throw("[DSHOW] Error opening video file!");

		// disable auto show
		// (wouldn't be needed if we used the null renderer)
		hr=dsdata->pGraph->QueryInterface(IID_IVideoWindow, (void**) & dsdata->pWindow);
		if (FAILED(hr)) throw("[DSHOW] Error getting video window interface");
		dsdata->pWindow->put_AutoShow(OAFALSE);

		// get video information
		AM_MEDIA_TYPE mtt;
		hr=dsdata->pGrabber->GetConnectedMediaType(&mtt);

		if (FAILED(hr)) throw("[DSHOW] Error getting connected media type info");

		VIDEOINFOHEADER *vih = (VIDEOINFOHEADER*) mtt.pbFormat;
		dsdata->videoWidth=vih->bmiHeader.biWidth;
		dsdata->videoHeight=vih->bmiHeader.biHeight;


		ResetSize(dsdata->videoWidth,dsdata->videoHeight);
		//纹理的创建现在修改到了这里,原来在构造函数中
		//放在这里可以根据视频的大小来生成纹理的大小
		//这样可以让视频完全平铺到面片上
		mTexture=Ogre::TextureManager::getSingleton().createManual(
			"DirectShowManualTexture",// name
			Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME,
			Ogre::TEX_TYPE_2D,// texture type
			mTexWidth,
			mTexHeight,
			0,// number of mipmaps
			Ogre::PF_BYTE_BGRA,// pixel format
			Ogre::TU_DYNAMIC_WRITE_ONLY_DISCARDABLE// usage
			);

		// microsoft's help version of free media type
		if (mtt.cbFormat != 0)
		{
			CoTaskMemFree((PVOID)mtt.pbFormat);
			mtt.cbFormat = 0;
			mtt.pbFormat = NULL;
		}
		if (mtt.pUnk != NULL)
		{
			mtt.pUnk->Release();
			mtt.pUnk = NULL;
		}

		// log it
		Ogre::LogManager::getSingletonPtr()->logMessage(
			Ogre::String("[DSHOW] -> This movie has dimensions: ")+
			Ogre::StringConverter::toString(dsdata->videoWidth)+"x"+
			Ogre::StringConverter::toString(dsdata->videoHeight)+".");


		// set sampling options
		dsdata->pGrabber->SetOneShot(FALSE);
		dsdata->pGrabber->SetBufferSamples(TRUE);

		// set some basic data
		mHorizontalMirroring=horizontalMirroring;

		// clean the texture, so that it's ready for rendering this video
		cleanTextureContents();
	}
示例#8
0
bool MediaImpl::loadMovie(QString filename)
{
    gchar* filetestpath = (gchar*) filename.toUtf8().constData();
    if (FALSE == g_file_test(filetestpath, G_FILE_TEST_EXISTS))
    {
        std::cout << "File " << filetestpath << " does not exist" << std::endl;
        return false;
    }
    _uri = filename;

    qDebug() << "Opening movie: " << filename << ".";

    // Free previously allocated structures
    unloadMovie();

    // Initialize GStreamer.
    GstElement *capsfilter0 = NULL;
    GstElement *videoscale0 = NULL;

    // Create the elements.
    if (_isSharedMemorySource)
    {
        _shmsrc0 = gst_element_factory_make ("shmsrc", "shmsrc0");
        _gdpdepay0 = gst_element_factory_make ("gdpdepay", "gdpdepay0");
        _pollSource = g_timeout_source_new (500);
        g_source_set_callback (_pollSource,
                               gstPollShmsrc,
                               this,
                               NULL);
        g_source_attach (_pollSource, g_main_context_default());
        g_source_unref (_pollSource);
    }
    else {
        _uridecodebin0 = gst_element_factory_make ("uridecodebin", "uridecodebin0");
    }
    _queue0 = gst_element_factory_make ("queue", "queue0");
    _videoconvert0 = gst_element_factory_make ("videoconvert", "videoconvert0");
    videoscale0 = gst_element_factory_make ("videoscale", "videoscale0");
    capsfilter0 = gst_element_factory_make ("capsfilter", "capsfilter0");
    _appsink0 = gst_element_factory_make ("appsink", "appsink0");

    // Prepare handler data.
    _padHandlerData.videoToConnect = _queue0;
    _padHandlerData.videoSink = _appsink0;
    _padHandlerData.videoIsConnected = false;

    _audioqueue0 = gst_element_factory_make ("queue", "audioqueue0");
    _audioconvert0 = gst_element_factory_make ("audioconvert", "audioconvert0");
    _audioresample0 = gst_element_factory_make ("audioresample", "audioresample0");
    _audiovolume0 = gst_element_factory_make ("volume", "audiovolume0");
    _audiosink0 = gst_element_factory_make ("autoaudiosink", "audiosink0");

    _padHandlerData.audioToConnect = _audioqueue0;

    // Create the empty pipeline.
    _pipeline = gst_pipeline_new ( "video-source-pipeline" );

    if (!_pipeline ||
            !_queue0 || !_videoconvert0 || ! videoscale0 || ! capsfilter0 ||
            !_appsink0 || !_audioqueue0 || !_audioconvert0 || !_audioresample0 ||
            !_audiovolume0 || !_audiosink0)
    {
        g_printerr ("Not all elements could be created.\n");

        if (! _pipeline) g_printerr("_pipeline");
        if (! _queue0) g_printerr("_queue0");
        if (! _videoconvert0) g_printerr("_videoconvert0");
        if (! videoscale0) g_printerr("videoscale0");
        if (! capsfilter0) g_printerr("capsfilter0");
        if (! _appsink0) g_printerr("_appsink0");
        if (! _audioqueue0) g_printerr("_audioqueue0");
        if (! _audioconvert0) g_printerr("_audioconvert0");
        if (! _audioresample0) g_printerr("_audioresample0");
        if (! _audiovolume0) g_printerr("_audiovolume0");
        if (! _audiosink0) g_printerr("_audiosink0");

        unloadMovie();
        return -1;
    }

    if (_isSharedMemorySource)
    {
        if (! _shmsrc0 || ! _gdpdepay0)
        {
            g_printerr ("Not all elements could be created.\n");
            if (! _shmsrc0) g_printerr("_shmsrc0");
            if (! _gdpdepay0) g_printerr("_gdpdepay0");
            unloadMovie();
            return -1;
        }
    }
    else
    {
        if (! _uridecodebin0)
        {
            g_printerr ("Not all elements could be created.\n");
            if (! _uridecodebin0) g_printerr("_uridecodebin0");
            unloadMovie();
            return -1;
        }
    }

    // Build the pipeline. Note that we are NOT linking the source at this
    // point. We will do it later.
    gst_bin_add_many (GST_BIN (_pipeline),
                      _isSharedMemorySource ? _shmsrc0 : _uridecodebin0, _queue0,
                      _videoconvert0, videoscale0, capsfilter0, _appsink0,
//    _audioqueue0, _audioconvert0, _audioresample0, _audiovolume0, _audiosink0,
                      NULL);

    // special case for shmsrc
    if (_isSharedMemorySource)
    {
        gst_bin_add (GST_BIN(_pipeline), _gdpdepay0);
        if (! gst_element_link_many (_shmsrc0, _gdpdepay0, _queue0, NULL))
        {
            g_printerr ("Could not link shmsrc, deserializer and video queue.\n");
        }
    }
    // link uridecodebin -> queue will be performed by callback

    if (! gst_element_link_many (_queue0, _videoconvert0, capsfilter0, videoscale0, _appsink0, NULL))
    {
        g_printerr ("Could not link video queue, colorspace converter, caps filter, scaler and app sink.\n");
        unloadMovie();
        return false;
    }

//  if (! gst_element_link_many (_audioqueue0, _audioconvert0, _audioresample0,
//        _audiovolume0, _audiosink0, NULL))
//  {
//    g_printerr ("Could not link audio queue, converter, resampler and audio sink.\n");
//    unloadMovie();
//    return false;
//  }

    // Process URI.
    QByteArray ba = filename.toLocal8Bit();
    gchar *filename_tmp = g_strdup((gchar*) filename.toUtf8().constData());
    gchar* uri = NULL; //  (gchar*) filename.toUtf8().constData();
    if (! _isSharedMemorySource && ! gst_uri_is_valid(uri))
    {
        // Try to convert filename to URI.
        GError* error = NULL;
        qDebug() << "Calling gst_filename_to_uri : " << uri;
        uri = gst_filename_to_uri(filename_tmp, &error);
        if (error)
        {
            qDebug() << "Filename to URI error: " << error->message;
            g_error_free(error);
            gst_object_unref (uri);
            freeResources();
            return false;
        }
    }
    g_free(filename_tmp);

    if (_isSharedMemorySource)
    {
        uri =  (gchar*) ba.data();
    }

    // Set URI to be played.
    qDebug() << "URI for uridecodebin: " << uri;
    // FIXME: sometimes it's just the path to the directory that is given, not the file itself.

    // Connect to the pad-added signal
    if (! _isSharedMemorySource)
    {
        g_signal_connect (_uridecodebin0, "pad-added", G_CALLBACK (MediaImpl::gstPadAddedCallback), &_padHandlerData);
        g_object_set (_uridecodebin0, "uri", uri, NULL);
    }
    else
    {
        //qDebug() << "LIVE mode" << uri;
        g_object_set (_shmsrc0, "socket-path", uri, NULL);
        g_object_set (_shmsrc0, "is-live", TRUE, NULL);
        _padHandlerData.videoIsConnected = true;
    }
    g_free(uri);

    // Configure audio appsink.
    // TODO: change from mono to stereo
//  gchar* audioCapsText = g_strdup_printf ("audio/x-raw-float,channels=1,rate=%d,signed=(boolean)true,width=%d,depth=%d,endianness=BYTE_ORDER",
//                                          Engine::signalInfo().sampleRate(), (int)(sizeof(Signal_T)*8), (int)(sizeof(Signal_T)*8) );
//  GstCaps* audioCaps = gst_caps_from_string (audioCapsText);
//  g_object_set (_audioSink, "emit-signals", TRUE,
//                            "caps", audioCaps,
////                            "max-buffers", 1,     // only one buffer (the last) is maintained in the queue
////                            "drop", TRUE,         // ... other buffers are dropped
//                            NULL);
//  g_signal_connect (_audioSink, "new-buffer", G_CALLBACK (VideoImpl::gstNewAudioBufferCallback), &_newAudioBufferHandlerData);
//  gst_caps_unref (audioCaps);
//  g_free (audioCapsText);


    // Configure video appsink.
    GstCaps *videoCaps = gst_caps_from_string ("video/x-raw,format=RGBA");
    g_object_set (capsfilter0, "caps", videoCaps, NULL);
    g_object_set (_appsink0, "emit-signals", TRUE,
                  "max-buffers", 1,     // only one buffer (the last) is maintained in the queue
                  "drop", TRUE,         // ... other buffers are dropped
                  "sync", TRUE,
                  NULL);
    g_signal_connect (_appsink0, "new-sample", G_CALLBACK (MediaImpl::gstNewSampleCallback), this);
    gst_caps_unref (videoCaps);

    g_object_set (_audiovolume0, "mute", false, NULL);
    g_object_set (_audiovolume0, "volume", 0.0, NULL);

    // Listen to the bus.
    _bus = gst_element_get_bus (_pipeline);

    // Start playing.
    if (! _isSharedMemorySource && ! setPlayState(true))
    {
        return false;
    }

    return true;
}
示例#9
0
DShowMoviePlayer::~DShowMoviePlayer (void)
{
	unloadMovie ();
}
示例#10
0
bool MediaImpl::loadMovie(QString filename)
{
  _uri = filename;

  qDebug() << "Opening movie: " << filename << ".";
  this->_frame = NULL;

  // Free previously allocated structures
  unloadMovie();

  //_firstFrameTime=_formatContext->start_time;

  // Initialize GStreamer.
  gst_init (NULL, NULL);
  GstElement *capsFilter = NULL;
  GstElement *videoScale = NULL;

  // Create the elements.
  _source =          gst_element_factory_make ("uridecodebin", "source");

//  _audioQueue =      gst_element_factory_make ("queue", "aqueue");
//  _audioConvert =    gst_element_factory_make ("audioconvert", "aconvert");
//  _audioResample =   gst_element_factory_make ("audioresample", "aresample");
//  _audioSink =       gst_element_factory_make ("appsink", "asink");
//
  _videoQueue =      gst_element_factory_make ("queue", "vqueue");
  _videoColorSpace = gst_element_factory_make ("videoconvert", "vcolorspace");
  videoScale = gst_element_factory_make ("videoscale", "videoscale0");
  capsFilter = gst_element_factory_make ("capsfilter", "capsfilter0");
  _videoSink =       gst_element_factory_make ("appsink", "vsink");

  // Prepare handler data.
//  _padHandlerData.audioToConnect   = _audioQueue;
  _padHandlerData.videoToConnect   = _videoQueue;
  _padHandlerData.videoSink        = _videoSink;
  //_padHandlerData.audioIsConnected = false;
  _padHandlerData.videoIsConnected = false;

//  _newAudioBufferHandlerData.audioSink          = _audioSink;
//  _newAudioBufferHandlerData.audioBufferAdapter = _audioBufferAdapter;

  // Create the empty pipeline.
  _pipeline = gst_pipeline_new ( "video-source-pipeline" );

  if (!_pipeline || !_source ||
//      !_audioQueue || !_audioConvert || !_audioResample || !_audioSink ||
      !_videoQueue || !_videoColorSpace || ! videoScale || ! capsFilter || ! _videoSink)
  {
    g_printerr ("Not all elements could be created.\n");
    unloadMovie();
    return -1;
  }

  // Build the pipeline. Note that we are NOT linking the source at this
  // point. We will do it later.
  gst_bin_add_many (GST_BIN (_pipeline), _source,
//                    _audioQueue, _audioConvert, _audioResample, _audioSink,
                    _videoQueue, _videoColorSpace, videoScale, capsFilter, _videoSink, NULL);

//  if (!gst_element_link_many(_audioQueue, _audioConvert, _audioResample, _audioSink, NULL)) {
//    g_printerr ("Audio elements could not be linked.\n");
//    unloadMovie();
//    return false;
//  }

  if (!gst_element_link_many (_videoQueue, _videoColorSpace, capsFilter, videoScale, _videoSink, NULL)) {
    g_printerr ("Video elements could not be linked.\n");
    unloadMovie();
    return false;
  }

  // Process URI.
  gchar* uri = (gchar*) filename.toUtf8().constData();
  if (!gst_uri_is_valid(uri))
  {
    // Try to convert filename to URI.
    GError* error = NULL;
    uri = gst_filename_to_uri(uri, &error);
    if (error) {
      qDebug() << "Filename to URI error: " << error->message;
      g_error_free(error);
      gst_object_unref (uri);
      freeResources();
      return false;
    }
  }

  // Set URI to be played.
  qDebug() << "URI for uridecodebin: " << uri;
  // FIXME: sometimes it's just the path to the directory that is given, not the file itself.
  g_object_set (_source, "uri", uri, NULL);
  // Connect to the pad-added signal
  g_signal_connect (_source, "pad-added", G_CALLBACK (MediaImpl::gstPadAddedCallback), &_padHandlerData);

  // Configure audio appsink.
  // TODO: change from mono to stereo
//  gchar* audioCapsText = g_strdup_printf ("audio/x-raw-float,channels=1,rate=%d,signed=(boolean)true,width=%d,depth=%d,endianness=BYTE_ORDER",
//                                          Engine::signalInfo().sampleRate(), (int)(sizeof(Signal_T)*8), (int)(sizeof(Signal_T)*8) );
//  GstCaps* audioCaps = gst_caps_from_string (audioCapsText);
//  g_object_set (_audioSink, "emit-signals", TRUE,
//                            "caps", audioCaps,
////                            "max-buffers", 1,     // only one buffer (the last) is maintained in the queue
////                            "drop", TRUE,         // ... other buffers are dropped
//                            NULL);
//  g_signal_connect (_audioSink, "new-buffer", G_CALLBACK (VideoImpl::gstNewAudioBufferCallback), &_newAudioBufferHandlerData);
//  gst_caps_unref (audioCaps);
//  g_free (audioCapsText);

  // Configure video appsink.
//  GstCaps *videoCaps = gst_caps_from_string ("video/x-raw-rgb");
  GstCaps *videoCaps = gst_caps_from_string ("video/x-raw,format=RGBA");
  g_object_set (capsFilter, "caps", videoCaps, NULL);
  g_object_set (_videoSink, "emit-signals", TRUE,
                            "max-buffers", 1,     // only one buffer (the last) is maintained in the queue
                            "drop", TRUE,         // ... other buffers are dropped
                            NULL);
  g_signal_connect (_videoSink, "new-sample", G_CALLBACK (MediaImpl::gstNewSampleCallback), this);
  gst_caps_unref (videoCaps);

  // Listen to the bus.
  _bus = gst_element_get_bus (_pipeline);

  // Start playing.
  if (!setPlayState(true))
    return false;

  qDebug() << "Pipeline started.";

  //_movieReady = true;
  return true;
}