Esempio n. 1
0
BOOL CBoxView::VPlaying()
{
	if (m_pGraph == NULL)
	{
		return FALSE;
	}

	CComPtr<IBaseFilter> pVideoInputFilter;
	HRESULT hr = FindInputFilters((void**)&pVideoInputFilter, CLSID_VideoInputDeviceCategory);
	if (pVideoInputFilter != NULL)
	{
		hr = m_pGraph->AddFilter(pVideoInputFilter, L"VCapture");
		//设置
		SetVideoProcAmp(pVideoInputFilter);

		CComPtr<IPin>			pAudio			= NULL;

		CComPtr<IPin>			pPinTemp		= NULL;
		CComPtr<IEnumPins>		pienumpins		= NULL;
		pVideoInputFilter->EnumPins(&pienumpins);
		while (pienumpins != NULL && pienumpins->Next(1, &pPinTemp, NULL) == S_OK)
		{
			PIN_DIRECTION ThisPinDir;
			pPinTemp->QueryDirection(&ThisPinDir);
			if (ThisPinDir == PINDIR_OUTPUT)
			{
				PIN_INFO pininfo;
				pPinTemp[0].QueryPinInfo(&pininfo);
				if (StrStr(pininfo.achName,_T("Audio")))
				{   //播放音频
					hr = m_pGraph->Render(pPinTemp);
					pAudio = pPinTemp;
					break;
				}
				else if (StrStr(pininfo.achName, _T("捕获"))     ||
						StrStr(pininfo.achName, _T("Capture"))
					)
				{	////设置捕获Filter的格式为输出PIN支持的某种宽高的格式
					CComPtr<IEnumMediaTypes> 	ptypes			= NULL;
					AM_MEDIA_TYPE				*ptype			= NULL;
					VIDEOINFOHEADER				*pvideoInfo		= NULL;

					pPinTemp->EnumMediaTypes(&ptypes);
					while (ptypes != NULL && ptypes->Next(1, &ptype, NULL) == 0)
					{
						if (ptype->majortype== MEDIATYPE_Video)
						{
							pvideoInfo = (VIDEOINFOHEADER *)ptype->pbFormat;
							if (m_DesiredHeight != 0									&&
								m_DesiredWidth  != 0									&&
								abs(pvideoInfo->rcSource.right)  == m_DesiredWidth		&&
								abs(pvideoInfo->rcSource.bottom) == m_DesiredHeight
								)
							{   //如果格式与所要求的格式一致的话,优先使用输出Pin的这种格式

								CComPtr<IAMStreamConfig>		pVSC = NULL;;      // for video cap
								hr = m_pCGB->FindInterface(&PIN_CATEGORY_CAPTURE,
									&MEDIATYPE_Video, pVideoInputFilter,
									IID_IAMStreamConfig, (void **)&pVSC);

								if (pVSC)
								{
									int nFrame = pvideoInfo->AvgTimePerFrame; // 得到采集的帧率
									CString str;
									str.Format(_T("FRAME: %d\n"), nFrame);
									OutputDebugString(str);

									//pvideoInfo->AvgTimePerFrame = (LONGLONG)(10000000/50);

									pVSC->SetFormat(ptype);
								}
								// Delete the media type when you are done.
								DeleteMediaType(ptype);
							}
						}
					}
				}
			}
			pPinTemp.Release();
		}


		if (pAudio)
		{	//是采集卡
			hr = m_pCGB->RenderStream(&PIN_CATEGORY_PREVIEW,
				&MEDIATYPE_Video,
				pVideoInputFilter,
				NULL,
				NULL);
			if (FAILED(hr))
				return FALSE;
		}
		else
		{	////目前PCI采集卡上有视频接口,而盒子的Filter上没有
			CComPtr<IPin>			pInPin			= NULL;
			CComPtr<IPin>			pOutPin			= NULL;
			CComPtr<IBaseFilter>	smarttee		= NULL;
			CComPtr<IBaseFilter>	decompression	= NULL;
			CComPtr<IBaseFilter>	pPreview		= NULL;

			hr = CoCreateInstance(CLSID_VideoRenderer, 0, CLSCTX_ALL, IID_IBaseFilter, (void**)&pPreview);
			//hr = CoCreateInstance(CLSID_FFDSHOW, 0, CLSCTX_ALL, IID_IBaseFilter, (void**)&decompression);
			if (decompression == NULL)
			{
				hr = CoCreateInstance(CLSID_SmartTee, 0, CLSCTX_ALL, IID_IBaseFilter, (void**)&smarttee);
				hr = CoCreateInstance(CLSID_MjpegDec, 0, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&decompression);
				if (hr != S_OK)
					return FALSE;

				{
					CComPtr<IEnumPins>		pEnum = 0;
					CComPtr<IPin>			pPin = 0;
					HRESULT hr = smarttee->EnumPins(&pEnum);
					if (FAILED(hr))
					{
						return hr;
					}
					while (pEnum->Next(1, &pPin, NULL) == S_OK)
					{
						PIN_DIRECTION ThisPinDir;
						pPin->QueryDirection(&ThisPinDir);
						if (ThisPinDir == PINDIR_OUTPUT)
						{
							CComPtr<IPin> pTmp = 0;
							hr = pPin->ConnectedTo(&pTmp);
							if (SUCCEEDED(hr)) // Already connected--not the pin we want
							{
								pTmp.Release();
							}
							else // Unconnected--this is the pin we want
							{
								PIN_INFO info;
								pPin->QueryPinInfo(&info);
								if (_tcsstr(info.achName, _T("Preview")))
								{
									//pEnum->Release();
									pOutPin = pPin;
									break;
								}
							}
						}
						pPin.Release();
					}
					pEnum.Release();
				}

				hr = m_pGraph->AddFilter(smarttee, L"CLSID_SmartTee");
				hr = m_pGraph->AddFilter(decompression, L"CLSID_MJPEG");
				hr = m_pGraph->AddFilter(pPreview, L"CLSID_VideoRenderer");

				ConnectFilters(m_pGraph, pVideoInputFilter, smarttee);
				ConnectFilters(m_pGraph, pOutPin, decompression);
				ConnectFilters(m_pGraph, decompression, pPreview);
			}
			else
			{	//创建FFDShow解码器
				hr = m_pGraph->AddFilter(decompression, L"CLSID_FFDSHOW");
				hr = m_pGraph->AddFilter(pPreview, L"CLSID_VideoRenderer");

				ConnectFilters(m_pGraph, pVideoInputFilter, decompression);
				ConnectFilters(m_pGraph, decompression, pPreview);
			}
		}

		//设置设备丢失时要处理消息的窗口
		hr = m_pGraph->QueryInterface(IID_IMediaEventEx, (void **)&m_pME);
		if (hr == NOERROR)
		{
			m_pME->SetNotifyWindow((OAHWND)m_pCtrlWnd->GetSafeHwnd(), WM_FGNOTIFY, 0);
		}

		hr = m_pGraph->QueryInterface(IID_IVideoWindow, (void **)&m_pVW);
		hr = m_pVW->put_AutoShow(OAFALSE);

		if (FAILED(hr))
			return FALSE;

		RECT rc;
		GetClientRect(m_pShowWnd->GetSafeHwnd(), &rc);
		m_pVW->put_Visible(OAFALSE);
		m_pVW->put_WindowStyle(WS_CHILD | WS_CLIPCHILDREN | WS_CLIPSIBLINGS);
		m_pVW->put_WindowStyleEx(WS_EX_ACCEPTFILES);
		m_pVW->put_Owner((OAHWND)m_pShowWnd->GetSafeHwnd());
		//m_pVW->put_MessageDrain((OAHWND)m_pShowWnd->GetSafeHwnd());
		m_pVW->put_Visible(OATRUE);
		//m_pVW->put_Width(rc.right);
		//m_pVW->put_Height(rc.bottom);
		//m_pVW->SetWindowForeground(1);

		//m_pVW->SetWindowPosition(0, 0, rc.right, rc.bottom); // be this big
		ReSize(m_vedioRect);

		hr = m_pControl->Run();

		return SUCCEEDED(hr);
	}

	return FALSE;
}
// use cameraID 1 for first and so on
HRESULT VideoTexture::init(int cameraID)
{
	if (cameraID <= 0) return S_FALSE;

	glEnable(GL_TEXTURE_2D);

	// Texture -> This will be put into the camera module	
	glGenTextures(1, textures);					// Create The Texture
	// Typical Texture Generation Using Data From The Bitmap
	for (int i = 0; i < 1; i++)
	{
		//glActiveTexture(GL_TEXTURE0 + i);
		glBindTexture(GL_TEXTURE_2D, textures[i]);
		// Generate The Texture (640x480... make changeable!)
		//glTexImage2D(GL_TEXTURE_2D, 0, 3, 640, 480, 0, GL_RGB, GL_UNSIGNED_BYTE, ...THe data111!!!);
		glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);	// Linear Filtering
		glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);	// Linear Filtering
		// Enable Texture Mapping
		glTexImage2D(GL_TEXTURE_2D, 0, 3, TEXTURE_WIDTH, TEXTURE_HEIGHT, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
	}

	// Video stuff:
	// Create captue graph builder:
	HRESULT hr = InitCaptureGraphBuilder(&pGraph, &pBuild);
	if (FAILED(hr)) return hr;
	IEnumMoniker *enumerator;
	hr = EnumerateDevices(CLSID_VideoInputDeviceCategory, &enumerator);
	//DisplayDeviceInformation(enumerator);
	// Take the first camera:
	IMoniker *pMoniker = NULL;
	for (int i = 0; i < cameraID; i++)
	{
		enumerator->Next(1, &pMoniker, NULL);
	}
	IBaseFilter *pCap = NULL;
	hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pCap);
	if (SUCCEEDED(hr))
	{
		hr = pGraph->AddFilter(pCap, L"Capture Filter");
		if (FAILED(hr)) return hr;
	}
	else return hr;

	// Create the Sample Grabber which we will use
	// To take each frame for texture generation
	hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
							IID_ISampleGrabber, (void **)&pGrabber);
	if (FAILED(hr)) return hr;
	hr = pGrabber->QueryInterface(IID_IBaseFilter, (void **)&pGrabberBase);
		// We have to set the 24-bit RGB desire here
	// So that the proper conversion filters
	// Are added automatically.
	AM_MEDIA_TYPE desiredType;
	memset(&desiredType, 0, sizeof(desiredType));
	desiredType.majortype = MEDIATYPE_Video;
	desiredType.subtype = MEDIASUBTYPE_RGB24;
	desiredType.formattype = FORMAT_VideoInfo;
	pGrabber->SetMediaType(&desiredType);
	pGrabber->SetBufferSamples(TRUE);
	// add to Graph
	pGraph->AddFilter(pGrabberBase, L"Grabber");

    /* Null render filter */
    hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&pNullRender);
    if(FAILED(hr)) return hr;
	pGraph->AddFilter(pNullRender, L"Render");

	// Connect the graph
    hr = ConnectFilters(pGraph, pCap, pGrabberBase); 
    if(FAILED(hr)) return hr;
	hr = ConnectFilters(pGraph, pGrabberBase, pNullRender);

	// Set output format of capture:
	IAMStreamConfig *pConfig = NULL;
    hr = pBuild->FindInterface(
                &PIN_CATEGORY_CAPTURE, // Capture pin.
                0,    // Any media type.
                pCap, // Pointer to the capture filter.
                IID_IAMStreamConfig, (void**)&pConfig);
	if (FAILED(hr)) return hr;
	AM_MEDIA_TYPE *pmtConfig;
	hr = pConfig->GetFormat(&pmtConfig);
	if (FAILED(hr)) return hr;
		
	// Try and find a good video format
    int iCount = 0, iSize = 0;
    hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize);               
    // Check the size to make sure we pass in the correct structure.
    if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
    {
		// Use the video capabilities structure.               
        for (int iFormat = 0; iFormat < iCount; iFormat++)
        {
			VIDEO_STREAM_CONFIG_CAPS scc;
			AM_MEDIA_TYPE *pmtConfig;
			hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
			if (SUCCEEDED(hr))
			{
				VIDEOINFOHEADER *hdr = (VIDEOINFOHEADER *)pmtConfig->pbFormat;
				if (hdr->bmiHeader.biWidth == CAM_WIDTH &&
					hdr->bmiHeader.biHeight == CAM_HEIGHT &&
					hdr->bmiHeader.biBitCount == 24)
				{
					pConfig->SetFormat(pmtConfig);
				}
			}
		}
	}
	pConfig->Release();

	// Set camera stuff
	IAMCameraControl *pCamControl = NULL;
	hr = pCap->QueryInterface(IID_IAMCameraControl, (void **)&pCamControl);
	if (FAILED(hr)) return hr;
	// Get the range and default value. 
	long Min, Max, Step, Default, Flags;
	// For getting: long Val;
	hr = pCamControl->GetRange(CameraControl_Focus, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pCamControl->Set(CameraControl_Focus, 0, CameraControl_Flags_Manual);
#if 0
	hr = pCamControl->GetRange(CameraControl_Exposure, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pCamControl->Set(CameraControl_Exposure, -4, CameraControl_Flags_Manual);
#endif
	pCamControl->Release();
	IAMVideoProcAmp *pProcAmp = 0;
	hr = pCap->QueryInterface(IID_IAMVideoProcAmp, (void**)&pProcAmp);
	if (FAILED(hr)) return hr;
#if 0
	hr = pProcAmp->GetRange(VideoProcAmp_Brightness, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Brightness, 30, VideoProcAmp_Flags_Manual);
	hr = pProcAmp->GetRange(VideoProcAmp_Gain, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Gain, 30, VideoProcAmp_Flags_Manual);
	hr = pProcAmp->GetRange(VideoProcAmp_WhiteBalance, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_WhiteBalance, 4500, VideoProcAmp_Flags_Manual);
	hr = pProcAmp->GetRange(VideoProcAmp_Saturation, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Saturation, 100, VideoProcAmp_Flags_Manual);		
	hr = pProcAmp->GetRange(VideoProcAmp_Contrast, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Contrast, 6, VideoProcAmp_Flags_Manual);		
#endif
	pProcAmp->Release();

	hr = pMediaControl->Run();
	return hr;
}
nsresult
DirectShowReader::ReadMetadata(MediaInfo* aInfo,
                               MetadataTags** aTags)
{
  MOZ_ASSERT(OnTaskQueue());
  HRESULT hr;
  nsresult rv;

  // Create the filter graph, reference it by the GraphBuilder interface,
  // to make graph building more convenient.
  hr = CoCreateInstance(CLSID_FilterGraph,
                        nullptr,
                        CLSCTX_INPROC_SERVER,
                        IID_IGraphBuilder,
                        reinterpret_cast<void**>(static_cast<IGraphBuilder**>(byRef(mGraph))));
  NS_ENSURE_TRUE(SUCCEEDED(hr) && mGraph, NS_ERROR_FAILURE);

  rv = ParseMP3Headers(&mMP3FrameParser, mDecoder->GetResource());
  NS_ENSURE_SUCCESS(rv, rv);

  #ifdef DEBUG
  // Add the graph to the Running Object Table so that we can connect
  // to this graph with GraphEdit/GraphStudio. Note: on Vista and up you must
  // also regsvr32 proppage.dll from the Windows SDK.
  // See: http://msdn.microsoft.com/en-us/library/ms787252(VS.85).aspx
  hr = AddGraphToRunningObjectTable(mGraph, &mRotRegister);
  NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
  #endif

  // Extract the interface pointers we'll need from the filter graph.
  hr = mGraph->QueryInterface(static_cast<IMediaControl**>(byRef(mControl)));
  NS_ENSURE_TRUE(SUCCEEDED(hr) && mControl, NS_ERROR_FAILURE);

  hr = mGraph->QueryInterface(static_cast<IMediaSeeking**>(byRef(mMediaSeeking)));
  NS_ENSURE_TRUE(SUCCEEDED(hr) && mMediaSeeking, NS_ERROR_FAILURE);

  // Build the graph. Create the filters we need, and connect them. We
  // build the entire graph ourselves to prevent other decoders installed
  // on the system being created and used.

  // Our source filters, wraps the MediaResource.
  mSourceFilter = new SourceFilter(MEDIATYPE_Stream, MEDIASUBTYPE_MPEG1Audio);
  NS_ENSURE_TRUE(mSourceFilter, NS_ERROR_FAILURE);

  rv = mSourceFilter->Init(mDecoder->GetResource(), mMP3FrameParser.GetMP3Offset());
  NS_ENSURE_SUCCESS(rv, rv);

  hr = mGraph->AddFilter(mSourceFilter, L"MozillaDirectShowSource");
  NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);

  // The MPEG demuxer.
  RefPtr<IBaseFilter> demuxer;
  hr = CreateAndAddFilter(mGraph,
                          CLSID_MPEG1Splitter,
                          L"MPEG1Splitter",
                          byRef(demuxer));
  NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);

  // Platform MP3 decoder.
  RefPtr<IBaseFilter> decoder;
  // Firstly try to create the MP3 decoder filter that ships with WinXP
  // directly. This filter doesn't normally exist on later versions of
  // Windows.
  hr = CreateAndAddFilter(mGraph,
                          CLSID_MPEG_LAYER_3_DECODER_FILTER,
                          L"MPEG Layer 3 Decoder",
                          byRef(decoder));
  if (FAILED(hr)) {
    // Failed to create MP3 decoder filter. Try to instantiate
    // the MP3 decoder DMO.
    hr = AddMP3DMOWrapperFilter(mGraph, byRef(decoder));
    NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
  }

  // Sink, captures audio samples and inserts them into our pipeline.
  static const wchar_t* AudioSinkFilterName = L"MozAudioSinkFilter";
  mAudioSinkFilter = new AudioSinkFilter(AudioSinkFilterName, &hr);
  NS_ENSURE_TRUE(mAudioSinkFilter && SUCCEEDED(hr), NS_ERROR_FAILURE);
  hr = mGraph->AddFilter(mAudioSinkFilter, AudioSinkFilterName);
  NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);

  // Join the filters.
  hr = ConnectFilters(mGraph, mSourceFilter, demuxer);
  NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);

  hr = ConnectFilters(mGraph, demuxer, decoder);
  NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);

  hr = ConnectFilters(mGraph, decoder, mAudioSinkFilter);
  NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);

  WAVEFORMATEX format;
  mAudioSinkFilter->GetSampleSink()->GetAudioFormat(&format);
  NS_ENSURE_TRUE(format.wFormatTag == WAVE_FORMAT_PCM, NS_ERROR_FAILURE);

  mInfo.mAudio.mChannels = mNumChannels = format.nChannels;
  mInfo.mAudio.mRate = mAudioRate = format.nSamplesPerSec;
  mInfo.mAudio.mBitDepth = format.wBitsPerSample;
  mBytesPerSample = format.wBitsPerSample / 8;

  // Begin decoding!
  hr = mControl->Run();
  NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);

  DWORD seekCaps = 0;
  hr = mMediaSeeking->GetCapabilities(&seekCaps);

  int64_t duration = mMP3FrameParser.GetDuration();
  if (SUCCEEDED(hr)) {
    mInfo.mMetadataDuration.emplace(TimeUnit::FromMicroseconds(duration));
  }

  LOG("Successfully initialized DirectShow MP3 decoder.");
  LOG("Channels=%u Hz=%u duration=%lld bytesPerSample=%d",
      mInfo.mAudio.mChannels,
      mInfo.mAudio.mRate,
      RefTimeToUsecs(duration),
      mBytesPerSample);

  *aInfo = mInfo;
  // Note: The SourceFilter strips ID3v2 tags out of the stream.
  *aTags = nullptr;

  return NS_OK;
}
Esempio n. 4
0
bool CVideoRenderer::Initialize ( const char * szFile )
{
    IBaseFilter * pDSound, * pXVID, * pVorbis;
    IBaseFilter * pSource;
    IFileSourceFilter * pFileSource;
    HRESULT hr;

    // Get the codecs
    CVideoManager *pManager = CVideoManager::GetSingletonPtr ();
    if ( pManager->CreateCodecSource ( &pSource ) != S_OK ) return false;
    if ( pManager->CreateCodecVorbis ( &pVorbis ) != S_OK ) return false;
    if ( pManager->CreateCodecXVID ( &pXVID ) != S_OK ) return false;

    // Check for a valid device
    if ( !m_pDevice ) return false;

    // Lock so we don't f**k up
    Lock ();

    CCore::GetSingleton ().GetConsole ()->Printf ( "Creating DirectShow graph instance" );

    // Initialize the graph builder
    CoCreateInstance ( CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, reinterpret_cast < void** > ( &m_pGraph ) );
    if ( m_pGraph == NULL ) return false;

    CCore::GetSingleton ().GetConsole ()->Printf ( "Creating DirectSound renderer instance" );

    // Initialize the DirectSound filter
    CoCreateInstance ( CLSID_DSoundRender, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, reinterpret_cast < void** > ( &pDSound ) );
    if ( pDSound == NULL ) return false;

#ifdef MTA_DEBUG
    CCore::GetSingleton ().GetConsole ()->Printf ( "Adding ROT for debug stuff" );

    // Enable GraphView debugging
    AddToROT(m_pGraph);
#endif

    CCore::GetSingleton ().GetConsole ()->Printf ( "Creating video renderer instance" );

    // Create an instance of the texture renderer and add it to the graph
    m_pFilter = CreateTextureRenderer ( &hr, m_pDevice, this );
    if ( hr != S_OK ) return false;

    // Add the source file filter to the grap h
    int iBufferSize = MultiByteToWideChar ( CP_ACP, 0, szFile, -1, NULL, 0 );
    wchar_t *wszFile = new wchar_t[iBufferSize];
    MultiByteToWideChar ( CP_ACP, 0, szFile, -1, wszFile, iBufferSize );

    CCore::GetSingleton ().GetConsole ()->Printf ( "Registering filter (Matroska)" );

    // Add the filters to the graph
    m_pGraph->AddFilter         ( pSource,      L"[MTA] MKV source" );
    
    CCore::GetSingleton ().GetConsole ()->Printf ( "Loading video file" );

    pSource->QueryInterface ( IID_IFileSourceFilter, reinterpret_cast < void** > ( &pFileSource ) );
    if ( pFileSource->Load ( wszFile, NULL ) != S_OK ) return false;

    CCore::GetSingleton ().GetConsole ()->Printf ( "Registering filter (Output)" );

    m_pGraph->AddFilter         ( m_pFilter,    L"[MTA] Texture renderer" );
    CCore::GetSingleton ().GetConsole ()->Printf ( "Registering filter (Vorbis)" );
    m_pGraph->AddFilter         ( pVorbis,      L"[MTA] Vorbis decoder" );
    CCore::GetSingleton ().GetConsole ()->Printf ( "Registering filter (XVID)" );
    m_pGraph->AddFilter         ( pXVID,        L"[MTA] XVID codec" );
    CCore::GetSingleton ().GetConsole ()->Printf ( "Registering filter (DirectSound)" );
    m_pGraph->AddFilter         ( pDSound,      L"[MTA] DirectSound renderer" );

    CCore::GetSingleton ().GetConsole ()->Printf ( "Connecting video renderer" );

    // Connect the video pins
    IPin *pOut, *pSourceOut;
    hr = ConnectFilters ( m_pGraph, pSource, pXVID, &pSourceOut );      // MKV Source       -> XVID
    assert ( hr == S_OK );
    hr = ConnectFilters ( m_pGraph, pXVID, m_pFilter, &pOut );          // XVID             -> Texture Renderer
    assert ( hr == S_OK );

    // Connect the audio pins (not necessary)
    hr = ConnectFilters ( m_pGraph, pSource, pVorbis, &pOut );          // MKV Source       -> Vorbis Decoder
    hr = ConnectFilters ( m_pGraph, pVorbis, pDSound, &pOut );          // Vorbis Decoder   -> DirectSound renderer

    m_pGraph->QueryInterface ( IID_IMediaSeeking, reinterpret_cast < void** > ( &m_pMediaSeeking ) );
    assert ( m_pMediaSeeking != NULL );
    m_pGraph->QueryInterface ( IID_IMediaControl, reinterpret_cast < void** > ( &m_pMediaControl ) );
    if ( m_pMediaControl == NULL || m_pMediaSeeking == NULL ) return false;

    m_pGraph->QueryInterface ( IID_IBasicAudio, reinterpret_cast < void** > ( &m_pBasicAudio ) );
    if ( m_pBasicAudio == NULL ) return false;

    CCore::GetSingleton ().GetConsole ()->Printf ( "Successfully loaded video renderer" );

    m_pBasicAudio->get_Volume ( &lDefaultVolume );

    // Clean up
    delete [] wszFile;
//  m_pGraph->Release ();

    // Unlock the mutex
    Unlock ();

    return true;
}