Ejemplo n.º 1
0
void CVisualPage::OnCamSetupButton()
{
  CComboBox * box = (CComboBox*)(GetDlgItem(IDC_RECORDING_COMBO));
  int i = box->GetCurSel();
  int n = box->GetLBTextLen(i);
  CString s;
  box->GetLBText(i, s.GetBuffer(n));
  PString setupDeviceName = s;
  s.ReleaseBuffer();

  if (setupDeviceName.IsEmpty()) return;
  if (setupDeviceName.Find("fake") == 0) return;
  if (setupDeviceName.Find("monitor") == 0) return;
  if (setupDeviceName.Find("zmonitor") == 0) return;
  PTRACE(4,"PVidDirectShow\tCurrent device: " << setupDeviceName);

  HRESULT hr;
  IBaseFilter * pFilter = NULL;
  IMoniker *pMoniker =NULL;
  ICreateDevEnum *pDevEnum =NULL;
  IEnumMoniker *pClassEnum = NULL;
  ULONG cFetched;

  ::CoInitialize(NULL);

  // Create the system device enumerator
  hr = CoCreateInstance (CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC, IID_ICreateDevEnum, (void **) &pDevEnum);
  if (FAILED(hr)) { ::CoUninitialize(); return; }

  // Create an enumerator for the video capture devices
  hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pClassEnum, 0);
  if (FAILED(hr)) { ::CoUninitialize(); return; }

  if (pClassEnum == NULL) { ::CoUninitialize(); return; }

  PTRACE(4,"PVidDirectShow\tEntering device enumeration loop...");
  while (1)
  { // Get the next device
    hr = pClassEnum->Next(1, &pMoniker, &cFetched);
    if (hr != S_OK) { PTRACE(4, "PVidDirectShow\tGetInputDeviceNames() No more video capture device"); break; }

    // Get the property bag
    IPropertyBag *pPropBag;

    hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)(&pPropBag));
    if (FAILED(hr))
    { PTRACE(4,"PVidDerectShow\tBindToStorage failed, continue");
      pMoniker->Release();
      continue;
    }

    // Find the description or friendly name.
    VARIANT DeviceName;
    DeviceName.vt = VT_BSTR;
    hr = pPropBag->Read(L"Description", &DeviceName, NULL);
    if (FAILED(hr)) hr = pPropBag->Read(L"FriendlyName", &DeviceName, NULL);
    if (SUCCEEDED(hr))
    { char *pDeviceName = BSTR_to_ANSI(DeviceName.bstrVal);
      if (pDeviceName)
      { PTRACE(4, "PVidDirectShow\tGetInputDeviceNames() Found this capture device '"<< pDeviceName <<"'");
        if(PString(pDeviceName) == setupDeviceName)
        {
          PTRACE(4, "PVidDirectShow\tCamera Setup: device found");
          pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**) &pFilter);
          ISpecifyPropertyPages *p_spec; CAUUID cauuid;
          HRESULT hr = pFilter->QueryInterface( IID_ISpecifyPropertyPages, (void **)&p_spec );
          if( !FAILED(hr) )
          if( SUCCEEDED(p_spec->GetPages( &cauuid )) )
          { if( cauuid.cElems > 0 )
            { HWND hwnd_desktop = ::GetDesktopWindow();
              OleCreatePropertyFrame( hwnd_desktop, 30, 30, NULL, 1, (LPUNKNOWN *)(&pFilter), cauuid.cElems, cauuid.pElems, 0, 0, NULL );
              CoTaskMemFree( cauuid.pElems );
            }
            p_spec->Release();
          }
        }
        free(pDeviceName);
      }
    }
    pPropBag->Release();
    pMoniker->Release();
  }

  ::CoUninitialize();
}
Ejemplo n.º 2
0
HRESULT Enumerate()
{
    HRESULT hr;
	IBaseFilter  *pSrc = NULL;
    CComPtr <IMoniker> pMoniker =NULL;
    ULONG cFetched;
	char str_tmp[MAX_PATH];
   
	printf("Enumerating WDM device drivers...\n\n");

    // Create the system device enumerator
    CComPtr <ICreateDevEnum> pDevEnum =NULL;

    hr = CoCreateInstance (CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
        IID_ICreateDevEnum, (void ** ) &pDevEnum);
    if (FAILED(hr))
    {
		printf("Couldn't create system enumerator!\n");
        return(hr);
    }

    // Create an enumerator for the video capture devices
    CComPtr <IEnumMoniker> pClassEnum = NULL;

    hr = pDevEnum->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, &pClassEnum, 0);
    if (FAILED(hr))
    {
		printf("Couldn't create system enumerator!\n");
        return(hr);
    }

    // If there are no enumerators for the requested type, then 
    // CreateClassEnumerator will succeed, but pClassEnum will be NULL.
    if (pClassEnum == NULL)
    {
		printf("No video capture hardware detected!\n");
        return(E_FAIL);
    }

	while(S_OK == (pClassEnum->Next (1, &pMoniker, &cFetched)))
	{
		CComPtr<IPropertyBag> pProp;
        pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pProp);
        VARIANT varName;
        VariantInit(&varName);
        hr = pProp->Read(L"FriendlyName", &varName, 0); 
		if(SUCCEEDED(hr))
		{
			strcpy(str_tmp,_bstr_t(varName.bstrVal));
			printf("--Device name: %s\n",str_tmp);
	        // Bind Moniker to a filter object
		    hr = pMoniker->BindToObject(0,0,IID_IBaseFilter, (void**)&pSrc);
			if(FAILED(hr)) 
			{
				printf("  Error: could not bind to filter object.\n");
				goto next;
			}

			// -----------------------------------------------------------------------
			IAMExtDevice *pExtDev = NULL;
			hr = pSrc->QueryInterface(IID_IAMExtDevice, (void**)&pExtDev);
			if(SUCCEEDED(hr)) 
			{
				printf("  IAMExtDevice properties:\n");

				long l;
				hr = pExtDev->get_DevicePort(&l);
				if(SUCCEEDED(hr))
				{
					printf("    DevicePort: ",l);
					switch(l)
					{
					  case(DEV_PORT_1394): printf("IEEE 1394 Bus"); 
					                       break;
					  default :            printf("(%i) non IEEE 1394\n",l);
										   pExtDev->Release();
										   goto next;
						                   break;
					};
					printf("\n");
				}

				LPOLESTR ole_str = NULL;
				hr = pExtDev->get_ExternalDeviceID(&ole_str);
				if(SUCCEEDED(hr))
				{
					unsigned __int64 msdv_id = *((unsigned __int64*) ole_str);
					printf("    ExternalDeviceID: %s (hexadecimal)\n",_ui64toa(msdv_id,str_tmp,16));
					printf("      >> Unique 64-bit identifier, as defined by IEEE 1394.\n");

					CoTaskMemFree(ole_str);
				}

				hr = pExtDev->get_ExternalDeviceVersion(&ole_str);
				if(SUCCEEDED(hr))
				{
					strcpy(str_tmp,_bstr_t(ole_str));
					printf("    ExternalDeviceVersion: %s\n",str_tmp);
					CoTaskMemFree(ole_str);
				}

				hr = pExtDev->get_DevicePower(&l);
				if(SUCCEEDED(hr))
				{
					printf("    DevicePower: ",l);
					switch(l)
					{
					  case(ED_POWER_OFF):     printf("off"); break;
					  case(ED_POWER_ON):      printf("on"); break;
					  case(ED_POWER_STANDBY): printf("standby"); break;
					  default :               printf("unknown"); break;
					};
					printf("\n");
				}

				pExtDev->Release();
			}
			else
			{
				printf("  IAMExtDevice not supported.\n");
			}
	
			pSrc->Release();
		}
		printf("\n");
		next: // LABEL next:

        VariantClear(&varName);
        pMoniker = NULL; // Release for the next loop.
    }

	pClassEnum->Reset();

    return hr;
}
// use cameraID 1 for first and so on
HRESULT VideoTexture::init(int cameraID)
{
	if (cameraID <= 0) return S_FALSE;

	glEnable(GL_TEXTURE_2D);

	// Texture -> This will be put into the camera module	
	glGenTextures(1, textures);					// Create The Texture
	// Typical Texture Generation Using Data From The Bitmap
	for (int i = 0; i < 1; i++)
	{
		//glActiveTexture(GL_TEXTURE0 + i);
		glBindTexture(GL_TEXTURE_2D, textures[i]);
		// Generate The Texture (640x480... make changeable!)
		//glTexImage2D(GL_TEXTURE_2D, 0, 3, 640, 480, 0, GL_RGB, GL_UNSIGNED_BYTE, ...THe data111!!!);
		glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);	// Linear Filtering
		glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);	// Linear Filtering
		// Enable Texture Mapping
		glTexImage2D(GL_TEXTURE_2D, 0, 3, TEXTURE_WIDTH, TEXTURE_HEIGHT, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
	}

	// Video stuff:
	// Create captue graph builder:
	HRESULT hr = InitCaptureGraphBuilder(&pGraph, &pBuild);
	if (FAILED(hr)) return hr;
	IEnumMoniker *enumerator;
	hr = EnumerateDevices(CLSID_VideoInputDeviceCategory, &enumerator);
	//DisplayDeviceInformation(enumerator);
	// Take the first camera:
	IMoniker *pMoniker = NULL;
	for (int i = 0; i < cameraID; i++)
	{
		enumerator->Next(1, &pMoniker, NULL);
	}
	IBaseFilter *pCap = NULL;
	hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pCap);
	if (SUCCEEDED(hr))
	{
		hr = pGraph->AddFilter(pCap, L"Capture Filter");
		if (FAILED(hr)) return hr;
	}
	else return hr;

	// Create the Sample Grabber which we will use
	// To take each frame for texture generation
	hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
							IID_ISampleGrabber, (void **)&pGrabber);
	if (FAILED(hr)) return hr;
	hr = pGrabber->QueryInterface(IID_IBaseFilter, (void **)&pGrabberBase);
		// We have to set the 24-bit RGB desire here
	// So that the proper conversion filters
	// Are added automatically.
	AM_MEDIA_TYPE desiredType;
	memset(&desiredType, 0, sizeof(desiredType));
	desiredType.majortype = MEDIATYPE_Video;
	desiredType.subtype = MEDIASUBTYPE_RGB24;
	desiredType.formattype = FORMAT_VideoInfo;
	pGrabber->SetMediaType(&desiredType);
	pGrabber->SetBufferSamples(TRUE);
	// add to Graph
	pGraph->AddFilter(pGrabberBase, L"Grabber");

    /* Null render filter */
    hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&pNullRender);
    if(FAILED(hr)) return hr;
	pGraph->AddFilter(pNullRender, L"Render");

	// Connect the graph
    hr = ConnectFilters(pGraph, pCap, pGrabberBase); 
    if(FAILED(hr)) return hr;
	hr = ConnectFilters(pGraph, pGrabberBase, pNullRender);

	// Set output format of capture:
	IAMStreamConfig *pConfig = NULL;
    hr = pBuild->FindInterface(
                &PIN_CATEGORY_CAPTURE, // Capture pin.
                0,    // Any media type.
                pCap, // Pointer to the capture filter.
                IID_IAMStreamConfig, (void**)&pConfig);
	if (FAILED(hr)) return hr;
	AM_MEDIA_TYPE *pmtConfig;
	hr = pConfig->GetFormat(&pmtConfig);
	if (FAILED(hr)) return hr;
		
	// Try and find a good video format
    int iCount = 0, iSize = 0;
    hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize);               
    // Check the size to make sure we pass in the correct structure.
    if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
    {
		// Use the video capabilities structure.               
        for (int iFormat = 0; iFormat < iCount; iFormat++)
        {
			VIDEO_STREAM_CONFIG_CAPS scc;
			AM_MEDIA_TYPE *pmtConfig;
			hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
			if (SUCCEEDED(hr))
			{
				VIDEOINFOHEADER *hdr = (VIDEOINFOHEADER *)pmtConfig->pbFormat;
				if (hdr->bmiHeader.biWidth == CAM_WIDTH &&
					hdr->bmiHeader.biHeight == CAM_HEIGHT &&
					hdr->bmiHeader.biBitCount == 24)
				{
					pConfig->SetFormat(pmtConfig);
				}
			}
		}
	}
	pConfig->Release();

	// Set camera stuff
	IAMCameraControl *pCamControl = NULL;
	hr = pCap->QueryInterface(IID_IAMCameraControl, (void **)&pCamControl);
	if (FAILED(hr)) return hr;
	// Get the range and default value. 
	long Min, Max, Step, Default, Flags;
	// For getting: long Val;
	hr = pCamControl->GetRange(CameraControl_Focus, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pCamControl->Set(CameraControl_Focus, 0, CameraControl_Flags_Manual);
#if 0
	hr = pCamControl->GetRange(CameraControl_Exposure, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pCamControl->Set(CameraControl_Exposure, -4, CameraControl_Flags_Manual);
#endif
	pCamControl->Release();
	IAMVideoProcAmp *pProcAmp = 0;
	hr = pCap->QueryInterface(IID_IAMVideoProcAmp, (void**)&pProcAmp);
	if (FAILED(hr)) return hr;
#if 0
	hr = pProcAmp->GetRange(VideoProcAmp_Brightness, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Brightness, 30, VideoProcAmp_Flags_Manual);
	hr = pProcAmp->GetRange(VideoProcAmp_Gain, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Gain, 30, VideoProcAmp_Flags_Manual);
	hr = pProcAmp->GetRange(VideoProcAmp_WhiteBalance, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_WhiteBalance, 4500, VideoProcAmp_Flags_Manual);
	hr = pProcAmp->GetRange(VideoProcAmp_Saturation, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Saturation, 100, VideoProcAmp_Flags_Manual);		
	hr = pProcAmp->GetRange(VideoProcAmp_Contrast, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Contrast, 6, VideoProcAmp_Flags_Manual);		
#endif
	pProcAmp->Release();

	hr = pMediaControl->Run();
	return hr;
}
Ejemplo n.º 4
0
// A very simple program to capture a webcam & audio to a file using DirectShow
//
int main(int argc, char* argv[])
{
    ICaptureGraphBuilder2 *pCaptureGraph = NULL;	// Capture graph builder object
	IGraphBuilder *pGraph = NULL;	// Graph builder object
    IMediaControl *pControl = NULL;	// Media control object
	IFileSinkFilter *pSink = NULL;	// File sink object
	IBaseFilter *pAudioInputFilter = NULL; // Audio Capture filter
	IBaseFilter *pVideoInputFilter = NULL; // Video Capture filter
	IBaseFilter *pASFWriter = NULL;	// WM ASF File config interface

    // Initialize the COM library.
    HRESULT hr = CoInitialize(NULL);
    if (FAILED(hr))
    {
	     // We’ll send our error messages to the console.
        printf("ERROR - Could not initialize COM library");
        return hr;
    }

    // Create the filter graph manager and query for interfaces.
    hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER,
                          IID_ICaptureGraphBuilder2, (void **)&pCaptureGraph);
    if (FAILED(hr))	// FAILED is a macro that tests the return value
    {
        printf("ERROR - Could not create the Filter Graph Manager.");
        return hr;
    }

	// Use a method of the capture graph builder
	// To create an output path for the stream 
	hr = pCaptureGraph->SetOutputFileName(&MEDIASUBTYPE_Asf, 
		L"C:\\MyWebcam.ASF", &pASFWriter, &pSink);

	// Now configure the ASF Writer
	// Present the property pages for this filter
	hr = ShowFilterPropertyPages(pASFWriter);

	// Now get the filter graph manager
	// That's part of the capture graph builder
	hr = pCaptureGraph->GetFiltergraph(&pGraph);

	 // Using QueryInterface on the graph builder, 
    // Get the Media Control object.
    hr = pGraph->QueryInterface(IID_IMediaControl, (void **)&pControl);
    if (FAILED(hr))
    {
        printf("ERROR - Could not create the Media Control object.");
        pGraph->Release();	// Clean up after ourselves.
		CoUninitialize();  // And uninitalize COM
        return hr;
    }

	// Get an AudioCapture filter.
	// But there are several to choose from
	// So we need to enumerate them, and pick one.
	// Then add the audio capture filter to the filter graph. 
	hr = GetAudioInputFilter(&pAudioInputFilter, L"Logitech");
	if (SUCCEEDED(hr)) {
		hr = pGraph->AddFilter(pAudioInputFilter, L"Webcam Audio Capture");
	}

	// Now create the video input filter from the webcam
	hr = GetVideoInputFilter(&pVideoInputFilter, L"Logitech");
	if (SUCCEEDED(hr)) {
		hr = pGraph->AddFilter(pVideoInputFilter, L"Webcam Video Capture");
	}

	// Add a video renderer
	//IBaseFilter *pVideoRenderer = NULL;
	//hr = AddFilterByCLSID(pGraph, CLSID_VideoRenderer, L"Video Renderer", &pVideoRenderer);

	// Use another method of the capture graph builder
	// To provide a render path for video preview
	IBaseFilter *pIntermediate = NULL;
	hr = pCaptureGraph->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video,
		pVideoInputFilter, NULL, NULL);

	// Now add the video capture to the output file
	hr = pCaptureGraph->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
		pVideoInputFilter, NULL, pASFWriter);
	
	// And do the same for the audio
	hr = pCaptureGraph->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio,
		pAudioInputFilter, NULL, pASFWriter);

    if (SUCCEEDED(hr))
    {
        // Run the graph.
        hr = pControl->Run();
        if (SUCCEEDED(hr))
        {
			// Wait patiently for completion of the recording
			wprintf(L"Started recording...press Enter to stop recording.\n");

            // Wait for completion.
			char ch;
			ch = getchar();		// We wait for keyboard input
        }

		// And let's stop the filter graph
		hr = pControl->Stop();

		wprintf(L"Stopped recording.\n");	// To the console

		// Before we finish up, save the filter graph to a file.
		SaveGraphFile(pGraph, L"C:\\MyGraph.GRF");
    }

	// Now release everything, and clean up.
	pSink->Release();
	pASFWriter->Release();
	pVideoInputFilter->Release();
	pAudioInputFilter->Release();
    pControl->Release();
    pGraph->Release();
	pCaptureGraph->Release();
    CoUninitialize();

	return 0;
}
Ejemplo n.º 5
0
bool CPlaylist::Load()
{
    IGraphBuilder * pGraph       = NULL;
    IAMPlayList   * pPlaylist    = NULL;
    HRESULT         hr;
    bool            bResult;

    if (NULL != m_pList || true == m_bTransient)
    {
        return true;
    }

    //
    // Make sure that this is one of our playlist read the last played element
    //
    bResult = LoadParam();

    hr = CoCreateInstance(CLSID_FilterGraph,
                          NULL,
                          CLSCTX_INPROC_SERVER,
                          IID_IGraphBuilder,
                          (void**) &pGraph);

    if (SUCCEEDED(hr))
    {
        hr = pGraph->RenderFile(m_pszPath, NULL);
    }

    if (SUCCEEDED(hr))
    {
        IEnumFilters * pEnum   = NULL;
        IBaseFilter  * pFilter = NULL;

        hr = pGraph->EnumFilters(&pEnum);

        if (pEnum)
        {
            while (!pPlaylist && pEnum->Next(1, &pFilter, NULL) == S_OK)
            {
                hr = pFilter->QueryInterface(IID_IAMPlayList, (void**)&pPlaylist);
                pFilter->Release();
            }

            if (!pPlaylist)
            {
                hr = E_NOINTERFACE;
            }

            pEnum->Release();
        }
    }

    if (SUCCEEDED(hr))
    {
        DWORD             dwCount;
        IAMPlayListItem * pItem   = NULL;

        if(pPlaylist)
            hr = pPlaylist->GetItemCount(&dwCount);
        else
            hr = E_FAIL;

        if (SUCCEEDED(hr))
        {
            for (DWORD i = 0; i < dwCount; i++)
            {
                hr = pPlaylist->GetItem(i, &pItem);

                if (SUCCEEDED(hr))
                {
                    BSTR pszSource = NULL;

                    hr = pItem->GetSourceURL(0, &pszSource);

                    if (SUCCEEDED(hr))
                    {
                        InsertTrack(i, pszSource);
                    }

                    pItem->Release();
                }
            }
        }
    }

    if (pPlaylist)
    {
        pPlaylist->Release();
    }

    if (pGraph)
    {
        pGraph->Release();
    }

    if (SUCCEEDED(hr))
    {
        return true;
    }
    else
    {
        return false;
    }
}
Ejemplo n.º 6
0
static gboolean
gst_dshowvideodec_create_graph_and_filters (GstDshowVideoDec * vdec)
{
  HRESULT hres = S_FALSE;
  GstDshowVideoDecClass *klass =
      (GstDshowVideoDecClass *) G_OBJECT_GET_CLASS (vdec);
  IBaseFilter *srcfilter = NULL;
  IBaseFilter *sinkfilter = NULL;
  gboolean ret = FALSE;

  /* create the filter graph manager object */
  hres = CoCreateInstance (CLSID_FilterGraph, NULL, CLSCTX_INPROC,
      IID_IFilterGraph, (LPVOID *) & vdec->filtergraph);
  if (hres != S_OK || !vdec->filtergraph) {
    GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't create an instance "
            "of the directshow graph manager (error=%d)", hres), (NULL));
    goto error;
  }

  hres = vdec->filtergraph->QueryInterface(IID_IMediaFilter,
      (void **) &vdec->mediafilter);
  if (hres != S_OK || !vdec->mediafilter) {
    GST_ELEMENT_ERROR (vdec, STREAM, FAILED,
        ("Can't get IMediacontrol interface "
            "from the graph manager (error=%d)", hres), (NULL));
    goto error;
  }

  /* create fake src filter */
  vdec->fakesrc = new FakeSrc();
  /* Created with a refcount of zero, so increment that */
  vdec->fakesrc->AddRef();

  hres = vdec->fakesrc->QueryInterface(IID_IBaseFilter,
      (void **) &srcfilter);
  if (FAILED (hres)) {
    GST_WARNING_OBJECT (vdec, "Failed to QI fakesrc to IBaseFilter");
    goto error;
  }

  /* search a decoder filter and create it */
  vdec->decfilter = gst_dshow_find_filter (
          klass->entry->input_majortype,
          klass->entry->input_subtype,
          klass->entry->output_majortype,
          klass->entry->output_subtype,
          klass->entry->preferred_filters);
  if (vdec->decfilter == NULL) {
    GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't create an instance "
            "of the decoder filter"), (NULL));
    goto error;
  }

  /* create fake sink filter */
  vdec->fakesink = new VideoFakeSink(vdec);
  /* Created with a refcount of zero, so increment that */
  vdec->fakesink->AddRef();

  hres = vdec->fakesink->QueryInterface(IID_IBaseFilter,
      (void **) &sinkfilter);
  if (FAILED (hres)) {
    GST_WARNING_OBJECT (vdec, "Failed to QI fakesink to IBaseFilter");
    goto error;
  }

  /* add filters to the graph */
  hres = vdec->filtergraph->AddFilter (srcfilter, L"src");
  if (hres != S_OK) {
    GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't add fakesrc filter "
            "to the graph (error=%d)", hres), (NULL));
    goto error;
  }

  hres = vdec->filtergraph->AddFilter(vdec->decfilter, L"decoder");
  if (hres != S_OK) {
    GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't add decoder filter "
            "to the graph (error=%d)", hres), (NULL));
    goto error;
  }

  hres = vdec->filtergraph->AddFilter(sinkfilter, L"sink");
  if (hres != S_OK) {
    GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't add fakesink filter "
            "to the graph (error=%d)", hres), (NULL));
    goto error;
  }

  vdec->setup = TRUE;

  ret = TRUE;

done:
  if (srcfilter)
    srcfilter->Release();
  if (sinkfilter)
    sinkfilter->Release();
  return ret;

error:
  if (vdec->fakesrc) {
    vdec->fakesrc->Release();
    vdec->fakesrc = NULL;
  }
  if (vdec->decfilter) {
    vdec->decfilter->Release();
    vdec->decfilter = NULL;
  }
  if (vdec->fakesink) {
    vdec->fakesink->Release();
    vdec->fakesink = NULL;
  }
  if (vdec->mediafilter) {
    vdec->mediafilter->Release();
    vdec->mediafilter = NULL;
  }
  if (vdec->filtergraph) {
    vdec->filtergraph->Release();
    vdec->filtergraph = NULL;
  }

  goto done;
}
Ejemplo n.º 7
0
HRESULT InitializeWindowlessVMR(IBaseFilter **ppVmr9)
{
    IBaseFilter* pVmr = NULL;

    if (!ppVmr9)
        return E_POINTER;
    *ppVmr9 = NULL;

    // Create the VMR and add it to the filter graph.
    HRESULT hr = CoCreateInstance(CLSID_VideoMixingRenderer9, NULL,
                     CLSCTX_INPROC, IID_IBaseFilter, (void**)&pVmr);
    if (SUCCEEDED(hr)) 
    {
        hr = pGB->AddFilter(pVmr, L"Video Mixing Renderer 9");
        if (SUCCEEDED(hr)) 
        {
            // Set the rendering mode and number of streams
            SmartPtr <IVMRFilterConfig9> pConfig;

            JIF(pVmr->QueryInterface(IID_IVMRFilterConfig9, (void**)&pConfig));
            JIF(pConfig->SetRenderingMode(VMR9Mode_Windowless));

            hr = pVmr->QueryInterface(IID_IVMRWindowlessControl9, (void**)&pWC);
            if( SUCCEEDED(hr)) 
            {
                hr = pWC->SetVideoClippingWindow(ghApp);
                hr = pWC->SetBorderColor(RGB(0,0,0));
            }

#ifndef BILINEAR_FILTERING
            // Request point filtering (instead of bilinear filtering)
            // to improve the text quality.  In general, if you are 
            // not scaling the app Image, you should use point filtering.
            // This is very important if you are doing source color keying.
            IVMRMixerControl9 *pMix;

            hr = pVmr->QueryInterface(IID_IVMRMixerControl9, (void**)&pMix);
            if( SUCCEEDED(hr)) 
            {
                DWORD dwPrefs=0;
                hr = pMix->GetMixingPrefs(&dwPrefs);

                if (SUCCEEDED(hr))
                {
                    dwPrefs |= MixerPref_PointFiltering;
                    dwPrefs &= ~(MixerPref_BiLinearFiltering);

                    hr = pMix->SetMixingPrefs(dwPrefs);
                }
                pMix->Release();
            }
#endif

            // Get alpha-blended bitmap interface
            hr = pVmr->QueryInterface(IID_IVMRMixerBitmap9, (void**)&pBMP);
        }
        else
            Msg(TEXT("Failed to add VMR to graph!  hr=0x%x\r\n"), hr);

        // Don't release the pVmr interface because we are copying it into
        // the caller's ppVmr9 pointer
        *ppVmr9 = pVmr;
    }
    else
        Msg(TEXT("Failed to create VMR!  hr=0x%x\r\n"), hr);

   return hr;
}
Ejemplo n.º 8
0
//
// OnReceiveMessage
//
// This is the derived class window message handler methods
//
LRESULT CVideoText::OnReceiveMessage(HWND hwnd,          // Window handle
                                     UINT uMsg,          // Message ID
                                     WPARAM wParam,      // First parameter
                                     LPARAM lParam)      // Other parameter
{
    IBaseFilter *pFilter = NULL;
    RECT ClientRect;

    // Blank out the window background

    if (uMsg == WM_ERASEBKGND) {
        EXECUTE_ASSERT(GetClientRect(m_hwnd,&ClientRect));
        HBRUSH hBrush = CreateSolidBrush(RGB(0,0,0));
        EXECUTE_ASSERT(FillRect(m_hdc,&ClientRect,hBrush));
        EXECUTE_ASSERT(DeleteObject(hBrush));
        return (LRESULT) 0;
    }

    // Handle WM_CLOSE by aborting the playback

    if (uMsg == WM_CLOSE) {
        m_pRenderer->NotifyEvent(EC_USERABORT,0,0);
        DoShowWindow(SW_HIDE);
        return CBaseWindow::OnClose();
    }

    // We pass on WM_ACTIVATEAPP messages to the filtergraph so that the
    // IVideoWindow plug in distributor can switch us out of fullscreen
    // mode where appropriate. These messages may also be used by the
    // resource manager to keep track of which renderer has the focus

    if (uMsg == WM_ACTIVATEAPP) {
        NOTE1("Notification of EC_ACTIVATE (%d)",(BOOL) wParam);
        m_pRenderer->QueryInterface(IID_IBaseFilter,(void **) &pFilter);
        m_pRenderer->NotifyEvent(EC_ACTIVATE,wParam,(LPARAM) pFilter);
        pFilter->Release();
        return (LRESULT) 0;
    }

    // Treat clicks on text as requests to move window

    if (uMsg == WM_NCHITTEST) {
        LRESULT Result = DefWindowProc(hwnd,uMsg,wParam,lParam);
        if (Result == HTCLIENT) {
            Result = HTCAPTION;
        }
        return Result;
    }

    // The base class that implements IVideoWindow looks after a flag
    // that says whether or not the cursor should be hidden. If so we
    // hide the cursor and return (LRESULT) 1. Otherwise we pass to
    // the DefWindowProc to show the cursor as normal. This is used
    // when our window is made fullscreen to imitate the Modex filter

    if (uMsg == WM_SETCURSOR) {
        if (IsCursorHidden() == TRUE) {
            SetCursor(NULL);
            return (LRESULT) 1;
        }
    }

    // When we detect a display change we send an EC_DISPLAY_CHANGED
    // message along with our input pin. The filtergraph will stop
    // everyone and reconnect our input pin. When being reconnected
    // we can then accept the media type that matches the new display
    // mode since we may no longer be able to draw the current format

    if (uMsg == WM_DISPLAYCHANGE) {
        m_pRenderer->m_Display.RefreshDisplayType(NULL);
        m_pRenderer->OnDisplayChange();
        NOTE("Sent EC_DISPLAY_CHANGED event");
        return (LRESULT) 0;
    }
    return CBaseWindow::OnReceiveMessage(hwnd,uMsg,wParam,lParam);

} // OnReceiveMessage
// Open multimedia stream.
BOOL CDShow::Open(ZString& pFileName, IDirectDraw7 *pDD)
{
	// Multimedia stream pointer
	IAMMultiMediaStream *pAMStream;
	IGraphBuilder *pGb; // 7/10 #110
	IEnumFilters *pEfs;
	IBasicAudio *pBa;

    
	//7/29/09 we can now do stuff while the video plays
	CoInitializeEx(NULL,COINIT_MULTITHREADED); 
	

    // Create Multimedia stream object
	if (FAILED(CoCreateInstance(CLSID_AMMultiMediaStream, NULL, CLSCTX_INPROC_SERVER,
				 IID_IAMMultiMediaStream, (void **)&pAMStream)))
	{		
		// Return FALSE to let caller know we failed.	
		return FALSE; 
	}
    
	// Initialize Multimedia stream object
	if (FAILED(pAMStream->Initialize(STREAMTYPE_READ, 0, NULL)))
	{	
		// There are alot of possiblities to fail.....		
		return FALSE; 
	}

    // Add primary video stream.
	if (FAILED((pAMStream->AddMediaStream(pDD, &MSPID_PrimaryVideo, 0, NULL))))
	{		
		return FALSE; 
	}

    // Add default sound render to primary video stream,
	// so sound will be played back automatically.
	if (FAILED(pAMStream->AddMediaStream(NULL, &MSPID_PrimaryAudio, AMMSF_ADDDEFAULTRENDERER, NULL)))
	{
		// Return FALSE to let caller know we failed.		
		return FALSE; 
	}

    // Convert filename to UNICODE.
	// Notice the safe way to get the actual size of a string.
	WCHAR wPath[MAX_PATH];
    MultiByteToWideChar(CP_ACP, 0, pFileName, -1, wPath, sizeof(wPath)/sizeof(wPath[0]));   
    
	// Build the filter graph for our multimedia stream.
	if (FAILED((pAMStream->OpenFile(wPath, 0))))
	{	
		// Return FALSE to let caller know we failed.		
		return FALSE; 
	}

	//7/10 #110
	FILTER_INFO FilterInfo;
	pAMStream->GetFilterGraph(&pGb);
	pGb->EnumFilters(&pEfs);

    IBaseFilter *pFilter;
	unsigned long cFetched;
    while(pEfs->Next(1, &pFilter, &cFetched) == S_OK) {
		FILTER_INFO FilterInfo;
		pFilter->QueryFilterInfo(&FilterInfo);
		char szName[MAX_FILTER_NAME];
		long cch = WideCharToMultiByte(CP_ACP,0,FilterInfo.achName,MAX_FILTER_NAME,szName,MAX_FILTER_NAME,0,0);
		if (cch > 0) {
			if (!strcmp("WMAudio Decoder DMO",szName)) {
				// set the volume to music level
				FilterInfo.pGraph->QueryInterface(IID_IBasicAudio,(void**)&pBa);
				HKEY hKey;
				DWORD dwResult = 0;
				if (ERROR_SUCCESS == ::RegOpenKeyEx(HKEY_LOCAL_MACHINE, ALLEGIANCE_REGISTRY_KEY_ROOT,0, KEY_READ, &hKey)) {
					DWORD dwSize = sizeof(dwResult);
					DWORD dwType = REG_DWORD;
					::RegQueryValueEx(hKey, "MusicGain", NULL, &dwType, (BYTE*)&dwResult, &dwSize);
					::RegCloseKey(hKey);
					if (dwType != REG_DWORD)
						dwResult = 0;
				}
				long vol = (dwResult * -1) * 100;
				if (vol < -5000) {
					vol = -10000;
				}
				pBa->put_Volume(vol);
				pBa->Release();
			}
           if (FilterInfo.pGraph != NULL)
               FilterInfo.pGraph->Release();
           pFilter->Release();
		}
	}
	pEfs->Release();
	pGb->Release();

    // Assign member to temperary stream pointer.
	m_pMMStream = pAMStream;
	
	// Add a reference to the file
	pAMStream->AddRef();
    	
	// Get media stream interface
	if (FAILED(m_pMMStream->GetMediaStream(MSPID_PrimaryVideo, &m_pPrimaryVideoStream)))
	{	
		return FALSE;
	}
	
	// Get DirectDraw media stream interface
	if (FAILED(m_pPrimaryVideoStream->QueryInterface(IID_IDirectDrawMediaStream,(void **)&m_pDDStream)))
	{				
		return FALSE;
	}	
	
	// Create stream sample
	if (FAILED(m_pDDStream->CreateSample(NULL,NULL,0,&m_pSample)))
	{			
		return FALSE;
	}	
	
	// Get DirectDraw surface interface from Sample.
	if (FAILED(m_pSample->GetSurface(&m_pDDSurface,&m_rSrcRect)))
	{			

		return FALSE;
	}	
	
	// Get DirectDraw surface7 interface
	if (FAILED(m_pDDSurface->QueryInterface(IID_IDirectDrawSurface7,(void**)&m_pDDSurface7)))
	{		
		return FALSE;
	}	
	
	// Ok. Media is open now.
	m_bMediaOpen=TRUE;
	
	// If we are here, everything turned out ok. Return TRUE.
	return TRUE;
}
Ejemplo n.º 10
0
HRESULT FindRenderer(IGraphBuilder *pGB, const GUID *mediatype, IBaseFilter **ppFilter)
{
    HRESULT hr;
    IEnumFilters *pEnum = NULL;
    IBaseFilter *pFilter = NULL;
    IPin *pPin;
    ULONG ulFetched, ulInPins, ulOutPins;
    BOOL bFound=FALSE;

    // Verify graph builder interface
    if (!pGB)
        return E_NOINTERFACE;

    // Verify that a media type was passed
    if (!mediatype)
        return E_POINTER;

    // Clear the filter pointer in case there is no match
    if (ppFilter)
        *ppFilter = NULL;

    // Get filter enumerator
    hr = pGB->EnumFilters(&pEnum);
    if (FAILED(hr))
        return hr;

    pEnum->Reset();

    // Enumerate all filters in the graph
    while(!bFound && (pEnum->Next(1, &pFilter, &ulFetched) == S_OK))
    {
#ifdef DEBUG
        // Read filter name for debugging purposes
        FILTER_INFO FilterInfo;
        TCHAR szName[256];

        hr = pFilter->QueryFilterInfo(&FilterInfo);
        if (SUCCEEDED(hr))
        {
            // Show filter name in debugger
#ifdef UNICODE
            lstrcpy(szName, FilterInfo.achName);
#else
            WideCharToMultiByte(CP_ACP, 0, FilterInfo.achName, -1, szName, 256, 0, 0);
#endif
            FilterInfo.pGraph->Release();
        }
#endif

        // Find a filter with one input and no output pins
        hr = CountFilterPins(pFilter, &ulInPins, &ulOutPins);
        if (FAILED(hr))
            break;

        if ((ulInPins == 1) && (ulOutPins == 0))
        {
            // Get the first pin on the filter
            pPin=0;
            pPin = GetInPin(pFilter, 0);

            // Read this pin's major media type
            AM_MEDIA_TYPE type= {0};
            hr = pPin->ConnectionMediaType(&type);
            if (FAILED(hr))
                break;

            // Is this pin's media type the requested type?
            // If so, then this is the renderer for which we are searching.
            // Copy the interface pointer and return.
            if (type.majortype == *mediatype)
            {
                // Found our filter
                *ppFilter = pFilter;
                bFound = TRUE;;
            }
            // This is not the renderer, so release the interface.
            else
                pFilter->Release();

            // Delete memory allocated by ConnectionMediaType()
            FreeMediaType(type);
        }
        else
        {
            // No match, so release the interface
            pFilter->Release();
        }
    }

    pEnum->Release();
    return hr;
}
Ejemplo n.º 11
0
//---------------------------------------------------------------------------------------
// Initialise all the DirectShow structures we need
//---------------------------------------------------------------------------------------
int init_dshow(wchar_t *FileName, int render_in_own_window)
{
	HRESULT err;
	FILTER_STATE state = State_Stopped;

	// Create a new graph
    err = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, (void **)&pGB);
	if (err != S_OK)
		return 0;

	// Get access to the video controls
	err = pGB->QueryInterface(IID_IMediaControl, (void **)&pMC);
	if (err != S_OK)
		return 0;

	err = pGB->QueryInterface(IID_IMediaSeeking, (void **)&pMS);
	if (err != S_OK)
		return 0;

	err = pGB->QueryInterface(IID_IMediaEventEx, (void **)&pME);
	if (err != S_OK)
		return 0;

	if (render_in_own_window)
	{
		IBaseFilter *pVMR;
		IVMRFilterConfig *pFC; 
		long lWidth, lHeight; 
		RECT rcSrc, rcDest;

		err = CoCreateInstance(CLSID_VideoMixingRenderer, NULL, CLSCTX_INPROC, IID_IBaseFilter, (void**)&pVMR); 
		if (err != S_OK)
			return 0;

		// Add the VMR to the filter graph.
		err = pGB->AddFilter(pVMR, L"VMR");
		if (err != S_OK)
			return 0;

		// Set the rendering mode.  
		err = pVMR->QueryInterface(IID_IVMRFilterConfig, (void**)&pFC); 
		if (err != S_OK)
			return 0;

		err = pFC->SetRenderingMode(VMRMode_Windowless); 
		if (err != S_OK)
			return 0;

		pFC->Release();

		// Set the window. 
		err = pVMR->QueryInterface(IID_IVMRWindowlessControl, (void**)&pWC);
		if (err != S_OK)
			return 0;

		err = pWC->SetVideoClippingWindow(game_window); 
		if (err != S_OK)
			return 0;

		pVMR->Release();

		// Find the native video size.
		err = pWC->GetNativeVideoSize(&lWidth, &lHeight, NULL, NULL); 
		// Set the source rectangle.
		SetRect(&rcSrc, 0, 0, lWidth/2, lHeight/2); 
    
		// Get the window client area.
		GetClientRect(game_window, &rcDest); 
		// Set the destination rectangle.
		SetRect(&rcDest, 0, 0, rcDest.right, rcDest.bottom); 
   
			// Set the video position.
		err = pWC->SetVideoPosition(&rcSrc, &rcDest); 
	}

	// Add the source file
	err = pGB->RenderFile(FileName, NULL);
	if (err != S_OK)
		return 0;

	// Have the graph signal event via window callbacks for performance
	err = pME->SetNotifyWindow((OAHWND)game_window, WM_GRAPHNOTIFY0, 0);
	if (err != S_OK)
		return 0;

	err = pMS->SetTimeFormat(&TIME_FORMAT_FRAME);
	if(err != S_OK)
	{
		err = pMS->SetTimeFormat(&TIME_FORMAT_MEDIA_TIME);
	}

	err = pMC->Run();
	do
	{
		err = pMC->GetState(0, (long *)&state);
	} while (state != State_Running);

	return 1;
}
Ejemplo n.º 12
0
/////////////////////////////////////////////////////////
// really open the file ! (OS dependent)
//
/////////////////////////////////////////////////////////
void pix_movieDS::realOpen(char *filename)
{
	WCHAR			WideFileName[MAXPDSTRING];
	HRESULT			RetVal;
	AM_MEDIA_TYPE	MediaType;
	BOOL			bFrameTime	= TRUE;
	GUID			Guid;

	// Convert c-string to Wide string.
	memset(&WideFileName, 0, MAXPDSTRING * 2);

	if (0 == MultiByteToWideChar(CP_ACP, 0, filename, strlen(filename), WideFileName,
		MAXPDSTRING))
	{
		error("Unable to load %s", filename);

		return;
	}

	// Add a file source filter to the filter graph.
	RetVal	= FilterGraph->AddSourceFilter(WideFileName, L"SOURCE", &VideoFilter);

	if (RetVal != S_OK || NULL == VideoFilter)
	{
		error("Unable to render %s", filename);

		return;
	}

	// Create an instance of the sample grabber filter. The filter allows frames to be
	// buffered from a video source.
	RetVal	= CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
		IID_IBaseFilter, (void**)&SampleFilter);

	if (RetVal != S_OK || NULL == SampleFilter)
	{
		error("Unable to create SampleFilter interface %d", RetVal);

		return;
	}

 	// Add sample grabber filter to the filter graph.
	RetVal	= FilterGraph->AddFilter(SampleFilter, L"Sample Grabber");

	if (RetVal != S_OK)
	{
		error("Unable to add SampleFilter %d", RetVal);

		return;
	}

	// Find an interface to the SampleGrabber from the SampleGrabber filter. The
	// SampleGrabber allows frames to be grabbed from the filter. SetBufferSamples(TRUE)
	// tells the SampleGrabber to buffer the frames. SetOneShot(FALSE) tells the
	// SampleGrabber to continuously grab frames.  has GetCurrentBuffer() method
	RetVal	= SampleFilter->QueryInterface(IID_ISampleGrabber, (void **)&SampleGrabber);

	if (RetVal != S_OK || NULL == SampleGrabber)
	{
		error("Unable to create SampleGrabber interface %d", RetVal);

		return;
	}

	// Set the media type that the SampleGrabber wants.
	// MEDIATYPE_Video selects only video and not interleaved audio and video
	// MEDIASUBTYPE_RGB24 is the colorspace and format to deliver frames
	// MediaType.formattype is GUID_NULLsince it is handled later to get file info
	memset(&MediaType, 0, sizeof(AM_MEDIA_TYPE));
	MediaType.majortype		= MEDIATYPE_Video;
	MediaType.subtype		= MEDIASUBTYPE_RGB24;
	MediaType.formattype	= GUID_NULL;
	RetVal					= SampleGrabber->SetMediaType(&MediaType);

	// Set the SampleGrabber to return continuous frames
	RetVal	= SampleGrabber->SetOneShot(FALSE);

	if (RetVal != S_OK)
	{
		error("Unable to setup sample grabber %d", RetVal);

		return;
	}

	// Set the SampleGrabber to copy the data to a buffer. This only set to FALSE when a
	// callback is used.
	RetVal	= SampleGrabber->SetBufferSamples(TRUE);

	if (RetVal != S_OK)
	{
		error("Unable to setup sample grabber %d", RetVal);

		return;
	}

	// Create the Null Renderer interface. The Null Renderer is used to disable rendering of a
	// video stream to a window.
	RetVal	= CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER,
		IID_IBaseFilter, (void**)&NullFilter);

	if (RetVal != S_OK || NULL == NullFilter)
	{
		error("Unable to create NullFilter interface %d", RetVal);

		return;
	}

	// Add the Null Renderer filter to the FilterGraph
	RetVal	= FilterGraph->AddFilter(NullFilter, L"NullRenderer");

	if (RetVal != S_OK)
	{
		error("Unable to add NullFilter %d", RetVal);

		return;
	}

	// DS filter chain is FileSource -> SampleGrabber -> NullRenderer
	// DS can put any neeeded filters in the chain for format or colorspace conversion
	// decompression or other transforms

	// Connect the SampleFilter to the VideoFilter
	RetVal	= movieConnectFilters(FilterGraph, VideoFilter, SampleFilter);

	if (RetVal != S_OK)
	{
		error("Unable to connect filters %d", RetVal);

		return;
	}

	// Connect the NullFilter to the SampleFilter
	RetVal	= movieConnectFilters(FilterGraph, SampleFilter, NullFilter);

	if (RetVal != S_OK)
	{
		error("Unable to connect filters %d", RetVal);

		return;
	}

	// Set the time format to frames
	Guid	= TIME_FORMAT_FRAME;

	RetVal	= MediaSeeking->SetTimeFormat(&Guid);

	if (RetVal != S_OK)
	{
		// If frame time format not available, default to 100 nanosecond increments.
		bFrameTime	= FALSE;

		Guid	= TIME_FORMAT_MEDIA_TIME;

		RetVal	= MediaSeeking->SetTimeFormat(&Guid);

		if (RetVal != S_OK)
		{
			error("Unable to set video time format %d", RetVal);

			return;
		}
	}

	// Get the duration of the video. Format will be in previously set time format. This is
	// compatible with the value returned from GetCurrentPosition
	RetVal	= MediaSeeking->GetDuration(&m_Duration);

	if (RetVal != S_OK)
	{
		error("Unable to get video duration %d", RetVal);

		return;
	}

	// Set the number of frames based on the time format used.
	if (TRUE == bFrameTime)
	{
		m_numFrames	= m_Duration;
	}

	else
	{
		LONGLONG	OutFormat;
		GUID		OutGuid;

		OutGuid	= TIME_FORMAT_FRAME;
		Guid	= TIME_FORMAT_MEDIA_TIME;

		//converts from 100 nanosecond format to number of frames
		MediaSeeking->ConvertTimeFormat(&OutFormat, &OutGuid, m_Duration, &Guid);

		m_numFrames	= OutFormat;
	}

	// Get the format of the connected media.
	RetVal	= SampleGrabber->GetConnectedMediaType(&MediaType);

	if (RetVal != S_OK)
	{
		error("Unable to get media type %d", RetVal);

		return;
	}

	// The SampleGrabber will only return video of the the 'FORMAT_VideoInfo' type.
	if (FORMAT_VideoInfo == MediaType.formattype && MediaType.pbFormat != NULL)
	{
		// Format returned is specific to the formattype.
		VIDEOINFOHEADER	*VideoInfo	= (VIDEOINFOHEADER *)MediaType.pbFormat;

		// Get size of the image from the BitmapInfoHeader returned in the VIDEOINFOHEADER.
		m_xsize		= VideoInfo->bmiHeader.biWidth;
		m_ysize		= VideoInfo->bmiHeader.biHeight;
		m_csize		= 3;
	}

	else
	{
		error("Invalid media type returned %s", filename);

		return;
	}

	// Allocate video buffer if valid sizes returned.
	if (m_xsize > 0 && m_ysize > 0 && m_csize > 0)
	{
		if (m_frame != NULL)
		{
			delete [] m_frame;
		}

		m_frame		= new BYTE[m_xsize * m_ysize * m_csize];

		if (NULL == m_frame)
		{
			error("Unable to allocate memory for the video buffer %s", filename);

			return;
		}
	}

	// Release the MediaType.pbFormat data
	FreeMediaType(MediaType);

	IBaseFilter	*DVFilter;

	// If DV video is used, set the quality to 720 x 480.
	RetVal	= FilterGraph->FindFilterByName(L"DV Video Decoder", &DVFilter);

	if (S_OK == RetVal && DVFilter != NULL)
	{
		IIPDVDec	*IPDVDec;

		// Find the IIPDVDec interface
		RetVal	= DVFilter->QueryInterface(IID_IIPDVDec, (void **)&IPDVDec);

		if (S_OK == RetVal && IPDVDec != NULL)
		{
			// Set the property to DVRESOLUTION_FULL
			IPDVDec->put_IPDisplay(DVRESOLUTION_FULL);

			// Release the interface
			IPDVDec->Release();
		}

		// Release the interface
		DVFilter->Release();
	}

	post("xsize %d ysize %d csize %",m_xsize, m_ysize, m_csize);

	// Setup the pixBlock data based on the media type.
	// this is a guess at the fast past for pixels on Windows
	m_pixBlock.image.xsize	= m_xsize;
	m_pixBlock.image.ysize	= m_ysize;
	m_pixBlock.image.csize	= m_csize;
	m_pixBlock.image.format	= GL_BGR_EXT;
	m_pixBlock.image.type	= GL_UNSIGNED_BYTE;

	// Start the video stream
	RetVal	= MediaControl->Run();

	if (RetVal != S_OK && RetVal != S_FALSE)
	{
		error("Unable to start video %d", RetVal);

		return;
	}

	// Wait for the video to begin playing.
	while (TRUE)
	{
		OAFilterState	FilterState;

		// Get the state and ensure it's not in an intermediate state
		RetVal	= MediaControl->GetState(0, &FilterState);

		if (RetVal != S_OK && RetVal != VFW_S_STATE_INTERMEDIATE)
		{
			error("Unable to run video %d", RetVal);

			return;
		}

		// Ensure the video is running
		else if (RetVal == S_OK && State_Running == FilterState)
		{
			break;
		}
	}

	// Sets the tex coords
	prepareTexture();

	// Set the last frame to -1 so it will show the first frame.
	m_LastFrame	= -1;

	m_haveMovie	= TRUE;
}
Ejemplo n.º 13
0
std::vector<CameraConfig> videoInputCamera::getCameraConfigs(int dev_id) {

	std::vector<CameraConfig> cfg_list;

	int count = getDeviceCount();
	if (count==0) return cfg_list;

	comInit();

	HRESULT hr;
	ICaptureGraphBuilder2 *lpCaptureGraphBuilder;
	IGraphBuilder *lpGraphBuilder;
	IBaseFilter *lpInputFilter;
	IAMStreamConfig *lpStreamConfig;

	char 	nDeviceName[255];
	WCHAR 	wDeviceName[255];

	for (int cam_id=0;cam_id<count;cam_id++) {
		if ((dev_id>=0) && (dev_id!=cam_id)) continue;
		hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void **)&lpCaptureGraphBuilder);
		if (FAILED(hr))	// FAILED is a macro that tests the return value
		{
			printf("ERROR - Could not create the Filter Graph Manager\n");
			comUnInit();
			return cfg_list;
		}

		// Create the Filter Graph Manager.
		hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,IID_IGraphBuilder, (void**)&lpGraphBuilder);
		if (FAILED(hr))
		{
			printf("ERROR - Could not add the graph builder!\n");
			lpCaptureGraphBuilder->Release();
			comUnInit();
			return cfg_list;
		}

		hr = lpCaptureGraphBuilder->SetFiltergraph(lpGraphBuilder);
		if (FAILED(hr))
		{
			printf("ERROR - Could not set filtergraph\n");
			lpGraphBuilder->Release();
			lpCaptureGraphBuilder->Release();
			comUnInit();
			return cfg_list;
		}

		memset(wDeviceName, 0, sizeof(WCHAR) * 255);
		memset(nDeviceName, 0, sizeof(char) * 255);
		hr = getDevice(&lpInputFilter, cam_id, wDeviceName, nDeviceName);

		if (SUCCEEDED(hr)){
			hr = lpGraphBuilder->AddFilter(lpInputFilter, wDeviceName);
		}else{
			printf("ERROR - Could not find specified video device\n");
			lpGraphBuilder->Release();
			lpCaptureGraphBuilder->Release();
			comUnInit();
			return cfg_list;
		}

		hr = lpCaptureGraphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, lpInputFilter, IID_IAMStreamConfig, (void **)&lpStreamConfig);
		if(FAILED(hr)){
			printf("ERROR: Couldn't config the stream!\n");
			lpInputFilter->Release();
			lpGraphBuilder->Release();
			lpCaptureGraphBuilder->Release();
			comUnInit();
			return cfg_list;
		}

		CameraConfig cam_cfg;
		CameraTool::initCameraConfig(&cam_cfg);

		cam_cfg.driver = DRIVER_DEFAULT;
		cam_cfg.device = cam_id;
		sprintf(cam_cfg.name, "%s", nDeviceName);

		int iCount = 0;
		int iSize = 0;
		hr = lpStreamConfig->GetNumberOfCapabilities(&iCount, &iSize);
		std::vector<CameraConfig> fmt_list;

		if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
		{
			GUID lastFormat = MEDIASUBTYPE_None;
			for (int iFormat = 0; iFormat < iCount; iFormat+=2)
			{
				VIDEO_STREAM_CONFIG_CAPS scc;
				AM_MEDIA_TYPE *pmtConfig;
				hr =  lpStreamConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
				if (SUCCEEDED(hr)){

					if ( pmtConfig->subtype != lastFormat) {

						if (fmt_list.size()>0) {
							std::sort(fmt_list.begin(), fmt_list.end());
							cfg_list.insert( cfg_list.end(), fmt_list.begin(), fmt_list.end() );
							fmt_list.clear();
						}
						cam_cfg.cam_format = getMediaSubtype(pmtConfig->subtype);
						lastFormat = pmtConfig->subtype;
					}

					int stepX = scc.OutputGranularityX;
					int stepY = scc.OutputGranularityY;
					if(stepX < 1 || stepY < 1) continue;

					else if ((stepX==1) && (stepY==1)) {

						cam_cfg.cam_width = scc.InputSize.cx;
						cam_cfg.cam_height = scc.InputSize.cy;

						int maxFrameInterval = scc.MaxFrameInterval;
						if (maxFrameInterval==0) maxFrameInterval = 10000000;
						float last_fps=-1;
						VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat;
						for (int iv=scc.MinFrameInterval;iv<=maxFrameInterval;iv=iv*2) {
							pVih->AvgTimePerFrame = iv;
							hr = lpStreamConfig->SetFormat(pmtConfig);
							if (hr==S_OK) { hr = lpStreamConfig->GetFormat(&pmtConfig);
							float fps = ((int)floor(100000000.0f/(float)pVih->AvgTimePerFrame + 0.5f))/10.0f;
							if (fps!=last_fps) {
								cam_cfg.cam_fps = fps;
								fmt_list.push_back(cam_cfg);
								last_fps=fps;
							} }
						}

					} else {
						int x,y;
						for (x=scc.MinOutputSize.cx,y=scc.MinOutputSize.cy;x<=scc.MaxOutputSize.cx,y<=scc.MaxOutputSize.cy;x+=stepX,y+=stepY) {

							cam_cfg.cam_width = x;
							cam_cfg.cam_height = y;

							int maxFrameInterval = scc.MaxFrameInterval;
							if (maxFrameInterval==0) maxFrameInterval = 10000000;
							float last_fps=-1;
							VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat;
							for (int iv=scc.MinFrameInterval;iv<=maxFrameInterval;iv=iv*2) {
								pVih->AvgTimePerFrame = iv;
								hr = lpStreamConfig->SetFormat(pmtConfig);
								if (hr==S_OK) { hr = lpStreamConfig->GetFormat(&pmtConfig);
								float fps = ((int)floor(100000000.0f/(float)pVih->AvgTimePerFrame + 0.5f))/10.0f;
								if (fps!=last_fps) {
									cam_cfg.cam_fps = fps;
									fmt_list.push_back(cam_cfg);
									last_fps=fps;
								} }
							}

						}
					}

					deleteMediaType(pmtConfig);
				}
			}
		}

		if (fmt_list.size()>0) {
			std::sort(fmt_list.begin(), fmt_list.end());
			cfg_list.insert( cfg_list.end(), fmt_list.begin(), fmt_list.end() );
			fmt_list.clear();
		}

		lpStreamConfig->Release();
		lpInputFilter->Release();
		lpGraphBuilder->Release();
		lpCaptureGraphBuilder->Release();
	}

	comUnInit();
	return cfg_list;
}
Ejemplo n.º 14
0
int	main()
{
	// for playing
	IGraphBuilder *pGraphBuilder;
	ICaptureGraphBuilder2 *pCaptureGraphBuilder2;
	IMediaControl *pMediaControl;
	IBaseFilter *pDeviceFilter = NULL;

	// to select a video input device
	ICreateDevEnum *pCreateDevEnum = NULL;
	IEnumMoniker *pEnumMoniker = NULL;
	IMoniker *pMoniker = NULL;
	ULONG nFetched = 0;

	// initialize COM
	CoInitialize(NULL);

	//
	// selecting a device
	//

	// Create CreateDevEnum to list device
	CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, 
		IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);

	// Create EnumMoniker to list VideoInputDevice 
	pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
		&pEnumMoniker, 0);
	if (pEnumMoniker == NULL) {
		// this will be shown if there is no capture device
		printf("no device\n");
		return 0;
	}

	// reset EnumMoniker
	pEnumMoniker->Reset();

	// get each Moniker
	while (pEnumMoniker->Next(1, &pMoniker, &nFetched) == S_OK)
	{
		IPropertyBag *pPropertyBag;
		TCHAR devname[256];

		// bind to IPropertyBag
		pMoniker->BindToStorage(0, 0, IID_IPropertyBag,
			(void **)&pPropertyBag);

		VARIANT var;

		// get FriendlyName
		var.vt = VT_BSTR;
		pPropertyBag->Read(L"FriendlyName", &var, 0);
		WideCharToMultiByte(CP_ACP, 0,
			var.bstrVal, -1, devname, sizeof(devname), 0, 0);
		VariantClear(&var);

		printf("%s\r\n", devname);
		printf("  select this device ? [y] or [n]\r\n");
		int ch = getchar();

		// you can start playing by 'y' + return key
		// if you press the other key, it will not be played.
		if (ch == 'y')
		{
			// Bind Monkier to Filter
			pMoniker->BindToObject(0, 0, IID_IBaseFilter,
				(void**)&pDeviceFilter );
		}
		else
		{
			getchar();
		}

		// release
		pMoniker->Release();
		pPropertyBag->Release();

		if (pDeviceFilter != NULL)
		{
			// go out of loop if getchar() returns 'y'
			break;
		}
	}

	if (pDeviceFilter != NULL) {
		//
		// PLAY
		//

		// create FilterGraph
		CoCreateInstance(CLSID_FilterGraph,
			NULL,
			CLSCTX_INPROC,
			IID_IGraphBuilder,
			(LPVOID *)&pGraphBuilder);

		// create CaptureGraphBuilder2
		CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, 
			IID_ICaptureGraphBuilder2, 
			(LPVOID *)&pCaptureGraphBuilder2);

		//============================================================
		//===========  MY CODE  ======================================
		//=============================================================
		HRESULT hr = CoInitialize(0);
		IAMStreamConfig *pConfig = NULL;
		hr = pCaptureGraphBuilder2->FindInterface(&PIN_CATEGORY_CAPTURE, 0, pDeviceFilter, IID_IAMStreamConfig, (void**)&pConfig);

		int iCount = 0, iSize = 0;
		hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize);

		// Check the size to make sure we pass in the correct structure.
		if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
		{
			// Use the video capabilities structure.

			for (int iFormat = 0; iFormat < iCount; iFormat++)
			{
				VIDEO_STREAM_CONFIG_CAPS scc;
				AM_MEDIA_TYPE *pmtConfig;
				hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
				if (SUCCEEDED(hr))
				{
					/* Examine the format, and possibly use it. */
					if ((pmtConfig->majortype == MEDIATYPE_Video) &&
						(pmtConfig->subtype == MEDIASUBTYPE_RGB24) &&
						(pmtConfig->formattype == FORMAT_VideoInfo) &&
						(pmtConfig->cbFormat >= sizeof (VIDEOINFOHEADER)) &&
						(pmtConfig->pbFormat != NULL))
					{
						VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat;
						// pVih contains the detailed format information.
						LONG lWidth = pVih->bmiHeader.biWidth;
						LONG lHeight = pVih->bmiHeader.biHeight;
						if( lWidth == 1280 )
							//					if (iFormat == 26)
						{ //2 = '1280x720YUV' YUV, 22 = '1280x800YUV', 26 = '1280x720RGB'
							hr = pConfig->SetFormat(pmtConfig);
						}
					}
					// Delete the media type when you are done.
					DeleteMediaType(pmtConfig);
				}
			}
		}


		// Query the capture filter for the IAMCameraControl interface.
		IAMCameraControl *pCameraControl = 0;
		hr = pDeviceFilter->QueryInterface(IID_IAMCameraControl, (void**)&pCameraControl);
		if (FAILED(hr))
		{
			// The device does not support IAMCameraControl
		}
		else
		{
			long Min, Max, Step, Default, Flags, Val;

			// Get the range and default values 
			hr = pCameraControl->GetRange(CameraControl_Exposure, &Min, &Max, &Step, &Default, &Flags);
			hr = pCameraControl->GetRange(CameraControl_Focus, &Min, &Max, &Step, &Default, &Flags);
			if (SUCCEEDED(hr))
			{
				hr = pCameraControl->Set(CameraControl_Exposure, -11, CameraControl_Flags_Manual ); // Min = -11, Max = 1, Step = 1
				hr = pCameraControl->Set(CameraControl_Focus, 12, CameraControl_Flags_Manual );
			}
		}


		// Query the capture filter for the IAMVideoProcAmp interface.
		IAMVideoProcAmp *pProcAmp = 0;
		hr = pDeviceFilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&pProcAmp);
		if (FAILED(hr))
		{
			// The device does not support IAMVideoProcAmp
		}
		else
		{
			long Min, Max, Step, Default, Flags, Val;

			// Get the range and default values 
			hr = pProcAmp->GetRange(VideoProcAmp_Brightness, &Min, &Max, &Step, &Default, &Flags);
			hr = pProcAmp->GetRange(VideoProcAmp_BacklightCompensation, &Min, &Max, &Step, &Default, &Flags);
			hr = pProcAmp->GetRange(VideoProcAmp_Contrast, &Min, &Max, &Step, &Default, &Flags);
			hr = pProcAmp->GetRange(VideoProcAmp_Saturation, &Min, &Max, &Step, &Default, &Flags);
			hr = pProcAmp->GetRange(VideoProcAmp_Sharpness, &Min, &Max, &Step, &Default, &Flags);
			hr = pProcAmp->GetRange(VideoProcAmp_WhiteBalance, &Min, &Max, &Step, &Default, &Flags);
			if (SUCCEEDED(hr))
			{
				hr = pProcAmp->Set(VideoProcAmp_Brightness, 142, VideoProcAmp_Flags_Manual);
				hr = pProcAmp->Set(VideoProcAmp_BacklightCompensation, 0, VideoProcAmp_Flags_Manual);
				hr = pProcAmp->Set(VideoProcAmp_Contrast, 4, VideoProcAmp_Flags_Manual);
				hr = pProcAmp->Set(VideoProcAmp_Saturation, 100, VideoProcAmp_Flags_Manual);
				hr = pProcAmp->Set(VideoProcAmp_Sharpness, 0, VideoProcAmp_Flags_Manual);
				hr = pProcAmp->Set(VideoProcAmp_WhiteBalance, 2800, VideoProcAmp_Flags_Manual);
			}
		}


		//============================================================
		//=========== END MY CODE  ======================================
		//=============================================================

		hr = S_OK;
		CTransformer* trans = new CTransformer( "Dif trans", 0, CLSID_DIFFilter, &hr );
		IBaseFilter * ttt = 0;
		trans->QueryInterface(IID_IBaseFilter, (LPVOID *)&ttt);
		// set FilterGraph
		hr = pCaptureGraphBuilder2->SetFiltergraph(pGraphBuilder);

		// get MediaControl interface
		hr = pGraphBuilder->QueryInterface(IID_IMediaControl,
			(LPVOID *)&pMediaControl);

		// add device filter to FilterGraph
		hr = pGraphBuilder->AddFilter(ttt, L"Dif trans");
		hr = pGraphBuilder->AddFilter(pDeviceFilter, L"Device Filter");

		// create Graph
		hr = pCaptureGraphBuilder2->RenderStream(&PIN_CATEGORY_CAPTURE,
			NULL, pDeviceFilter, NULL, NULL);

		// start playing
		hr = pMediaControl->Run();

		// to block execution
		// without this messagebox, the graph will be stopped immediately
		MessageBox(NULL,
			"Block Execution",
			"Block",
			MB_OK);

		// release
		pMediaControl->Release();
		pCaptureGraphBuilder2->Release();
		pGraphBuilder->Release();
	}

	// release
	pEnumMoniker->Release();
	pCreateDevEnum->Release();

	// finalize COM
	CoUninitialize();

	return 0;
}
Ejemplo n.º 15
0
void Video::play( char *fileName, DWORD )
{
	WCHAR wPath[100];
	HRESULT hr;
	IMediaControl *pMC;

	if(!init_success)
		return;

	MultiByteToWideChar( CP_ACP, 0, fileName, -1, wPath, 100 );

	if( (hr = pGraph->RenderFile(wPath, NULL)) == 0)
	{

		// use full screen video interface
		// try to change display mode
		IVideoWindow *iVideoWindow = NULL;
		if( (hr = pGraph->QueryInterface(IID_IVideoWindow, (void **) &iVideoWindow)) == 0)
		{
#ifdef CREATE_DUMMY_WINDOW
			if(hwnd)
			{
				HRESULT hr2 = iVideoWindow->put_MessageDrain((OAHWND) hwnd);
				hr2 = 0;
			}
#endif

#ifdef FULL_SCREEN_VIDEO
			IFilter *iFilter;
			if( pGraph->FindFilterByName(L"Video Renderer", &iFilter) == 0)
			{
				IBasicVideo *iBasicVideo;
				if( iFilter->QueryInterface(IID_IBasicVideo, (void **)&iBasicVideo) == 0)
				{
					IFullScreenVideo *iFullScreenVideo;
					IDirectDrawVideo *iDirectDrawVideo;
					if( iFilter->QueryInterface(IID_IFullScreenVideo, (void **)&iFullScreenVideo) == 0)
					{
						iFullScreenVideo->Release();
					}
					else if( iFilter->QueryInterface(IID_IDirectDrawVideo, (void **)&iDirectDrawVideo) == 0)
					{
						HRESULT hr2;
						hr2 = iDirectDrawVideo->UseWhenFullScreen(OATRUE);
						iDirectDrawVideo->Release();
					}

					iBasicVideo->Release();
				}
				iFilter->Release();
			}
			hr=iVideoWindow->put_FullScreenMode(OATRUE);
#endif

			/* // code to find all filter in the filter graph
			{
				IEnumFilters *iEnumFilters;
				pGraph->EnumFilters(&iEnumFilters);

				ULONG filterCount = 16;
				IFilter *iFilters[16];
				iEnumFilters->Next(filterCount, iFilters, &filterCount);

				for( ULONG j = 0; j < filterCount; ++j )
				{
					FILTER_INFO filterInfo;
					iFilters[j]->QueryFilterInfo(&filterInfo);
					filterInfo.pGraph->Release();
					iFilters[j]->Release();
				}

				iEnumFilters->Release();
			}*/

			iVideoWindow->HideCursor(OATRUE);
			iVideoWindow->put_Visible( OAFALSE );
			iVideoWindow->put_AutoShow( OAFALSE );
			LONG windowStyle;
			iVideoWindow->get_WindowStyle( &windowStyle);
			windowStyle &= ~WS_BORDER & ~WS_CAPTION & ~WS_SIZEBOX & ~WS_THICKFRAME &
				~WS_HSCROLL & ~WS_VSCROLL & ~WS_VISIBLE;
			iVideoWindow->put_WindowStyle( windowStyle);
		}
		else
			iVideoWindow = NULL;
		
		if( (hr = pGraph->QueryInterface(IID_IMediaControl, (void **) &pMC)) == 0)
		{
			pMC->Run();					// sometimes it returns 1, but still ok
			state = PLAYING;
			pMC->Release();
		}

		if( iVideoWindow )
		{
			iVideoWindow->put_Visible( OAFALSE );
			LONG windowStyle;
			iVideoWindow->get_WindowStyle( &windowStyle);
			windowStyle &= ~WS_BORDER & ~WS_CAPTION & ~WS_SIZEBOX & ~WS_THICKFRAME &
				~WS_HSCROLL & ~WS_VSCROLL & ~WS_VISIBLE;
			iVideoWindow->put_WindowStyle( windowStyle);

			LONG maxWidth;
			LONG maxHeight;
			hr=iVideoWindow->GetMaxIdealImageSize( &maxWidth, &maxHeight);
#ifdef FULL_SCREEN_VIDEO
#else
			iVideoWindow->put_BorderColor( RGB(0,0,0) );
			iVideoWindow->put_WindowState(SW_MAXIMIZE);

			IBaseFilter *iFilter;
			if( pGraph->FindFilterByName((const WCHAR *)L"Video Renderer", &iFilter) == 0)
			{
				IBasicVideo *iBasicVideo;
				if( iFilter->QueryInterface(IID_IBasicVideo, (void **)&iBasicVideo) == 0)
				{
					LONG screenWidth;
					LONG screenHeight;
					LONG videoWidth;
					LONG videoHeight;
					if( iVideoWindow->get_Width(&screenWidth) == 0 &&
						iVideoWindow->get_Height(&screenHeight) == 0 &&
						iBasicVideo->GetVideoSize(&videoWidth, &videoHeight) == 0)
					{
						// zoom in by 2 if possible
						if( screenWidth >= videoWidth * 2 &&
							screenHeight >= videoHeight * 2)
						{
							videoWidth *= 2;
							videoHeight *= 2;
						}

						// center the video client area
						iBasicVideo->SetDestinationPosition(
							(screenWidth-videoWidth)/2, (screenHeight-videoHeight)/2,
							videoWidth, videoHeight);
					}

					iBasicVideo->Release();
				}
				iFilter->Release();
			}
#endif
			iVideoWindow->HideCursor(OATRUE);
			iVideoWindow->SetWindowForeground(OATRUE);
		}

		if(iVideoWindow)
		{
			iVideoWindow->Release();
			iVideoWindow = NULL;
		}
	}

	if( hr && !skip_on_fail_flag)
		err.run("video.play error %d", hr );
}
Ejemplo n.º 16
0
	void TestCamera()
	{
		InitOpenCL();
		//TCHAR szDeviceName[80];
		//TCHAR szDeviceVersion[80];

		//for (int wIndex = 0; wIndex < 10; wIndex++) 
		//{
		//	if (capGetDriverDescription(
		//		wIndex, 
		//		szDeviceName, 
		//		sizeof (szDeviceName), 
		//		szDeviceVersion, 
		//		sizeof (szDeviceVersion)
		//		)) 
		//	{
		//		// Append name to list of installed capture drivers
		//		// and then let the user select a driver to use.
		//	}
		//} 

		//HWND hWndC = capCreateCaptureWindow(TEXT("PunkCapture"),
		//	WS_CHILD | WS_VISIBLE, 0, 0, 160, 120, *System::Window::Instance(), 1);

		//SendMessage (hWndC, WM_CAP_DRIVER_CONNECT, 0, 0L); 
		//// 
		//// Or, use the macro to connect to the MSVIDEO driver: 
		//// fOK = capDriverConnect(hWndC, 0); 
		//// 
		//// Place code to set up and capture video here. 
		//// 
		////capDriverDisconnect (hWndC); 

		//CAPDRIVERCAPS CapDriverCaps = { }; 
		//CAPSTATUS     CapStatus = { };

		//capDriverGetCaps(hWndC, &CapDriverCaps, sizeof(CAPDRIVERCAPS)); 

		//// Video source dialog box. 
		//if (CapDriverCaps.fHasDlgVideoSource)
		//{
		//	capDlgVideoSource(hWndC); 
		//}

		//// Video format dialog box. 
		//if (CapDriverCaps.fHasDlgVideoFormat) 
		//{
		//	capDlgVideoFormat(hWndC); 

		//	// Are there new image dimensions?
		//	capGetStatus(hWndC, &CapStatus, sizeof (CAPSTATUS));

		//	// If so, notify the parent of a size change.
		//} 

		//// Video display dialog box. 
		//if (CapDriverCaps.fHasDlgVideoDisplay)
		//{
		//	capDlgVideoDisplay(hWndC); 
		//}


		HRESULT hr;
		IGraphBuilder*  graph= 0;  hr = CoCreateInstance( CLSID_FilterGraph, 0, CLSCTX_INPROC,IID_IGraphBuilder, (void **)&graph );
		IMediaControl*  ctrl = 0;  hr = graph->QueryInterface( IID_IMediaControl, (void **)&ctrl );

		ICreateDevEnum* devs = 0;  hr = CoCreateInstance (CLSID_SystemDeviceEnum, 0, CLSCTX_INPROC, IID_ICreateDevEnum, (void **) &devs);
		IEnumMoniker*   cams = 0;  hr = devs?devs->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, &cams, 0):0;  
		IMoniker*       mon  = 0;  hr = cams->Next (1,&mon,0);  // get first found capture device (webcam?)    
		IBaseFilter*    cam  = 0;  hr = mon->BindToObject(0,0,IID_IBaseFilter, (void**)&cam);
		hr = graph->AddFilter(cam, L"Capture Source"); // add web cam to graph as source
		IEnumPins*      pins = 0;  hr = cam?cam->EnumPins(&pins):0;   // we need output pin to autogenerate rest of the graph
		IPin*           pin  = 0;  hr = pins?pins->Next(1,&pin, 0):0; // via graph->Render
		hr = graph->Render(pin); // graph builder now builds whole filter chain including MJPG decompression on some webcams
		IEnumFilters*   fil  = 0;  hr = graph->EnumFilters(&fil); // from all newly added filters
		IBaseFilter*    rnd  = 0;  hr = fil->Next(1,&rnd,0); // we find last one (renderer)
		hr = rnd->EnumPins(&pins);  // because data we are intersted in are pumped to renderers input pin 
		hr = pins->Next(1,&pin, 0); // via Receive member of IMemInputPin interface
		IMemInputPin*   mem  = 0;  hr = pin->QueryInterface(IID_IMemInputPin,(void**)&mem);

		DsHook(mem,6,Receive); // so we redirect it to our own proc to grab image data

		hr = ctrl->Run();   

	};
Ejemplo n.º 17
0
static gboolean
gst_dshowvideodec_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  gboolean ret = FALSE;
  HRESULT hres;
  GstStructure *s = gst_caps_get_structure (caps, 0);
  GstDshowVideoDec *vdec = (GstDshowVideoDec *) gst_pad_get_parent (pad);
  GstDshowVideoDecClass *klass =
      (GstDshowVideoDecClass *) G_OBJECT_GET_CLASS (vdec);
  GstBuffer *extradata = NULL;
  const GValue *v = NULL;
  guint size = 0;
  GstCaps *caps_out;
  AM_MEDIA_TYPE output_mediatype, input_mediatype;
  VIDEOINFOHEADER *input_vheader = NULL, *output_vheader = NULL;
  CComPtr<IPin> output_pin;
  CComPtr<IPin> input_pin;
  IBaseFilter *srcfilter = NULL;
  IBaseFilter *sinkfilter = NULL;
  const GValue *fps;

  /* read data */
  if (!gst_structure_get_int (s, "width", &vdec->width) ||
      !gst_structure_get_int (s, "height", &vdec->height)) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("error getting video width or height from caps"), (NULL));
    goto end;
  }
  fps = gst_structure_get_value (s, "framerate");
  if (fps) {
    vdec->fps_n = gst_value_get_fraction_numerator (fps);
    vdec->fps_d = gst_value_get_fraction_denominator (fps);
  }
  else {
    /* Invent a sane default framerate; the timestamps matter
     * more anyway. */
    vdec->fps_n = 25;
    vdec->fps_d = 1;
  }

  if ((v = gst_structure_get_value (s, "codec_data")))
    extradata = gst_value_get_buffer (v);

  /* define the input type format */
  memset (&input_mediatype, 0, sizeof (AM_MEDIA_TYPE));
  input_mediatype.majortype = klass->entry->input_majortype;
  input_mediatype.subtype = klass->entry->input_subtype;
  input_mediatype.bFixedSizeSamples = FALSE;
  input_mediatype.bTemporalCompression = TRUE;

  if (strstr (klass->entry->sinkcaps, "video/mpeg, mpegversion= (int) 1")) {
    size =
        sizeof (MPEG1VIDEOINFO) + (extradata ? GST_BUFFER_SIZE (extradata) -
        1 : 0);
    input_vheader = (VIDEOINFOHEADER *)g_malloc0 (size);

    input_vheader->bmiHeader.biSize = sizeof (BITMAPINFOHEADER);
    if (extradata) {
      MPEG1VIDEOINFO *mpeg_info = (MPEG1VIDEOINFO *) input_vheader;

      memcpy (mpeg_info->bSequenceHeader,
          GST_BUFFER_DATA (extradata), GST_BUFFER_SIZE (extradata));
      mpeg_info->cbSequenceHeader = GST_BUFFER_SIZE (extradata);
    }
    input_mediatype.formattype = FORMAT_MPEGVideo;
  } else {
    size =
        sizeof (VIDEOINFOHEADER) +
        (extradata ? GST_BUFFER_SIZE (extradata) : 0);
    input_vheader = (VIDEOINFOHEADER *)g_malloc0 (size);

    input_vheader->bmiHeader.biSize = sizeof (BITMAPINFOHEADER);
    if (extradata) {            /* Codec data is appended after our header */
      memcpy (((guchar *) input_vheader) + sizeof (VIDEOINFOHEADER),
          GST_BUFFER_DATA (extradata), GST_BUFFER_SIZE (extradata));
      input_vheader->bmiHeader.biSize += GST_BUFFER_SIZE (extradata);
    }
    input_mediatype.formattype = FORMAT_VideoInfo;
  }
  input_vheader->rcSource.top = input_vheader->rcSource.left = 0;
  input_vheader->rcSource.right = vdec->width;
  input_vheader->rcSource.bottom = vdec->height;
  input_vheader->rcTarget = input_vheader->rcSource;
  input_vheader->bmiHeader.biWidth = vdec->width;
  input_vheader->bmiHeader.biHeight = vdec->height;
  input_vheader->bmiHeader.biPlanes = 1;
  input_vheader->bmiHeader.biBitCount = 16;
  input_vheader->bmiHeader.biCompression = klass->entry->format;
  input_vheader->bmiHeader.biSizeImage =
      (vdec->width * vdec->height) * (input_vheader->bmiHeader.biBitCount / 8);

  input_mediatype.cbFormat = size;
  input_mediatype.pbFormat = (BYTE *) input_vheader;
  input_mediatype.lSampleSize = input_vheader->bmiHeader.biSizeImage;

  vdec->fakesrc->GetOutputPin()->SetMediaType(&input_mediatype);

  /* set the sample size for fakesrc filter to the output buffer size */
  vdec->fakesrc->GetOutputPin()->SetSampleSize(input_mediatype.lSampleSize);

  /* connect our fake src to decoder */
  hres = vdec->fakesrc->QueryInterface(IID_IBaseFilter,
      (void **) &srcfilter);
  if (FAILED (hres)) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
      ("Can't QT fakesrc to IBaseFilter: %x", hres), (NULL));
    goto end;
  }

  output_pin = gst_dshow_get_pin_from_filter (srcfilter, PINDIR_OUTPUT);
  if (!output_pin) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get output pin from our directshow fakesrc filter"), (NULL));
    goto end;
  }
  input_pin = gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_INPUT);
  if (!input_pin) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get input pin from decoder filter"), (NULL));
    goto end;
  }

  hres = vdec->filtergraph->ConnectDirect (output_pin, input_pin, NULL);
  if (hres != S_OK) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't connect fakesrc with decoder (error=%x)", hres), (NULL));
    goto end;
  }

  /* get decoder output video format */
  if (!gst_dshowvideodec_get_filter_output_format (vdec,
          klass->entry->output_subtype, &output_vheader, &size)) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get decoder output video format"), (NULL));
    goto end;
  }

  memset (&output_mediatype, 0, sizeof (AM_MEDIA_TYPE));
  output_mediatype.majortype = klass->entry->output_majortype;
  output_mediatype.subtype = klass->entry->output_subtype;
  output_mediatype.bFixedSizeSamples = TRUE;
  output_mediatype.bTemporalCompression = FALSE;
  output_mediatype.lSampleSize = output_vheader->bmiHeader.biSizeImage;
  output_mediatype.formattype = FORMAT_VideoInfo;
  output_mediatype.cbFormat = size;
  output_mediatype.pbFormat = (BYTE *) output_vheader;

  vdec->fakesink->SetMediaType (&output_mediatype);

  /* connect decoder to our fake sink */
  output_pin = gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_OUTPUT);
  if (!output_pin) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get output pin from our decoder filter"), (NULL));
    goto end;
  }

  hres = vdec->fakesink->QueryInterface(IID_IBaseFilter,
      (void **) &sinkfilter);
  if (FAILED (hres)) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
      ("Can't QT fakesink to IBaseFilter: %x", hres), (NULL));
    goto end;
  }

  input_pin = gst_dshow_get_pin_from_filter (sinkfilter, PINDIR_INPUT);
  if (!input_pin) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get input pin from our directshow fakesink filter"), (NULL));
    goto end;
  }

  hres = vdec->filtergraph->ConnectDirect(output_pin, input_pin,
      &output_mediatype);
  if (hres != S_OK) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't connect decoder with fakesink (error=%x)", hres), (NULL));
    goto end;
  }

  /* negotiate output */
  caps_out = gst_caps_from_string (klass->entry->srccaps);
  gst_caps_set_simple (caps_out,
      "width", G_TYPE_INT, vdec->width,
      "height", G_TYPE_INT, vdec->height, NULL);

  if (vdec->fps_n && vdec->fps_d) {
      gst_caps_set_simple (caps_out, 
          "framerate", GST_TYPE_FRACTION, vdec->fps_n, vdec->fps_d, NULL);
  }

  if (!gst_pad_set_caps (vdec->srcpad, caps_out)) {
    gst_caps_unref (caps_out);
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Failed to negotiate output"), (NULL));
    goto end;
  }
  gst_caps_unref (caps_out);

  hres = vdec->mediafilter->Run (-1);
  if (hres != S_OK) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't run the directshow graph (error=%d)", hres), (NULL));
    goto end;
  }

  ret = TRUE;
end:
  gst_object_unref (vdec);
  if (input_vheader)
    g_free (input_vheader);
  if (srcfilter)
    srcfilter->Release();
  if (sinkfilter)
    sinkfilter->Release();
  return ret;
}
Ejemplo n.º 18
0
/*
 * Class:     sage_PVR350OSDRenderingPlugin
 * Method:    updateOSD0
 * Signature: (J[IIILjava/awt/Rectangle;Ljava/awt/Rectangle;)Z
 */
JNIEXPORT jboolean JNICALL Java_sage_PVR350OSDRenderingPlugin_updateOSD0(JNIEnv *env, jobject jo,
																	   jlong osdHandle, jintArray jimage,
																	   jint width, jint height,
																	   jobject targetRect,
																	   jobject videoRect)
{
	static jclass rectClass = (jclass) env->NewGlobalRef(env->FindClass("java/awt/Rectangle"));
	static jfieldID fid_rectx = env->GetFieldID(rectClass, "x", "I");
	static jfieldID fid_recty = env->GetFieldID(rectClass, "y", "I");
	static jfieldID fid_rectwidth = env->GetFieldID(rectClass, "width", "I");
	static jfieldID fid_rectheight = env->GetFieldID(rectClass, "height", "I");

	PVR350OSDPluginNativeData* osdData = (PVR350OSDPluginNativeData*) osdHandle;
	jboolean rv = JNI_TRUE;
	IBaseFilter* pOSD = osdData->pOSD;
	WaitForSingleObject(osdData->mutex350OSD, MUTEX350WAITTIME);

	IKsPropertySet* ksProp = NULL;
	HRESULT hr = pOSD->QueryInterface(IID_IKsPropertySet, (void**)&ksProp);
	if (FAILED(hr))
	{
		ReleaseMutex(osdData->mutex350OSD);
		return JNI_FALSE;
	}

	DWORD arrLen = width*height;

	/*
	 * PROPRIETARY CODE BLOCK DISABLED FOR OPEN SOURCE RELEASE - START
	OSD_BUFF_LOC osdBuf;
	osdBuf.memCopySize = arrLen*4;
	jint* critArr = (jint*)env->GetPrimitiveArrayCritical(jimage, NULL);
	OSD_USER_BUFPTR pBuf = critArr;

	if (osdData->ntscModeFor350)
	{
		osdBuf.OSDMemOffset = osdData->lastOSDMemWrite == 0 ? 1400000 : 0;

		hr = ksProp->Set(PROPSETID_IVAC_DISPLAY, IVAC_OSD_MEMCOPY, &osdBuf,
			sizeof(OSD_BUFF_LOC), pBuf, arrLen*4);
		TEST_AND_PRINT
		env->ReleasePrimitiveArrayCritical(jimage, critArr, JNI_ABORT);
		rv = rv && SUCCEEDED(hr);
		if (width == 720 && height >= 480)
		{
			// Full OSD update, just do a video pointer flip, don't waste time with a BLT
			osdData->lastOSDMemWrite = osdBuf.OSDMemOffset;
		}
		else
		{

			BLT_BASE_PARAM m_baseParam;
			OSD_BLT_COPY m_BltCopy;
				
			m_baseParam.ROPCode = 0xA;
			m_baseParam.alphaBlend = 1;
			m_baseParam.pixelBlend = 0;
			m_baseParam.destPixelMask = 0xFFFFFFFF;
			m_baseParam.bltWidth = width;
			m_baseParam.bltHeight = height;
			m_baseParam.destRectAddr = osdData->lastOSDMemWrite +
				(env->GetIntField(targetRect, fid_rectx) + env->GetIntField(targetRect, fid_recty)*720)*4;//0;
			m_baseParam.destStride = 720;//width;

			memcpy(&(m_BltCopy.bltBaseParam), &m_baseParam, sizeof(BLT_BASE_PARAM));
			m_BltCopy.srcStride = width;
			m_BltCopy.srcRectAddr = osdBuf.OSDMemOffset;

			hr = ksProp->Set(PROPSETID_IVAC_DISPLAY, IVAC_OSD_BLT_COPY, &m_BltCopy,
				sizeof(OSD_BLT_COPY), &m_BltCopy, sizeof(OSD_BLT_COPY));
			TEST_AND_PRINT
			rv = rv && SUCCEEDED(hr);

			// IMPORTANT: This was what was causing a hang in V2RC3 because I was doing it before
			// the BLT block above. The hang was caused by the OSD still performing a BLT while
			// I made a seek call.  If I wait here for the BLT to complete before I release the
			// mutex on the 350 then I should prevent those hangs
			OSD_BLT_STATE osdState = 1;
			int numWaits = 0;
			while (osdState && SUCCEEDED(hr) && (numWaits++ < 500))
			{
				DWORD dwReturned;
				hr = ksProp->Get(PROPSETID_IVAC_DISPLAY, IVAC_OSD_BLT_STATE, &osdState, sizeof(OSD_BLT_STATE),
					&osdState, sizeof(OSD_BLT_STATE), &dwReturned);
				TEST_AND_PRINT
				slog((env, "OSDBLTState=%d\r\n", osdState));
				Sleep(10);
			}
		}
	}
	else
	{
		if (width == 720 && height >= 480)
		{
			osdBuf.OSDMemOffset = 0;

			hr = ksProp->Set(PROPSETID_IVAC_DISPLAY, IVAC_OSD_MEMCOPY, &osdBuf,
				sizeof(OSD_BUFF_LOC), pBuf, arrLen*4);
			TEST_AND_PRINT
			env->ReleasePrimitiveArrayCritical(jimage, critArr, JNI_ABORT);
			rv = rv && SUCCEEDED(hr);

			// Full OSD update, just do a video pointer flip, don't waste time with a BLT
			osdData->lastOSDMemWrite = 0;
		}
		else
		{
			osdBuf.OSDMemOffset = 1700000;

			hr = ksProp->Set(PROPSETID_IVAC_DISPLAY, IVAC_OSD_MEMCOPY, &osdBuf,
				sizeof(OSD_BUFF_LOC), pBuf, arrLen*4);
			TEST_AND_PRINT
			env->ReleasePrimitiveArrayCritical(jimage, critArr, JNI_ABORT);
			rv = rv && SUCCEEDED(hr);

			BLT_BASE_PARAM m_baseParam;
			OSD_BLT_COPY m_BltCopy;
					
			m_baseParam.ROPCode = 0xA;
			m_baseParam.alphaBlend = 1;
			m_baseParam.pixelBlend = 0;
			m_baseParam.destPixelMask = 0xFFFFFFFF;
			m_baseParam.bltWidth = width;
			m_baseParam.bltHeight = height;
			m_baseParam.destRectAddr = osdData->lastOSDMemWrite +
				(env->GetIntField(targetRect, fid_rectx) + env->GetIntField(targetRect, fid_recty)*720)*4;
			m_baseParam.destStride = 720;

			m_BltCopy.bltBaseParam = m_baseParam;
			m_BltCopy.srcStride = width;
			m_BltCopy.srcRectAddr = osdBuf.OSDMemOffset;
			hr = ksProp->Set(PROPSETID_IVAC_DISPLAY, IVAC_OSD_BLT_COPY, &m_BltCopy,
				sizeof(OSD_BLT_COPY), &m_BltCopy, sizeof(OSD_BLT_COPY));
			TEST_AND_PRINT
			rv = rv && SUCCEEDED(hr);

		}
	}

	if (videoRect)
	{
		VIDEO_SCREEN_COORD vCoord;
		vCoord.topLeftX = env->GetIntField(videoRect, fid_rectx);
		vCoord.topLeftY = env->GetIntField(videoRect, fid_recty);
		vCoord.videoWinWidth = env->GetIntField(videoRect, fid_rectwidth);
		vCoord.videoWinHeight = env->GetIntField(videoRect, fid_rectheight);
		hr = ksProp->Set(PROPSETID_IVAC_DISPLAY, IVAC_VIDEO_SCREEN_COORD, &vCoord, 
			sizeof(VIDEO_SCREEN_COORD), &vCoord, sizeof(VIDEO_SCREEN_COORD));
		TEST_AND_PRINT
		rv = rv && SUCCEEDED(hr);
	}

	OSD_DISPLAYED_BUFFER dispBuf;
	dispBuf.OSDBuffStart = osdData->lastOSDMemWrite;
	dispBuf.OSDInitXOffset = 0;
	dispBuf.OSDInitYOffset = 0;
	dispBuf.OSDLines = osdData->ntscModeFor350 ? (480*2) : (576*2);
	dispBuf.OSDStride = 1440;
	hr = ksProp->Set(PROPSETID_IVAC_DISPLAY, IVAC_OSD_DISPLAYED_BUFFER, &dispBuf,
		sizeof(OSD_DISPLAYED_BUFFER), &dispBuf, sizeof(OSD_DISPLAYED_BUFFER));
	TEST_AND_PRINT
	rv = rv && SUCCEEDED(hr);

 * PROPRIETARY CODE BLOCK DISABLED FOR OPEN SOURCE RELEASE - END
 */
	SAFE_RELEASE(ksProp);

	ReleaseMutex(osdData->mutex350OSD);
	return rv;
}
Ejemplo n.º 19
0
bool CVideoRenderer::Initialize ( const char * szFile )
{
    IBaseFilter * pDSound, * pXVID, * pVorbis;
    IBaseFilter * pSource;
    IFileSourceFilter * pFileSource;
    HRESULT hr;

    // Get the codecs
    CVideoManager *pManager = CVideoManager::GetSingletonPtr ();
    if ( pManager->CreateCodecSource ( &pSource ) != S_OK ) return false;
    if ( pManager->CreateCodecVorbis ( &pVorbis ) != S_OK ) return false;
    if ( pManager->CreateCodecXVID ( &pXVID ) != S_OK ) return false;

    // Check for a valid device
    if ( !m_pDevice ) return false;

    // Lock so we don't f**k up
    Lock ();

    CCore::GetSingleton ().GetConsole ()->Printf ( "Creating DirectShow graph instance" );

    // Initialize the graph builder
    CoCreateInstance ( CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, reinterpret_cast < void** > ( &m_pGraph ) );
    if ( m_pGraph == NULL ) return false;

    CCore::GetSingleton ().GetConsole ()->Printf ( "Creating DirectSound renderer instance" );

    // Initialize the DirectSound filter
    CoCreateInstance ( CLSID_DSoundRender, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, reinterpret_cast < void** > ( &pDSound ) );
    if ( pDSound == NULL ) return false;

#ifdef MTA_DEBUG
    CCore::GetSingleton ().GetConsole ()->Printf ( "Adding ROT for debug stuff" );

    // Enable GraphView debugging
    AddToROT(m_pGraph);
#endif

    CCore::GetSingleton ().GetConsole ()->Printf ( "Creating video renderer instance" );

    // Create an instance of the texture renderer and add it to the graph
    m_pFilter = CreateTextureRenderer ( &hr, m_pDevice, this );
    if ( hr != S_OK ) return false;

    // Add the source file filter to the grap h
    int iBufferSize = MultiByteToWideChar ( CP_ACP, 0, szFile, -1, NULL, 0 );
    wchar_t *wszFile = new wchar_t[iBufferSize];
    MultiByteToWideChar ( CP_ACP, 0, szFile, -1, wszFile, iBufferSize );

    CCore::GetSingleton ().GetConsole ()->Printf ( "Registering filter (Matroska)" );

    // Add the filters to the graph
    m_pGraph->AddFilter         ( pSource,      L"[MTA] MKV source" );
    
    CCore::GetSingleton ().GetConsole ()->Printf ( "Loading video file" );

    pSource->QueryInterface ( IID_IFileSourceFilter, reinterpret_cast < void** > ( &pFileSource ) );
    if ( pFileSource->Load ( wszFile, NULL ) != S_OK ) return false;

    CCore::GetSingleton ().GetConsole ()->Printf ( "Registering filter (Output)" );

    m_pGraph->AddFilter         ( m_pFilter,    L"[MTA] Texture renderer" );
    CCore::GetSingleton ().GetConsole ()->Printf ( "Registering filter (Vorbis)" );
    m_pGraph->AddFilter         ( pVorbis,      L"[MTA] Vorbis decoder" );
    CCore::GetSingleton ().GetConsole ()->Printf ( "Registering filter (XVID)" );
    m_pGraph->AddFilter         ( pXVID,        L"[MTA] XVID codec" );
    CCore::GetSingleton ().GetConsole ()->Printf ( "Registering filter (DirectSound)" );
    m_pGraph->AddFilter         ( pDSound,      L"[MTA] DirectSound renderer" );

    CCore::GetSingleton ().GetConsole ()->Printf ( "Connecting video renderer" );

    // Connect the video pins
    IPin *pOut, *pSourceOut;
    hr = ConnectFilters ( m_pGraph, pSource, pXVID, &pSourceOut );      // MKV Source       -> XVID
    assert ( hr == S_OK );
    hr = ConnectFilters ( m_pGraph, pXVID, m_pFilter, &pOut );          // XVID             -> Texture Renderer
    assert ( hr == S_OK );

    // Connect the audio pins (not necessary)
    hr = ConnectFilters ( m_pGraph, pSource, pVorbis, &pOut );          // MKV Source       -> Vorbis Decoder
    hr = ConnectFilters ( m_pGraph, pVorbis, pDSound, &pOut );          // Vorbis Decoder   -> DirectSound renderer

    m_pGraph->QueryInterface ( IID_IMediaSeeking, reinterpret_cast < void** > ( &m_pMediaSeeking ) );
    assert ( m_pMediaSeeking != NULL );
    m_pGraph->QueryInterface ( IID_IMediaControl, reinterpret_cast < void** > ( &m_pMediaControl ) );
    if ( m_pMediaControl == NULL || m_pMediaSeeking == NULL ) return false;

    m_pGraph->QueryInterface ( IID_IBasicAudio, reinterpret_cast < void** > ( &m_pBasicAudio ) );
    if ( m_pBasicAudio == NULL ) return false;

    CCore::GetSingleton ().GetConsole ()->Printf ( "Successfully loaded video renderer" );

    m_pBasicAudio->get_Volume ( &lDefaultVolume );

    // Clean up
    delete [] wszFile;
//  m_pGraph->Release ();

    // Unlock the mutex
    Unlock ();

    return true;
}