HRESULT CFLVConverter::CreateVideoCompressor(IBaseFilter** ppFilter)
{
// Video capture category
static WCHAR szMon[] = L"@device:cm:{33D9A760-90C8-11D0-BD43-00A0C911CE86}\\msvc";
IBindCtx *pBindCtx;
ULONG chEaten = 0;
IMoniker *pMoniker = 0;
	if(!ppFilter) return E_POINTER;
	*ppFilter = NULL;
	HRESULT hr = CreateBindCtx(0, &pBindCtx);

	hr = MkParseDisplayName(pBindCtx, szMon, &chEaten, &pMoniker);
	pBindCtx->Release();
	if (SUCCEEDED(hr))
	{
		hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**) ppFilter);
		//need not: hr = pGB->AddFilter(ppFilter, L"MSVC1");
		pMoniker->Release();
	}
	return hr;
}
IBaseFilter *GetAudioDevice (){
	// Create the system device enumerator.
	ICreateDevEnum *pDevEnum = NULL;
	CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC, 
		IID_ICreateDevEnum, (void **)&pDevEnum);

	// Create an enumerator for video capture devices.
	IEnumMoniker *pClassEnum = NULL;
	pDevEnum->CreateClassEnumerator(CLSID_AudioInputDeviceCategory, &pClassEnum, 0);

	ULONG cFetched;
	IMoniker *pMoniker = NULL;
	IBaseFilter *pSrc = NULL;
	if (pClassEnum->Next(1, &pMoniker, &cFetched) == S_OK)
	{
	  // Bind the first moniker to a filter object.
	 pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pSrc);
		pMoniker->Release();
	}
	pClassEnum->Release();
	pDevEnum->Release();
	return pSrc;
}
예제 #3
0
Camera::Camera(bool Show,bool Start) : eHandler(this),_realData(false),_UpdateWindow(Show),_LastData(0),_CurData(0) {
	DWORD no;
	IGraphBuilder *graph = 0;
	ctrl = 0;
	ICreateDevEnum *devs = 0;
	IEnumMoniker *cams = 0;
	IMoniker *mon = 0;
	IBaseFilter *cam = 0;
	IEnumPins *pins = 0;
	IPin *pin = 0;
	IEnumFilters *fil = 0;
	IBaseFilter *rnd = 0;
	IMemInputPin *mem = 0;
	curCamera = this;
	_isOn = Start;

	CoCreateInstance( CLSID_FilterGraph, 0, CLSCTX_INPROC,IID_IGraphBuilder, (void **)&graph );
	graph->QueryInterface( IID_IMediaControl, (void **)&ctrl );
	CoCreateInstance (CLSID_SystemDeviceEnum, 0, CLSCTX_INPROC, IID_ICreateDevEnum, (void **) &devs);
	devs->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, &cams, 0);
	cams->Next (1,&mon,0);												// get first found capture device (webcam)    
	mon->BindToObject(0,0,IID_IBaseFilter, (void**)&cam);
	
	graph->AddFilter(cam, L"Capture Source");							// add web cam to graph as source
	cam->EnumPins(&pins);												// we need output pin to autogenerate rest of the graph
	pins->Next(1,&pin, 0);												// via graph->Render
	graph->Render(pin);													// graph builder now builds whole filter chain including MJPG decompression on some webcams
	graph->EnumFilters(&fil);											// from all newly added filters
	fil->Next(1,&rnd,0);												// we find last one (renderer)
	rnd->EnumPins(&pins);												// because data we are intersted in are pumped to renderers input pin 
	pins->Next(1,&pin, 0);												// via Receive member of IMemInputPin interface
	pin->QueryInterface(IID_IMemInputPin,(void**)&mem);

	DsHook(mem,6,Receive);												// so we redirect it to our own proc to grab image data

	if (Start) this->Start();
}
예제 #4
0
HRESULT CaptureGraph::BuildGraphFromListBox(CListBox *captureFilterList, int selectedIndex)
{
	if(pGraph != NULL)
	{
		HRESULT hr;
		//dump the graph
		IMoniker *pSelectedCaptureMoniker = (IMoniker *)captureFilterList->GetItemDataPtr(selectedIndex);
		IBaseFilter *pCaptureFilter = NULL;
		hr = pSelectedCaptureMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void **)&pCaptureFilter);
		if(FAILED(hr))
		{
			return hr;
		}
		pGraph->AddFilter(pCaptureFilter, L"Video Capture");
		hr = pCaptureBuilder2->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, pCaptureFilter, NULL, NULL);
		if(FAILED(hr))
		{
			return hr;
		}
		hr = SaveGraphFile(pGraph, L"D:\\VideoCapture.grf");
		return hr;
	}
	return E_FAIL;
}
예제 #5
0
	void TestCamera()
	{
		InitOpenCL();
		//TCHAR szDeviceName[80];
		//TCHAR szDeviceVersion[80];

		//for (int wIndex = 0; wIndex < 10; wIndex++) 
		//{
		//	if (capGetDriverDescription(
		//		wIndex, 
		//		szDeviceName, 
		//		sizeof (szDeviceName), 
		//		szDeviceVersion, 
		//		sizeof (szDeviceVersion)
		//		)) 
		//	{
		//		// Append name to list of installed capture drivers
		//		// and then let the user select a driver to use.
		//	}
		//} 

		//HWND hWndC = capCreateCaptureWindow(TEXT("PunkCapture"),
		//	WS_CHILD | WS_VISIBLE, 0, 0, 160, 120, *System::Window::Instance(), 1);

		//SendMessage (hWndC, WM_CAP_DRIVER_CONNECT, 0, 0L); 
		//// 
		//// Or, use the macro to connect to the MSVIDEO driver: 
		//// fOK = capDriverConnect(hWndC, 0); 
		//// 
		//// Place code to set up and capture video here. 
		//// 
		////capDriverDisconnect (hWndC); 

		//CAPDRIVERCAPS CapDriverCaps = { }; 
		//CAPSTATUS     CapStatus = { };

		//capDriverGetCaps(hWndC, &CapDriverCaps, sizeof(CAPDRIVERCAPS)); 

		//// Video source dialog box. 
		//if (CapDriverCaps.fHasDlgVideoSource)
		//{
		//	capDlgVideoSource(hWndC); 
		//}

		//// Video format dialog box. 
		//if (CapDriverCaps.fHasDlgVideoFormat) 
		//{
		//	capDlgVideoFormat(hWndC); 

		//	// Are there new image dimensions?
		//	capGetStatus(hWndC, &CapStatus, sizeof (CAPSTATUS));

		//	// If so, notify the parent of a size change.
		//} 

		//// Video display dialog box. 
		//if (CapDriverCaps.fHasDlgVideoDisplay)
		//{
		//	capDlgVideoDisplay(hWndC); 
		//}


		HRESULT hr;
		IGraphBuilder*  graph= 0;  hr = CoCreateInstance( CLSID_FilterGraph, 0, CLSCTX_INPROC,IID_IGraphBuilder, (void **)&graph );
		IMediaControl*  ctrl = 0;  hr = graph->QueryInterface( IID_IMediaControl, (void **)&ctrl );

		ICreateDevEnum* devs = 0;  hr = CoCreateInstance (CLSID_SystemDeviceEnum, 0, CLSCTX_INPROC, IID_ICreateDevEnum, (void **) &devs);
		IEnumMoniker*   cams = 0;  hr = devs?devs->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, &cams, 0):0;  
		IMoniker*       mon  = 0;  hr = cams->Next (1,&mon,0);  // get first found capture device (webcam?)    
		IBaseFilter*    cam  = 0;  hr = mon->BindToObject(0,0,IID_IBaseFilter, (void**)&cam);
		hr = graph->AddFilter(cam, L"Capture Source"); // add web cam to graph as source
		IEnumPins*      pins = 0;  hr = cam?cam->EnumPins(&pins):0;   // we need output pin to autogenerate rest of the graph
		IPin*           pin  = 0;  hr = pins?pins->Next(1,&pin, 0):0; // via graph->Render
		hr = graph->Render(pin); // graph builder now builds whole filter chain including MJPG decompression on some webcams
		IEnumFilters*   fil  = 0;  hr = graph->EnumFilters(&fil); // from all newly added filters
		IBaseFilter*    rnd  = 0;  hr = fil->Next(1,&rnd,0); // we find last one (renderer)
		hr = rnd->EnumPins(&pins);  // because data we are intersted in are pumped to renderers input pin 
		hr = pins->Next(1,&pin, 0); // via Receive member of IMemInputPin interface
		IMemInputPin*   mem  = 0;  hr = pin->QueryInterface(IID_IMemInputPin,(void**)&mem);

		DsHook(mem,6,Receive); // so we redirect it to our own proc to grab image data

		hr = ctrl->Run();   

	};
예제 #6
0
HRESULT RecordGraph::BuildGraph(CString recordFileName)
{
	HRESULT hr;
	ICreateDevEnum *pSysAudioCaptureEnum = NULL;
	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **)&pSysAudioCaptureEnum);
	if (FAILED(hr))
	{
		return hr;
	}

	IBaseFilter *pAudioCapture = NULL;
	IEnumMoniker *pEnumMoniker = NULL;
	hr = pSysAudioCaptureEnum->CreateClassEnumerator(CLSID_AudioInputDeviceCategory, &pEnumMoniker, 0);
	if (hr == S_OK)
	{
		IMoniker *pMoniker = NULL;
		ULONG fetched;
		BOOL findMicrophone = FALSE;
		while(pEnumMoniker->Next(1, &pMoniker, &fetched) == S_OK && !findMicrophone)
		{
			IPropertyBag *pPropBag;
			hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag);
			if(SUCCEEDED(hr))
			{
				VARIANT varName;
				VariantInit(&varName);
				hr = pPropBag->Read(L"FriendlyName", &varName, 0);
				if(SUCCEEDED(hr))
				{
					ASSERT(varName.vt == VT_BSTR);
					CString friendlyName(varName.bstrVal);
					if(friendlyName.Find(L"Microphone") >= 0)
					{
						hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void **)&pAudioCapture);
						findMicrophone = TRUE;
					}
					VariantClear(&varName);
					pPropBag->Release();
				}
			}
		}
		pEnumMoniker->Release();
	}
	pSysAudioCaptureEnum->Release();
	if(pAudioCapture == NULL)
	{
		return S_FALSE;
	}
	pBuilder->AddFilter(pAudioCapture, L"Audio Capture");

	IBaseFilter *pWaveDest = NULL;
	hr = CoCreateInstance(CLSID_WavDest, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void **)&pWaveDest);
	if(FAILED(hr))
	{
		return hr;
	}
	pBuilder->AddFilter(pWaveDest, L"Wave Dest");

	IBaseFilter *pFileWriter = NULL;
	hr = CoCreateInstance(CLSID_FileWriter, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void **)&pFileWriter);
	if(FAILED(hr))
	{
		return hr;
	}
	pBuilder->AddFilter(pFileWriter, L"File Writer");
	IFileSinkFilter *pFileSetter = NULL;
	hr = pFileWriter->QueryInterface(IID_IFileSinkFilter, (void **)&pFileSetter);
	if(FAILED(hr))
	{
		return hr;
	}
	AM_MEDIA_TYPE pmt;
	pmt.majortype = MEDIATYPE_Stream;
	pmt.subtype = MEDIASUBTYPE_WAVE;
	pmt.formattype = FORMAT_WaveFormatEx;
	hr = pFileSetter->SetFileName(recordFileName, &pmt);

	hr = ConnectFilters(pBuilder, pAudioCapture, pWaveDest, MEDIATYPE_NULL);if(FAILED(hr)) return hr;
	hr = ConnectFilters(pBuilder, pWaveDest, pFileWriter, MEDIATYPE_NULL);//if(FAILED(hr)) return hr;
	SaveGraphFile(pBuilder, L"D:\\Record.grf");

	pFileSetter->Release();
	pFileWriter->Release();
	pWaveDest->Release();
	pAudioCapture->Release();
}
예제 #7
0
파일: f_ds_vfile.cpp 프로젝트: d-zenju/aws
IBaseFilter * GetFilter(const char * fname)
{
	HRESULT hr;
	IFilterMapper3 * pFM3;
	hr = CoCreateInstance(CLSID_FilterMapper2, NULL, CLSCTX_INPROC_SERVER, 
		IID_IFilterMapper3, (void **)&pFM3);
	
	ICreateDevEnum *pSysDevEnum = NULL;
	hr = pFM3->GetICreateDevEnum(&pSysDevEnum);
	/*
	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
		IID_ICreateDevEnum, (void **)&pSysDevEnum);
	*/

	if (FAILED(hr))
	{
		return NULL;
	}

	IEnumMoniker *pEnumCat = NULL;
	hr = pSysDevEnum->CreateClassEnumerator(CLSID_LegacyAmFilterCategory, &pEnumCat, 0);
	char str[BUFSIZE_MONIKER_INF];
	IBaseFilter *pFilter = NULL;

	if (hr == S_OK) 
	{
		// Enumerate the monikers.
		IMoniker *pMoniker = NULL;
		ULONG cFetched;
//		cout << "Seeking ffdshow Video Decoder" << endl;
		int ifilter = 0;
		while(pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK && pFilter == NULL)
		{
			IPropertyBag *pPropBag;
			hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, 
				(void **)&pPropBag);
			if (SUCCEEDED(hr))
			{
				VARIANT varName;
				VariantInit(&varName);
				hr = pPropBag->Read(L"FriendlyName", &varName, 0);
				if (SUCCEEDED(hr))
				{
					wcstombs(str, varName.bstrVal, BUFSIZE_MONIKER_INF);
//					cout << ifilter << ":" << str << endl;
					if(strcmp(str, fname) == 0){
						hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter,
							(void**)&pFilter);
					}

				}
				VariantClear(&varName);
				
				pPropBag->Release();
			}
			ifilter++;
			pMoniker->Release();
		}
		pEnumCat->Release();
	}
	pSysDevEnum->Release();
	pFM3->Release();
	return pFilter;
}
static GstCaps *
gst_dshowaudiosrc_get_caps (GstBaseSrc * basesrc)
{
  HRESULT hres = S_OK;
  IBindCtx *lpbc = NULL;
  IMoniker *audiom = NULL;
  DWORD dwEaten;
  GstDshowAudioSrc *src = GST_DSHOWAUDIOSRC (basesrc);
  gunichar2 *unidevice = NULL;

  if (src->device) {
    g_free (src->device);
    src->device = NULL;
  }

  src->device =
      gst_dshow_getdevice_from_devicename (&CLSID_AudioInputDeviceCategory,
      &src->device_name);
  if (!src->device) {
    GST_ERROR ("No audio device found.");
    return NULL;
  }
  unidevice =
      g_utf8_to_utf16 (src->device, strlen (src->device), NULL, NULL, NULL);

  if (!src->audio_cap_filter) {
    hres = CreateBindCtx (0, &lpbc);
    if (SUCCEEDED (hres)) {
      hres =
          MkParseDisplayName (lpbc, (LPCOLESTR) unidevice, &dwEaten, &audiom);
      if (SUCCEEDED (hres)) {
        hres = audiom->BindToObject (lpbc, NULL, IID_IBaseFilter,
            (LPVOID *) & src->audio_cap_filter);
        audiom->Release ();
      }
      lpbc->Release ();
    }
  }

  if (src->audio_cap_filter && !src->caps) {
    /* get the capture pins supported types */
    IPin *capture_pin = NULL;
    IEnumPins *enumpins = NULL;
    HRESULT hres;

    hres = src->audio_cap_filter->EnumPins (&enumpins);
    if (SUCCEEDED (hres)) {
      while (enumpins->Next (1, &capture_pin, NULL) == S_OK) {
        IKsPropertySet *pKs = NULL;

        hres =
            capture_pin->QueryInterface (IID_IKsPropertySet, (LPVOID *) & pKs);
        if (SUCCEEDED (hres) && pKs) {
          DWORD cbReturned;
          GUID pin_category;
          RPC_STATUS rpcstatus;

          hres =
              pKs->Get (AMPROPSETID_Pin,
              AMPROPERTY_PIN_CATEGORY, NULL, 0, &pin_category, sizeof (GUID),
              &cbReturned);

          /* we only want capture pins */
          if (UuidCompare (&pin_category, (UUID *) & PIN_CATEGORY_CAPTURE,
                  &rpcstatus) == 0) {
            IAMStreamConfig *streamcaps = NULL;

            if (SUCCEEDED (capture_pin->QueryInterface (IID_IAMStreamConfig,
                        (LPVOID *) & streamcaps))) {
              src->caps =
                  gst_dshowaudiosrc_getcaps_from_streamcaps (src, capture_pin,
                  streamcaps);
              streamcaps->Release ();
            }
          }
          pKs->Release ();
        }
        capture_pin->Release ();
      }
      enumpins->Release ();
    }
  }

  if (unidevice) {
    g_free (unidevice);
  }

  if (src->caps) {
    return gst_caps_ref (src->caps);
  }

  return NULL;
}
예제 #9
0
static int v4w_open_videodevice(V4wState *s)
{
	// Initialize COM
	CoInitialize(NULL);

	// get a Graph
	HRESULT hr= CoCreateInstance (CLSID_FilterGraph,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_IGraphBuilder, //IID_IBaseFilter,
		(void **)&s->m_pGraph);
	if(FAILED(hr))
	{
		return -1;
	}

	// get a CaptureGraphBuilder2
	hr= CoCreateInstance (CLSID_CaptureGraphBuilder2,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_ICaptureGraphBuilder2, //IID_IBaseFilter,
		(void **)&s->m_pBuilder);
	if(FAILED(hr))
	{
		return -2;
	}

	// connect capture graph builder with the graph
	s->m_pBuilder->SetFiltergraph(s->m_pGraph);

	// get mediacontrol so we can start and stop the filter graph
	hr=s->m_pGraph->QueryInterface (IID_IMediaControl, (void **)&s->m_pControl);
	if(FAILED(hr))
	{
		return -3;
	}


	ICreateDevEnum *pCreateDevEnum = NULL;
	IEnumMoniker *pEnumMoniker = NULL;
	IMoniker *pMoniker = NULL;

	ULONG nFetched = 0;

	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, 
		IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
	if(FAILED(hr))
	{
		return -4;
	}

	hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
		&pEnumMoniker, 0);
	if (FAILED(hr) || pEnumMoniker == NULL) {
		//printf("no device\n");
		return -5;
	}

	pEnumMoniker->Reset();

	int pos=0;
	while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
	{
		IPropertyBag *pBag;
		hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag );
		if( hr != S_OK )
			continue; 

		if (s->dev[0]=='\0')
			break;

		VARIANT var;
		VariantInit(&var);
		hr = pBag->Read( L"FriendlyName", &var, NULL ); 
		if( hr != S_OK )
		{
			pMoniker->Release();
			continue;
		}
		//USES_CONVERSION;
		char szName[256];

		WideCharToMultiByte(CP_UTF8,0,var.bstrVal,-1,szName,256,0,0);
		VariantClear(&var); 

		if (strcmp(szName, s->dev)==0)
			break;

		pMoniker->Release();
		pBag->Release();
		pMoniker=NULL;
		pBag=NULL;
	}

	if(pMoniker==NULL)
	{
		return -6;
	}

	hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
	if(FAILED(hr))
	{
		return -7;
	}

	s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

	pMoniker->Release();
	pEnumMoniker->Release();
	pCreateDevEnum->Release();


	GUID pPinCategory;

	if (try_format(s->m_pDeviceFilter, s->pix_fmt, &pPinCategory)==0)
		s->pix_fmt = s->pix_fmt;
	else if (try_format(s->m_pDeviceFilter,MS_YUV420P, &pPinCategory)==0)
		s->pix_fmt = MS_YUV420P;
	else if (try_format(s->m_pDeviceFilter,MS_YUY2, &pPinCategory)==0)
		s->pix_fmt = MS_YUY2;
	else if (try_format(s->m_pDeviceFilter,MS_YUYV, &pPinCategory)==0)
		s->pix_fmt = MS_YUYV;
	else if (try_format(s->m_pDeviceFilter,MS_UYVY, &pPinCategory)==0)
		s->pix_fmt = MS_UYVY;
	else if (try_format(s->m_pDeviceFilter,MS_RGB24, &pPinCategory)==0)
		s->pix_fmt = MS_RGB24;
	else
	{
		ms_error("Unsupported video pixel format.");
		return -8;
	}

	if (s->pix_fmt == MS_YUV420P)
		ms_message("Driver supports YUV420P, using that format.");
	else if (s->pix_fmt == MS_YUY2)
		ms_message("Driver supports YUY2 (YUYV), using that format.");
	else if (s->pix_fmt == MS_YUYV)
		ms_message("Driver supports YUV422, using that format.");
	else if (s->pix_fmt == MS_UYVY)
		ms_message("Driver supports UYVY, using that format.");
	else if (s->pix_fmt == MS_RGB24)
		ms_message("Driver supports RGB24, using that format.");

	if (try_format_size(s, s->pix_fmt, s->vsize.width, s->vsize.height, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", s->vsize.width, s->vsize.height);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H);
	else
	{
		ms_error("No supported size found for format.");
		/* size not supported? */
		return -9;
	}

	// get DXFilter
	s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE);
	if(s->m_pDXFilter==NULL)
	{
		return -10;
	}
	s->m_pDXFilter->AddRef();

	CMediaType mt;
	mt.SetType(&MEDIATYPE_Video);

	GUID m = MEDIASUBTYPE_RGB24;
	if (s->pix_fmt == MS_YUV420P)
		m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0'));
	else if (s->pix_fmt == MS_YUY2)
		m = MEDIASUBTYPE_YUY2;
	else if (s->pix_fmt == MS_YUYV)
		m = MEDIASUBTYPE_YUYV;
	else if (s->pix_fmt == MS_UYVY)
		m = MEDIASUBTYPE_UYVY;
	else if (s->pix_fmt == MS_RGB24)
		m = MEDIASUBTYPE_RGB24;
	mt.SetSubtype(&m);

	mt.formattype = FORMAT_VideoInfo;
	mt.SetTemporalCompression(FALSE);

	VIDEOINFO *pvi = (VIDEOINFO *)
		mt.AllocFormatBuffer(sizeof(VIDEOINFO));
	if (NULL == pvi)
		return -11;
	ZeroMemory(pvi, sizeof(VIDEOINFO));

	if (s->pix_fmt == MS_YUV420P)
		pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
	else if (s->pix_fmt == MS_YUY2)
		pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2');
	else if (s->pix_fmt == MS_YUYV)
		pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','V');
	else if (s->pix_fmt == MS_UYVY)
		pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y');
	else if (s->pix_fmt == MS_RGB24)
		pvi->bmiHeader.biCompression = BI_RGB;

	if (s->pix_fmt == MS_YUV420P)
		pvi->bmiHeader.biBitCount = 12;
	else if (s->pix_fmt == MS_YUY2)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_YUYV)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_UYVY)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_RGB24)
		pvi->bmiHeader.biBitCount = 24;

	pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
	pvi->bmiHeader.biWidth = s->vsize.width;
	pvi->bmiHeader.biHeight = s->vsize.height;
	pvi->bmiHeader.biPlanes = 1;
	pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
	pvi->bmiHeader.biClrImportant = 0;
	mt.SetSampleSize(pvi->bmiHeader.biSizeImage);

	mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO));

	hr = s->m_pDXFilter->SetAcceptedMediaType(&mt);
	if(FAILED(hr))
	{
		return -12;
	}

	hr = s->m_pDXFilter->SetCallback(Callback); 
	if(FAILED(hr))
	{
		return -13;
	}

	hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter,
		(LPVOID *)&s->m_pIDXFilter);
	if(FAILED(hr))
	{
		return -14;
	}

	hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter");
	if(FAILED(hr))
	{
		return -15;
	}


	// get null renderer
	hr=CoCreateInstance (CLSID_NullRenderer,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_IBaseFilter,
		(void **)&s->m_pNullRenderer);
	if(FAILED(hr))
	{
		return -16;
	}
	if (s->m_pNullRenderer!=NULL)
	{
		s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer");
	}

	hr = s->m_pBuilder->RenderStream(&pPinCategory,
		&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
	if (FAILED(hr))
	{
		return -17;
	}

	IAMStreamConfig *pConfig = NULL;
	hr = s->m_pBuilder->FindInterface(
		&pPinCategory, // Preview pin.
		&MEDIATYPE_Video,    // Any media type.
		s->m_pDeviceFilter, // Pointer to the capture filter.
		IID_IAMStreamConfig, (void**)&pConfig); 
	if (pConfig!=NULL)
	{
		AM_MEDIA_TYPE *pType = NULL;
		int iCount, iSize;
		pConfig->GetNumberOfCapabilities(&iCount, &iSize);

		for (int i = 0; i < iCount; i++) {
			VIDEO_STREAM_CONFIG_CAPS scc;
			pType = NULL;
			pConfig->GetStreamCaps(i, &pType, (BYTE *)&scc);

			if (!((pType->formattype == FORMAT_VideoInfo) &&
				(pType->cbFormat >= sizeof(VIDEOINFOHEADER)) &&
				(pType->pbFormat != NULL)))
				continue;

			VIDEOINFOHEADER & videoInfo = *(VIDEOINFOHEADER *)pType->pbFormat;

			if (m != pType->subtype)
				continue;

			if (videoInfo.bmiHeader.biWidth != s->vsize.width)
				continue;

			if (videoInfo.bmiHeader.biHeight != s->vsize.height)
				continue;

			if (videoInfo.bmiHeader.biBitCount != pvi->bmiHeader.biBitCount)
				continue;

			if (videoInfo.bmiHeader.biCompression != pvi->bmiHeader.biCompression)
				continue;

			videoInfo.AvgTimePerFrame = UNITS / (LONGLONG)s->fps;
			pConfig->SetFormat(pType);    
		}

		pConfig->GetFormat(&pType);
		if (pType!=NULL)
		{
			VIDEOINFO *pvi;
			pvi = (VIDEOINFO *)pType->pbFormat;
			ms_message("v4w: camera asked fps=%.2f // real fps=%.2f", s->fps, ((float)UNITS / (float)pvi->AvgTimePerFrame));
		}

		pConfig->Release();
	}

	//m_pDXFilter->SetBufferSamples(TRUE);

	s_callback = s;
	hr = s->m_pControl->Run();
	if(FAILED(hr))
	{
		return -18;
	}


	s->rotregvalue=1;
	return 0;
}
예제 #10
0
bool directx_camera_server::open_and_find_parameters(const int which, unsigned width, unsigned height)
{
  HRESULT hr;

  //-------------------------------------------------------------------
  // Create COM and DirectX objects needed to access a video stream.

  // Initialize COM.  This must have a matching uninitialize somewhere before
  // the object is destroyed.
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CoInitialize\n");
#endif
  CoInitialize(NULL);

  // Create the filter graph manager
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance FilterGraph\n");
#endif
  CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, 
		      IID_IGraphBuilder, (void **)&_pGraph);
  if (_pGraph == NULL) {
    fprintf(stderr, "directx_camera_server::open_and_find_parameters(): Can't create graph manager\n");
    return false;
  }
  _pGraph->QueryInterface(IID_IMediaControl, (void **)&_pMediaControl);
  _pGraph->QueryInterface(IID_IMediaEvent, (void **)&_pEvent);

  // Create the Capture Graph Builder.
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance CaptureGraphBuilder2\n");
#endif
  CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, 
      IID_ICaptureGraphBuilder2, (void **)&_pBuilder);
  if (_pBuilder == NULL) {
    fprintf(stderr, "directx_camera_server::open_and_find_parameters(): Can't create graph builder\n");
    return false;
  }

  // Associate the graph with the builder.
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before SetFilterGraph\n");
#endif
  _pBuilder->SetFiltergraph(_pGraph);

  //-------------------------------------------------------------------
  // Go find a video device to use: in this case, we are using the Nth
  // one we find, where the number N is the "which" parameter.

  // Create the system device enumerator.
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance SystemDeviceEnum\n");
#endif
  ICreateDevEnum *pDevEnum = NULL;
  CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC, 
      IID_ICreateDevEnum, (void **)&pDevEnum);
  if (pDevEnum == NULL) {
    fprintf(stderr, "directx_camera_server::open_and_find_parameters(): Can't create device enumerator\n");
    return false;
  }

  // Create an enumerator for video capture devices.
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CreateClassEnumerator\n");
#endif
  IEnumMoniker *pClassEnum = NULL;
  pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pClassEnum, 0);
  if (pClassEnum == NULL) {
    fprintf(stderr, "directx_camera_server::open_and_find_parameters(): Can't create video enumerator (no cameras?)\n");
    pDevEnum->Release();
    return false;
  }

#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before Loop over enumerators\n");
#endif
  ULONG cFetched;
  IMoniker *pMoniker = NULL;
  IBaseFilter *pSrc = NULL;
  // Skip (which - 1) cameras
  int i;
  for (i = 0; i < which-1 ; i++) {
    if (pClassEnum->Next(1, &pMoniker, &cFetched) != S_OK) {
      fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't open camera (not enough cameras)\n");
      pMoniker->Release();
      return false;
    }
  }
  // Take the next camera and bind it
  if (pClassEnum->Next(1, &pMoniker, &cFetched) == S_OK) {
    // Bind the first moniker to a filter object.
    pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pSrc);
    pMoniker->Release();
  } else {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't open camera (not enough cameras)\n");
    pMoniker->Release();
    return false;
  }

  pClassEnum->Release();
  pDevEnum->Release();

  //-------------------------------------------------------------------
  // Construct the sample grabber callback handler that will be used
  // to receive image data from the sample grabber.
  if ( (_pCallback = new directx_samplegrabber_callback()) == NULL) {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't create sample grabber callback handler (out of memory?)\n");
    return false;
  }

  //-------------------------------------------------------------------
  // Construct the sample grabber that will be used to snatch images from
  // the video stream as they go by.  Set its media type and callback.

  // Create the Sample Grabber.
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance SampleGrabber\n");
#endif
  CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
      IID_IBaseFilter, reinterpret_cast<void**>(&_pSampleGrabberFilter));
  if (_pSampleGrabberFilter == NULL) {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't get SampleGrabber filter (not DirectX 8.1+?)\n");
    return false;
  }
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before QueryInterface\n");
#endif
  _pSampleGrabberFilter->QueryInterface(IID_ISampleGrabber,
      reinterpret_cast<void**>(&_pGrabber));

  // Set the media type to video
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before SetMediaType\n");
#endif
  AM_MEDIA_TYPE mt;
  // Ask for video media producers that produce 8-bit RGB
  ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
  mt.majortype = MEDIATYPE_Video;	  // Ask for video media producers
  mt.subtype = MEDIASUBTYPE_RGB24;	  // Ask for 8 bit RGB
  _pGrabber->SetMediaType(&mt);

  //-------------------------------------------------------------------
  // Ask for the video resolution that has been passed in.
  // This code is based on
  // intuiting that we need to use the SetFormat call on the IAMStreamConfig
  // interface; this interface is described in the help pages.
  // If the width and height are specified as 0, then they are not set
  // in the header, letting them use whatever is the default.
  if ( (width != 0) && (height != 0) ) {
    _pBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, pSrc,
			      IID_IAMStreamConfig, (void **)&_pStreamConfig);
    if (_pStreamConfig == NULL) {
      fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't get StreamConfig interface\n");
      return false;
    }

    ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
    mt.majortype = MEDIATYPE_Video;	  // Ask for video media producers
    mt.subtype = MEDIASUBTYPE_RGB24;	  // Ask for 8 bit RGB
    mt.pbFormat = (BYTE*)CoTaskMemAlloc(sizeof(VIDEOINFOHEADER));
    VIDEOINFOHEADER *pVideoHeader = (VIDEOINFOHEADER*)mt.pbFormat;
    ZeroMemory(pVideoHeader, sizeof(VIDEOINFOHEADER));
    pVideoHeader->bmiHeader.biBitCount = 24;
    pVideoHeader->bmiHeader.biWidth = width;
    pVideoHeader->bmiHeader.biHeight = height;
    pVideoHeader->bmiHeader.biPlanes = 1;
    pVideoHeader->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
    pVideoHeader->bmiHeader.biSizeImage = DIBSIZE(pVideoHeader->bmiHeader);

    // Set the format type and size.
    mt.formattype = FORMAT_VideoInfo;
    mt.cbFormat = sizeof(VIDEOINFOHEADER);

    // Set the sample size.
    mt.bFixedSizeSamples = TRUE;
    mt.lSampleSize = DIBSIZE(pVideoHeader->bmiHeader);

    // Make the call to actually set the video type to what we want.
    if (_pStreamConfig->SetFormat(&mt) != S_OK) {
      fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't set resolution to %dx%d\n",
	pVideoHeader->bmiHeader.biWidth, pVideoHeader->bmiHeader.biHeight);
      return false;
    }

    // Clean up the pbFormat header memory we allocated above.
    CoTaskMemFree(mt.pbFormat);
  }

  //-------------------------------------------------------------------
  // Create a NULL renderer that will be used to discard the video frames
  // on the output pin of the sample grabber

#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance NullRenderer\n");
#endif
  IBaseFilter *pNull = NULL;
  CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER,
      IID_IBaseFilter, reinterpret_cast<void**>(&pNull));

  //-------------------------------------------------------------------
  // Build the filter graph.  First add the filters and then connect them.

  // pSrc is the capture filter for the video device we found above.
  _pGraph->AddFilter(pSrc, L"Video Capture");

  // Add the sample grabber filter
  _pGraph->AddFilter(_pSampleGrabberFilter, L"SampleGrabber");

  // Add the null renderer filter
  _pGraph->AddFilter(pNull, L"NullRenderer");

  // Connect the output of the video reader to the sample grabber input
  ConnectTwoFilters(_pGraph, pSrc, _pSampleGrabberFilter);

  // Connect the output of the sample grabber to the NULL renderer input
  ConnectTwoFilters(_pGraph, _pSampleGrabberFilter, pNull);

  //-------------------------------------------------------------------
  // XXX See if this is a video tuner card by querying for that interface.
  // Set it to read the video channel if it is one.
  IAMTVTuner  *pTuner = NULL;
  hr = _pBuilder->FindInterface(NULL, NULL, pSrc, IID_IAMTVTuner, (void**)&pTuner);
  if (pTuner != NULL) {
#ifdef	DEBUG
    printf("directx_camera_server::open_and_find_parameters(): Found a TV Tuner!\n");
#endif

    //XXX Put code here.
    // Set the first input pin to use the cable as input
    hr = pTuner->put_InputType(0, TunerInputCable);
    if (FAILED(hr)) {
      fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't set input to cable\n");
    }

    // Set the channel on the video to be baseband (is this channel zero?)
    hr = pTuner->put_Channel(0, -1, -1);
    if (FAILED(hr)) {
      fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't set channel\n");
    }

    pTuner->Release();
  }
  

  //-------------------------------------------------------------------
  // Find _num_rows and _num_columns in the video stream.
  _pGrabber->GetConnectedMediaType(&mt);
  VIDEOINFOHEADER *pVih;
  if (mt.formattype == FORMAT_VideoInfo) {
      pVih = reinterpret_cast<VIDEOINFOHEADER*>(mt.pbFormat);
  } else {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't get video header type\n");
    return false;
  }

  // Number of rows and columns.  This is different if we are using a target
  // rectangle (rcTarget) than if we are not.
  if (IsRectEmpty(&pVih->rcTarget)) {
    _num_columns = pVih->bmiHeader.biWidth;
    _num_rows = pVih->bmiHeader.biHeight;
  } else {
    _num_columns = pVih->rcTarget.right;
    _num_rows = pVih->bmiHeader.biHeight;
    printf("XXX directx_camera_server::open_and_find_parameters(): Warning: may not work correctly with target rectangle\n");
  }
  _minX = 0;
  _maxX = _num_columns - 1;
  _minY = 0;
  _maxY = _num_rows - 1;
#ifdef DEBUG
  printf("Got %dx%d video\n", _num_columns, _num_rows);
#endif

  // Make sure that the image is not compressed and that we have 8 bits
  // per pixel.
  if (pVih->bmiHeader.biCompression != BI_RGB) {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Compression not RGB\n");
    switch (pVih->bmiHeader.biCompression) {
      case BI_RLE8:
	fprintf(stderr,"  (It is BI_RLE8)\n");
	break;
      case BI_RLE4:
	fprintf(stderr,"  (It is BI_RLE4)\n");
      case BI_BITFIELDS:
	fprintf(stderr,"  (It is BI_BITFIELDS)\n");
	break;
      default:
	fprintf(stderr,"  (Unknown compression type)\n");
    }
    return false;
  }
  int BytesPerPixel = pVih->bmiHeader.biBitCount / 8;
  if (BytesPerPixel != 3) {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Not 3 bytes per pixel (%d)\n",
      pVih->bmiHeader.biBitCount);
    return false;
  }

  // A negative height indicates that the images are stored non-inverted in Y
  // Not sure what to do with images that have negative height -- need to
  // read the book some more to find out.
  if (_num_rows < 0) {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Num Rows is negative (internal error)\n");
    return false;
  }

  // Find the stride to take when moving from one row of video to the
  // next.  This is rounded up to the nearest DWORD.
  _stride = (_num_columns * BytesPerPixel + 3) & ~3;

  // Set the callback, where '0' means 'use the SampleCB callback'
  _pGrabber->SetCallback(_pCallback, 0);

  //-------------------------------------------------------------------
  // Release resources that won't be used later and return
  pSrc->Release();
  pNull->Release();
  return true;
}
예제 #11
0
static void vfw_detect(MSWebCamManager *obj) {
    ICreateDevEnum *pCreateDevEnum = NULL;
    IEnumMoniker *pEnumMoniker = NULL;
    IMoniker *pMoniker = NULL;
    HRESULT hr;

    ULONG nFetched = 0;

    // Initialize COM
    CoInitialize(NULL);

    hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
                          IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
    if(FAILED(hr))
    {
        CoUninitialize();
        return ;
    }

    hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
            &pEnumMoniker, 0);
    if (FAILED(hr) || pEnumMoniker == NULL) {
        //printf("no device\n");
        CoUninitialize();
        return ;
    }

    pEnumMoniker->Reset();

    int pos=0;
    while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
    {
        IPropertyBag *pBag;
        hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag );
        if( hr != S_OK )
            continue;

        VARIANT var;
        VariantInit(&var);
        hr = pBag->Read( L"FriendlyName", &var, NULL );
        if( hr != S_OK )
        {
            pMoniker->Release();
            continue;
        }
        //USES_CONVERSION;
        char szName[256];

        WideCharToMultiByte(CP_UTF8,0,var.bstrVal,-1,szName,256,0,0);
        VariantClear(&var);

        IBaseFilter *m_pDeviceFilter;
        hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&m_pDeviceFilter );
        if(SUCCEEDED(hr))
        {
            GUID pPinCategory;
            int fmt_supported = 0;

            dump_format(m_pDeviceFilter);

            //basic testing for the device.
            if (try_format(m_pDeviceFilter,MS_YUV420P, &pPinCategory)==0)
                fmt_supported = 1;
            else if (try_format(m_pDeviceFilter,MS_YUY2, &pPinCategory)==0)
                fmt_supported = 1;
            else if (try_format(m_pDeviceFilter,MS_YUYV, &pPinCategory)==0)
                fmt_supported = 1;
            else if (try_format(m_pDeviceFilter,MS_UYVY, &pPinCategory)==0)
                fmt_supported = 1;
            else if (try_format(m_pDeviceFilter,MS_RGB24, &pPinCategory)==0)
                fmt_supported = 1;
            else
            {
                ms_warning("Unsupported video pixel format/refuse camera (%s).", szName);
            }

            if (fmt_supported==1)
            {
                MSWebCam *cam=ms_web_cam_new(&ms_directx_cam_desc);
                cam->name=ms_strdup(szName);
                ms_web_cam_manager_add_cam(obj,cam);
            }
            m_pDeviceFilter->Release();
            m_pDeviceFilter=NULL;
        }


        pMoniker->Release();
        pBag->Release();
        pMoniker=NULL;
        pBag=NULL;
    }

    pEnumMoniker->Release();
    pCreateDevEnum->Release();
    CoUninitialize();
}
void
VideoDeviceImpl::setup()
{
    HRESULT hr = CoInitializeEx(nullptr, COINIT_MULTITHREADED);
    if (FAILED(hr))
        return fail("Could not initialize video device.");

    hr = CoCreateInstance(
        CLSID_CaptureGraphBuilder2,
        nullptr,
        CLSCTX_INPROC_SERVER,
        IID_ICaptureGraphBuilder2,
        (void**) &cInterface->captureGraph_);
    if (FAILED(hr))
        return fail("Could not create the Filter Graph Manager");

    hr = CoCreateInstance(CLSID_FilterGraph,
        nullptr,
        CLSCTX_INPROC_SERVER,IID_IGraphBuilder,
        (void**) &cInterface->graph_);
    if (FAILED(hr))
        return fail("Could not add the graph builder!");

    hr = cInterface->captureGraph_->SetFiltergraph(cInterface->graph_);
    if (FAILED(hr))
        return fail("Could not set filtergraph.");

    ICreateDevEnum *pSysDevEnum = nullptr;
    hr = CoCreateInstance(CLSID_SystemDeviceEnum,
        nullptr,
        CLSCTX_INPROC_SERVER,
        IID_ICreateDevEnum,
        (void **)&pSysDevEnum);
    if (FAILED(hr))
        return fail("Could not create the enumerator!");

    IEnumMoniker* pEnumCat = nullptr;
    hr = pSysDevEnum->CreateClassEnumerator(
        CLSID_VideoInputDeviceCategory,
        &pEnumCat,
        0);
    if (SUCCEEDED(hr)) {
        // Auto-deletion at if {} exist or at exception
        auto IEnumMonikerDeleter = [](IEnumMoniker* p){ p->Release(); };
        std::unique_ptr<IEnumMoniker, decltype(IEnumMonikerDeleter)&> pEnumCatGuard {pEnumCat, IEnumMonikerDeleter};

        IMoniker *pMoniker = nullptr;
        ULONG cFetched;
        unsigned int deviceCounter = 0;
        while ((pEnumCatGuard->Next(1, &pMoniker, &cFetched) == S_OK))
        {
            if (deviceCounter == this->id) {
                IPropertyBag *pPropBag;
                hr = pMoniker->BindToStorage(
                    0,
                    0,
                    IID_IPropertyBag,
                    (void **)&pPropBag);
                if (SUCCEEDED(hr)) {
                    VARIANT varName;
                    VariantInit(&varName);
                    hr = pPropBag->Read(L"FriendlyName", &varName, 0);
                    if (SUCCEEDED(hr)) {
                        int l = WideCharToMultiByte(
                            CP_UTF8,
                            0,
                            varName.bstrVal,
                            -1,
                            0, 0, 0, 0);
                        auto tmp = new char[l];
                        WideCharToMultiByte(
                            CP_UTF8,
                            0,
                            varName.bstrVal,
                            -1,
                            tmp,
                            l,
                            0, 0);
                        this->name = std::string(tmp);
                        this->device = std::string("video=") + this->name;
                        hr = pMoniker->BindToObject(
                            nullptr, nullptr,
                            IID_IBaseFilter,
                            (void**)&cInterface->videoInputFilter_);
                        if (SUCCEEDED(hr))
                            hr = cInterface->graph_->AddFilter(
                                cInterface->videoInputFilter_,
                                varName.bstrVal);
                        else {
                            fail("Could not add filter to video device.");
                        }
                        hr = cInterface->captureGraph_->FindInterface(
                            &PIN_CATEGORY_PREVIEW,
                            &MEDIATYPE_Video,
                            cInterface->videoInputFilter_,
                            IID_IAMStreamConfig,
                            (void **)&cInterface->streamConf_);
                        if(FAILED(hr)) {
                            hr = cInterface->captureGraph_->FindInterface(
                                &PIN_CATEGORY_CAPTURE,
                                &MEDIATYPE_Video,
                                cInterface->videoInputFilter_,
                                IID_IAMStreamConfig,
                                (void **)&cInterface->streamConf_);
                            if (FAILED(hr)) {
                                fail("Couldn't config the stream!");
                            }
                        }
                        break; // Device found
                    }
                    VariantClear(&varName);
                    pPropBag->Release();
                    pPropBag = nullptr;
                    pMoniker->Release();
                    pMoniker = nullptr;
                }
            }
            deviceCounter++;
        }
        if (SUCCEEDED(hr)) {
            int piCount;
            int piSize;
            cInterface->streamConf_->GetNumberOfCapabilities(&piCount, &piSize);
            AM_MEDIA_TYPE *pmt;
            VIDEO_STREAM_CONFIG_CAPS pSCC;
            for (int i = 0; i < piCount; i++) {
                cInterface->streamConf_->GetStreamCaps(i, &pmt, (BYTE*)&pSCC);
                if (pmt->formattype == FORMAT_VideoInfo) {
                    auto videoInfo = (VIDEOINFOHEADER*) pmt->pbFormat;
                    sizeList_.emplace_back(videoInfo->bmiHeader.biWidth, videoInfo->bmiHeader.biHeight);
                    rateList_[sizeList_.back()].emplace_back(1e7, pSCC.MinFrameInterval);
                    rateList_[sizeList_.back()].emplace_back(1e7, pSCC.MaxFrameInterval);
                    capMap_[sizeList_.back()] = pmt;
                }
            }
        }
    }
    pSysDevEnum->Release();
    pSysDevEnum = NULL;
}
예제 #13
0
파일: DShow.cpp 프로젝트: GWARDAR/OpenPLi-1
// enumerate all needed  devices 
//
HRESULT EnumerateDevices(char *videodriver[], 
                              int *vcount, 
                              IGraphBuilder *pIGB, 
                              ICaptureGraphBuilder *pICGB, 
                              int devicenumber ,
                              IBaseFilter **pCap,
                              BOOL isVideo)
{
    ICreateDevEnum *pCreateDevEnum=NULL;
    IEnumMoniker *pEm= NULL;
    IMoniker *pM     = NULL;
    UINT     uIndex  = 0;
    HRESULT  hr      = NOERROR;
    int      cc      = 0;

	int oldv=0x7FFFFFFF;

    if (vcount!=NULL)
        {
        oldv=*vcount;
    	*vcount=-1;
        }

//    for(cc=0;cc<2;cc++)
    for(cc=1;cc<2;cc++)
        {
        hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void**)&pCreateDevEnum);
        if (SUCCEEDED(hr))
            {
            if (cc==0)
                hr = pCreateDevEnum->CreateClassEnumerator(AM_KSCATEGORY_CAPTURE/*AM_KSCATEGORY_VIDEO*/,	&pEm, 0);
            else
                {
                if (isVideo)
                    hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,	&pEm, 0);
                else
                    hr = pCreateDevEnum->CreateClassEnumerator(CLSID_AudioInputDeviceCategory,	&pEm, 0);
                }

            pCreateDevEnum->Release();
            if (SUCCEEDED(hr)&&(pEm!=NULL)) 
                {
                pEm->Reset();
                ULONG cFetched=0;
                while( (hr = pEm->Next(1, &pM, &cFetched), hr==S_OK) )
                    {
	                IPropertyBag *pBag=NULL;
	                hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pBag);
	                if(SUCCEEDED(hr)) 
                        {
	                    VARIANT var;
	                    var.vt = VT_BSTR;
	                    hr = pBag->Read(L"FriendlyName", &var, NULL);
	                    if ( (hr == NOERROR) && (videodriver!=NULL) && (vcount!=NULL) )
                            {
		                    char achName[80];
		                    WideCharToMultiByte(CP_ACP, 0, var.bstrVal, -1, achName, 80, NULL, NULL);
		                    *vcount=(*vcount)+1;
                            #if 0
                            if (cc==0)
                                lstrcpy(videodriver[*vcount],"(WDM)");
                            else
                                lstrcpy(videodriver[*vcount],"(VfW)");
                            #else
                            lstrcpy(videodriver[*vcount],"");
                            #endif        
                            lstrcat(videodriver[*vcount],achName);
		                    SysFreeString(var.bstrVal);
	                        }
	                    pBag->Release();
                        if (vcount!=NULL)
                            {
                            if ((pCap!=NULL)&&(devicenumber==*vcount))
                                hr = pM->BindToObject(0, 0, IID_IBaseFilter, (void**)pCap);
                            }
    	                }
	                pM->Release();
	                uIndex++;
                    if (vcount!=NULL)
                        {
	                    if (*vcount>=oldv)
		                    break;
                        }
                    }
                pEm->Release();
                }    
            }
        }

    return(NOERROR);
}
예제 #14
0
UVCPrivate::UVCPrivate(DevicePtr usb)
{
  if (usb->interfaceID() != Device::USB)
    return;

  USBSystem sys;
  String devicePath = sys.getDeviceNode((USBDevice &)*usb);

  if (!devicePath.size())
  {
    logger(LOG_ERROR) << "UVC: Could not get device path for device '" << usb->id() << "'" << std::endl;
    return;
  }

  DWORD devInst;

  if (!sys.getUSBSystemPrivate().getDevInst(devicePath, devInst))
  {
    logger(LOG_ERROR) << "UVC: Could not get devInst for device '" << usb->id() << "'" << std::endl;
    return;
  }

  HRESULT hr = S_OK;
  IMoniker *moniker;
  Ptr<ICreateDevEnum> devEnum;
  Ptr<IEnumMoniker> classEnum;

  void *p;

  CoInitializeEx(NULL, COINIT_MULTITHREADED);

  // Create the system device enumerator
  if ((hr = (CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC, IID_ICreateDevEnum, (void **)&p))) != S_OK)
  {
    logger(LOG_ERROR) << "UVC: Failed to get ICreateDevEnum enumerator" << std::endl;
    return;
  }

  devEnum = Ptr<ICreateDevEnum>((ICreateDevEnum *)p, [](ICreateDevEnum *i) { i->Release(); });

  // Create an enumerator for the video capture devices
  if ((hr = devEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, (IEnumMoniker **)&p, 0)) != S_OK)
  {
    logger(LOG_ERROR) << "UVC: Failed to get VideoInputDeviceCategory class enumerator" << std::endl;
    return;
  }

  
  // If there are no enumerators for the requested type, then
  // CreateClassEnumerator will succeed, but pClassEnum will be NULL.
  if (p == NULL) 
  {
    logger(LOG_ERROR) << "UVC: Failed to get any video input devices in the enumerator" << std::endl;
    return;
  }

  classEnum = Ptr<IEnumMoniker>((IEnumMoniker *)p, [](IEnumMoniker *i) { i->Release(); });

  while ((hr = classEnum->Next(1, &moniker, NULL)) == S_OK)
  {
    IPropertyBag *propBag;
    if ((hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)&propBag)) != S_OK)
    {
      logger(LOG_WARNING) << "UVC: Could not get properties for current moniker" << std::endl;
      continue;
    }

    VARIANT varName;
    VariantInit(&varName);

    if ((hr = propBag->Read(L"DevicePath", &varName, 0)) != S_OK)
    {
      logger(LOG_WARNING) << "UVC: Could not get DevicePath for current moniker" << std::endl;
      continue;
    }

    _bstr_t b(varName.bstrVal);
    String devPath = b;
    VariantClear(&varName);

    DWORD dInst;

    DWORD parentDevInst;

    if (!sys.getUSBSystemPrivate().getDevInst(devPath, dInst, (LPGUID)&AM_KSCATEGORY_CAPTURE)) //AM_KSCATEGORY_VIDEO
    {
      logger(LOG_WARNING) << "UVC: Could not get DevInst for DevicePath = " << devPath << std::endl;
      continue;
    }
    
    if (CM_Get_Parent(&parentDevInst, dInst, 0) != CR_SUCCESS)
    {
      logger(LOG_WARNING) << "UVC: Could not get parent DevInst for DevicePath = " << devPath << std::endl;
      continue;
    }

    if (parentDevInst == devInst) // Check whether our USBDevice's DevInst matches with IMoniker's parent's DevInst matches
    {
      if (usb->channelID() >= 0)
      {
        auto x = devPath.find("&mi_");

        if (x != String::npos)
        {
          char *endptr;
          int channel = strtol(devPath.c_str() + x + 4, &endptr, 10);

          if (channel != usb->channelID())
            continue;
        }
        else
        {
          logger(LOG_ERROR) << "UVC: Could not find channel ID of current device '" << devPath << "'" << std::endl;
          continue;
        }
      }

      if ((hr = moniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&p)) != S_OK)
      {
        logger(LOG_ERROR) << "UVC: Could not get the IBaseFilter for the current video input device" << std::endl;
        continue;
      }

      _captureFilter = Ptr<IBaseFilter>((IBaseFilter *)p, [](IBaseFilter *d) { d->Release(); });
      _captureFilter->AddRef();
      break;
    }
  }
}
예제 #15
0
/*
 * Class:     sage_PVR350OSDRenderingPlugin
 * Method:    openOSD0
 * Signature: ()J
 */
JNIEXPORT jlong JNICALL Java_sage_PVR350OSDRenderingPlugin_openOSD0
  (JNIEnv *env, jobject jo)
{
	CoInitializeEx(NULL, COM_THREADING_MODE);
	HRESULT hr;
	slog((env, "BVF open350OSD0 called\r\n"));
	PVR350OSDPluginNativeData rv;
	ZeroMemory(&rv, sizeof(PVR350OSDPluginNativeData));
	rv.ntscModeFor350 = 1;
	// We can't load this by the GUID just like some of the KS CC stuff
    ICreateDevEnum *pSysDevEnum = NULL;
    IEnumMoniker *pEnum = NULL;
    IMoniker *pMoniker = NULL;
    hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, 
        CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, 
        (void**)&pSysDevEnum);
	if (FAILED(hr)) return 0;
    hr = pSysDevEnum->CreateClassEnumerator(KSCATEGORY_DATADECOMPRESSOR, &pEnum, 0);
	if (hr != S_OK)
	{
		// Nothing to enumerate
		SAFE_RELEASE(pSysDevEnum);
		return hr;
	}
    while (S_OK == pEnum->Next(1, &pMoniker, NULL))
    {
        IPropertyBag *pPropBag = NULL;
        hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, 
                                (void **)&pPropBag);
		if (SUCCEEDED(hr))
		{
			VARIANT var;
			VariantInit(&var);
			hr = pPropBag->Read(L"FriendlyName", &var, 0);
			if (SUCCEEDED(hr))
			{
				char conv[512];
				WideCharToMultiByte(CP_ACP, 0, var.bstrVal, -1, conv, 512, 0, 0);
				if (strstr(conv, "PVR"))
				{
        			hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter,
        				(void**)(&(rv.pOSD)));
        			if (SUCCEEDED(hr))
					{
						VariantClear(&var);
						SAFE_RELEASE(pPropBag);
						SAFE_RELEASE(pMoniker);
						break;
					}
				}
			}
			VariantClear(&var);
		}
        SAFE_RELEASE(pPropBag);
        SAFE_RELEASE(pMoniker);
    }

    SAFE_RELEASE(pSysDevEnum);
    SAFE_RELEASE(pEnum);
	if (rv.pOSD)
	{
		hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER,
			IID_IGraphBuilder, (void **)&(rv.pGraph));
		TEST_AND_BAIL
		if (!rv.mutex350OSD)
		{
			rv.mutex350OSD = CreateMutex(NULL, FALSE, "Global\\SageTV350Sync");
			WaitForSingleObject(rv.mutex350OSD, MUTEX350WAITTIME);
		}
		hr = rv.pGraph->AddFilter(rv.pOSD, L"OSD");
		if (FAILED(hr))
		{
			SAFE_RELEASE(rv.pOSD);
			SAFE_RELEASE(rv.pGraph);
			ReleaseMutex(rv.mutex350OSD);
			CloseHandle(rv.mutex350OSD);
			elog((env, "Error opening 350 OSD hr=0x%x\r\n", hr));
			return 0;
		}
		DWORD holder;
		HKEY myKey;
		DWORD readType;
		DWORD hsize = sizeof(holder);
		if (RegCreateKeyEx(HKEY_LOCAL_MACHINE, "SOFTWARE\\Frey Technologies\\Common", 0, 0,
			REG_OPTION_NON_VOLATILE, KEY_ALL_ACCESS, 0, &myKey, 0) == ERROR_SUCCESS)
		{
			if (RegQueryValueEx(myKey, "NTSC", 0, &readType, (LPBYTE) &holder, &hsize) == ERROR_SUCCESS)
			{
				rv.ntscModeFor350 = holder;
			}
			else
			{
				RegSetValueEx(myKey, "NTSC", 0, REG_DWORD, (LPBYTE) &(rv.ntscModeFor350), sizeof(rv.ntscModeFor350));
			}

			RegCloseKey(myKey);
		}

		PVR350OSDPluginNativeData* realRV = new PVR350OSDPluginNativeData;
		memcpy(realRV, &rv, sizeof(PVR350OSDPluginNativeData));
		if (Print350OSDInfo(realRV, env, TRUE))
		{
			ReleaseMutex(realRV->mutex350OSD);
			return (jlong) realRV;
		}
		else
		{
			ReleaseMutex(realRV->mutex350OSD);
			Java_sage_PVR350OSDRenderingPlugin_closeOSD0(env, jo, (jlong)realRV);
			return 0;
		}
	}
예제 #16
0
/* Try to intelligently fetch a default video input device */
static HRESULT
GetDefaultInputDevice(IBaseFilter **ppSrcFilter)
{
    HRESULT hr = S_OK;
    IBaseFilter *pSrc = NULL;
    IMoniker *pMoniker = NULL;
    ICreateDevEnum *pDevEnum = NULL;
    IEnumMoniker *pClassEnum = NULL;

    if (!ppSrcFilter) {
        return E_POINTER;
    }

    hr = CoCreateInstance(
        CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
        IID_ICreateDevEnum, (void **)&pDevEnum
    );
    if (FAILED(hr)) return hr;

    hr = pDevEnum->CreateClassEnumerator(
        CLSID_VideoInputDeviceCategory, &pClassEnum, 0
    );
    if (FAILED(hr)) {
        SAFE_RELEASE(pDevEnum);
        return hr;
    }

    if (pClassEnum == NULL) {
        /* No devices available */
        SAFE_RELEASE(pDevEnum);
        return E_FAIL;
    }

    /* Pick the first device from the list.
     * Note that if the Next() call succeeds but there are no monikers,
     * it will return S_FALSE (which is not a failure).
     */
    hr = pClassEnum->Next (1, &pMoniker, NULL);
    if (hr == S_FALSE) {
        SAFE_RELEASE(pDevEnum);
        SAFE_RELEASE(pClassEnum);
        return E_FAIL;
    }

    hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pSrc);
    if (FAILED(hr)) {
        SAFE_RELEASE(pDevEnum);
        SAFE_RELEASE(pClassEnum);
        SAFE_RELEASE(pMoniker);
        return hr;
    }

    *ppSrcFilter = pSrc;
    (*ppSrcFilter)->AddRef();

    SAFE_RELEASE(pSrc);
    SAFE_RELEASE(pMoniker);
    SAFE_RELEASE(pDevEnum);
    SAFE_RELEASE(pClassEnum);

    return hr;
}
예제 #17
0
static int v4w_configure_videodevice(V4wState *s)
{
	// Initialize COM
	CoInitialize(NULL);

	// get a Graph
	HRESULT hr= CoCreateInstance (CLSID_FilterGraph,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_IGraphBuilder, //IID_IBaseFilter,
		(void **)&s->m_pGraph);
	if(FAILED(hr))
	{
		return -1;
	}

	// get a CaptureGraphBuilder2
	hr= CoCreateInstance (CLSID_CaptureGraphBuilder2,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_ICaptureGraphBuilder2, //IID_IBaseFilter,
		(void **)&s->m_pBuilder);
	if(FAILED(hr))
	{
		return -2;
	}

	// connect capture graph builder with the graph
	s->m_pBuilder->SetFiltergraph(s->m_pGraph);

	// get mediacontrol so we can start and stop the filter graph
	hr=s->m_pGraph->QueryInterface (IID_IMediaControl, (void **)&s->m_pControl);
	if(FAILED(hr))
	{
		return -3;
	}


	ICreateDevEnum *pCreateDevEnum = NULL;
	IEnumMoniker *pEnumMoniker = NULL;
	IMoniker *pMoniker = NULL;

	ULONG nFetched = 0;

	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, 
		IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
	if(FAILED(hr))
	{
		return -4;
	}

	hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
		&pEnumMoniker, 0);
	if (FAILED(hr) || pEnumMoniker == NULL) {
		//printf("no device\n");
		return -5;
	}

	pEnumMoniker->Reset();

	int pos=0;
	while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
	{
		IPropertyBag *pBag;
		hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag );
		if( hr != S_OK )
			continue; 

		if (s->dev[0]=='\0')
			break;

		VARIANT var;
		VariantInit(&var);
		hr = pBag->Read( L"FriendlyName", &var, NULL ); 
		if( hr != S_OK )
		{
			pMoniker->Release();
			continue;
		}
		//USES_CONVERSION;
		char szName[256];

		WideCharToMultiByte(CP_UTF8,0,var.bstrVal,-1,szName,256,0,0);
		VariantClear(&var); 

		if (strcmp(szName, s->dev)==0)
			break;

		pMoniker->Release();
		pBag->Release();
		pMoniker=NULL;
		pBag=NULL;
	}

	if(pMoniker==NULL)
	{
		int pos=0;
		while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
		{
			IPropertyBag *pBag;
			hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag );
			if( hr != S_OK )
				continue; 
		}

	}

	if(pMoniker==NULL)
	{
		return -6;
	}

	hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
	if(FAILED(hr))
	{
		return -7;
	}

	s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

	pMoniker->Release();
	pEnumMoniker->Release();
	pCreateDevEnum->Release();


	GUID pPinCategory;

	if (try_format(s->m_pDeviceFilter, s->pix_fmt, &pPinCategory)==0)
		s->pix_fmt = s->pix_fmt;
	else if (try_format(s->m_pDeviceFilter,MS_YUV420P, &pPinCategory)==0)
		s->pix_fmt = MS_YUV420P;
	else if (try_format(s->m_pDeviceFilter,MS_YUY2, &pPinCategory)==0)
		s->pix_fmt = MS_YUY2;
	else if (try_format(s->m_pDeviceFilter,MS_YUYV, &pPinCategory)==0)
		s->pix_fmt = MS_YUYV;
	else if (try_format(s->m_pDeviceFilter,MS_UYVY, &pPinCategory)==0)
		s->pix_fmt = MS_UYVY;
	else if (try_format(s->m_pDeviceFilter,MS_RGB24, &pPinCategory)==0)
		s->pix_fmt = MS_RGB24;
	else
	{
		ms_error("Unsupported video pixel format.");
		return -8;
	}

	if (s->pix_fmt == MS_YUV420P)
		ms_message("Driver supports YUV420P, using that format.");
	else if (s->pix_fmt == MS_YUY2)
		ms_message("Driver supports YUY2 (YUYV), using that format.");
	else if (s->pix_fmt == MS_YUYV)
		ms_message("Driver supports YUV422, using that format.");
	else if (s->pix_fmt == MS_UYVY)
		ms_message("Driver supports UYVY, using that format.");
	else if (s->pix_fmt == MS_RGB24)
		ms_message("Driver supports RGB24, using that format.");

	if (try_format_size(s, s->pix_fmt, s->vsize.width, s->vsize.height, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", s->vsize.width, s->vsize.height);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H);
	else
	{
		ms_error("No supported size found for format.");
		/* size not supported? */
		return -9;
	}

	return 0;
}
예제 #18
0
HRESULT FindCaptureDevice(IBaseFilter ** ppSrcFilter)
{
	HRESULT hr;
	IBaseFilter * pSrc = NULL;
	IMoniker *pMoniker = NULL;
	ULONG cFetched;

	if (!ppSrcFilter)
		return E_POINTER;

	// Create the system device enumerator
	ICreateDevEnum *pDevEnum = NULL;

	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
		IID_ICreateDevEnum, (void **)&pDevEnum);
	if (FAILED(hr))
	{
		return hr;
	}

	// Create an enumerator for the video capture devices
	IEnumMoniker *pClassEnum = NULL;

	hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pClassEnum, 0);
	if (FAILED(hr))
	{
		return hr;
	}

	// If there are no enumerators for the requested type, then 
	// CreateClassEnumerator will succeed, but pClassEnum will be NULL.
	if (pClassEnum == NULL)
	{
		return E_FAIL;
	}

	// Use the first video capture device on the device list.
	// Note that if the Next() call succeeds but there are no monikers,
	// it will return S_FALSE (which is not a failure).  Therefore, we
	// check that the return code is S_OK instead of using SUCCEEDED() macro.
	if (S_OK == (pClassEnum->Next(1, &pMoniker, &cFetched)))
	{
		IBindCtx *pbc = NULL;

		CreateBindCtx(0, &pbc);
		// Bind Moniker to a filter object
		hr = pMoniker->BindToObject(pbc, 0, IID_IBaseFilter, (void**)&pSrc);
		pbc->Release();
		if (FAILED(hr))
		{
			return hr;
		}
	}
	else
	{
		return E_FAIL;
	}

	// Copy the found filter pointer to the output parameter.
	// Do NOT Release() the reference, since it will still be used
	// by the calling function.
	*ppSrcFilter = pSrc;

	return hr;
}
예제 #19
0
HRESULT FindCaptureDevice(IBaseFilter ** ppSrcFilter)
{
    HRESULT hr = S_OK;
    IBaseFilter * pSrc = NULL;
    IMoniker* pMoniker =NULL;
    ICreateDevEnum *pDevEnum =NULL;
    IEnumMoniker *pClassEnum = NULL;

    if (!ppSrcFilter)
	{
        return E_POINTER;
	}
   
    // Create the system device enumerator
    hr = CoCreateInstance (CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
                           IID_ICreateDevEnum, (void **) &pDevEnum);
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't create system enumerator!  hr=0x%x"), hr);
    }

    // Create an enumerator for the video capture devices

	if (SUCCEEDED(hr))
	{
	    hr = pDevEnum->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, &pClassEnum, 0);
		if (FAILED(hr))
		{
			Msg(TEXT("Couldn't create class enumerator!  hr=0x%x"), hr);
	    }
	}

	if (SUCCEEDED(hr))
	{
		// If there are no enumerators for the requested type, then 
		// CreateClassEnumerator will succeed, but pClassEnum will be NULL.
		if (pClassEnum == NULL)
		{
			MessageBox(ghApp,TEXT("No video capture device was detected.\r\n\r\n")
				TEXT("This sample requires a video capture device, such as a USB WebCam,\r\n")
				TEXT("to be installed and working properly.  The sample will now close."),
				TEXT("No Video Capture Hardware"), MB_OK | MB_ICONINFORMATION);
			hr = E_FAIL;
		}
	}

    // Use the first video capture device on the device list.
    // Note that if the Next() call succeeds but there are no monikers,
    // it will return S_FALSE (which is not a failure).  Therefore, we
    // check that the return code is S_OK instead of using SUCCEEDED() macro.

	if (SUCCEEDED(hr))
	{
		hr = pClassEnum->Next (1, &pMoniker, NULL);
		if (hr == S_FALSE)
		{
	        Msg(TEXT("Unable to access video capture device!"));   
			hr = E_FAIL;
		}
	}

	if (SUCCEEDED(hr))
    {
        // Bind Moniker to a filter object
        hr = pMoniker->BindToObject(0,0,IID_IBaseFilter, (void**)&pSrc);
        if (FAILED(hr))
        {
            Msg(TEXT("Couldn't bind moniker to filter object!  hr=0x%x"), hr);
        }
    }

    // Copy the found filter pointer to the output parameter.
	if (SUCCEEDED(hr))
	{
	    *ppSrcFilter = pSrc;
		(*ppSrcFilter)->AddRef();
	}

	SAFE_RELEASE(pSrc);
    SAFE_RELEASE(pMoniker);
    SAFE_RELEASE(pDevEnum);
    SAFE_RELEASE(pClassEnum);

    return hr;
}
예제 #20
0
DirectShowScanner::DirectShowScanner() {
   ICreateDevEnum *pDevEnum      = 0;
   int             hr;
   int             devNum;
   char            nameBuf[80];
   
   // Reference:  Pesce, pp 54-56.   

   debug_msg("new DirectShowScanner()\n");

   // Initialize the COM subsystem
   hr=CoInitialize(NULL);
   if (FAILED(hr)) {
	   debug_msg("Failed COM subsystem initialisation.\n");
	   	return;
   }

   // Create a helper object to find the capture devices.
   hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (LPVOID*)&pDevEnum);
   if (FAILED(hr)) {
   		debug_msg("Failed to Create a helper object to find the DS capture devices.\n");
		CoUninitialize();
		return;
   }

   IEnumMoniker *pEnum    = 0;
   IMoniker     *pMoniker = 0;
   IPropertyBag *pPropBag = 0;
   VARIANT      varName;

   // Get an enumerator over video capture filters
   hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnum, 0);
   //showErrorMessage(hr);
   if (FAILED(hr) || pEnum == 0) {
   		debug_msg("Failed to Get an enumerator over DS video capture filters.\n");
		CoUninitialize();
		return;
   }

   // Get the capture filter for each device installed, up to NUM_DEVS devices
   for( devNum=0; devNum < NUM_DEVS; ++devNum) {
      if ( pEnum->Next(1, &pMoniker, NULL) == S_OK ) {

         hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag);
	     if (FAILED(hr)) {
			debug_msg("Failed to Get propbag bound to storage on DS dev: %d\n", devNum);
			continue;
		 }
         //showErrorMessage(hr);
         debug_msg("propbag bound to storage ok= %d\n", hr);

         VariantInit(&varName);
         hr = pPropBag->Read(L"FriendlyName", &varName, 0);
		 if (FAILED(hr)) {
			debug_msg("Failed to Get friendly name read on DS dev: %d\n", devNum);
			continue;
		 }
         //showErrorMessage(hr);
         debug_msg("friendly name read ok= %d\n", hr);

         // Need this macro in atlconv.h to go from bStr to char* - msp
         USES_CONVERSION;
         strcpy(nameBuf, W2A(varName.bstrVal));

         debug_msg("DirectShowScanner::DirectShowScanner():  found nameBuf/FriendlyName=%s\n", nameBuf);

         // needs work, but don't add drivers that look like VFW drivers - msp
         if( (strstr(nameBuf, "VFW") == NULL) ) {
            hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void **)(pCaptureFilter+devNum));
            //showErrorMessage(hr);
			if (FAILED(hr)) {
				debug_msg("Failed to Get friendly name read on DS dev: %d\n", devNum);
				continue;
			}
			debug_msg("capture filter bound ok= %d\n", hr);
            devs_[devNum] = new DirectShowDevice(strdup(nameBuf), pCaptureFilter[devNum]);
         } else {
            debug_msg("discarding an apparent VFW device= %s\n", nameBuf);
			devs_[devNum] = NULL;
         }

         VariantClear(&varName);
         pPropBag->Release();
      }
   }

   // Release these objects so COM can release their memory
   pMoniker->Release();
   pEnum->Release();
   pDevEnum->Release();
}
예제 #21
0
gboolean
gst_dshow_find_filter (CLSID input_majortype, CLSID input_subtype,
    CLSID output_majortype, CLSID output_subtype,
    gchar * prefered_filter_name, IBaseFilter ** filter)
{
  gboolean ret = FALSE;
  HRESULT hres;
  GUID arrayInTypes[2];
  GUID arrayOutTypes[2];
  IFilterMapper2 *mapper = NULL;
  IEnumMoniker *enum_moniker = NULL;
  IMoniker *moniker = NULL;
  ULONG fetched;
  gchar *prefered_filter_upper = NULL;
  gboolean exit = FALSE;

  /* initialize output parameter */
  if (filter)
    *filter = NULL;

  /* create a private copy of prefered filter substring in upper case */
  if (prefered_filter_name) {
    prefered_filter_upper = g_strdup (prefered_filter_name);
    _strupr (prefered_filter_upper);
  }

  hres = CoCreateInstance (CLSID_FilterMapper2, NULL, CLSCTX_INPROC,
      IID_IFilterMapper2, (void **) &mapper);
  if (FAILED (hres))
    goto clean;

  memcpy (&arrayInTypes[0], &input_majortype, sizeof (CLSID));
  memcpy (&arrayInTypes[1], &input_subtype, sizeof (CLSID));
  memcpy (&arrayOutTypes[0], &output_majortype, sizeof (CLSID));
  memcpy (&arrayOutTypes[1], &output_subtype, sizeof (CLSID));

  hres =
      mapper->EnumMatchingFilters (&enum_moniker, 0, FALSE,
      MERIT_DO_NOT_USE + 1, TRUE, 1, arrayInTypes, NULL, NULL, FALSE, TRUE, 1,
      arrayOutTypes, NULL, NULL);
  if (FAILED (hres))
    goto clean;

  enum_moniker->Reset ();

  while (hres = enum_moniker->Next (1, &moniker, &fetched), hres == S_OK
      && !exit) {
    IBaseFilter *filter_temp = NULL;
    IPropertyBag *property_bag = NULL;
    gchar *friendly_name = NULL;

    hres =
        moniker->BindToStorage (NULL, NULL, IID_IPropertyBag,
        (void **) &property_bag);
    if (SUCCEEDED (hres) && property_bag) {
      VARIANT varFriendlyName;
      VariantInit (&varFriendlyName);

      hres = property_bag->Read (L"FriendlyName", &varFriendlyName, NULL);
      if (hres == S_OK && varFriendlyName.bstrVal) {
        friendly_name =
            g_utf16_to_utf8 ((const gunichar2 *) varFriendlyName.bstrVal,
            wcslen (varFriendlyName.bstrVal), NULL, NULL, NULL);
        if (friendly_name)
          _strupr (friendly_name);
        SysFreeString (varFriendlyName.bstrVal);
      }
      property_bag->Release ();
    }

    hres =
        moniker->BindToObject (NULL, NULL, IID_IBaseFilter,
        (void **) &filter_temp);
    if (SUCCEEDED (hres) && filter_temp) {
      ret = TRUE;
      if (filter) {
        if (*filter)
          (*filter)->Release ();

        *filter = filter_temp;
        (*filter)->AddRef ();

        if (prefered_filter_upper && friendly_name &&
            strstr (friendly_name, prefered_filter_upper))
          exit = TRUE;
      }

      /* if we just want to know if the formats are supported OR
         if we don't care about what will be the filter used
         => we can stop enumeration */
      if (!filter || !prefered_filter_upper)
        exit = TRUE;

      filter_temp->Release ();
    }

    g_free (friendly_name);
    moniker->Release ();
  }

clean:
  g_free (prefered_filter_upper);
  if (enum_moniker)
    enum_moniker->Release ();
  if (mapper)
    mapper->Release ();

  return ret;
}
예제 #22
0
HRESULT CAccessSys::FindCaptureDevice(void)
{
	HRESULT hr = S_OK;
	IBaseFilter * pSrc = NULL;
	IMoniker* pMoniker = NULL;
	ICreateDevEnum *pDevEnum = NULL;
	IEnumMoniker *pClassEnum = NULL;


	// Create the system device enumerator
	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
		IID_ICreateDevEnum, (void **)&pDevEnum);
	if (FAILED(hr))
	{
		Msg(TEXT("Couldn't create system enumerator!  hr=0x%x"), hr);
	}

	// Create an enumerator for the video capture devices

	if (SUCCEEDED(hr))
	{
		hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pClassEnum, 0);
		if (FAILED(hr))
		{
			Msg(TEXT("Couldn't create class enumerator!  hr=0x%x"), hr);
		}
	}

	if (SUCCEEDED(hr))
	{
		// If there are no enumerators for the requested type, then 
		// CreateClassEnumerator will succeed, but pClassEnum will be NULL.
		if (pClassEnum == NULL)
		{
			MessageBox(NULL, TEXT("No video capture device was detected.\r\n\r\n")
				TEXT("This sample requires a video capture device, such as a USB WebCam,\r\n")
				TEXT("to be installed and working properly.  The sample will now close."),
				TEXT("No Video Capture Hardware"), MB_OK | MB_ICONINFORMATION);
			hr = E_FAIL;
		}
	}

	// Use the first video capture device on the device list.
	// Note that if the Next() call succeeds but there are no monikers,
	// it will return S_FALSE (which is not a failure).  Therefore, we
	// check that the return code is S_OK instead of using SUCCEEDED() macro.

	if (SUCCEEDED(hr))
	{
		hr = pClassEnum->Next(1, &pMoniker, NULL);
		if (hr == S_FALSE)
		{
			Msg(TEXT("Unable to access video capture device!"));
			hr = E_FAIL;
		}
	}

	if (SUCCEEDED(hr))
	{
		// Bind Moniker to a filter object
		hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pSrc);
		if (FAILED(hr))
		{
			Msg(TEXT("Couldn't bind moniker to filter object!  hr=0x%x"), hr);
		}
	}

	// Copy the found filter pointer to the output parameter.
	ULONG ref;
	if (SUCCEEDED(hr))
	{
		p_streams[0].p_device_filter = pSrc;
		ref = p_streams[0].p_device_filter->AddRef();
	}

	hr = p_capture_graph_builder2->FindInterface(&PIN_CATEGORY_CAPTURE,
		&MEDIATYPE_Video, pSrc,
		IID_IAMStreamConfig, (void **)&p_VSC);
	if (FAILED(hr))
	{
		Msg(TEXT("Couldn't find IAMStreamConfig!  hr=0x%x"), hr);
	}
	else {
		AM_MEDIA_TYPE *pmt;
		//VIDEO_STREAM_CONFIG_CAPS scc;
		BYTE* scc = NULL;
		int piCount, piSize;

		hr = p_VSC->GetNumberOfCapabilities(&piCount, &piSize);
		if (hr == S_OK){
			for (int i = 0; i < piCount; i++){
				scc = new BYTE[piSize];
				hr = p_VSC->GetStreamCaps(i, &pmt, scc/*reinterpret_cast<BYTE*>(&scc)*/);
				//hr = p_VSC->GetFormat(&pmt);

				double FrameRate = 15.0;
				if (hr == NOERROR)
				{
					if (pmt->subtype == MEDIASUBTYPE_RGB24 ||
						pmt->subtype == MEDIASUBTYPE_I420 ||
						pmt->subtype == MEDIASUBTYPE_YUY2){
						if (pmt->formattype == FORMAT_VideoInfo)
						{
							//pmt->subtype = MEDIASUBTYPE_RGB24;
							VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *)pmt->pbFormat;
							if (pvi->bmiHeader.biHeight == 240 && pvi->bmiHeader.biWidth == 320){
								pvi->AvgTimePerFrame = (LONGLONG)(10000000 / FrameRate);
								//pvi->bmiHeader.biHeight = 240;
								//pvi->bmiHeader.biWidth = 320;
								hr = p_VSC->SetFormat(pmt);
								if (FAILED(hr)){
									Msg(TEXT("couldn't set video format! hr = 0x%x"), hr);
								}
								DeleteMediaType(pmt);
								delete[] scc;
								break;
							}
						}
					}
					DeleteMediaType(pmt);
				}
				delete[] scc;
			}
		}
		ref = pSrc->Release();
	}
	SAFE_RELEASE(pSrc);
	SAFE_RELEASE(pMoniker);
	SAFE_RELEASE(pClassEnum);

	// Create an enumerator for the audio capture devices

	if (SUCCEEDED(hr))
	{
		hr = pDevEnum->CreateClassEnumerator(CLSID_AudioInputDeviceCategory, &pClassEnum, 0);
		if (FAILED(hr))
		{
			Msg(TEXT("Couldn't create class enumerator!  hr=0x%x"), hr);
		}
	}

	if (SUCCEEDED(hr))
	{
		// If there are no enumerators for the requested type, then 
		// CreateClassEnumerator will succeed, but pClassEnum will be NULL.
		if (pClassEnum == NULL)
		{
			MessageBox(NULL, TEXT("No audio capture device was detected.\r\n\r\n")
				TEXT("This sample requires a audio capture device\r\n")
				TEXT("to be installed and working properly.  The sample will now close."),
				TEXT("No Audio Capture Hardware"), MB_OK | MB_ICONINFORMATION);
			hr = E_FAIL;
		}
	}

	// Use the first video capture device on the device list.
	// Note that if the Next() call succeeds but there are no monikers,
	// it will return S_FALSE (which is not a failure).  Therefore, we
	// check that the return code is S_OK instead of using SUCCEEDED() macro.

	if (SUCCEEDED(hr))
	{
		hr = pClassEnum->Next(1, &pMoniker, NULL);
		if (hr == S_FALSE)
		{
			Msg(TEXT("Unable to access audio capture device!"));
			hr = E_FAIL;
		}
	}

	if (SUCCEEDED(hr))
	{
		// Bind Moniker to a filter object
		hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pSrc);
		if (FAILED(hr))
		{
			Msg(TEXT("Couldn't bind moniker to filter object!  hr=0x%x"), hr);
		}
	}

	// Copy the found filter pointer to the output parameter.
	if (SUCCEEDED(hr))
	{
		p_streams[1].p_device_filter = pSrc;
		ref = p_streams[1].p_device_filter->AddRef();
	}

	SAFE_RELEASE(pSrc);
	SAFE_RELEASE(pMoniker);
	SAFE_RELEASE(pClassEnum);

	SAFE_RELEASE(pDevEnum);

	return hr;
}
예제 #23
0
파일: main.cpp 프로젝트: 7m4mon/Q200_PTR
/*
 * Enumerate all video devices
 *
 * See also:
 *
 * Using the System Device Enumerator:
 *     http://msdn2.microsoft.com/en-us/library/ms787871.aspx
 */
int enum_devices()
{
	HRESULT hr;

	printf("Enumerating video input devices ...\n");

	// Create the System Device Enumerator.
	ICreateDevEnum *pSysDevEnum = NULL;
	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
		IID_ICreateDevEnum, (void **)&pSysDevEnum);
	if(FAILED(hr))
	{
		fprintf(stderr, "ERROR: Unable to create system device enumerator.\n");
		return hr;
	}

	// Obtain a class enumerator for the video input device category.
	IEnumMoniker *pEnumCat = NULL;
	hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0);

	if(hr == S_OK) 
	{
		// Enumerate the monikers.
		IMoniker *pMoniker = NULL;
		ULONG cFetched;
		while(pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK)
		{
			IPropertyBag *pPropBag;
			hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, 
				(void **)&pPropBag);
			if(SUCCEEDED(hr))
			{
				// To retrieve the filter's friendly name, do the following:
				VARIANT varName;
				VariantInit(&varName);
				hr = pPropBag->Read(L"FriendlyName", &varName, 0);
				if (SUCCEEDED(hr))
				{
					// Display the name in your UI somehow.
					wprintf(L"  Found device: %s\n", varName.bstrVal);
				}
				VariantClear(&varName);

				// To create an instance of the filter, do the following:
				IBaseFilter *pFilter;
				hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter,
					(void**)&pFilter);
				
				process_filter(pFilter);

				//Remember to release pFilter later.
				pPropBag->Release();

			}
			pMoniker->Release();
		}
		pEnumCat->Release();
	}
	pSysDevEnum->Release();

	return 0;
}
예제 #24
0
bool GetAudioCaptureDevices(std::vector<IBaseFilter*>& captureSourceFilterList, std::vector<std::wstring>& captureDeviceNameList)
{
	HRESULT hr;
	CoInitialize(NULL);
	ComUninitial comUninitial;

	ICreateDevEnum* createDevEnum;
	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void**)&createDevEnum);
	if (FAILED(hr))
	{
		ErrorPrint("Create system device enum error",hr);
		return false;
	}
	ComReleaser createDevEnumReleaser(createDevEnum);

	IEnumMoniker *enumMoniker = NULL;
	hr = createDevEnum->CreateClassEnumerator(CLSID_AudioInputDeviceCategory,&enumMoniker, 0);
	if (hr != S_OK)
	{
		ErrorPrint("Create class enumerator error",hr);
		return false;
	}
	std::cout<<"HRESULT VALUE:"<<std::hex<<hr<<std::endl;
	std::cout<<"Enum moniker value:"<<std::hex<<(int)enumMoniker<<std::endl;
	ComReleaser enumMonikerReleaser(enumMoniker);

	IMoniker* moniker;
	int i = 0;
	while (S_OK == enumMoniker->Next(1, &moniker, NULL))
	{
		std::cout<<"Find audio count:"<<++i<<std::endl;
		ComReleaser monikerReleaser(moniker);
		IPropertyBag* propBag;
		hr = moniker->BindToStorage(NULL,NULL,IID_IPropertyBag, (void**)&propBag);
		if (FAILED(hr))
		{
			ErrorPrint("Bind to storage error",hr);
		}
		else
		{
			ComReleaser propBagReleaser(propBag);

			VARIANT varName;
			VariantInit(&varName);
			hr = propBag->Read(L"FriendlyName", &varName, NULL);
			if (FAILED(hr))
			{
				ErrorPrint("Get audio input device friendly name error",hr);
				VariantClear(&varName);
				continue;
			}
			VariantClear(&varName);

			IBaseFilter* audioCaptureInputDevice;
			hr = moniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)&audioCaptureInputDevice);
			if (FAILED(hr))
			{
				ErrorPrint("Bind to object error",hr);
				continue;
			}

			captureSourceFilterList.push_back(audioCaptureInputDevice);
			captureDeviceNameList.push_back(varName.bstrVal);
		}
	}

#ifdef _DEBUG
	std::cout<<"Start print all capture device name:\n";
	using namespace boost::lambda;
	std::for_each(captureDeviceNameList.begin(), captureDeviceNameList.end(), wcout<<_1<<"\n");
#endif

	return true;
}
예제 #25
0
static int v4w_open_videodevice(V4wState *s, int format, MSVideoSize *vsize)
{
	// Initialize COM
	CoInitialize(NULL);

	// get a Graph
	HRESULT hr=s->m_pGraph.CoCreateInstance(CLSID_FilterGraph);
	if(FAILED(hr))
	{
		return -1;
	}

	// get a CaptureGraphBuilder2
#if !defined(_WIN32_WCE)
	hr=s->m_pBuilder.CoCreateInstance(CLSID_CaptureGraphBuilder2);
#else
	hr=s->m_pBuilder.CoCreateInstance(CLSID_CaptureGraphBuilder);
#endif
	if(FAILED(hr))
	{
		return -2;
	}

	// connect capture graph builder with the graph
	s->m_pBuilder->SetFiltergraph(s->m_pGraph);

	// get mediacontrol so we can start and stop the filter graph
	hr=s->m_pGraph.QueryInterface(&(s->m_pControl));
	if(FAILED(hr))
	{
		return -3;
	}

	// get DXFilter
	s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE);
	if(s->m_pDXFilter==NULL)
	{
		return -4;
	}
	s->m_pDXFilter->AddRef();
	if(FAILED(hr))
	{
		return -4;
	}

	CMediaType mt;
	mt.SetType(&MEDIATYPE_Video);

	if (format==MS_YUV420P)
	{
		GUID m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0'));
		mt.SetSubtype(&m);
		mt.SetSubtype(&MEDIASUBTYPE_YV12);
	}
	else //if (format==MS_RGB24)
	{
		mt.SetSubtype(&MEDIASUBTYPE_RGB24);
	}

	//mt.SetSubtype(&MEDIASUBTYPE_IYUV);
	//mt.SetSubtype(&MEDIASUBTYPE_YUYV);
	//mt.SetSubtype(&MEDIASUBTYPE_RGB24);
	//mt.SetSampleSize();
	mt.formattype = FORMAT_VideoInfo;
	mt.SetTemporalCompression(FALSE);

	VIDEOINFO *pvi = (VIDEOINFO *)
	mt.AllocFormatBuffer(sizeof(VIDEOINFO));
	if (NULL == pvi)
		return E_OUTOFMEMORY;
	ZeroMemory(pvi, sizeof(VIDEOINFO));
	if (format==MS_YUV420P)
	{
		pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
		pvi->bmiHeader.biCompression = MAKEFOURCC('Y','V','1','2');
		pvi->bmiHeader.biBitCount = 12;
	}
	else
	{
		pvi->bmiHeader.biCompression = BI_RGB;
		pvi->bmiHeader.biBitCount = 24;
	}
	pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
	pvi->bmiHeader.biWidth = vsize->width;
	pvi->bmiHeader.biHeight = vsize->height;
	pvi->bmiHeader.biPlanes = 1;
	pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
	pvi->bmiHeader.biClrImportant = 0;
	mt.SetSampleSize(pvi->bmiHeader.biSizeImage);
	mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO));

	hr = s->m_pDXFilter->SetAcceptedMediaType(&mt);
	if(FAILED(hr))
	{
		return -5;
	}

	hr = s->m_pDXFilter->SetCallback(Callback); 
	if(FAILED(hr))
	{
		return -6;
	}

	hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter,
	 (LPVOID *)&s->m_pIDXFilter);
	if(FAILED(hr))
	{
		return -7;
	}

	hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter");
	if(FAILED(hr))
	{
		return -8;
	}

#ifdef WM6
	ICreateDevEnum *pCreateDevEnum = NULL;
	IEnumMoniker *pEnumMoniker = NULL;
	IMoniker *pMoniker = NULL;

	ULONG nFetched = 0;

	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, 
		IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
	if(FAILED(hr))
	{
		return -9;
	}

	hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
		&pEnumMoniker, 0);
	if (FAILED(hr) || pEnumMoniker == NULL) {
		//printf("no device\n");
		return -10;
	}

	pEnumMoniker->Reset();

	hr = pEnumMoniker->Next(1, &pMoniker, &nFetched);
	if(FAILED(hr) || pMoniker==NULL)
	{
		return -11;
	}

	hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
	if(FAILED(hr))
	{
		return -12;
	}

	s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

	pMoniker->Release();
	pEnumMoniker->Release();
	pCreateDevEnum->Release();
#else
	WCHAR wzDeviceName[ MAX_PATH + 1 ];
	CComVariant   varCamName;
	CPropertyBag PropBag;
    CComPtr<IPersistPropertyBag>    pPropertyBag;
	GetFirstCameraDriver(wzDeviceName);

	hr = s->m_pDeviceFilter.CoCreateInstance( CLSID_VideoCapture ); 
	if (FAILED(hr))
	{
		return -8;
	}

	s->m_pDeviceFilter.QueryInterface( &pPropertyBag );
	varCamName = wzDeviceName;
	if(( varCamName.vt == VT_BSTR ) == NULL ) {
	  return E_OUTOFMEMORY;
	}
	PropBag.Write( L"VCapName", &varCamName );   
	pPropertyBag->Load( &PropBag, NULL );
	pPropertyBag.Release();

	hr = s->m_pGraph->AddFilter( s->m_pDeviceFilter, L"Video capture source" );
#endif

	if (FAILED(hr))
	{
		return -8;
	}

	// get null renderer
	s->m_pNullRenderer = NULL;
#if 0
	hr=s->m_pNullRenderer.CoCreateInstance(CLSID_NullRenderer);
	if(FAILED(hr))
	{
		return -13;
	}
#endif
	if (s->m_pNullRenderer!=NULL)
	{
		s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer");
	}

	hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_PREVIEW,
		&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
	if (FAILED(hr))
	{
		//hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_CAPTURE,
		//	&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
		if (FAILED(hr))
		{
			return -14;
		}
	}
	
	//m_pDXFilter->SetBufferSamples(TRUE);


		// Create the System Device Enumerator.
IFilterMapper *pMapper = NULL;
//IEnumMoniker *pEnum = NULL;
IEnumRegFilters *pEnum = NULL;

hr = CoCreateInstance(CLSID_FilterMapper, 
    NULL, CLSCTX_INPROC, IID_IFilterMapper, 
    (void **) &pMapper);

if (FAILED(hr))
{
    // Error handling omitted for clarity.
}

GUID arrayInTypes[2];
arrayInTypes[0] = MEDIATYPE_Video;
arrayInTypes[1] = MEDIASUBTYPE_dvsd;

hr = pMapper->EnumMatchingFilters(
        &pEnum,
        MERIT_HW_COMPRESSOR, // Minimum merit.
        FALSE,               // At least one input pin?
        MEDIATYPE_NULL,
        MEDIASUBTYPE_NULL,
        FALSE,              // Must be a renderer?
        FALSE,               // At least one output pin?
        MEDIATYPE_NULL,                  
        MEDIASUBTYPE_NULL);              

// Enumerate the monikers.
//IMoniker *pMoniker;
REGFILTER *pMoniker;
ULONG cFetched;
while (pEnum->Next(1, &pMoniker, &cFetched) == S_OK)
{
    IPropertyBag *pPropBag = NULL;
#if 0
	hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, 
       (void **)&pPropBag);

    if (SUCCEEDED(hr))
    {
        // To retrieve the friendly name of the filter, do the following:
        VARIANT varName;
        VariantInit(&varName);
        hr = pPropBag->Read(L"FriendlyName", &varName, 0);
        if (SUCCEEDED(hr))
        {
            // Display the name in your UI somehow.
        }
        VariantClear(&varName);

        // To create an instance of the filter, do the following:
        IBaseFilter *pFilter;
        hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)&pFilter);
        // Now add the filter to the graph. Remember to release pFilter later.
    
        // Clean up.
        pPropBag->Release();
    }
    pMoniker->Release();
#endif

}

// Clean up.
pMapper->Release();
pEnum->Release();




	s_callback = s;
	hr = s->m_pControl->Run();
	if(FAILED(hr))
	{
		return -15;
	}

	s->rotregvalue=1;
	s->pix_fmt = format;
	s->vsize.height = vsize->height;
	s->vsize.width = vsize->width;
	return 0;
}
예제 #26
0
HRESULT FindVideoCompressor(const std::wstring& format, IBaseFilter *&result)
{
	HRESULT hr = S_OK;

	CoInitialize(NULL);
	ComUninitial comUninitial;

	ICreateDevEnum *createDevEnum;
	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void**)&createDevEnum);
	if(FAILED(hr))
	{
		ErrorPrint("Create system device enum error", hr);
		return hr;
	}
	ComReleaser createDevEnumReleaser(createDevEnum);

	IEnumMoniker *enumMoniker;
	hr = createDevEnum->CreateClassEnumerator(CLSID_VideoCompressorCategory, &enumMoniker, 0);
	if (FAILED(hr))
	{
		ErrorPrint("Create Video compressor enum error", hr);
		return hr;
	}

	IMoniker *moniker;
	while(S_OK == enumMoniker->Next(1, &moniker, NULL))
	{
		ComReleaser monikerReleaser(moniker);

		LPOLESTR oleDisplayName;
		hr = moniker->GetDisplayName(NULL, NULL, &oleDisplayName);
		if(FAILED(hr))
		{
			ErrorPrint("Get display name error", hr);
			continue;
		}
		std::wstring displayName(oleDisplayName);

		size_t slashPos;
		for (slashPos = 0; slashPos < displayName.size(); ++slashPos)
		{
			if(displayName[slashPos] == L'\\')
				break;
		}
		if(slashPos == displayName.size())
		{
			ErrorPrint("Find slash error");
			continue;
		}

		displayName = displayName.substr(slashPos+1);
		if (displayName == format)
		{
			hr = moniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)&result);
			if(FAILED(hr))
			{
				ErrorPrint("Create video compressor filter error", hr);
				continue;
			}
			return S_OK;
		}

	}

	return E_FAIL;
}
예제 #27
0
//-----------------------------------------------------------------------------
// AddFilter2
// Attempts to locate a filter of a given class category and name
HRESULT CDSUtils::AddFilter2(IGraphBuilder* pGraph, const GUID &clsid, LPCWSTR pName, IBaseFilter** ppFilter)
{
	HRESULT hr = S_OK;

	if (pGraph && pName && ppFilter)
	{
		// first enumerate the system devices for the specifed class and filter name
		CComPtr<ICreateDevEnum> pSysDevEnum = NULL;
		hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, reinterpret_cast<void**>(&pSysDevEnum));

		if (SUCCEEDED(hr))
		{
			CComPtr<IEnumMoniker> pEnumCat = NULL;
			hr = pSysDevEnum->CreateClassEnumerator(clsid, &pEnumCat, 0);

			if (S_OK == hr)
			{
				IMoniker* pMoniker = NULL;
				bool Loop = true;
				while ((S_OK == pEnumCat->Next(1, &pMoniker, NULL)) && Loop)
				{
					IPropertyBag* pPropBag = NULL;
					hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, reinterpret_cast<void**>(&pPropBag));

					if (SUCCEEDED(hr))
					{
						VARIANT varName;
						VariantInit(&varName);
						hr = pPropBag->Read(L"FriendlyName", &varName, 0);
						if (SUCCEEDED(hr))
						{
							if (0 == wcscmp(varName.bstrVal, pName))
							{
								hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, reinterpret_cast<void**>(ppFilter));
								Loop = false;
							}
						}

						VariantClear(&varName);

						// contained within a loop, decrement the reference count
						SAFE_RELEASE(pPropBag);
					}
					SAFE_RELEASE(pMoniker);
				}
			}
		}

		// if a filter has been located add it to the graph
		if (*ppFilter)
		{
			hr = pGraph->AddFilter(reinterpret_cast<IBaseFilter*>(*ppFilter), pName);
		}
		else
		{
			hr = E_FAIL;
		}
	}
	else
	{
		hr = E_INVALIDARG;
	}

	return hr;
}
예제 #28
0
bool GetCaptureCrossbarDevices(std::vector<IBaseFilter*>& crossbarFilterList, std::vector<std::wstring>& crossbarNameList)
{
	HRESULT hr;

	ICreateDevEnum* createDevEnum; //创建枚举器
	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void**)&createDevEnum);
	if(FAILED(hr))
	{
		ErrorPrint("Crate system device enum error", hr);
		return false;
	}
	ComReleaser createDevEnumReleaser(createDevEnum);

	IEnumMoniker *enumMoniker;
	hr = createDevEnum->CreateClassEnumerator(AM_KSCATEGORY_CROSSBAR, &enumMoniker, 0); //创建crossbar枚举器
	if(hr != S_OK)
	{
		ErrorPrint("Create class enumerator error", hr);
		return false;
	}
	ComReleaser enumMonikerReleaser(enumMoniker);

	IMoniker *moniker;
	while(S_OK == enumMoniker->Next(1, &moniker, NULL))  //枚举输入设备
	{
		ComReleaser monikerReleaser(moniker);
		IPropertyBag* propBag;
		hr = moniker->BindToStorage(NULL, NULL, IID_IPropertyBag, (void**)&propBag);
		if(FAILED(hr))
		{
			ErrorPrint("Bind to storage error", hr);
			return false;
		}

		VARIANT varName;
		VariantInit(&varName);
		hr = propBag->Read(L"FriendlyName", &varName, NULL);
		if(FAILED(hr))
		{
			ErrorPrint("Read friendly name error", hr);
			VariantClear(&varName);
			continue;
		}
		VariantClear(&varName);

		IBaseFilter* captureSourceFilter;
		hr = moniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)&captureSourceFilter); //生成video input对应的source filter,记得要手动释放
		if (FAILED(hr))
		{
			ErrorPrint(wstring(L"Get caputre device ") + varName.bstrVal + L" source filter error", hr);
			continue;
		}

		crossbarNameList.push_back(varName.bstrVal);
		crossbarFilterList.push_back(captureSourceFilter);
	}

#ifdef _DEBUG
	std::cout<<"Start print all  crossbar device name:\n";
	using namespace boost::lambda;
	std::for_each(crossbarNameList.begin(), crossbarNameList.end(), wcout<<_1<<"\n");
#endif

	return true;
}
예제 #29
0
void DSManager::initCaptureDevices()
{
    HRESULT ret = 0;
    VARIANT name;
    ICreateDevEnum* devEnum = NULL;
    IEnumMoniker* monikerEnum = NULL;
    IMoniker* moniker = NULL;

    if(m_devices.size() > 0)
    {
        /* clean up our list in case of reinitialization */
        for(std::list<DSCaptureDevice*>::iterator it = m_devices.begin() ; it != m_devices.end() ; ++it)
        {
            delete *it;
        }
        m_devices.clear();
    }

    /* get the available devices list */
    ret = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
            IID_ICreateDevEnum, (void**)&devEnum);

    if(FAILED(ret))
    {
        return;
    }

    ret = devEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, 
            &monikerEnum, 0);

    /* error or no devices */
    if(FAILED(ret) || ret == S_FALSE)
    {
        devEnum->Release();
        return;
    }

    /* loop and initialize all available capture devices */
    while(monikerEnum->Next(1, &moniker, 0) == S_OK)
    {
        DSCaptureDevice* captureDevice = NULL;
        IPropertyBag* propertyBag = NULL;

        {
          IBaseFilter* cp = NULL;
          if(!FAILED(moniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&cp)))
          {
            IAMVfwCaptureDialogs* vfw = NULL;
            if(!FAILED(
                  cp->QueryInterface(IID_IAMVfwCaptureDialogs, (void**)&vfw)))
            {
              if(vfw)
              {
                vfw->Release();
                cp->Release();
                continue;
              }
            }
          }
        }

        /* get properties of the device */
        ret = moniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)&propertyBag);
        if(!FAILED(ret))
        {
            VariantInit(&name);

            ret = propertyBag->Read(L"FriendlyName", &name, 0);
            if(FAILED(ret))
            {
                VariantClear(&name);
                propertyBag->Release();
                moniker->Release();
                continue;
            }

            /* create a new capture device */
            captureDevice = new DSCaptureDevice(name.bstrVal);
            /* wprintf(L"%ws\n", name.bstrVal); */

            if(captureDevice && captureDevice->initDevice(moniker))
            {
                /* initialization success, add to the list */
                m_devices.push_back(captureDevice);
            }
            else
            {
                /* printf("failed to initialize device\n"); */
                delete captureDevice;
            }

            /* clean up */
            VariantClear(&name);
            propertyBag->Release();
        }
        moniker->Release();
    }

    /* cleanup */
    monikerEnum->Release();
    devEnum->Release();
}
static GstCaps *
gst_dshowvideosrc_get_caps (GstBaseSrc * basesrc, GstCaps *filter)
{
    HRESULT hres = S_OK;
    IBindCtx *lpbc = NULL;
    IMoniker *videom;
    DWORD dwEaten;
    GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (basesrc);
    gunichar2 *unidevice = NULL;

    if (src->caps) {
        return gst_caps_ref (src->caps);
    }

    if (!src->device) {
        src->device =
            gst_dshow_getdevice_from_devicename (&CLSID_VideoInputDeviceCategory,
                    &src->device_name);
        if (!src->device) {
            GST_ERROR ("No video device found.");
            return NULL;
        }
    }

    unidevice =
        g_utf8_to_utf16 (src->device, strlen (src->device), NULL, NULL, NULL);

    if (!src->video_cap_filter) {
        hres = CreateBindCtx (0, &lpbc);
        if (SUCCEEDED (hres)) {
            hres =
                MkParseDisplayName (lpbc, (LPCOLESTR) unidevice, &dwEaten, &videom);
            if (SUCCEEDED (hres)) {
                hres = videom->BindToObject (lpbc, NULL, IID_IBaseFilter,
                                             (LPVOID *) & src->video_cap_filter);
                videom->Release ();
            }
            lpbc->Release ();
        }
    }

    if (!src->caps) {
        src->caps = gst_caps_new_empty ();
    }

    if (src->video_cap_filter && gst_caps_is_empty (src->caps)) {
        /* get the capture pins supported types */
        IPin *capture_pin = NULL;
        IEnumPins *enumpins = NULL;
        HRESULT hres;

        hres = src->video_cap_filter->EnumPins (&enumpins);
        if (SUCCEEDED (hres)) {
            while (enumpins->Next (1, &capture_pin, NULL) == S_OK) {
                IKsPropertySet *pKs = NULL;
                hres =
                    capture_pin->QueryInterface (IID_IKsPropertySet, (LPVOID *) & pKs);
                if (SUCCEEDED (hres) && pKs) {
                    DWORD cbReturned;
                    GUID pin_category;
                    RPC_STATUS rpcstatus;

                    hres =
                        pKs->Get (AMPROPSETID_Pin,
                                  AMPROPERTY_PIN_CATEGORY, NULL, 0, &pin_category, sizeof (GUID),
                                  &cbReturned);

                    /* we only want capture pins */
                    if (UuidCompare (&pin_category, (UUID *) & PIN_CATEGORY_CAPTURE,
                                     &rpcstatus) == 0) {
                        {
                            GstCaps *caps =
                                gst_dshowvideosrc_getcaps_from_streamcaps (src, capture_pin);
                            if (caps) {
                                gst_caps_append (src->caps, caps);
                            } else {
                                caps = gst_dshowvideosrc_getcaps_from_enum_mediatypes (src, capture_pin);
                                if (caps)
                                    gst_caps_append (src->caps, caps);
                            }
                        }
                    }
                    pKs->Release ();
                }
                capture_pin->Release ();
            }
            enumpins->Release ();
        }
    }

    if (unidevice) {
        g_free (unidevice);
    }

    if (src->caps) {
        if (filter) {
            return gst_caps_intersect_full (filter, src->caps,
                                            GST_CAPS_INTERSECT_FIRST);
        } else {
            return gst_caps_ref (src->caps);
        }
    }

    return NULL;
}