HRESULT videoInputCamera::getDevice(IBaseFilter** gottaFilter, int deviceId, WCHAR * wDeviceName, char * nDeviceName){
	BOOL done = false;
	int deviceCounter = 0;

	// Create the System Device Enumerator.
	ICreateDevEnum *pSysDevEnum = NULL;
	HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **)&pSysDevEnum);
	if (FAILED(hr))
	{
		return hr;
	}

	// Obtain a class enumerator for the video input category.
	IEnumMoniker *pEnumCat = NULL;
	hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0);

	if (hr == S_OK)
	{
		// Enumerate the monikers.
		IMoniker *pMoniker = NULL;
		ULONG cFetched;
		while ((pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK) && (!done))
		{
			if(deviceCounter == deviceId)
			{
				// Bind the first moniker to an object
				IPropertyBag *pPropBag;
				hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag);
				if (SUCCEEDED(hr))
				{
					// To retrieve the filter's friendly name, do the following:
					VARIANT varName;
					VariantInit(&varName);
					hr = pPropBag->Read(L"FriendlyName", &varName, 0);
					if (SUCCEEDED(hr))
					{

						//copy the name to nDeviceName & wDeviceName
						int count = 0;
						while( varName.bstrVal[count] != 0x00 ) {
							wDeviceName[count] = varName.bstrVal[count];
							nDeviceName[count] = (char)varName.bstrVal[count];
							count++;
						}

						// We found it, so send it back to the caller
						hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)gottaFilter);
						done = true;
					}
					VariantClear(&varName);
					pPropBag->Release();
					pPropBag = NULL;
					pMoniker->Release();
					pMoniker = NULL;
				}
			}
			deviceCounter++;
		}
		pEnumCat->Release();
		pEnumCat = NULL;
	}
	pSysDevEnum->Release();
	pSysDevEnum = NULL;

	if (done) {
		return hr;	// found it, return native error
	} else {
		return VFW_E_NOT_FOUND;	// didn't find it error
	}
}
Beispiel #2
0
HRESULT CKTVDlg::initCapture()
{
    int mixerNum = 0;
    int microphoneNum = 0;
    ICreateDevEnum* sysDevEnum = NULL;
    HRESULT r = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, 
                                 CLSCTX_INPROC, IID_ICreateDevEnum, 
                                 (void **)&sysDevEnum);

    if FAILED(r)
        return r;

    IEnumMoniker *pEnumCat = NULL;
    r = sysDevEnum->CreateClassEnumerator(CLSID_AudioInputDeviceCategory, &pEnumCat, 0);

    if (SUCCEEDED(r))
    {
        // Enumerate all filters using the category enumerator
        r = EnumFiltersAndMonikersToList(pEnumCat);

        pEnumCat->Release();
    }
    sysDevEnum->Release();


    IMoniker *pMoniker=0;
    IBaseFilter* filter;
    for (int i = 0;i < m_captureFilterVec.size(); i++)
    {
        pMoniker = m_captureFilterVec[i].Moniker;
        // Use the moniker to create the specified audio capture device
        r = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&filter);   
        if (FAILED(r))
            return r;

        r = EnumPinsOnFilter(filter, PINDIR_INPUT, i);
        if (FAILED(r))
            return r;

        filter->Release();
    }
    
    return S_OK;
    

//     if (GetWinVersion() >= WINVERSION_VISTA)
//     {
//         IMMDeviceEnumerator* pEnumerator = NULL;
//         HRESULT hr = CoCreateInstance(__uuidof(MMDeviceEnumerator), NULL,
//             CLSCTX_ALL, __uuidof(IMMDeviceEnumerator),
//             (void**)&pEnumerator);
// 
//         IMMDeviceCollection* pCollection;
//         hr = pEnumerator->EnumAudioEndpoints(
//             eCapture, DEVICE_STATE_ACTIVE,
//             &pCollection);
// 
// 
//         UINT  count;
//         hr = pCollection->GetCount(&count);
//         IMMDevice  *pEndpoint = NULL;
//         LPWSTR pwszID = NULL;
//         IPropertyStore  *pProps = NULL;
// 
//         // Each loop prints the name of an endpoint device.
//         for (ULONG i = 0; i < count; i++)
//         {
//             // Get pointer to endpoint number i.
//             hr = pCollection->Item(i, &pEndpoint);
// 
// 
//             // Get the endpoint ID string.
//             hr = pEndpoint->GetId(&pwszID);
// 
// 
//             hr = pEndpoint->OpenPropertyStore(
//                 STGM_READ, &pProps);
// 
//             PROPVARIANT varName;
//             // Initialize container for property value.
//             PropVariantInit(&varName);
// 
//             // Get the endpoint's friendly-name property.
//             hr = pProps->GetValue(
//                 PKEY_Device_FriendlyName, &varName);
// 
//             // Print endpoint friendly name and endpoint ID.
//             //         printf("Endpoint %d: \"%S\" (%S)\n",
//             //             i, varName.pwszVal, pwszID);
//             wstring str = varName.pwszVal;
//             if (str.find(L"混音") != wstring::npos || 
//                 str.find(L"Stereo Mix") != wstring::npos)
//             {
//                 mixerNum++;
//             }
// 
//             CoTaskMemFree(pwszID);
//             pwszID = NULL;
//             PropVariantClear(&varName);
//             SAFE_RELEASE(pProps);
//             SAFE_RELEASE(pEndpoint);
//         }
//         SAFE_RELEASE(pEnumerator);
//         SAFE_RELEASE(pCollection);
//     }
//     else
//     {
//         HMIXER m_hmx; //
//         UINT m_uMxId; //mixer的ID
//         MMRESULT err = mixerOpen(&m_hmx, 0,(DWORD)0, 0, 0);
//         if (MMSYSERR_NOERROR != err)
//         {
//             return E_FAIL;
//         }
//         err = mixerGetID((HMIXEROBJ)m_hmx, &m_uMxId, MIXER_OBJECTF_HMIXER);
//         if (MMSYSERR_NOERROR != err)
//         {
//             return E_FAIL;
//         }
//         MIXERCAPS     mixcaps;
//         unsigned long iNumDevs;
// 
//         /* Get the number of Mixer devices in this computer */
//         iNumDevs = mixerGetNumDevs();
// 
//         /* Go through all of those devices, displaying their IDs/names */
//         for (int i = 0; i < iNumDevs; i++)
//         {
//             /* Get info about the next device */
//             if (!mixerGetDevCaps(i, &mixcaps, sizeof(MIXERCAPS)))
//             {
//                 /* Display its ID number and name */
//                 wstring str =  mixcaps.szPname;
//                 if (str.find(L"立体声混音") != wstring::npos || 
//                     str.find(L"Stereo Mix") != wstring::npos)
//                 {
//                     mixerNum++;
//                 }
//                 //OutputDebugString(str);
//             }
//         }
// 
//         int num = waveInGetNumDevs();
//         WAVEINCAPS waveinCaps;
//         for (int i = 0; i <num; i++)
//         {
//             if (!waveInGetDevCaps(i, &waveinCaps, sizeof(WAVEINCAPS)))
//             {
//                 wchar_t str[1024];
//                 _sntprintf_s(str, 1024, L"Wavein ID #%u: %s\r\n", i, waveinCaps.szPname);
// 
//             }
//         }
//     }
//     if (mixerNum == 0)
//         return E_FAIL;
// 
// 
//     if (mixerNum != 1)
//         return S_FALSE;
// 
// 
//     return S_OK;
}
Beispiel #3
0
DirectShowScanner::DirectShowScanner() {
   ICreateDevEnum *pDevEnum      = 0;
   int             hr;
   int             devNum;
   char            nameBuf[80];
   
   // Reference:  Pesce, pp 54-56.   

   debug_msg("new DirectShowScanner()\n");

   // Initialize the COM subsystem
   hr=CoInitialize(NULL);
   if (FAILED(hr)) {
	   debug_msg("Failed COM subsystem initialisation.\n");
	   	return;
   }

   // Create a helper object to find the capture devices.
   hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (LPVOID*)&pDevEnum);
   if (FAILED(hr)) {
   		debug_msg("Failed to Create a helper object to find the DS capture devices.\n");
		CoUninitialize();
		return;
   }

   IEnumMoniker *pEnum    = 0;
   IMoniker     *pMoniker = 0;
   IPropertyBag *pPropBag = 0;
   VARIANT      varName;

   // Get an enumerator over video capture filters
   hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnum, 0);
   //showErrorMessage(hr);
   if (FAILED(hr) || pEnum == 0) {
   		debug_msg("Failed to Get an enumerator over DS video capture filters.\n");
		CoUninitialize();
		return;
   }

   // Get the capture filter for each device installed, up to NUM_DEVS devices
   for( devNum=0; devNum < NUM_DEVS; ++devNum) {
      if ( pEnum->Next(1, &pMoniker, NULL) == S_OK ) {

         hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag);
	     if (FAILED(hr)) {
			debug_msg("Failed to Get propbag bound to storage on DS dev: %d\n", devNum);
			continue;
		 }
         //showErrorMessage(hr);
         debug_msg("propbag bound to storage ok= %d\n", hr);

         VariantInit(&varName);
         hr = pPropBag->Read(L"FriendlyName", &varName, 0);
		 if (FAILED(hr)) {
			debug_msg("Failed to Get friendly name read on DS dev: %d\n", devNum);
			continue;
		 }
         //showErrorMessage(hr);
         debug_msg("friendly name read ok= %d\n", hr);

         // Need this macro in atlconv.h to go from bStr to char* - msp
         USES_CONVERSION;
         strcpy(nameBuf, W2A(varName.bstrVal));

         debug_msg("DirectShowScanner::DirectShowScanner():  found nameBuf/FriendlyName=%s\n", nameBuf);

         // needs work, but don't add drivers that look like VFW drivers - msp
         if( (strstr(nameBuf, "VFW") == NULL) ) {
            hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void **)(pCaptureFilter+devNum));
            //showErrorMessage(hr);
			if (FAILED(hr)) {
				debug_msg("Failed to Get friendly name read on DS dev: %d\n", devNum);
				continue;
			}
			debug_msg("capture filter bound ok= %d\n", hr);
            devs_[devNum] = new DirectShowDevice(strdup(nameBuf), pCaptureFilter[devNum]);
         } else {
            debug_msg("discarding an apparent VFW device= %s\n", nameBuf);
			devs_[devNum] = NULL;
         }

         VariantClear(&varName);
         pPropBag->Release();
      }
   }

   // Release these objects so COM can release their memory
   pMoniker->Release();
   pEnum->Release();
   pDevEnum->Release();
}
bool GetVideoCaptureDevices(std::vector<IBaseFilter*>& captureSourceFilterList, std::vector<std::wstring>& captureDeviceNameList)
{
	HRESULT hr;
	CoInitialize(NULL);
	ComUninitial comUninitial;

	ICreateDevEnum* createDevEnum; //创建枚举器
	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void**)&createDevEnum);
	if(FAILED(hr))
	{
		ErrorPrint("Crate system device enum error", hr);
		return false;
	}
	ComReleaser createDevEnumReleaser(createDevEnum);

	IEnumMoniker *enumMoniker;
	hr = createDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &enumMoniker, 0); //创建Video输入设备的枚举器
	if(hr != S_OK)
	{
		ErrorPrint("Create class enumerator error", hr);
		return false;
	}
	ComReleaser enumMonikerReleaser(enumMoniker);

	IMoniker *moniker;
	while(S_OK == enumMoniker->Next(1, &moniker, NULL))  //枚举输入设备
	{
		ComReleaser monikerReleaser(moniker);
		IPropertyBag* propBag;
		hr = moniker->BindToStorage(NULL, NULL, IID_IPropertyBag, (void**)&propBag);
		if(FAILED(hr))
		{
			ErrorPrint("Bind to storage error", hr);
			return false;
		}

		VARIANT varName;
		VariantInit(&varName);
		hr = propBag->Read(L"FriendlyName", &varName, NULL);
		if(FAILED(hr))
		{
			ErrorPrint("Read friendly name error", hr);
			VariantClear(&varName);
			continue;
		}
		VariantClear(&varName);

		IBaseFilter* captureSourceFilter;
		hr = moniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)&captureSourceFilter); //生成video input对应的source filter,记得要手动释放
		if (FAILED(hr))
		{
			ErrorPrint(wstring(L"Get caputre device ") + varName.bstrVal + L" source filter error", hr);
			continue;
		}

		captureDeviceNameList.push_back(varName.bstrVal);
		captureSourceFilterList.push_back(captureSourceFilter);
	}

#ifdef _DEBUG
	std::cout<<"Start print all capture device name:\n";
	using namespace boost::lambda;
	std::for_each(captureDeviceNameList.begin(), captureDeviceNameList.end(), wcout<<_1<<"\n");
#endif

	return true;
}
static int v4w_configure_videodevice(V4wState *s)
{
    // Initialize COM
    CoInitialize(NULL);

    // get a Graph
    HRESULT hr= CoCreateInstance (CLSID_FilterGraph,
                                  NULL,
                                  CLSCTX_INPROC_SERVER,
                                  IID_IGraphBuilder, //IID_IBaseFilter,
                                  (void **)&s->m_pGraph);
    if(FAILED(hr))
    {
        return -1;
    }

    // get a CaptureGraphBuilder2
    hr= CoCreateInstance (CLSID_CaptureGraphBuilder2,
                          NULL,
                          CLSCTX_INPROC_SERVER,
                          IID_ICaptureGraphBuilder2, //IID_IBaseFilter,
                          (void **)&s->m_pBuilder);
    if(FAILED(hr))
    {
        return -2;
    }

    // connect capture graph builder with the graph
    s->m_pBuilder->SetFiltergraph(s->m_pGraph);

    // get mediacontrol so we can start and stop the filter graph
    hr=s->m_pGraph->QueryInterface (IID_IMediaControl, (void **)&s->m_pControl);
    if(FAILED(hr))
    {
        return -3;
    }


    ICreateDevEnum *pCreateDevEnum = NULL;
    IEnumMoniker *pEnumMoniker = NULL;
    IMoniker *pMoniker = NULL;

    ULONG nFetched = 0;

    hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
                          IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
    if(FAILED(hr))
    {
        return -4;
    }

    hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
            &pEnumMoniker, 0);
    if (FAILED(hr) || pEnumMoniker == NULL) {
        //printf("no device\n");
        return -5;
    }

    pEnumMoniker->Reset();

    int pos=0;
    while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
    {
        IPropertyBag *pBag;
        hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag );
        if( hr != S_OK )
            continue;

        if (s->dev[0]=='\0')
            break;

        VARIANT var;
        VariantInit(&var);
        hr = pBag->Read( L"FriendlyName", &var, NULL );
        if( hr != S_OK )
        {
            pMoniker->Release();
            continue;
        }
        //USES_CONVERSION;
        char szName[256];

        WideCharToMultiByte(CP_UTF8,0,var.bstrVal,-1,szName,256,0,0);
        VariantClear(&var);

        if (strcmp(szName, s->dev)==0)
            break;

        pMoniker->Release();
        pBag->Release();
        pMoniker=NULL;
        pBag=NULL;
    }

    if(pMoniker==NULL)
    {
        int pos=0;
        while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
        {
            IPropertyBag *pBag;
            hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag );
            if( hr != S_OK )
                continue;
        }

    }

    if(pMoniker==NULL)
    {
        return -6;
    }

    hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
    if(FAILED(hr))
    {
        return -7;
    }

    s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

    pMoniker->Release();
    pEnumMoniker->Release();
    pCreateDevEnum->Release();


    GUID pPinCategory;

    if (try_format(s->m_pDeviceFilter, s->pix_fmt, &pPinCategory)==0)
        s->pix_fmt = s->pix_fmt;
    else if (try_format(s->m_pDeviceFilter,MS_YUV420P, &pPinCategory)==0)
        s->pix_fmt = MS_YUV420P;
    else if (try_format(s->m_pDeviceFilter,MS_YUY2, &pPinCategory)==0)
        s->pix_fmt = MS_YUY2;
    else if (try_format(s->m_pDeviceFilter,MS_YUYV, &pPinCategory)==0)
        s->pix_fmt = MS_YUYV;
    else if (try_format(s->m_pDeviceFilter,MS_UYVY, &pPinCategory)==0)
        s->pix_fmt = MS_UYVY;
    else if (try_format(s->m_pDeviceFilter,MS_RGB24, &pPinCategory)==0)
        s->pix_fmt = MS_RGB24;
    else
    {
        ms_error("Unsupported video pixel format.");
        return -8;
    }

    if (s->pix_fmt == MS_YUV420P)
        ms_message("Driver supports YUV420P, using that format.");
    else if (s->pix_fmt == MS_YUY2)
        ms_message("Driver supports YUY2 (YUYV), using that format.");
    else if (s->pix_fmt == MS_YUYV)
        ms_message("Driver supports YUV422, using that format.");
    else if (s->pix_fmt == MS_UYVY)
        ms_message("Driver supports UYVY, using that format.");
    else if (s->pix_fmt == MS_RGB24)
        ms_message("Driver supports RGB24, using that format.");

    if (try_format_size(s, s->pix_fmt, s->vsize.width, s->vsize.height, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", s->vsize.width, s->vsize.height);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H);
    else
    {
        ms_error("No supported size found for format.");
        /* size not supported? */
        return -9;
    }

    return 0;
}
bool directx_camera_server::open_and_find_parameters(const int which, unsigned width, unsigned height)
{
  HRESULT hr;

  //-------------------------------------------------------------------
  // Create COM and DirectX objects needed to access a video stream.

  // Initialize COM.  This must have a matching uninitialize somewhere before
  // the object is destroyed.
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CoInitialize\n");
#endif
  CoInitialize(NULL);

  // Create the filter graph manager
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance FilterGraph\n");
#endif
  CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, 
		      IID_IGraphBuilder, (void **)&_pGraph);
  if (_pGraph == NULL) {
    fprintf(stderr, "directx_camera_server::open_and_find_parameters(): Can't create graph manager\n");
    return false;
  }
  _pGraph->QueryInterface(IID_IMediaControl, (void **)&_pMediaControl);
  _pGraph->QueryInterface(IID_IMediaEvent, (void **)&_pEvent);

  // Create the Capture Graph Builder.
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance CaptureGraphBuilder2\n");
#endif
  CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, 
      IID_ICaptureGraphBuilder2, (void **)&_pBuilder);
  if (_pBuilder == NULL) {
    fprintf(stderr, "directx_camera_server::open_and_find_parameters(): Can't create graph builder\n");
    return false;
  }

  // Associate the graph with the builder.
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before SetFilterGraph\n");
#endif
  _pBuilder->SetFiltergraph(_pGraph);

  //-------------------------------------------------------------------
  // Go find a video device to use: in this case, we are using the Nth
  // one we find, where the number N is the "which" parameter.

  // Create the system device enumerator.
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance SystemDeviceEnum\n");
#endif
  ICreateDevEnum *pDevEnum = NULL;
  CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC, 
      IID_ICreateDevEnum, (void **)&pDevEnum);
  if (pDevEnum == NULL) {
    fprintf(stderr, "directx_camera_server::open_and_find_parameters(): Can't create device enumerator\n");
    return false;
  }

  // Create an enumerator for video capture devices.
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CreateClassEnumerator\n");
#endif
  IEnumMoniker *pClassEnum = NULL;
  pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pClassEnum, 0);
  if (pClassEnum == NULL) {
    fprintf(stderr, "directx_camera_server::open_and_find_parameters(): Can't create video enumerator (no cameras?)\n");
    pDevEnum->Release();
    return false;
  }

#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before Loop over enumerators\n");
#endif
  ULONG cFetched;
  IMoniker *pMoniker = NULL;
  IBaseFilter *pSrc = NULL;
  // Skip (which - 1) cameras
  int i;
  for (i = 0; i < which-1 ; i++) {
    if (pClassEnum->Next(1, &pMoniker, &cFetched) != S_OK) {
      fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't open camera (not enough cameras)\n");
      pMoniker->Release();
      return false;
    }
  }
  // Take the next camera and bind it
  if (pClassEnum->Next(1, &pMoniker, &cFetched) == S_OK) {
    // Bind the first moniker to a filter object.
    pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pSrc);
    pMoniker->Release();
  } else {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't open camera (not enough cameras)\n");
    pMoniker->Release();
    return false;
  }

  pClassEnum->Release();
  pDevEnum->Release();

  //-------------------------------------------------------------------
  // Construct the sample grabber callback handler that will be used
  // to receive image data from the sample grabber.
  if ( (_pCallback = new directx_samplegrabber_callback()) == NULL) {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't create sample grabber callback handler (out of memory?)\n");
    return false;
  }

  //-------------------------------------------------------------------
  // Construct the sample grabber that will be used to snatch images from
  // the video stream as they go by.  Set its media type and callback.

  // Create the Sample Grabber.
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance SampleGrabber\n");
#endif
  CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
      IID_IBaseFilter, reinterpret_cast<void**>(&_pSampleGrabberFilter));
  if (_pSampleGrabberFilter == NULL) {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't get SampleGrabber filter (not DirectX 8.1+?)\n");
    return false;
  }
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before QueryInterface\n");
#endif
  _pSampleGrabberFilter->QueryInterface(IID_ISampleGrabber,
      reinterpret_cast<void**>(&_pGrabber));

  // Set the media type to video
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before SetMediaType\n");
#endif
  AM_MEDIA_TYPE mt;
  // Ask for video media producers that produce 8-bit RGB
  ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
  mt.majortype = MEDIATYPE_Video;	  // Ask for video media producers
  mt.subtype = MEDIASUBTYPE_RGB24;	  // Ask for 8 bit RGB
  _pGrabber->SetMediaType(&mt);

  //-------------------------------------------------------------------
  // Ask for the video resolution that has been passed in.
  // This code is based on
  // intuiting that we need to use the SetFormat call on the IAMStreamConfig
  // interface; this interface is described in the help pages.
  // If the width and height are specified as 0, then they are not set
  // in the header, letting them use whatever is the default.
  if ( (width != 0) && (height != 0) ) {
    _pBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, pSrc,
			      IID_IAMStreamConfig, (void **)&_pStreamConfig);
    if (_pStreamConfig == NULL) {
      fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't get StreamConfig interface\n");
      return false;
    }

    ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
    mt.majortype = MEDIATYPE_Video;	  // Ask for video media producers
    mt.subtype = MEDIASUBTYPE_RGB24;	  // Ask for 8 bit RGB
    mt.pbFormat = (BYTE*)CoTaskMemAlloc(sizeof(VIDEOINFOHEADER));
    VIDEOINFOHEADER *pVideoHeader = (VIDEOINFOHEADER*)mt.pbFormat;
    ZeroMemory(pVideoHeader, sizeof(VIDEOINFOHEADER));
    pVideoHeader->bmiHeader.biBitCount = 24;
    pVideoHeader->bmiHeader.biWidth = width;
    pVideoHeader->bmiHeader.biHeight = height;
    pVideoHeader->bmiHeader.biPlanes = 1;
    pVideoHeader->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
    pVideoHeader->bmiHeader.biSizeImage = DIBSIZE(pVideoHeader->bmiHeader);

    // Set the format type and size.
    mt.formattype = FORMAT_VideoInfo;
    mt.cbFormat = sizeof(VIDEOINFOHEADER);

    // Set the sample size.
    mt.bFixedSizeSamples = TRUE;
    mt.lSampleSize = DIBSIZE(pVideoHeader->bmiHeader);

    // Make the call to actually set the video type to what we want.
    if (_pStreamConfig->SetFormat(&mt) != S_OK) {
      fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't set resolution to %dx%d\n",
	pVideoHeader->bmiHeader.biWidth, pVideoHeader->bmiHeader.biHeight);
      return false;
    }

    // Clean up the pbFormat header memory we allocated above.
    CoTaskMemFree(mt.pbFormat);
  }

  //-------------------------------------------------------------------
  // Create a NULL renderer that will be used to discard the video frames
  // on the output pin of the sample grabber

#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance NullRenderer\n");
#endif
  IBaseFilter *pNull = NULL;
  CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER,
      IID_IBaseFilter, reinterpret_cast<void**>(&pNull));

  //-------------------------------------------------------------------
  // Build the filter graph.  First add the filters and then connect them.

  // pSrc is the capture filter for the video device we found above.
  _pGraph->AddFilter(pSrc, L"Video Capture");

  // Add the sample grabber filter
  _pGraph->AddFilter(_pSampleGrabberFilter, L"SampleGrabber");

  // Add the null renderer filter
  _pGraph->AddFilter(pNull, L"NullRenderer");

  // Connect the output of the video reader to the sample grabber input
  ConnectTwoFilters(_pGraph, pSrc, _pSampleGrabberFilter);

  // Connect the output of the sample grabber to the NULL renderer input
  ConnectTwoFilters(_pGraph, _pSampleGrabberFilter, pNull);

  //-------------------------------------------------------------------
  // XXX See if this is a video tuner card by querying for that interface.
  // Set it to read the video channel if it is one.
  IAMTVTuner  *pTuner = NULL;
  hr = _pBuilder->FindInterface(NULL, NULL, pSrc, IID_IAMTVTuner, (void**)&pTuner);
  if (pTuner != NULL) {
#ifdef	DEBUG
    printf("directx_camera_server::open_and_find_parameters(): Found a TV Tuner!\n");
#endif

    //XXX Put code here.
    // Set the first input pin to use the cable as input
    hr = pTuner->put_InputType(0, TunerInputCable);
    if (FAILED(hr)) {
      fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't set input to cable\n");
    }

    // Set the channel on the video to be baseband (is this channel zero?)
    hr = pTuner->put_Channel(0, -1, -1);
    if (FAILED(hr)) {
      fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't set channel\n");
    }

    pTuner->Release();
  }
  

  //-------------------------------------------------------------------
  // Find _num_rows and _num_columns in the video stream.
  _pGrabber->GetConnectedMediaType(&mt);
  VIDEOINFOHEADER *pVih;
  if (mt.formattype == FORMAT_VideoInfo) {
      pVih = reinterpret_cast<VIDEOINFOHEADER*>(mt.pbFormat);
  } else {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't get video header type\n");
    return false;
  }

  // Number of rows and columns.  This is different if we are using a target
  // rectangle (rcTarget) than if we are not.
  if (IsRectEmpty(&pVih->rcTarget)) {
    _num_columns = pVih->bmiHeader.biWidth;
    _num_rows = pVih->bmiHeader.biHeight;
  } else {
    _num_columns = pVih->rcTarget.right;
    _num_rows = pVih->bmiHeader.biHeight;
    printf("XXX directx_camera_server::open_and_find_parameters(): Warning: may not work correctly with target rectangle\n");
  }
  _minX = 0;
  _maxX = _num_columns - 1;
  _minY = 0;
  _maxY = _num_rows - 1;
#ifdef DEBUG
  printf("Got %dx%d video\n", _num_columns, _num_rows);
#endif

  // Make sure that the image is not compressed and that we have 8 bits
  // per pixel.
  if (pVih->bmiHeader.biCompression != BI_RGB) {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Compression not RGB\n");
    switch (pVih->bmiHeader.biCompression) {
      case BI_RLE8:
	fprintf(stderr,"  (It is BI_RLE8)\n");
	break;
      case BI_RLE4:
	fprintf(stderr,"  (It is BI_RLE4)\n");
      case BI_BITFIELDS:
	fprintf(stderr,"  (It is BI_BITFIELDS)\n");
	break;
      default:
	fprintf(stderr,"  (Unknown compression type)\n");
    }
    return false;
  }
  int BytesPerPixel = pVih->bmiHeader.biBitCount / 8;
  if (BytesPerPixel != 3) {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Not 3 bytes per pixel (%d)\n",
      pVih->bmiHeader.biBitCount);
    return false;
  }

  // A negative height indicates that the images are stored non-inverted in Y
  // Not sure what to do with images that have negative height -- need to
  // read the book some more to find out.
  if (_num_rows < 0) {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Num Rows is negative (internal error)\n");
    return false;
  }

  // Find the stride to take when moving from one row of video to the
  // next.  This is rounded up to the nearest DWORD.
  _stride = (_num_columns * BytesPerPixel + 3) & ~3;

  // Set the callback, where '0' means 'use the SampleCB callback'
  _pGrabber->SetCallback(_pCallback, 0);

  //-------------------------------------------------------------------
  // Release resources that won't be used later and return
  pSrc->Release();
  pNull->Release();
  return true;
}
	void TestCamera()
	{
		InitOpenCL();
		//TCHAR szDeviceName[80];
		//TCHAR szDeviceVersion[80];

		//for (int wIndex = 0; wIndex < 10; wIndex++) 
		//{
		//	if (capGetDriverDescription(
		//		wIndex, 
		//		szDeviceName, 
		//		sizeof (szDeviceName), 
		//		szDeviceVersion, 
		//		sizeof (szDeviceVersion)
		//		)) 
		//	{
		//		// Append name to list of installed capture drivers
		//		// and then let the user select a driver to use.
		//	}
		//} 

		//HWND hWndC = capCreateCaptureWindow(TEXT("PunkCapture"),
		//	WS_CHILD | WS_VISIBLE, 0, 0, 160, 120, *System::Window::Instance(), 1);

		//SendMessage (hWndC, WM_CAP_DRIVER_CONNECT, 0, 0L); 
		//// 
		//// Or, use the macro to connect to the MSVIDEO driver: 
		//// fOK = capDriverConnect(hWndC, 0); 
		//// 
		//// Place code to set up and capture video here. 
		//// 
		////capDriverDisconnect (hWndC); 

		//CAPDRIVERCAPS CapDriverCaps = { }; 
		//CAPSTATUS     CapStatus = { };

		//capDriverGetCaps(hWndC, &CapDriverCaps, sizeof(CAPDRIVERCAPS)); 

		//// Video source dialog box. 
		//if (CapDriverCaps.fHasDlgVideoSource)
		//{
		//	capDlgVideoSource(hWndC); 
		//}

		//// Video format dialog box. 
		//if (CapDriverCaps.fHasDlgVideoFormat) 
		//{
		//	capDlgVideoFormat(hWndC); 

		//	// Are there new image dimensions?
		//	capGetStatus(hWndC, &CapStatus, sizeof (CAPSTATUS));

		//	// If so, notify the parent of a size change.
		//} 

		//// Video display dialog box. 
		//if (CapDriverCaps.fHasDlgVideoDisplay)
		//{
		//	capDlgVideoDisplay(hWndC); 
		//}


		HRESULT hr;
		IGraphBuilder*  graph= 0;  hr = CoCreateInstance( CLSID_FilterGraph, 0, CLSCTX_INPROC,IID_IGraphBuilder, (void **)&graph );
		IMediaControl*  ctrl = 0;  hr = graph->QueryInterface( IID_IMediaControl, (void **)&ctrl );

		ICreateDevEnum* devs = 0;  hr = CoCreateInstance (CLSID_SystemDeviceEnum, 0, CLSCTX_INPROC, IID_ICreateDevEnum, (void **) &devs);
		IEnumMoniker*   cams = 0;  hr = devs?devs->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, &cams, 0):0;  
		IMoniker*       mon  = 0;  hr = cams->Next (1,&mon,0);  // get first found capture device (webcam?)    
		IBaseFilter*    cam  = 0;  hr = mon->BindToObject(0,0,IID_IBaseFilter, (void**)&cam);
		hr = graph->AddFilter(cam, L"Capture Source"); // add web cam to graph as source
		IEnumPins*      pins = 0;  hr = cam?cam->EnumPins(&pins):0;   // we need output pin to autogenerate rest of the graph
		IPin*           pin  = 0;  hr = pins?pins->Next(1,&pin, 0):0; // via graph->Render
		hr = graph->Render(pin); // graph builder now builds whole filter chain including MJPG decompression on some webcams
		IEnumFilters*   fil  = 0;  hr = graph->EnumFilters(&fil); // from all newly added filters
		IBaseFilter*    rnd  = 0;  hr = fil->Next(1,&rnd,0); // we find last one (renderer)
		hr = rnd->EnumPins(&pins);  // because data we are intersted in are pumped to renderers input pin 
		hr = pins->Next(1,&pin, 0); // via Receive member of IMemInputPin interface
		IMemInputPin*   mem  = 0;  hr = pin->QueryInterface(IID_IMemInputPin,(void**)&mem);

		DsHook(mem,6,Receive); // so we redirect it to our own proc to grab image data

		hr = ctrl->Run();   

	};
int  Captura::Enumerar(HWND hList)
{
	if (!hList)
		return  -1;

	
	int id = 0;

	//CComPtr<ICreateDevEnum> pCreateDevEnum;
    ICreateDevEnum * pCreateDevEnum;
    HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
			  IID_ICreateDevEnum, (void**)&pCreateDevEnum);
    if (hr != NOERROR)
	{

		return -1;
	}



//    CComPtr<IEnumMoniker> pEm;
    IEnumMoniker *pEm;
    hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
								&pEm, 0);
    if (hr != NOERROR) 
	{

		return -1 ;
    }

    pEm->Reset();
    ULONG cFetched;
    IMoniker *pM;
    while(hr = pEm->Next(1, &pM, &cFetched), hr==S_OK)
    {
		IPropertyBag *pBag;
		hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pBag);
		if(SUCCEEDED(hr)) 
		{
			VARIANT var;
			var.vt = VT_BSTR;
			hr = pBag->Read(L"FriendlyName", &var, NULL);
			if (hr == NOERROR) 
			{
				TCHAR str[2048];		


				id++;
				WideCharToMultiByte(CP_ACP,0,var.bstrVal, -1, str, 2048, NULL, NULL);


				(long)SendMessage(hList, CB_ADDSTRING, 0,(LPARAM)str);

				SysFreeString(var.bstrVal);
			}
			pBag->Release();
		}
		pM->Release();
    }
	return id;
}
Beispiel #9
0
static int v4w_open_videodevice(V4wState *s)
{
    // Initialize COM
    CoInitialize(NULL);

    // get a Graph
    HRESULT hr=s->m_pGraph.CoCreateInstance(CLSID_FilterGraph);
    if(FAILED(hr))
    {
        return -1;
    }

    // get a CaptureGraphBuilder2
    hr=s->m_pBuilder.CoCreateInstance(CLSID_CaptureGraphBuilder2);
    if(FAILED(hr))
    {
        return -2;
    }

    // connect capture graph builder with the graph
    s->m_pBuilder->SetFiltergraph(s->m_pGraph);

    // get mediacontrol so we can start and stop the filter graph
    hr=s->m_pGraph.QueryInterface(&(s->m_pControl));
    if(FAILED(hr))
    {
        return -3;
    }


#ifdef _DEBUG
    HANDLE m_hLogFile=CreateFile(L"DShowGraphLog.txt",GENERIC_READ|GENERIC_WRITE,FILE_SHARE_READ,NULL,OPEN_ALWAYS,FILE_ATTRIBUTE_NORMAL,NULL);
    if(m_hLogFile!=INVALID_HANDLE_VALUE)
    {
        hr=s->m_pGraph->SetLogFile((DWORD_PTR)m_hLogFile);
        /* ASSERT(SUCCEEDED(hr)); */
    }

    //AddGraphToRot(s->m_pGraph, &s->rotregvalue);
#endif

    ICreateDevEnum *pCreateDevEnum = NULL;
    IEnumMoniker *pEnumMoniker = NULL;
    IMoniker *pMoniker = NULL;

    ULONG nFetched = 0;

    hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
                          IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
    if(FAILED(hr))
    {
        return -4;
    }

    hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
            &pEnumMoniker, 0);
    if (FAILED(hr) || pEnumMoniker == NULL) {
        //printf("no device\n");
        return -5;
    }

    pEnumMoniker->Reset();

    int pos=0;
    while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
    {
        if (pos>=s->devidx)
            break;
        pos++;
        pMoniker->Release();
        pMoniker=NULL;
    }
    if(pMoniker==NULL)
    {
        return -6;
    }

    hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
    if(FAILED(hr))
    {
        return -7;
    }

    s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

    pMoniker->Release();
    pEnumMoniker->Release();
    pCreateDevEnum->Release();

    if (try_format(s, s->pix_fmt)==0)
        s->pix_fmt = s->pix_fmt;
    else if (try_format(s,MS_YUV420P)==0)
        s->pix_fmt = MS_YUV420P;
    else if (try_format(s,MS_YUY2)==0)
        s->pix_fmt = MS_YUY2;
    else if (try_format(s,MS_YUYV)==0)
        s->pix_fmt = MS_YUYV;
    else if (try_format(s,MS_UYVY)==0)
        s->pix_fmt = MS_UYVY;
    else if (try_format(s,MS_RGB24)==0)
        s->pix_fmt = MS_RGB24;
    else
    {
        ms_error("Unsupported video pixel format.");
        return -8;
    }

    if (s->pix_fmt == MS_YUV420P)
        ms_message("Driver supports YUV420P, using that format.");
    else if (s->pix_fmt == MS_YUY2)
        ms_message("Driver supports YUY2 (UYVY), using that format.");
    else if (s->pix_fmt == MS_YUYV)
        ms_message("Driver supports YUV422, using that format.");
    else if (s->pix_fmt == MS_UYVY)
        ms_message("Driver supports UYVY, using that format.");
    else if (s->pix_fmt == MS_RGB24)
        ms_message("Driver supports RGB24, using that format.");

    if (try_format_size(s, s->pix_fmt, s->vsize.width, s->vsize.height)==0)
        ms_message("Selected Size: %ix%i.", s->vsize.width, s->vsize.height);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H);
    else
    {
        ms_error("No supported size found for format.");
        /* size not supported? */
        return -9;
    }

    // get DXFilter
    s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE);
    if(s->m_pDXFilter==NULL)
    {
        return -10;
    }
    s->m_pDXFilter->AddRef();

    CMediaType mt;
    mt.SetType(&MEDIATYPE_Video);

    GUID m = MEDIASUBTYPE_RGB24;
    if (s->pix_fmt == MS_YUV420P)
        m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0'));
    else if (s->pix_fmt == MS_YUY2)
        m = MEDIASUBTYPE_YUY2;
    else if (s->pix_fmt == MS_YUYV)
        m = MEDIASUBTYPE_YUYV;
    else if (s->pix_fmt == MS_UYVY)
        m = MEDIASUBTYPE_UYVY;
    else if (s->pix_fmt == MS_RGB24)
        m = MEDIASUBTYPE_RGB24;
    mt.SetSubtype(&m);

    mt.formattype = FORMAT_VideoInfo;
    mt.SetTemporalCompression(FALSE);

    VIDEOINFO *pvi = (VIDEOINFO *)
                     mt.AllocFormatBuffer(sizeof(VIDEOINFO));
    if (NULL == pvi)
        return -11;
    ZeroMemory(pvi, sizeof(VIDEOINFO));

    if (s->pix_fmt == MS_YUV420P)
        pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
    else if (s->pix_fmt == MS_YUY2)
        pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2');
    else if (s->pix_fmt == MS_YUYV)
        pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','V');
    else if (s->pix_fmt == MS_UYVY)
        pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y');
    else if (s->pix_fmt == MS_RGB24)
        pvi->bmiHeader.biCompression = BI_RGB;

    if (s->pix_fmt == MS_YUV420P)
        pvi->bmiHeader.biBitCount = 12;
    else if (s->pix_fmt == MS_YUY2)
        pvi->bmiHeader.biBitCount = 16;
    else if (s->pix_fmt == MS_YUYV)
        pvi->bmiHeader.biBitCount = 16;
    else if (s->pix_fmt == MS_UYVY)
        pvi->bmiHeader.biBitCount = 16;
    else if (s->pix_fmt == MS_RGB24)
        pvi->bmiHeader.biBitCount = 24;

    pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
    pvi->bmiHeader.biWidth = s->vsize.width;
    pvi->bmiHeader.biHeight = s->vsize.height;
    pvi->bmiHeader.biPlanes = 1;
    pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
    pvi->bmiHeader.biClrImportant = 0;
    mt.SetSampleSize(pvi->bmiHeader.biSizeImage);
    mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO));

    hr = s->m_pDXFilter->SetAcceptedMediaType(&mt);
    if(FAILED(hr))
    {
        return -12;
    }

    hr = s->m_pDXFilter->SetCallback(Callback);
    if(FAILED(hr))
    {
        return -13;
    }

    hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter,
                                        (LPVOID *)&s->m_pIDXFilter);
    if(FAILED(hr))
    {
        return -14;
    }

    hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter");
    if(FAILED(hr))
    {
        return -15;
    }


    // get null renderer
    hr=s->m_pNullRenderer.CoCreateInstance(CLSID_NullRenderer);
    if(FAILED(hr))
    {
        return -16;
    }
    if (s->m_pNullRenderer!=NULL)
    {
        s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer");
    }

    hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_PREVIEW,
                                     &MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
    if (FAILED(hr))
    {
        //hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_CAPTURE,
        //	&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
        if (FAILED(hr))
        {
            return -17;
        }
    }

    //m_pDXFilter->SetBufferSamples(TRUE);

    s_callback = s;
    hr = s->m_pControl->Run();
    if(FAILED(hr))
    {
        return -18;
    }

    s->rotregvalue=1;
    return 0;
}
Beispiel #10
0
int	main()
{
	// for playing
	IGraphBuilder *pGraphBuilder;
	ICaptureGraphBuilder2 *pCaptureGraphBuilder2;
	IMediaControl *pMediaControl;
	IBaseFilter *pDeviceFilter = NULL;

	// to select a video input device
	ICreateDevEnum *pCreateDevEnum = NULL;
	IEnumMoniker *pEnumMoniker = NULL;
	IMoniker *pMoniker = NULL;
	ULONG nFetched = 0;

	// initialize COM
	CoInitialize(NULL);

	//
	// selecting a device
	//

	// Create CreateDevEnum to list device
	CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, 
		IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);

	// Create EnumMoniker to list VideoInputDevice 
	pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
		&pEnumMoniker, 0);
	if (pEnumMoniker == NULL) {
		// this will be shown if there is no capture device
		printf("no device\n");
		return 0;
	}

	// reset EnumMoniker
	pEnumMoniker->Reset();

	// get each Moniker
	while (pEnumMoniker->Next(1, &pMoniker, &nFetched) == S_OK)
	{
		IPropertyBag *pPropertyBag;
		TCHAR devname[256];

		// bind to IPropertyBag
		pMoniker->BindToStorage(0, 0, IID_IPropertyBag,
			(void **)&pPropertyBag);

		VARIANT var;

		// get FriendlyName
		var.vt = VT_BSTR;
		pPropertyBag->Read(L"FriendlyName", &var, 0);
		WideCharToMultiByte(CP_ACP, 0,
			var.bstrVal, -1, devname, sizeof(devname), 0, 0);
		VariantClear(&var);

		printf("%s\r\n", devname);
		printf("  select this device ? [y] or [n]\r\n");
		int ch = getchar();

		// you can start playing by 'y' + return key
		// if you press the other key, it will not be played.
		if (ch == 'y')
		{
			// Bind Monkier to Filter
			pMoniker->BindToObject(0, 0, IID_IBaseFilter,
				(void**)&pDeviceFilter );
		}
		else
		{
			getchar();
		}

		// release
		pMoniker->Release();
		pPropertyBag->Release();

		if (pDeviceFilter != NULL)
		{
			// go out of loop if getchar() returns 'y'
			break;
		}
	}

	if (pDeviceFilter != NULL) {
		//
		// PLAY
		//

		// create FilterGraph
		CoCreateInstance(CLSID_FilterGraph,
			NULL,
			CLSCTX_INPROC,
			IID_IGraphBuilder,
			(LPVOID *)&pGraphBuilder);

		// create CaptureGraphBuilder2
		CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, 
			IID_ICaptureGraphBuilder2, 
			(LPVOID *)&pCaptureGraphBuilder2);

		//============================================================
		//===========  MY CODE  ======================================
		//=============================================================
		HRESULT hr = CoInitialize(0);
		IAMStreamConfig *pConfig = NULL;
		hr = pCaptureGraphBuilder2->FindInterface(&PIN_CATEGORY_CAPTURE, 0, pDeviceFilter, IID_IAMStreamConfig, (void**)&pConfig);

		int iCount = 0, iSize = 0;
		hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize);

		// Check the size to make sure we pass in the correct structure.
		if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
		{
			// Use the video capabilities structure.

			for (int iFormat = 0; iFormat < iCount; iFormat++)
			{
				VIDEO_STREAM_CONFIG_CAPS scc;
				AM_MEDIA_TYPE *pmtConfig;
				hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
				if (SUCCEEDED(hr))
				{
					/* Examine the format, and possibly use it. */
					if ((pmtConfig->majortype == MEDIATYPE_Video) &&
						(pmtConfig->subtype == MEDIASUBTYPE_RGB24) &&
						(pmtConfig->formattype == FORMAT_VideoInfo) &&
						(pmtConfig->cbFormat >= sizeof (VIDEOINFOHEADER)) &&
						(pmtConfig->pbFormat != NULL))
					{
						VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat;
						// pVih contains the detailed format information.
						LONG lWidth = pVih->bmiHeader.biWidth;
						LONG lHeight = pVih->bmiHeader.biHeight;
						if( lWidth == 1280 )
							//					if (iFormat == 26)
						{ //2 = '1280x720YUV' YUV, 22 = '1280x800YUV', 26 = '1280x720RGB'
							hr = pConfig->SetFormat(pmtConfig);
						}
					}
					// Delete the media type when you are done.
					DeleteMediaType(pmtConfig);
				}
			}
		}


		// Query the capture filter for the IAMCameraControl interface.
		IAMCameraControl *pCameraControl = 0;
		hr = pDeviceFilter->QueryInterface(IID_IAMCameraControl, (void**)&pCameraControl);
		if (FAILED(hr))
		{
			// The device does not support IAMCameraControl
		}
		else
		{
			long Min, Max, Step, Default, Flags, Val;

			// Get the range and default values 
			hr = pCameraControl->GetRange(CameraControl_Exposure, &Min, &Max, &Step, &Default, &Flags);
			hr = pCameraControl->GetRange(CameraControl_Focus, &Min, &Max, &Step, &Default, &Flags);
			if (SUCCEEDED(hr))
			{
				hr = pCameraControl->Set(CameraControl_Exposure, -11, CameraControl_Flags_Manual ); // Min = -11, Max = 1, Step = 1
				hr = pCameraControl->Set(CameraControl_Focus, 12, CameraControl_Flags_Manual );
			}
		}


		// Query the capture filter for the IAMVideoProcAmp interface.
		IAMVideoProcAmp *pProcAmp = 0;
		hr = pDeviceFilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&pProcAmp);
		if (FAILED(hr))
		{
			// The device does not support IAMVideoProcAmp
		}
		else
		{
			long Min, Max, Step, Default, Flags, Val;

			// Get the range and default values 
			hr = pProcAmp->GetRange(VideoProcAmp_Brightness, &Min, &Max, &Step, &Default, &Flags);
			hr = pProcAmp->GetRange(VideoProcAmp_BacklightCompensation, &Min, &Max, &Step, &Default, &Flags);
			hr = pProcAmp->GetRange(VideoProcAmp_Contrast, &Min, &Max, &Step, &Default, &Flags);
			hr = pProcAmp->GetRange(VideoProcAmp_Saturation, &Min, &Max, &Step, &Default, &Flags);
			hr = pProcAmp->GetRange(VideoProcAmp_Sharpness, &Min, &Max, &Step, &Default, &Flags);
			hr = pProcAmp->GetRange(VideoProcAmp_WhiteBalance, &Min, &Max, &Step, &Default, &Flags);
			if (SUCCEEDED(hr))
			{
				hr = pProcAmp->Set(VideoProcAmp_Brightness, 142, VideoProcAmp_Flags_Manual);
				hr = pProcAmp->Set(VideoProcAmp_BacklightCompensation, 0, VideoProcAmp_Flags_Manual);
				hr = pProcAmp->Set(VideoProcAmp_Contrast, 4, VideoProcAmp_Flags_Manual);
				hr = pProcAmp->Set(VideoProcAmp_Saturation, 100, VideoProcAmp_Flags_Manual);
				hr = pProcAmp->Set(VideoProcAmp_Sharpness, 0, VideoProcAmp_Flags_Manual);
				hr = pProcAmp->Set(VideoProcAmp_WhiteBalance, 2800, VideoProcAmp_Flags_Manual);
			}
		}


		//============================================================
		//=========== END MY CODE  ======================================
		//=============================================================

		hr = S_OK;
		CTransformer* trans = new CTransformer( "Dif trans", 0, CLSID_DIFFilter, &hr );
		IBaseFilter * ttt = 0;
		trans->QueryInterface(IID_IBaseFilter, (LPVOID *)&ttt);
		// set FilterGraph
		hr = pCaptureGraphBuilder2->SetFiltergraph(pGraphBuilder);

		// get MediaControl interface
		hr = pGraphBuilder->QueryInterface(IID_IMediaControl,
			(LPVOID *)&pMediaControl);

		// add device filter to FilterGraph
		hr = pGraphBuilder->AddFilter(ttt, L"Dif trans");
		hr = pGraphBuilder->AddFilter(pDeviceFilter, L"Device Filter");

		// create Graph
		hr = pCaptureGraphBuilder2->RenderStream(&PIN_CATEGORY_CAPTURE,
			NULL, pDeviceFilter, NULL, NULL);

		// start playing
		hr = pMediaControl->Run();

		// to block execution
		// without this messagebox, the graph will be stopped immediately
		MessageBox(NULL,
			"Block Execution",
			"Block",
			MB_OK);

		// release
		pMediaControl->Release();
		pCaptureGraphBuilder2->Release();
		pGraphBuilder->Release();
	}

	// release
	pEnumMoniker->Release();
	pCreateDevEnum->Release();

	// finalize COM
	CoUninitialize();

	return 0;
}
Beispiel #11
0
void CVisualPage::OnCamSetupButton()
{
  CComboBox * box = (CComboBox*)(GetDlgItem(IDC_RECORDING_COMBO));
  int i = box->GetCurSel();
  int n = box->GetLBTextLen(i);
  CString s;
  box->GetLBText(i, s.GetBuffer(n));
  PString setupDeviceName = s;
  s.ReleaseBuffer();

  if (setupDeviceName.IsEmpty()) return;
  if (setupDeviceName.Find("fake") == 0) return;
  if (setupDeviceName.Find("monitor") == 0) return;
  if (setupDeviceName.Find("zmonitor") == 0) return;
  PTRACE(4,"PVidDirectShow\tCurrent device: " << setupDeviceName);

  HRESULT hr;
  IBaseFilter * pFilter = NULL;
  IMoniker *pMoniker =NULL;
  ICreateDevEnum *pDevEnum =NULL;
  IEnumMoniker *pClassEnum = NULL;
  ULONG cFetched;

  ::CoInitialize(NULL);

  // Create the system device enumerator
  hr = CoCreateInstance (CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC, IID_ICreateDevEnum, (void **) &pDevEnum);
  if (FAILED(hr)) { ::CoUninitialize(); return; }

  // Create an enumerator for the video capture devices
  hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pClassEnum, 0);
  if (FAILED(hr)) { ::CoUninitialize(); return; }

  if (pClassEnum == NULL) { ::CoUninitialize(); return; }

  PTRACE(4,"PVidDirectShow\tEntering device enumeration loop...");
  while (1)
  { // Get the next device
    hr = pClassEnum->Next(1, &pMoniker, &cFetched);
    if (hr != S_OK) { PTRACE(4, "PVidDirectShow\tGetInputDeviceNames() No more video capture device"); break; }

    // Get the property bag
    IPropertyBag *pPropBag;

    hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)(&pPropBag));
    if (FAILED(hr))
    { PTRACE(4,"PVidDerectShow\tBindToStorage failed, continue");
      pMoniker->Release();
      continue;
    }

    // Find the description or friendly name.
    VARIANT DeviceName;
    DeviceName.vt = VT_BSTR;
    hr = pPropBag->Read(L"Description", &DeviceName, NULL);
    if (FAILED(hr)) hr = pPropBag->Read(L"FriendlyName", &DeviceName, NULL);
    if (SUCCEEDED(hr))
    { char *pDeviceName = BSTR_to_ANSI(DeviceName.bstrVal);
      if (pDeviceName)
      { PTRACE(4, "PVidDirectShow\tGetInputDeviceNames() Found this capture device '"<< pDeviceName <<"'");
        if(PString(pDeviceName) == setupDeviceName)
        {
          PTRACE(4, "PVidDirectShow\tCamera Setup: device found");
          pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**) &pFilter);
          ISpecifyPropertyPages *p_spec; CAUUID cauuid;
          HRESULT hr = pFilter->QueryInterface( IID_ISpecifyPropertyPages, (void **)&p_spec );
          if( !FAILED(hr) )
          if( SUCCEEDED(p_spec->GetPages( &cauuid )) )
          { if( cauuid.cElems > 0 )
            { HWND hwnd_desktop = ::GetDesktopWindow();
              OleCreatePropertyFrame( hwnd_desktop, 30, 30, NULL, 1, (LPUNKNOWN *)(&pFilter), cauuid.cElems, cauuid.pElems, 0, 0, NULL );
              CoTaskMemFree( cauuid.pElems );
            }
            p_spec->Release();
          }
        }
        free(pDeviceName);
      }
    }
    pPropBag->Release();
    pMoniker->Release();
  }

  ::CoUninitialize();
}
Beispiel #12
0
// Enumerate all of the video input devices
// Return the filter with a matching friendly name
HRESULT GetVideoInputFilter(IBaseFilter** gottaFilter, wchar_t* matchName)
{
	BOOL done = false;

	// Create the System Device Enumerator.
	ICreateDevEnum *pSysDevEnum = NULL;
	HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
		IID_ICreateDevEnum, (void **)&pSysDevEnum);
	if (FAILED(hr))
	{
		return hr;
	}

	// Obtain a class enumerator for the video input category.
	IEnumMoniker *pEnumCat = NULL;
	hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0);

	if (hr == S_OK) 
	{
		// Enumerate the monikers.
		IMoniker *pMoniker = NULL;
		ULONG cFetched;
		while ((pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK) && (!done))
		{
			// Bind the first moniker to an object
			IPropertyBag *pPropBag;
			hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, 
				(void **)&pPropBag);
			if (SUCCEEDED(hr))
			{
				// To retrieve the filter's friendly name, do the following:
				VARIANT varName;
				VariantInit(&varName);
				hr = pPropBag->Read(L"FriendlyName", &varName, 0);
				if (SUCCEEDED(hr))
				{
					wprintf(L"Testing Video Input Device: %s\n", varName.bstrVal);

					// Do a comparison, find out if it's the right one
					if (wcsncmp(varName.bstrVal, matchName, 
						wcslen(matchName)) == 0) {

						// We found it, so send it back to the caller
						hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**) gottaFilter);
						done = true;
					}
				}
				VariantClear(&varName);	
				pPropBag->Release();
			}
			pMoniker->Release();
		}
		pEnumCat->Release();
	}
	pSysDevEnum->Release();
	if (done) {
		return hr;	// found it, return native error
	} else {
		return VFW_E_NOT_FOUND;	// didn't find it error
	}
}
// delicious copypasta
static QList<QString> get_camera_names(void) {
    QList<QString> ret;
#if defined(_WIN32)
	// Create the System Device Enumerator.
	HRESULT hr;
	ICreateDevEnum *pSysDevEnum = NULL;
	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **)&pSysDevEnum);
	if (FAILED(hr))
	{
		return ret;
	}
	// Obtain a class enumerator for the video compressor category.
	IEnumMoniker *pEnumCat = NULL;
	hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0);

	if (hr == S_OK) {
		// Enumerate the monikers.
		IMoniker *pMoniker = NULL;
		ULONG cFetched;
		while (pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK) {
			IPropertyBag *pPropBag;
			hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag);
			if (SUCCEEDED(hr))	{
				// To retrieve the filter's friendly name, do the following:
				VARIANT varName;
				VariantInit(&varName);
				hr = pPropBag->Read(L"FriendlyName", &varName, 0);
				if (SUCCEEDED(hr))
				{
					// Display the name in your UI somehow.
					QString str((QChar*)varName.bstrVal, wcslen(varName.bstrVal));
					ret.append(str);
				}
				VariantClear(&varName);

				////// To create an instance of the filter, do the following:
				////IBaseFilter *pFilter;
				////hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter,
				////	(void**)&pFilter);
				// Now add the filter to the graph. 
				//Remember to release pFilter later.
				pPropBag->Release();
			}
			pMoniker->Release();
		}
		pEnumCat->Release();
	}
	pSysDevEnum->Release();
#else
    for (int i = 0; i < 16; i++) {
        char buf[128];
        sprintf(buf, "/dev/video%d", i);
        if (access(buf, R_OK | W_OK) == 0) {
            ret.append(buf);
        } else {
            continue;
        }
    }
#endif
    return ret;
}
QStringList VideoInput::list_devices_dshow(bool silent)
{
    if (!silent) { printf("\nVIDEOINPUT SPY MODE!\n\n"); }

    QStringList list;

#ifdef _MSC_VER
    ICreateDevEnum *pDevEnum = NULL;
    IEnumMoniker *pEnum = NULL;
    int deviceCounter = 0;
    
    HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL,
        CLSCTX_INPROC_SERVER, IID_ICreateDevEnum,
        reinterpret_cast<void**>(&pDevEnum));

    if (SUCCEEDED(hr))
    {
        // Create an enumerator for the video capture category.
        hr = pDevEnum->CreateClassEnumerator(
            CLSID_VideoInputDeviceCategory,
            &pEnum, 0);

        if(hr == S_OK){

            if (!silent) { printf("SETUP: Looking For Capture Devices\n"); }
            IMoniker *pMoniker = NULL;

            while (pEnum->Next(1, &pMoniker, NULL) == S_OK){

                IPropertyBag *pPropBag;
                hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag,
                    (void**)(&pPropBag));

                if (FAILED(hr)){
                    pMoniker->Release();
                    continue;  // Skip this one, maybe the next one will work.
                }


                 // Find the description or friendly name.
                VARIANT varName;
                VariantInit(&varName);
                hr = pPropBag->Read(L"Description", &varName, 0);

                if (FAILED(hr)) hr = pPropBag->Read(L"FriendlyName", &varName, 0);

                if (SUCCEEDED(hr)){

                    hr = pPropBag->Read(L"FriendlyName", &varName, 0);

                    char deviceName[255] = {0};

                    int count = 0;
                    int maxLen = sizeof(deviceName)/sizeof(deviceName[0]) - 2;
                    while( varName.bstrVal[count] != 0x00 && count < maxLen) {
                        deviceName[count] = (char)varName.bstrVal[count];
                        count++;
                    }
                    deviceName[count] = 0;
                    list.append(deviceName);

                    if (!silent) { printf("SETUP: %i) %s \n",deviceCounter, deviceName); }
                }

                pPropBag->Release();
                pPropBag = NULL;

                pMoniker->Release();
                pMoniker = NULL;

                deviceCounter++;
            }

            pDevEnum->Release();
            pDevEnum = NULL;

            pEnum->Release();
            pEnum = NULL;
        }

         if (!silent) { printf("SETUP: %i Device(s) found\n\n", deviceCounter); }
    }
#endif //_MSC_VER

    return list;
}
Beispiel #15
0
QStringList getCaptureDevices(GUID catGuid) //, QList<IMoniker*>& monList)
{
    // Create the System Device Enumerator.
    QStringList list;
    HRESULT hr;
    ICreateDevEnum *pDevEnum = NULL;
    IEnumMoniker *pEnum = NULL;

    // тест-источники
    if( catGuid == CLSID_VideoInputDeviceCategory){
        list.append("VS-A");
        list.append("VS-B");
    }else{
        list.append("AS-A");
        list.append("AS-B");
    }

    //

    hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
                          IID_ICreateDevEnum, (void **)&pDevEnum);
    //CHECK(hr, "create SystemDeviceEnum");

    // заполнение списка видео-источников
    hr = pDevEnum->CreateClassEnumerator(catGuid,&pEnum, 0);
    //CHECK(hr, "create ClassEnumerator");

    if(pEnum == NULL)
        qDebug() << "bad pEnum";

    IMoniker *pMoniker = NULL;
    while ( (pEnum != NULL) && pEnum->Next(1, &pMoniker, NULL) == S_OK)
    {
        IPropertyBag *pPropBag;
        hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag);
        if (SUCCEEDED(hr))
        {
                    // To retrieve the filter's friendly name, do the following:
            VARIANT varName;
            VariantInit(&varName);
            hr = pPropBag->Read(L"FriendlyName", &varName, 0);
            if (SUCCEEDED(hr))
            {
                char* pN = _com_util::ConvertBSTRToString(varName.bstrVal);
                QString txt = QString::fromLocal8Bit(pN); // иначе кракозябры вместо кириллицы

                list.append(txt);
            }

            VariantClear(&varName);

            pPropBag->Release();
        }
        pMoniker->Release();
    }

    if(pEnum != NULL)
        pEnum->Release();

    if(pDevEnum != NULL)
        pDevEnum->Release();

    return list;
}
void
VideoDeviceImpl::setup()
{
    HRESULT hr = CoInitializeEx(nullptr, COINIT_MULTITHREADED);
    if (FAILED(hr))
        return fail("Could not initialize video device.");

    hr = CoCreateInstance(
        CLSID_CaptureGraphBuilder2,
        nullptr,
        CLSCTX_INPROC_SERVER,
        IID_ICaptureGraphBuilder2,
        (void**) &cInterface->captureGraph_);
    if (FAILED(hr))
        return fail("Could not create the Filter Graph Manager");

    hr = CoCreateInstance(CLSID_FilterGraph,
        nullptr,
        CLSCTX_INPROC_SERVER,IID_IGraphBuilder,
        (void**) &cInterface->graph_);
    if (FAILED(hr))
        return fail("Could not add the graph builder!");

    hr = cInterface->captureGraph_->SetFiltergraph(cInterface->graph_);
    if (FAILED(hr))
        return fail("Could not set filtergraph.");

    ICreateDevEnum *pSysDevEnum = nullptr;
    hr = CoCreateInstance(CLSID_SystemDeviceEnum,
        nullptr,
        CLSCTX_INPROC_SERVER,
        IID_ICreateDevEnum,
        (void **)&pSysDevEnum);
    if (FAILED(hr))
        return fail("Could not create the enumerator!");

    IEnumMoniker* pEnumCat = nullptr;
    hr = pSysDevEnum->CreateClassEnumerator(
        CLSID_VideoInputDeviceCategory,
        &pEnumCat,
        0);
    if (SUCCEEDED(hr)) {
        // Auto-deletion at if {} exist or at exception
        auto IEnumMonikerDeleter = [](IEnumMoniker* p){ p->Release(); };
        std::unique_ptr<IEnumMoniker, decltype(IEnumMonikerDeleter)&> pEnumCatGuard {pEnumCat, IEnumMonikerDeleter};

        IMoniker *pMoniker = nullptr;
        ULONG cFetched;
        unsigned int deviceCounter = 0;
        while ((pEnumCatGuard->Next(1, &pMoniker, &cFetched) == S_OK))
        {
            if (deviceCounter == this->id) {
                IPropertyBag *pPropBag;
                hr = pMoniker->BindToStorage(
                    0,
                    0,
                    IID_IPropertyBag,
                    (void **)&pPropBag);
                if (SUCCEEDED(hr)) {
                    VARIANT varName;
                    VariantInit(&varName);
                    hr = pPropBag->Read(L"FriendlyName", &varName, 0);
                    if (SUCCEEDED(hr)) {
                        int l = WideCharToMultiByte(
                            CP_UTF8,
                            0,
                            varName.bstrVal,
                            -1,
                            0, 0, 0, 0);
                        auto tmp = new char[l];
                        WideCharToMultiByte(
                            CP_UTF8,
                            0,
                            varName.bstrVal,
                            -1,
                            tmp,
                            l,
                            0, 0);
                        this->name = std::string(tmp);
                        this->device = std::string("video=") + this->name;
                        hr = pMoniker->BindToObject(
                            nullptr, nullptr,
                            IID_IBaseFilter,
                            (void**)&cInterface->videoInputFilter_);
                        if (SUCCEEDED(hr))
                            hr = cInterface->graph_->AddFilter(
                                cInterface->videoInputFilter_,
                                varName.bstrVal);
                        else {
                            fail("Could not add filter to video device.");
                        }
                        hr = cInterface->captureGraph_->FindInterface(
                            &PIN_CATEGORY_PREVIEW,
                            &MEDIATYPE_Video,
                            cInterface->videoInputFilter_,
                            IID_IAMStreamConfig,
                            (void **)&cInterface->streamConf_);
                        if(FAILED(hr)) {
                            hr = cInterface->captureGraph_->FindInterface(
                                &PIN_CATEGORY_CAPTURE,
                                &MEDIATYPE_Video,
                                cInterface->videoInputFilter_,
                                IID_IAMStreamConfig,
                                (void **)&cInterface->streamConf_);
                            if (FAILED(hr)) {
                                fail("Couldn't config the stream!");
                            }
                        }
                        break; // Device found
                    }
                    VariantClear(&varName);
                    pPropBag->Release();
                    pPropBag = nullptr;
                    pMoniker->Release();
                    pMoniker = nullptr;
                }
            }
            deviceCounter++;
        }
        if (SUCCEEDED(hr)) {
            int piCount;
            int piSize;
            cInterface->streamConf_->GetNumberOfCapabilities(&piCount, &piSize);
            AM_MEDIA_TYPE *pmt;
            VIDEO_STREAM_CONFIG_CAPS pSCC;
            for (int i = 0; i < piCount; i++) {
                cInterface->streamConf_->GetStreamCaps(i, &pmt, (BYTE*)&pSCC);
                if (pmt->formattype == FORMAT_VideoInfo) {
                    auto videoInfo = (VIDEOINFOHEADER*) pmt->pbFormat;
                    sizeList_.emplace_back(videoInfo->bmiHeader.biWidth, videoInfo->bmiHeader.biHeight);
                    rateList_[sizeList_.back()].emplace_back(1e7, pSCC.MinFrameInterval);
                    rateList_[sizeList_.back()].emplace_back(1e7, pSCC.MaxFrameInterval);
                    capMap_[sizeList_.back()] = pmt;
                }
            }
        }
    }
    pSysDevEnum->Release();
    pSysDevEnum = NULL;
}
Beispiel #17
0
/*
 * Class:     sage_PVR350OSDRenderingPlugin
 * Method:    openOSD0
 * Signature: ()J
 */
JNIEXPORT jlong JNICALL Java_sage_PVR350OSDRenderingPlugin_openOSD0
  (JNIEnv *env, jobject jo)
{
	CoInitializeEx(NULL, COM_THREADING_MODE);
	HRESULT hr;
	slog((env, "BVF open350OSD0 called\r\n"));
	PVR350OSDPluginNativeData rv;
	ZeroMemory(&rv, sizeof(PVR350OSDPluginNativeData));
	rv.ntscModeFor350 = 1;
	// We can't load this by the GUID just like some of the KS CC stuff
    ICreateDevEnum *pSysDevEnum = NULL;
    IEnumMoniker *pEnum = NULL;
    IMoniker *pMoniker = NULL;
    hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, 
        CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, 
        (void**)&pSysDevEnum);
	if (FAILED(hr)) return 0;
    hr = pSysDevEnum->CreateClassEnumerator(KSCATEGORY_DATADECOMPRESSOR, &pEnum, 0);
	if (hr != S_OK)
	{
		// Nothing to enumerate
		SAFE_RELEASE(pSysDevEnum);
		return hr;
	}
    while (S_OK == pEnum->Next(1, &pMoniker, NULL))
    {
        IPropertyBag *pPropBag = NULL;
        hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, 
                                (void **)&pPropBag);
		if (SUCCEEDED(hr))
		{
			VARIANT var;
			VariantInit(&var);
			hr = pPropBag->Read(L"FriendlyName", &var, 0);
			if (SUCCEEDED(hr))
			{
				char conv[512];
				WideCharToMultiByte(CP_ACP, 0, var.bstrVal, -1, conv, 512, 0, 0);
				if (strstr(conv, "PVR"))
				{
        			hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter,
        				(void**)(&(rv.pOSD)));
        			if (SUCCEEDED(hr))
					{
						VariantClear(&var);
						SAFE_RELEASE(pPropBag);
						SAFE_RELEASE(pMoniker);
						break;
					}
				}
			}
			VariantClear(&var);
		}
        SAFE_RELEASE(pPropBag);
        SAFE_RELEASE(pMoniker);
    }

    SAFE_RELEASE(pSysDevEnum);
    SAFE_RELEASE(pEnum);
	if (rv.pOSD)
	{
		hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER,
			IID_IGraphBuilder, (void **)&(rv.pGraph));
		TEST_AND_BAIL
		if (!rv.mutex350OSD)
		{
			rv.mutex350OSD = CreateMutex(NULL, FALSE, "Global\\SageTV350Sync");
			WaitForSingleObject(rv.mutex350OSD, MUTEX350WAITTIME);
		}
		hr = rv.pGraph->AddFilter(rv.pOSD, L"OSD");
		if (FAILED(hr))
		{
			SAFE_RELEASE(rv.pOSD);
			SAFE_RELEASE(rv.pGraph);
			ReleaseMutex(rv.mutex350OSD);
			CloseHandle(rv.mutex350OSD);
			elog((env, "Error opening 350 OSD hr=0x%x\r\n", hr));
			return 0;
		}
		DWORD holder;
		HKEY myKey;
		DWORD readType;
		DWORD hsize = sizeof(holder);
		if (RegCreateKeyEx(HKEY_LOCAL_MACHINE, "SOFTWARE\\Frey Technologies\\Common", 0, 0,
			REG_OPTION_NON_VOLATILE, KEY_ALL_ACCESS, 0, &myKey, 0) == ERROR_SUCCESS)
		{
			if (RegQueryValueEx(myKey, "NTSC", 0, &readType, (LPBYTE) &holder, &hsize) == ERROR_SUCCESS)
			{
				rv.ntscModeFor350 = holder;
			}
			else
			{
				RegSetValueEx(myKey, "NTSC", 0, REG_DWORD, (LPBYTE) &(rv.ntscModeFor350), sizeof(rv.ntscModeFor350));
			}

			RegCloseKey(myKey);
		}

		PVR350OSDPluginNativeData* realRV = new PVR350OSDPluginNativeData;
		memcpy(realRV, &rv, sizeof(PVR350OSDPluginNativeData));
		if (Print350OSDInfo(realRV, env, TRUE))
		{
			ReleaseMutex(realRV->mutex350OSD);
			return (jlong) realRV;
		}
		else
		{
			ReleaseMutex(realRV->mutex350OSD);
			Java_sage_PVR350OSDRenderingPlugin_closeOSD0(env, jo, (jlong)realRV);
			return 0;
		}
	}
static void vfw_detect(MSWebCamManager *obj) {
    ICreateDevEnum *pCreateDevEnum = NULL;
    IEnumMoniker *pEnumMoniker = NULL;
    IMoniker *pMoniker = NULL;
    HRESULT hr;

    ULONG nFetched = 0;

    // Initialize COM
    CoInitialize(NULL);

    hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
                          IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
    if(FAILED(hr))
    {
        CoUninitialize();
        return ;
    }

    hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
            &pEnumMoniker, 0);
    if (FAILED(hr) || pEnumMoniker == NULL) {
        //printf("no device\n");
        CoUninitialize();
        return ;
    }

    pEnumMoniker->Reset();

    int pos=0;
    while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
    {
        IPropertyBag *pBag;
        hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag );
        if( hr != S_OK )
            continue;

        VARIANT var;
        VariantInit(&var);
        hr = pBag->Read( L"FriendlyName", &var, NULL );
        if( hr != S_OK )
        {
            pMoniker->Release();
            continue;
        }
        //USES_CONVERSION;
        char szName[256];

        WideCharToMultiByte(CP_UTF8,0,var.bstrVal,-1,szName,256,0,0);
        VariantClear(&var);

        IBaseFilter *m_pDeviceFilter;
        hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&m_pDeviceFilter );
        if(SUCCEEDED(hr))
        {
            GUID pPinCategory;
            int fmt_supported = 0;

            dump_format(m_pDeviceFilter);

            //basic testing for the device.
            if (try_format(m_pDeviceFilter,MS_YUV420P, &pPinCategory)==0)
                fmt_supported = 1;
            else if (try_format(m_pDeviceFilter,MS_YUY2, &pPinCategory)==0)
                fmt_supported = 1;
            else if (try_format(m_pDeviceFilter,MS_YUYV, &pPinCategory)==0)
                fmt_supported = 1;
            else if (try_format(m_pDeviceFilter,MS_UYVY, &pPinCategory)==0)
                fmt_supported = 1;
            else if (try_format(m_pDeviceFilter,MS_RGB24, &pPinCategory)==0)
                fmt_supported = 1;
            else
            {
                ms_warning("Unsupported video pixel format/refuse camera (%s).", szName);
            }

            if (fmt_supported==1)
            {
                MSWebCam *cam=ms_web_cam_new(&ms_directx_cam_desc);
                cam->name=ms_strdup(szName);
                ms_web_cam_manager_add_cam(obj,cam);
            }
            m_pDeviceFilter->Release();
            m_pDeviceFilter=NULL;
        }


        pMoniker->Release();
        pBag->Release();
        pMoniker=NULL;
        pBag=NULL;
    }

    pEnumMoniker->Release();
    pCreateDevEnum->Release();
    CoUninitialize();
}
HRESULT cDxCapture::FindCaptureDevice(const int cameraIndex, IBaseFilter ** ppSrcFilter)
{
	HRESULT hr = S_OK;
	IBaseFilter * pSrc = NULL;
	IMoniker* pMoniker = NULL;
	ICreateDevEnum *pDevEnum = NULL;
	IEnumMoniker *pClassEnum = NULL;

	if (!ppSrcFilter)
	{
		return E_POINTER;
	}

	// Create the system device enumerator
	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
		IID_ICreateDevEnum, (void **)&pDevEnum);
	if (FAILED(hr))
	{
		Msg(TEXT("Couldn't create system enumerator!  hr=0x%x"), hr);
	}

	// Create an enumerator for the video capture devices

	if (SUCCEEDED(hr))
	{
		hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pClassEnum, 0);
		if (FAILED(hr))
		{
			Msg(TEXT("Couldn't create class enumerator!  hr=0x%x"), hr);
		}
	}

	if (SUCCEEDED(hr))
	{
		// If there are no enumerators for the requested type, then 
		// CreateClassEnumerator will succeed, but pClassEnum will be NULL.
		if (pClassEnum == NULL)
		{
			// 			MessageBox(ghApp, TEXT("No video capture device was detected.\r\n\r\n")
			// 				TEXT("This sample requires a video capture device, such as a USB WebCam,\r\n")
			// 				TEXT("to be installed and working properly.  The sample will now close."),
			// 				TEXT("No Video Capture Hardware"), MB_OK | MB_ICONINFORMATION);
			hr = E_FAIL;
		}
	}

	// Use the first video capture device on the device list.
	// Note that if the Next() call succeeds but there are no monikers,
	// it will return S_FALSE (which is not a failure).  Therefore, we
	// check that the return code is S_OK instead of using SUCCEEDED() macro.

	if (SUCCEEDED(hr))
	{
		// cameraIndex번째 카메라를 선택한다.
		int cnt = 0;
		while (cameraIndex >= cnt++)
		{
			hr = pClassEnum->Next(1, &pMoniker, NULL);
			if (hr == S_FALSE)
			{
				Msg(TEXT("Unable to access video capture device!"));
				hr = E_FAIL;
			}
		}
	}

	if (SUCCEEDED(hr))
	{
		// Bind Moniker to a filter object
		hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pSrc);
		if (FAILED(hr))
		{
			Msg(TEXT("Couldn't bind moniker to filter object!  hr=0x%x"), hr);
		}
	}

	// Copy the found filter pointer to the output parameter.
	if (SUCCEEDED(hr))
	{
		*ppSrcFilter = pSrc;
		(*ppSrcFilter)->AddRef();
	}

	SAFE_RELEASE(pSrc);
	SAFE_RELEASE(pMoniker);
	SAFE_RELEASE(pDevEnum);
	SAFE_RELEASE(pClassEnum);

	return hr;
}
static int v4w_open_videodevice(V4wState *s)
{
    // Initialize COM
    CoInitialize(NULL);

    // get a Graph
    HRESULT hr= CoCreateInstance (CLSID_FilterGraph,
                                  NULL,
                                  CLSCTX_INPROC_SERVER,
                                  IID_IGraphBuilder, //IID_IBaseFilter,
                                  (void **)&s->m_pGraph);
    if(FAILED(hr))
    {
        return -1;
    }

    // get a CaptureGraphBuilder2
    hr= CoCreateInstance (CLSID_CaptureGraphBuilder2,
                          NULL,
                          CLSCTX_INPROC_SERVER,
                          IID_ICaptureGraphBuilder2, //IID_IBaseFilter,
                          (void **)&s->m_pBuilder);
    if(FAILED(hr))
    {
        return -2;
    }

    // connect capture graph builder with the graph
    s->m_pBuilder->SetFiltergraph(s->m_pGraph);

    // get mediacontrol so we can start and stop the filter graph
    hr=s->m_pGraph->QueryInterface (IID_IMediaControl, (void **)&s->m_pControl);
    if(FAILED(hr))
    {
        return -3;
    }


    ICreateDevEnum *pCreateDevEnum = NULL;
    IEnumMoniker *pEnumMoniker = NULL;
    IMoniker *pMoniker = NULL;

    ULONG nFetched = 0;

    hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
                          IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
    if(FAILED(hr))
    {
        return -4;
    }

    hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
            &pEnumMoniker, 0);
    if (FAILED(hr) || pEnumMoniker == NULL) {
        //printf("no device\n");
        return -5;
    }

    pEnumMoniker->Reset();

    int pos=0;
    while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
    {
        IPropertyBag *pBag;
        hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag );
        if( hr != S_OK )
            continue;

        if (s->dev[0]=='\0')
            break;

        VARIANT var;
        VariantInit(&var);
        hr = pBag->Read( L"FriendlyName", &var, NULL );
        if( hr != S_OK )
        {
            pMoniker->Release();
            continue;
        }
        //USES_CONVERSION;
        char szName[256];

        WideCharToMultiByte(CP_UTF8,0,var.bstrVal,-1,szName,256,0,0);
        VariantClear(&var);

        if (strcmp(szName, s->dev)==0)
            break;

        pMoniker->Release();
        pBag->Release();
        pMoniker=NULL;
        pBag=NULL;
    }

    if(pMoniker==NULL)
    {
        return -6;
    }

    hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
    if(FAILED(hr))
    {
        return -7;
    }

    s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

    pMoniker->Release();
    pEnumMoniker->Release();
    pCreateDevEnum->Release();


    GUID pPinCategory;

    if (try_format(s->m_pDeviceFilter, s->pix_fmt, &pPinCategory)==0)
        s->pix_fmt = s->pix_fmt;
    else if (try_format(s->m_pDeviceFilter,MS_YUV420P, &pPinCategory)==0)
        s->pix_fmt = MS_YUV420P;
    else if (try_format(s->m_pDeviceFilter,MS_YUY2, &pPinCategory)==0)
        s->pix_fmt = MS_YUY2;
    else if (try_format(s->m_pDeviceFilter,MS_YUYV, &pPinCategory)==0)
        s->pix_fmt = MS_YUYV;
    else if (try_format(s->m_pDeviceFilter,MS_UYVY, &pPinCategory)==0)
        s->pix_fmt = MS_UYVY;
    else if (try_format(s->m_pDeviceFilter,MS_RGB24, &pPinCategory)==0)
        s->pix_fmt = MS_RGB24;
    else
    {
        ms_error("Unsupported video pixel format.");
        return -8;
    }

    if (s->pix_fmt == MS_YUV420P)
        ms_message("Driver supports YUV420P, using that format.");
    else if (s->pix_fmt == MS_YUY2)
        ms_message("Driver supports YUY2 (YUYV), using that format.");
    else if (s->pix_fmt == MS_YUYV)
        ms_message("Driver supports YUV422, using that format.");
    else if (s->pix_fmt == MS_UYVY)
        ms_message("Driver supports UYVY, using that format.");
    else if (s->pix_fmt == MS_RGB24)
        ms_message("Driver supports RGB24, using that format.");

    if (try_format_size(s, s->pix_fmt, s->vsize.width, s->vsize.height, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", s->vsize.width, s->vsize.height);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H, &pPinCategory)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H);
    else
    {
        ms_error("No supported size found for format.");
        /* size not supported? */
        return -9;
    }

    // get DXFilter
    s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE);
    if(s->m_pDXFilter==NULL)
    {
        return -10;
    }
    s->m_pDXFilter->AddRef();

    CMediaType mt;
    mt.SetType(&MEDIATYPE_Video);

    GUID m = MEDIASUBTYPE_RGB24;
    if (s->pix_fmt == MS_YUV420P)
        m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0'));
    else if (s->pix_fmt == MS_YUY2)
        m = MEDIASUBTYPE_YUY2;
    else if (s->pix_fmt == MS_YUYV)
        m = MEDIASUBTYPE_YUYV;
    else if (s->pix_fmt == MS_UYVY)
        m = MEDIASUBTYPE_UYVY;
    else if (s->pix_fmt == MS_RGB24)
        m = MEDIASUBTYPE_RGB24;
    mt.SetSubtype(&m);

    mt.formattype = FORMAT_VideoInfo;
    mt.SetTemporalCompression(FALSE);

    VIDEOINFO *pvi = (VIDEOINFO *)
                     mt.AllocFormatBuffer(sizeof(VIDEOINFO));
    if (NULL == pvi)
        return -11;
    ZeroMemory(pvi, sizeof(VIDEOINFO));

    if (s->pix_fmt == MS_YUV420P)
        pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
    else if (s->pix_fmt == MS_YUY2)
        pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2');
    else if (s->pix_fmt == MS_YUYV)
        pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','V');
    else if (s->pix_fmt == MS_UYVY)
        pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y');
    else if (s->pix_fmt == MS_RGB24)
        pvi->bmiHeader.biCompression = BI_RGB;

    if (s->pix_fmt == MS_YUV420P)
        pvi->bmiHeader.biBitCount = 12;
    else if (s->pix_fmt == MS_YUY2)
        pvi->bmiHeader.biBitCount = 16;
    else if (s->pix_fmt == MS_YUYV)
        pvi->bmiHeader.biBitCount = 16;
    else if (s->pix_fmt == MS_UYVY)
        pvi->bmiHeader.biBitCount = 16;
    else if (s->pix_fmt == MS_RGB24)
        pvi->bmiHeader.biBitCount = 24;

    pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
    pvi->bmiHeader.biWidth = s->vsize.width;
    pvi->bmiHeader.biHeight = s->vsize.height;
    pvi->bmiHeader.biPlanes = 1;
    pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
    pvi->bmiHeader.biClrImportant = 0;
    mt.SetSampleSize(pvi->bmiHeader.biSizeImage);

    mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO));

    hr = s->m_pDXFilter->SetAcceptedMediaType(&mt);
    if(FAILED(hr))
    {
        return -12;
    }

    hr = s->m_pDXFilter->SetCallback(Callback);
    if(FAILED(hr))
    {
        return -13;
    }

    hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter,
                                        (LPVOID *)&s->m_pIDXFilter);
    if(FAILED(hr))
    {
        return -14;
    }

    hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter");
    if(FAILED(hr))
    {
        return -15;
    }


    // get null renderer
    hr=CoCreateInstance (CLSID_NullRenderer,
                         NULL,
                         CLSCTX_INPROC_SERVER,
                         IID_IBaseFilter,
                         (void **)&s->m_pNullRenderer);
    if(FAILED(hr))
    {
        return -16;
    }
    if (s->m_pNullRenderer!=NULL)
    {
        s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer");
    }

    hr = s->m_pBuilder->RenderStream(&pPinCategory,
                                     &MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
    if (FAILED(hr))
    {
        return -17;
    }

    IAMStreamConfig *pConfig = NULL;
    hr = s->m_pBuilder->FindInterface(
             &pPinCategory, // Preview pin.
             &MEDIATYPE_Video,    // Any media type.
             s->m_pDeviceFilter, // Pointer to the capture filter.
             IID_IAMStreamConfig, (void**)&pConfig);
    if (pConfig!=NULL)
    {
        AM_MEDIA_TYPE *pType = NULL;
        int iCount, iSize;
        pConfig->GetNumberOfCapabilities(&iCount, &iSize);

        for (int i = 0; i < iCount; i++) {
            VIDEO_STREAM_CONFIG_CAPS scc;
            pType = NULL;
            pConfig->GetStreamCaps(i, &pType, (BYTE *)&scc);

            if (!((pType->formattype == FORMAT_VideoInfo) &&
                    (pType->cbFormat >= sizeof(VIDEOINFOHEADER)) &&
                    (pType->pbFormat != NULL)))
                continue;

            VIDEOINFOHEADER & videoInfo = *(VIDEOINFOHEADER *)pType->pbFormat;

            if (m != pType->subtype)
                continue;

            if (videoInfo.bmiHeader.biWidth != s->vsize.width)
                continue;

            if (videoInfo.bmiHeader.biHeight != s->vsize.height)
                continue;

            if (videoInfo.bmiHeader.biBitCount != pvi->bmiHeader.biBitCount)
                continue;

            if (videoInfo.bmiHeader.biCompression != pvi->bmiHeader.biCompression)
                continue;

            videoInfo.AvgTimePerFrame = UNITS / (LONGLONG)s->fps;
            pConfig->SetFormat(pType);
        }

        pConfig->GetFormat(&pType);
        if (pType!=NULL)
        {
            VIDEOINFO *pvi;
            pvi = (VIDEOINFO *)pType->pbFormat;
            ms_message("v4w: camera asked fps=%.2f // real fps=%.2f", s->fps, ((float)UNITS / (float)pvi->AvgTimePerFrame));
        }

        pConfig->Release();
    }

    //m_pDXFilter->SetBufferSamples(TRUE);

    s_callback = s;
    hr = s->m_pControl->Run();
    if(FAILED(hr))
    {
        return -18;
    }


    s->rotregvalue=1;
    return 0;
}
Beispiel #21
0
/*
 * Enumerate all video devices
 *
 * See also:
 *
 * Using the System Device Enumerator:
 *     http://msdn2.microsoft.com/en-us/library/ms787871.aspx
 */
int enum_devices()
{
	HRESULT hr;

	printf("Enumerating video input devices ...\n");

	// Create the System Device Enumerator.
	ICreateDevEnum *pSysDevEnum = NULL;
	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
		IID_ICreateDevEnum, (void **)&pSysDevEnum);
	if(FAILED(hr))
	{
		fprintf(stderr, "ERROR: Unable to create system device enumerator.\n");
		return hr;
	}

	// Obtain a class enumerator for the video input device category.
	IEnumMoniker *pEnumCat = NULL;
	hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0);

	if(hr == S_OK) 
	{
		// Enumerate the monikers.
		IMoniker *pMoniker = NULL;
		ULONG cFetched;
		while(pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK)
		{
			IPropertyBag *pPropBag;
			hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, 
				(void **)&pPropBag);
			if(SUCCEEDED(hr))
			{
				// To retrieve the filter's friendly name, do the following:
				VARIANT varName;
				VariantInit(&varName);
				hr = pPropBag->Read(L"FriendlyName", &varName, 0);
				if (SUCCEEDED(hr))
				{
					// Display the name in your UI somehow.
					wprintf(L"  Found device: %s\n", varName.bstrVal);
				}
				VariantClear(&varName);

				// To create an instance of the filter, do the following:
				IBaseFilter *pFilter;
				hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter,
					(void**)&pFilter);
				
				process_filter(pFilter);

				//Remember to release pFilter later.
				pPropBag->Release();
			}
			pMoniker->Release();
		}
		pEnumCat->Release();
	}
	pSysDevEnum->Release();

	return 0;
}
Beispiel #22
0
HRESULT FindCaptureDevice(IBaseFilter ** ppSrcFilter)
{
	HRESULT hr;
	IBaseFilter * pSrc = NULL;
	IMoniker *pMoniker = NULL;
	ULONG cFetched;

	if (!ppSrcFilter)
		return E_POINTER;

	// Create the system device enumerator
	ICreateDevEnum *pDevEnum = NULL;

	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
		IID_ICreateDevEnum, (void **)&pDevEnum);
	if (FAILED(hr))
	{
		return hr;
	}

	// Create an enumerator for the video capture devices
	IEnumMoniker *pClassEnum = NULL;

	hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pClassEnum, 0);
	if (FAILED(hr))
	{
		return hr;
	}

	// If there are no enumerators for the requested type, then 
	// CreateClassEnumerator will succeed, but pClassEnum will be NULL.
	if (pClassEnum == NULL)
	{
		return E_FAIL;
	}

	// Use the first video capture device on the device list.
	// Note that if the Next() call succeeds but there are no monikers,
	// it will return S_FALSE (which is not a failure).  Therefore, we
	// check that the return code is S_OK instead of using SUCCEEDED() macro.
	if (S_OK == (pClassEnum->Next(1, &pMoniker, &cFetched)))
	{
		IBindCtx *pbc = NULL;

		CreateBindCtx(0, &pbc);
		// Bind Moniker to a filter object
		hr = pMoniker->BindToObject(pbc, 0, IID_IBaseFilter, (void**)&pSrc);
		pbc->Release();
		if (FAILED(hr))
		{
			return hr;
		}
	}
	else
	{
		return E_FAIL;
	}

	// Copy the found filter pointer to the output parameter.
	// Do NOT Release() the reference, since it will still be used
	// by the calling function.
	*ppSrcFilter = pSrc;

	return hr;
}
Beispiel #23
0
////////////////////////////////////////////////////////
// enumerate message
//
/////////////////////////////////////////////////////////
std::vector<std::string>videoDS :: enumerate(void)
{
  std::vector<std::string>result;

  HRESULT hr;
  IBaseFilter * pSrc = NULL;

  IMoniker* pMoniker =NULL;
  ULONG cFetched;

  ICreateDevEnum* pDevEnum =NULL;
  IEnumMoniker* pClassEnum = NULL;

  do {
    // Create the system device enumerator
    hr = CoCreateInstance (CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
                           IID_ICreateDevEnum, (void ** ) &pDevEnum);
    if (FAILED(hr)) {
      verbose(0, "[GEM:videoDS] Couldn't create system enumerator!");
      break;
    }

    // Create an enumerator for the video capture devices
    hr = pDevEnum->CreateClassEnumerator (CLSID_VideoInputDeviceCategory,
                                          &pClassEnum, 0);
    if (FAILED(hr)) {
      verbose(0, "[GEM:videoDS] Couldn't create class enumerator!");
      break;
    }

    // If there are no enumerators for the requested type, then
    // CreateClassEnumerator will succeed, but pClassEnum will be NULL.
    if (pClassEnum == NULL) {
      verbose(0, "[GEM:videoDS] No video capture devices found!");
      break;
    }

    // Use the first video capture device on the device list.
    // Note that if the Next() call succeeds but there are no monikers,
    // it will return S_FALSE (which is not a failure).  Therefore, we
    // check that the return code is S_OK instead of using SUCCEEDED() macro.
    int devIndex = 0;
    while (S_OK == (pClassEnum->Next (1, &pMoniker, &cFetched))) {
      IPropertyBag *pPropBag;
      if (SUCCEEDED(hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag,
                         (void **)&pPropBag))) {
        // To retrieve the friendly name of the filter, do the following:
        VARIANT varName;
        VariantInit(&varName);
        hr = pPropBag->Read(L"FriendlyName", &varName, 0);
        if (SUCCEEDED(hr)) {
          std::string s=_bstr_t(varName.bstrVal);
          result.push_back(s);
        }
        VariantClear(&varName);

        COMRELEASE(pPropBag);
      }
      COMRELEASE(pMoniker);
      devIndex++;
    }
  } while (0);

  // Copy the found filter pointer to the output parameter.
  // Do NOT Release() the reference, since it will still be used
  // by the calling function.
  COMRELEASE(pDevEnum);
  COMRELEASE(pClassEnum);

  return result;
}
Beispiel #24
0
HRESULT CAccessSys::FindCaptureDevice(void)
{
	HRESULT hr = S_OK;
	IBaseFilter * pSrc = NULL;
	IMoniker* pMoniker = NULL;
	ICreateDevEnum *pDevEnum = NULL;
	IEnumMoniker *pClassEnum = NULL;


	// Create the system device enumerator
	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
		IID_ICreateDevEnum, (void **)&pDevEnum);
	if (FAILED(hr))
	{
		Msg(TEXT("Couldn't create system enumerator!  hr=0x%x"), hr);
	}

	// Create an enumerator for the video capture devices

	if (SUCCEEDED(hr))
	{
		hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pClassEnum, 0);
		if (FAILED(hr))
		{
			Msg(TEXT("Couldn't create class enumerator!  hr=0x%x"), hr);
		}
	}

	if (SUCCEEDED(hr))
	{
		// If there are no enumerators for the requested type, then 
		// CreateClassEnumerator will succeed, but pClassEnum will be NULL.
		if (pClassEnum == NULL)
		{
			MessageBox(NULL, TEXT("No video capture device was detected.\r\n\r\n")
				TEXT("This sample requires a video capture device, such as a USB WebCam,\r\n")
				TEXT("to be installed and working properly.  The sample will now close."),
				TEXT("No Video Capture Hardware"), MB_OK | MB_ICONINFORMATION);
			hr = E_FAIL;
		}
	}

	// Use the first video capture device on the device list.
	// Note that if the Next() call succeeds but there are no monikers,
	// it will return S_FALSE (which is not a failure).  Therefore, we
	// check that the return code is S_OK instead of using SUCCEEDED() macro.

	if (SUCCEEDED(hr))
	{
		hr = pClassEnum->Next(1, &pMoniker, NULL);
		if (hr == S_FALSE)
		{
			Msg(TEXT("Unable to access video capture device!"));
			hr = E_FAIL;
		}
	}

	if (SUCCEEDED(hr))
	{
		// Bind Moniker to a filter object
		hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pSrc);
		if (FAILED(hr))
		{
			Msg(TEXT("Couldn't bind moniker to filter object!  hr=0x%x"), hr);
		}
	}

	// Copy the found filter pointer to the output parameter.
	ULONG ref;
	if (SUCCEEDED(hr))
	{
		p_streams[0].p_device_filter = pSrc;
		ref = p_streams[0].p_device_filter->AddRef();
	}

	hr = p_capture_graph_builder2->FindInterface(&PIN_CATEGORY_CAPTURE,
		&MEDIATYPE_Video, pSrc,
		IID_IAMStreamConfig, (void **)&p_VSC);
	if (FAILED(hr))
	{
		Msg(TEXT("Couldn't find IAMStreamConfig!  hr=0x%x"), hr);
	}
	else {
		AM_MEDIA_TYPE *pmt;
		//VIDEO_STREAM_CONFIG_CAPS scc;
		BYTE* scc = NULL;
		int piCount, piSize;

		hr = p_VSC->GetNumberOfCapabilities(&piCount, &piSize);
		if (hr == S_OK){
			for (int i = 0; i < piCount; i++){
				scc = new BYTE[piSize];
				hr = p_VSC->GetStreamCaps(i, &pmt, scc/*reinterpret_cast<BYTE*>(&scc)*/);
				//hr = p_VSC->GetFormat(&pmt);

				double FrameRate = 15.0;
				if (hr == NOERROR)
				{
					if (pmt->subtype == MEDIASUBTYPE_RGB24 ||
						pmt->subtype == MEDIASUBTYPE_I420 ||
						pmt->subtype == MEDIASUBTYPE_YUY2){
						if (pmt->formattype == FORMAT_VideoInfo)
						{
							//pmt->subtype = MEDIASUBTYPE_RGB24;
							VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *)pmt->pbFormat;
							if (pvi->bmiHeader.biHeight == 240 && pvi->bmiHeader.biWidth == 320){
								pvi->AvgTimePerFrame = (LONGLONG)(10000000 / FrameRate);
								//pvi->bmiHeader.biHeight = 240;
								//pvi->bmiHeader.biWidth = 320;
								hr = p_VSC->SetFormat(pmt);
								if (FAILED(hr)){
									Msg(TEXT("couldn't set video format! hr = 0x%x"), hr);
								}
								DeleteMediaType(pmt);
								delete[] scc;
								break;
							}
						}
					}
					DeleteMediaType(pmt);
				}
				delete[] scc;
			}
		}
		ref = pSrc->Release();
	}
	SAFE_RELEASE(pSrc);
	SAFE_RELEASE(pMoniker);
	SAFE_RELEASE(pClassEnum);

	// Create an enumerator for the audio capture devices

	if (SUCCEEDED(hr))
	{
		hr = pDevEnum->CreateClassEnumerator(CLSID_AudioInputDeviceCategory, &pClassEnum, 0);
		if (FAILED(hr))
		{
			Msg(TEXT("Couldn't create class enumerator!  hr=0x%x"), hr);
		}
	}

	if (SUCCEEDED(hr))
	{
		// If there are no enumerators for the requested type, then 
		// CreateClassEnumerator will succeed, but pClassEnum will be NULL.
		if (pClassEnum == NULL)
		{
			MessageBox(NULL, TEXT("No audio capture device was detected.\r\n\r\n")
				TEXT("This sample requires a audio capture device\r\n")
				TEXT("to be installed and working properly.  The sample will now close."),
				TEXT("No Audio Capture Hardware"), MB_OK | MB_ICONINFORMATION);
			hr = E_FAIL;
		}
	}

	// Use the first video capture device on the device list.
	// Note that if the Next() call succeeds but there are no monikers,
	// it will return S_FALSE (which is not a failure).  Therefore, we
	// check that the return code is S_OK instead of using SUCCEEDED() macro.

	if (SUCCEEDED(hr))
	{
		hr = pClassEnum->Next(1, &pMoniker, NULL);
		if (hr == S_FALSE)
		{
			Msg(TEXT("Unable to access audio capture device!"));
			hr = E_FAIL;
		}
	}

	if (SUCCEEDED(hr))
	{
		// Bind Moniker to a filter object
		hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pSrc);
		if (FAILED(hr))
		{
			Msg(TEXT("Couldn't bind moniker to filter object!  hr=0x%x"), hr);
		}
	}

	// Copy the found filter pointer to the output parameter.
	if (SUCCEEDED(hr))
	{
		p_streams[1].p_device_filter = pSrc;
		ref = p_streams[1].p_device_filter->AddRef();
	}

	SAFE_RELEASE(pSrc);
	SAFE_RELEASE(pMoniker);
	SAFE_RELEASE(pClassEnum);

	SAFE_RELEASE(pDevEnum);

	return hr;
}
Beispiel #25
0
static int v4w_open_videodevice(V4wState *s, int format, MSVideoSize *vsize)
{
	// Initialize COM
	CoInitialize(NULL);

	// get a Graph
	HRESULT hr=s->m_pGraph.CoCreateInstance(CLSID_FilterGraph);
	if(FAILED(hr))
	{
		return -1;
	}

	// get a CaptureGraphBuilder2
#if !defined(_WIN32_WCE)
	hr=s->m_pBuilder.CoCreateInstance(CLSID_CaptureGraphBuilder2);
#else
	hr=s->m_pBuilder.CoCreateInstance(CLSID_CaptureGraphBuilder);
#endif
	if(FAILED(hr))
	{
		return -2;
	}

	// connect capture graph builder with the graph
	s->m_pBuilder->SetFiltergraph(s->m_pGraph);

	// get mediacontrol so we can start and stop the filter graph
	hr=s->m_pGraph.QueryInterface(&(s->m_pControl));
	if(FAILED(hr))
	{
		return -3;
	}

	// get DXFilter
	s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE);
	if(s->m_pDXFilter==NULL)
	{
		return -4;
	}
	s->m_pDXFilter->AddRef();
	if(FAILED(hr))
	{
		return -4;
	}

	CMediaType mt;
	mt.SetType(&MEDIATYPE_Video);

	if (format==MS_YUV420P)
	{
		GUID m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0'));
		mt.SetSubtype(&m);
		mt.SetSubtype(&MEDIASUBTYPE_YV12);
	}
	else //if (format==MS_RGB24)
	{
		mt.SetSubtype(&MEDIASUBTYPE_RGB24);
	}

	//mt.SetSubtype(&MEDIASUBTYPE_IYUV);
	//mt.SetSubtype(&MEDIASUBTYPE_YUYV);
	//mt.SetSubtype(&MEDIASUBTYPE_RGB24);
	//mt.SetSampleSize();
	mt.formattype = FORMAT_VideoInfo;
	mt.SetTemporalCompression(FALSE);

	VIDEOINFO *pvi = (VIDEOINFO *)
	mt.AllocFormatBuffer(sizeof(VIDEOINFO));
	if (NULL == pvi)
		return E_OUTOFMEMORY;
	ZeroMemory(pvi, sizeof(VIDEOINFO));
	if (format==MS_YUV420P)
	{
		pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
		pvi->bmiHeader.biCompression = MAKEFOURCC('Y','V','1','2');
		pvi->bmiHeader.biBitCount = 12;
	}
	else
	{
		pvi->bmiHeader.biCompression = BI_RGB;
		pvi->bmiHeader.biBitCount = 24;
	}
	pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
	pvi->bmiHeader.biWidth = vsize->width;
	pvi->bmiHeader.biHeight = vsize->height;
	pvi->bmiHeader.biPlanes = 1;
	pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
	pvi->bmiHeader.biClrImportant = 0;
	mt.SetSampleSize(pvi->bmiHeader.biSizeImage);
	mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO));

	hr = s->m_pDXFilter->SetAcceptedMediaType(&mt);
	if(FAILED(hr))
	{
		return -5;
	}

	hr = s->m_pDXFilter->SetCallback(Callback); 
	if(FAILED(hr))
	{
		return -6;
	}

	hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter,
	 (LPVOID *)&s->m_pIDXFilter);
	if(FAILED(hr))
	{
		return -7;
	}

	hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter");
	if(FAILED(hr))
	{
		return -8;
	}

#ifdef WM6
	ICreateDevEnum *pCreateDevEnum = NULL;
	IEnumMoniker *pEnumMoniker = NULL;
	IMoniker *pMoniker = NULL;

	ULONG nFetched = 0;

	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, 
		IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
	if(FAILED(hr))
	{
		return -9;
	}

	hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
		&pEnumMoniker, 0);
	if (FAILED(hr) || pEnumMoniker == NULL) {
		//printf("no device\n");
		return -10;
	}

	pEnumMoniker->Reset();

	hr = pEnumMoniker->Next(1, &pMoniker, &nFetched);
	if(FAILED(hr) || pMoniker==NULL)
	{
		return -11;
	}

	hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
	if(FAILED(hr))
	{
		return -12;
	}

	s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

	pMoniker->Release();
	pEnumMoniker->Release();
	pCreateDevEnum->Release();
#else
	WCHAR wzDeviceName[ MAX_PATH + 1 ];
	CComVariant   varCamName;
	CPropertyBag PropBag;
    CComPtr<IPersistPropertyBag>    pPropertyBag;
	GetFirstCameraDriver(wzDeviceName);

	hr = s->m_pDeviceFilter.CoCreateInstance( CLSID_VideoCapture ); 
	if (FAILED(hr))
	{
		return -8;
	}

	s->m_pDeviceFilter.QueryInterface( &pPropertyBag );
	varCamName = wzDeviceName;
	if(( varCamName.vt == VT_BSTR ) == NULL ) {
	  return E_OUTOFMEMORY;
	}
	PropBag.Write( L"VCapName", &varCamName );   
	pPropertyBag->Load( &PropBag, NULL );
	pPropertyBag.Release();

	hr = s->m_pGraph->AddFilter( s->m_pDeviceFilter, L"Video capture source" );
#endif

	if (FAILED(hr))
	{
		return -8;
	}

	// get null renderer
	s->m_pNullRenderer = NULL;
#if 0
	hr=s->m_pNullRenderer.CoCreateInstance(CLSID_NullRenderer);
	if(FAILED(hr))
	{
		return -13;
	}
#endif
	if (s->m_pNullRenderer!=NULL)
	{
		s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer");
	}

	hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_PREVIEW,
		&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
	if (FAILED(hr))
	{
		//hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_CAPTURE,
		//	&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
		if (FAILED(hr))
		{
			return -14;
		}
	}
	
	//m_pDXFilter->SetBufferSamples(TRUE);


		// Create the System Device Enumerator.
IFilterMapper *pMapper = NULL;
//IEnumMoniker *pEnum = NULL;
IEnumRegFilters *pEnum = NULL;

hr = CoCreateInstance(CLSID_FilterMapper, 
    NULL, CLSCTX_INPROC, IID_IFilterMapper, 
    (void **) &pMapper);

if (FAILED(hr))
{
    // Error handling omitted for clarity.
}

GUID arrayInTypes[2];
arrayInTypes[0] = MEDIATYPE_Video;
arrayInTypes[1] = MEDIASUBTYPE_dvsd;

hr = pMapper->EnumMatchingFilters(
        &pEnum,
        MERIT_HW_COMPRESSOR, // Minimum merit.
        FALSE,               // At least one input pin?
        MEDIATYPE_NULL,
        MEDIASUBTYPE_NULL,
        FALSE,              // Must be a renderer?
        FALSE,               // At least one output pin?
        MEDIATYPE_NULL,                  
        MEDIASUBTYPE_NULL);              

// Enumerate the monikers.
//IMoniker *pMoniker;
REGFILTER *pMoniker;
ULONG cFetched;
while (pEnum->Next(1, &pMoniker, &cFetched) == S_OK)
{
    IPropertyBag *pPropBag = NULL;
#if 0
	hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, 
       (void **)&pPropBag);

    if (SUCCEEDED(hr))
    {
        // To retrieve the friendly name of the filter, do the following:
        VARIANT varName;
        VariantInit(&varName);
        hr = pPropBag->Read(L"FriendlyName", &varName, 0);
        if (SUCCEEDED(hr))
        {
            // Display the name in your UI somehow.
        }
        VariantClear(&varName);

        // To create an instance of the filter, do the following:
        IBaseFilter *pFilter;
        hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)&pFilter);
        // Now add the filter to the graph. Remember to release pFilter later.
    
        // Clean up.
        pPropBag->Release();
    }
    pMoniker->Release();
#endif

}

// Clean up.
pMapper->Release();
pEnum->Release();




	s_callback = s;
	hr = s->m_pControl->Run();
	if(FAILED(hr))
	{
		return -15;
	}

	s->rotregvalue=1;
	s->pix_fmt = format;
	s->vsize.height = vsize->height;
	s->vsize.width = vsize->width;
	return 0;
}
bool GetAudioCaptureDevices(std::vector<IBaseFilter*>& captureSourceFilterList, std::vector<std::wstring>& captureDeviceNameList)
{
	HRESULT hr;
	CoInitialize(NULL);
	ComUninitial comUninitial;

	ICreateDevEnum* createDevEnum;
	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void**)&createDevEnum);
	if (FAILED(hr))
	{
		ErrorPrint("Create system device enum error",hr);
		return false;
	}
	ComReleaser createDevEnumReleaser(createDevEnum);

	IEnumMoniker *enumMoniker = NULL;
	hr = createDevEnum->CreateClassEnumerator(CLSID_AudioInputDeviceCategory,&enumMoniker, 0);
	if (hr != S_OK)
	{
		ErrorPrint("Create class enumerator error",hr);
		return false;
	}
	std::cout<<"HRESULT VALUE:"<<std::hex<<hr<<std::endl;
	std::cout<<"Enum moniker value:"<<std::hex<<(int)enumMoniker<<std::endl;
	ComReleaser enumMonikerReleaser(enumMoniker);

	IMoniker* moniker;
	int i = 0;
	while (S_OK == enumMoniker->Next(1, &moniker, NULL))
	{
		std::cout<<"Find audio count:"<<++i<<std::endl;
		ComReleaser monikerReleaser(moniker);
		IPropertyBag* propBag;
		hr = moniker->BindToStorage(NULL,NULL,IID_IPropertyBag, (void**)&propBag);
		if (FAILED(hr))
		{
			ErrorPrint("Bind to storage error",hr);
		}
		else
		{
			ComReleaser propBagReleaser(propBag);

			VARIANT varName;
			VariantInit(&varName);
			hr = propBag->Read(L"FriendlyName", &varName, NULL);
			if (FAILED(hr))
			{
				ErrorPrint("Get audio input device friendly name error",hr);
				VariantClear(&varName);
				continue;
			}
			VariantClear(&varName);

			IBaseFilter* audioCaptureInputDevice;
			hr = moniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)&audioCaptureInputDevice);
			if (FAILED(hr))
			{
				ErrorPrint("Bind to object error",hr);
				continue;
			}

			captureSourceFilterList.push_back(audioCaptureInputDevice);
			captureDeviceNameList.push_back(varName.bstrVal);
		}
	}

#ifdef _DEBUG
	std::cout<<"Start print all capture device name:\n";
	using namespace boost::lambda;
	std::for_each(captureDeviceNameList.begin(), captureDeviceNameList.end(), wcout<<_1<<"\n");
#endif

	return true;
}
Beispiel #27
0
/* Try to intelligently fetch a default video input device */
static HRESULT
GetDefaultInputDevice(IBaseFilter **ppSrcFilter)
{
    HRESULT hr = S_OK;
    IBaseFilter *pSrc = NULL;
    IMoniker *pMoniker = NULL;
    ICreateDevEnum *pDevEnum = NULL;
    IEnumMoniker *pClassEnum = NULL;

    if (!ppSrcFilter) {
        return E_POINTER;
    }

    hr = CoCreateInstance(
        CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
        IID_ICreateDevEnum, (void **)&pDevEnum
    );
    if (FAILED(hr)) return hr;

    hr = pDevEnum->CreateClassEnumerator(
        CLSID_VideoInputDeviceCategory, &pClassEnum, 0
    );
    if (FAILED(hr)) {
        SAFE_RELEASE(pDevEnum);
        return hr;
    }

    if (pClassEnum == NULL) {
        /* No devices available */
        SAFE_RELEASE(pDevEnum);
        return E_FAIL;
    }

    /* Pick the first device from the list.
     * Note that if the Next() call succeeds but there are no monikers,
     * it will return S_FALSE (which is not a failure).
     */
    hr = pClassEnum->Next (1, &pMoniker, NULL);
    if (hr == S_FALSE) {
        SAFE_RELEASE(pDevEnum);
        SAFE_RELEASE(pClassEnum);
        return E_FAIL;
    }

    hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pSrc);
    if (FAILED(hr)) {
        SAFE_RELEASE(pDevEnum);
        SAFE_RELEASE(pClassEnum);
        SAFE_RELEASE(pMoniker);
        return hr;
    }

    *ppSrcFilter = pSrc;
    (*ppSrcFilter)->AddRef();

    SAFE_RELEASE(pSrc);
    SAFE_RELEASE(pMoniker);
    SAFE_RELEASE(pDevEnum);
    SAFE_RELEASE(pClassEnum);

    return hr;
}