QStringList VideoInput::list_devices_dshow(bool silent) { if (!silent) { printf("\nVIDEOINPUT SPY MODE!\n\n"); } QStringList list; #ifdef _MSC_VER ICreateDevEnum *pDevEnum = NULL; IEnumMoniker *pEnum = NULL; int deviceCounter = 0; HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, reinterpret_cast<void**>(&pDevEnum)); if (SUCCEEDED(hr)) { // Create an enumerator for the video capture category. hr = pDevEnum->CreateClassEnumerator( CLSID_VideoInputDeviceCategory, &pEnum, 0); if(hr == S_OK){ if (!silent) { printf("SETUP: Looking For Capture Devices\n"); } IMoniker *pMoniker = NULL; while (pEnum->Next(1, &pMoniker, NULL) == S_OK){ IPropertyBag *pPropBag; hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)(&pPropBag)); if (FAILED(hr)){ pMoniker->Release(); continue; // Skip this one, maybe the next one will work. } // Find the description or friendly name. VARIANT varName; VariantInit(&varName); hr = pPropBag->Read(L"Description", &varName, 0); if (FAILED(hr)) hr = pPropBag->Read(L"FriendlyName", &varName, 0); if (SUCCEEDED(hr)){ hr = pPropBag->Read(L"FriendlyName", &varName, 0); char deviceName[255] = {0}; int count = 0; int maxLen = sizeof(deviceName)/sizeof(deviceName[0]) - 2; while( varName.bstrVal[count] != 0x00 && count < maxLen) { deviceName[count] = (char)varName.bstrVal[count]; count++; } deviceName[count] = 0; list.append(deviceName); if (!silent) { printf("SETUP: %i) %s \n",deviceCounter, deviceName); } } pPropBag->Release(); pPropBag = NULL; pMoniker->Release(); pMoniker = NULL; deviceCounter++; } pDevEnum->Release(); pDevEnum = NULL; pEnum->Release(); pEnum = NULL; } if (!silent) { printf("SETUP: %i Device(s) found\n\n", deviceCounter); } } #endif //_MSC_VER return list; }
static int get_device(struct vidsrc_st *st, const char *name) { ICreateDevEnum *dev_enum; IEnumMoniker *enum_mon; IMoniker *mon; ULONG fetched; HRESULT res; int id = 0; bool found = false; if (!st) return EINVAL; res = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void**)&dev_enum); if (res != NOERROR) return ENOENT; res = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &enum_mon, 0); if (res != NOERROR) return ENOENT; enum_mon->Reset(); while (enum_mon->Next(1, &mon, &fetched) == S_OK && !found) { IPropertyBag *bag; VARIANT var; char dev_name[256]; int len = 0; res = mon->BindToStorage(0, 0, IID_IPropertyBag, (void **)&bag); if (!SUCCEEDED(res)) continue; var.vt = VT_BSTR; res = bag->Read(L"FriendlyName", &var, NULL); if (NOERROR != res) continue; len = WideCharToMultiByte(CP_ACP, 0, var.bstrVal, -1, dev_name, sizeof(dev_name), NULL, NULL); if (len > 0) { found = !str_isset(name) || !str_casecmp(dev_name, name); if (found) { info("dshow: got device '%s' id=%d\n", name, id); st->dev_moniker = mon; } } SysFreeString(var.bstrVal); bag->Release(); if (!found) { mon->Release(); ++id; } } return found ? 0 : ENOENT; }
static GstCaps * gst_dshowvideosrc_get_caps (GstBaseSrc * basesrc) { HRESULT hres = S_OK; IBindCtx *lpbc = NULL; IMoniker *videom; DWORD dwEaten; GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (basesrc); gunichar2 *unidevice = NULL; if (src->caps) { return gst_caps_ref (src->caps); } if (!src->device) { src->device = gst_dshow_getdevice_from_devicename (&CLSID_VideoInputDeviceCategory, &src->device_name); if (!src->device) { GST_ERROR ("No video device found."); return NULL; } } unidevice = g_utf8_to_utf16 (src->device, strlen (src->device), NULL, NULL, NULL); if (!src->video_cap_filter) { hres = CreateBindCtx (0, &lpbc); if (SUCCEEDED (hres)) { hres = MkParseDisplayName (lpbc, (LPCOLESTR) unidevice, &dwEaten, &videom); if (SUCCEEDED (hres)) { hres = videom->BindToObject (lpbc, NULL, IID_IBaseFilter, (LPVOID *) & src->video_cap_filter); videom->Release (); } lpbc->Release (); } } if (!src->caps) { src->caps = gst_caps_new_empty (); } if (src->video_cap_filter && gst_caps_is_empty (src->caps)) { /* get the capture pins supported types */ IPin *capture_pin = NULL; IEnumPins *enumpins = NULL; HRESULT hres; hres = src->video_cap_filter->EnumPins (&enumpins); if (SUCCEEDED (hres)) { while (enumpins->Next (1, &capture_pin, NULL) == S_OK) { IKsPropertySet *pKs = NULL; hres = capture_pin->QueryInterface (IID_IKsPropertySet, (LPVOID *) & pKs); if (SUCCEEDED (hres) && pKs) { DWORD cbReturned; GUID pin_category; RPC_STATUS rpcstatus; hres = pKs->Get (AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0, &pin_category, sizeof (GUID), &cbReturned); /* we only want capture pins */ if (UuidCompare (&pin_category, (UUID *) & PIN_CATEGORY_CAPTURE, &rpcstatus) == 0) { { GstCaps *caps = gst_dshowvideosrc_getcaps_from_streamcaps (src, capture_pin); if (caps) { gst_caps_append (src->caps, caps); } else { caps = gst_dshowvideosrc_getcaps_from_enum_mediatypes (src, capture_pin); if (caps) gst_caps_append (src->caps, caps); } } } pKs->Release (); } capture_pin->Release (); } enumpins->Release (); } } if (unidevice) { g_free (unidevice); } if (src->caps) { return gst_caps_ref (src->caps); } return NULL; }
DirectShowScanner::DirectShowScanner() { ICreateDevEnum *pDevEnum = 0; int hr; int devNum; char nameBuf[80]; // Reference: Pesce, pp 54-56. debug_msg("new DirectShowScanner()\n"); // Initialize the COM subsystem hr=CoInitialize(NULL); if (FAILED(hr)) { debug_msg("Failed COM subsystem initialisation.\n"); return; } // Create a helper object to find the capture devices. hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (LPVOID*)&pDevEnum); if (FAILED(hr)) { debug_msg("Failed to Create a helper object to find the DS capture devices.\n"); CoUninitialize(); return; } IEnumMoniker *pEnum = 0; IMoniker *pMoniker = 0; IPropertyBag *pPropBag = 0; VARIANT varName; // Get an enumerator over video capture filters hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnum, 0); //showErrorMessage(hr); if (FAILED(hr) || pEnum == 0) { debug_msg("Failed to Get an enumerator over DS video capture filters.\n"); CoUninitialize(); return; } // Get the capture filter for each device installed, up to NUM_DEVS devices for( devNum=0; devNum < NUM_DEVS; ++devNum) { if ( pEnum->Next(1, &pMoniker, NULL) == S_OK ) { hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag); if (FAILED(hr)) { debug_msg("Failed to Get propbag bound to storage on DS dev: %d\n", devNum); continue; } //showErrorMessage(hr); debug_msg("propbag bound to storage ok= %d\n", hr); VariantInit(&varName); hr = pPropBag->Read(L"FriendlyName", &varName, 0); if (FAILED(hr)) { debug_msg("Failed to Get friendly name read on DS dev: %d\n", devNum); continue; } //showErrorMessage(hr); debug_msg("friendly name read ok= %d\n", hr); // Need this macro in atlconv.h to go from bStr to char* - msp USES_CONVERSION; strcpy(nameBuf, W2A(varName.bstrVal)); debug_msg("DirectShowScanner::DirectShowScanner(): found nameBuf/FriendlyName=%s\n", nameBuf); // needs work, but don't add drivers that look like VFW drivers - msp if( (strstr(nameBuf, "VFW") == NULL) ) { hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void **)(pCaptureFilter+devNum)); //showErrorMessage(hr); if (FAILED(hr)) { debug_msg("Failed to Get friendly name read on DS dev: %d\n", devNum); continue; } debug_msg("capture filter bound ok= %d\n", hr); devs_[devNum] = new DirectShowDevice(strdup(nameBuf), pCaptureFilter[devNum]); } else { debug_msg("discarding an apparent VFW device= %s\n", nameBuf); devs_[devNum] = NULL; } VariantClear(&varName); pPropBag->Release(); } } // Release these objects so COM can release their memory pMoniker->Release(); pEnum->Release(); pDevEnum->Release(); }
gboolean gst_dshow_find_filter (CLSID input_majortype, CLSID input_subtype, CLSID output_majortype, CLSID output_subtype, gchar * prefered_filter_name, IBaseFilter ** filter) { gboolean ret = FALSE; HRESULT hres; GUID arrayInTypes[2]; GUID arrayOutTypes[2]; IFilterMapper2 *mapper = NULL; IEnumMoniker *enum_moniker = NULL; IMoniker *moniker = NULL; ULONG fetched; gchar *prefered_filter_upper = NULL; gboolean exit = FALSE; /* initialize output parameter */ if (filter) *filter = NULL; /* create a private copy of prefered filter substring in upper case */ if (prefered_filter_name) { prefered_filter_upper = g_strdup (prefered_filter_name); _strupr (prefered_filter_upper); } hres = CoCreateInstance (CLSID_FilterMapper2, NULL, CLSCTX_INPROC, IID_IFilterMapper2, (void **) &mapper); if (FAILED (hres)) goto clean; memcpy (&arrayInTypes[0], &input_majortype, sizeof (CLSID)); memcpy (&arrayInTypes[1], &input_subtype, sizeof (CLSID)); memcpy (&arrayOutTypes[0], &output_majortype, sizeof (CLSID)); memcpy (&arrayOutTypes[1], &output_subtype, sizeof (CLSID)); hres = mapper->EnumMatchingFilters (&enum_moniker, 0, FALSE, MERIT_DO_NOT_USE + 1, TRUE, 1, arrayInTypes, NULL, NULL, FALSE, TRUE, 1, arrayOutTypes, NULL, NULL); if (FAILED (hres)) goto clean; enum_moniker->Reset (); while (hres = enum_moniker->Next (1, &moniker, &fetched), hres == S_OK && !exit) { IBaseFilter *filter_temp = NULL; IPropertyBag *property_bag = NULL; gchar *friendly_name = NULL; hres = moniker->BindToStorage (NULL, NULL, IID_IPropertyBag, (void **) &property_bag); if (SUCCEEDED (hres) && property_bag) { VARIANT varFriendlyName; VariantInit (&varFriendlyName); hres = property_bag->Read (L"FriendlyName", &varFriendlyName, NULL); if (hres == S_OK && varFriendlyName.bstrVal) { friendly_name = g_utf16_to_utf8 ((const gunichar2 *) varFriendlyName.bstrVal, wcslen (varFriendlyName.bstrVal), NULL, NULL, NULL); if (friendly_name) _strupr (friendly_name); SysFreeString (varFriendlyName.bstrVal); } property_bag->Release (); } hres = moniker->BindToObject (NULL, NULL, IID_IBaseFilter, (void **) &filter_temp); if (SUCCEEDED (hres) && filter_temp) { ret = TRUE; if (filter) { if (*filter) (*filter)->Release (); *filter = filter_temp; (*filter)->AddRef (); if (prefered_filter_upper && friendly_name && strstr (friendly_name, prefered_filter_upper)) exit = TRUE; } /* if we just want to know if the formats are supported OR if we don't care about what will be the filter used => we can stop enumeration */ if (!filter || !prefered_filter_upper) exit = TRUE; filter_temp->Release (); } g_free (friendly_name); moniker->Release (); } clean: g_free (prefered_filter_upper); if (enum_moniker) enum_moniker->Release (); if (mapper) mapper->Release (); return ret; }
static void vfw_detect(MSWebCamManager *obj) { ICreateDevEnum *pCreateDevEnum = NULL; IEnumMoniker *pEnumMoniker = NULL; IMoniker *pMoniker = NULL; HRESULT hr; ULONG nFetched = 0; // Initialize COM CoInitialize(NULL); hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum); if(FAILED(hr)) { CoUninitialize(); return ; } hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumMoniker, 0); if (FAILED(hr) || pEnumMoniker == NULL) { //printf("no device\n"); CoUninitialize(); return ; } pEnumMoniker->Reset(); int pos=0; while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) ) { IPropertyBag *pBag; hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag ); if( hr != S_OK ) continue; VARIANT var; VariantInit(&var); hr = pBag->Read( L"FriendlyName", &var, NULL ); if( hr != S_OK ) { pMoniker->Release(); continue; } //USES_CONVERSION; char szName[256]; WideCharToMultiByte(CP_UTF8,0,var.bstrVal,-1,szName,256,0,0); VariantClear(&var); IBaseFilter *m_pDeviceFilter; hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&m_pDeviceFilter ); if(SUCCEEDED(hr)) { GUID pPinCategory; int fmt_supported = 0; dump_format(m_pDeviceFilter); //basic testing for the device. if (try_format(m_pDeviceFilter,MS_YUV420P, &pPinCategory)==0) fmt_supported = 1; else if (try_format(m_pDeviceFilter,MS_YUY2, &pPinCategory)==0) fmt_supported = 1; else if (try_format(m_pDeviceFilter,MS_YUYV, &pPinCategory)==0) fmt_supported = 1; else if (try_format(m_pDeviceFilter,MS_UYVY, &pPinCategory)==0) fmt_supported = 1; else if (try_format(m_pDeviceFilter,MS_RGB24, &pPinCategory)==0) fmt_supported = 1; else { ms_warning("Unsupported video pixel format/refuse camera (%s).", szName); } if (fmt_supported==1) { MSWebCam *cam=ms_web_cam_new(&ms_directx_cam_desc); cam->name=ms_strdup(szName); ms_web_cam_manager_add_cam(obj,cam); } m_pDeviceFilter->Release(); m_pDeviceFilter=NULL; } pMoniker->Release(); pBag->Release(); pMoniker=NULL; pBag=NULL; } pEnumMoniker->Release(); pCreateDevEnum->Release(); CoUninitialize(); }
static int v4w_open_videodevice(V4wState *s) { // Initialize COM CoInitialize(NULL); // get a Graph HRESULT hr= CoCreateInstance (CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, //IID_IBaseFilter, (void **)&s->m_pGraph); if(FAILED(hr)) { return -1; } // get a CaptureGraphBuilder2 hr= CoCreateInstance (CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, //IID_IBaseFilter, (void **)&s->m_pBuilder); if(FAILED(hr)) { return -2; } // connect capture graph builder with the graph s->m_pBuilder->SetFiltergraph(s->m_pGraph); // get mediacontrol so we can start and stop the filter graph hr=s->m_pGraph->QueryInterface (IID_IMediaControl, (void **)&s->m_pControl); if(FAILED(hr)) { return -3; } ICreateDevEnum *pCreateDevEnum = NULL; IEnumMoniker *pEnumMoniker = NULL; IMoniker *pMoniker = NULL; ULONG nFetched = 0; hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum); if(FAILED(hr)) { return -4; } hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumMoniker, 0); if (FAILED(hr) || pEnumMoniker == NULL) { //printf("no device\n"); return -5; } pEnumMoniker->Reset(); int pos=0; while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) ) { IPropertyBag *pBag; hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag ); if( hr != S_OK ) continue; if (s->dev[0]=='\0') break; VARIANT var; VariantInit(&var); hr = pBag->Read( L"FriendlyName", &var, NULL ); if( hr != S_OK ) { pMoniker->Release(); continue; } //USES_CONVERSION; char szName[256]; WideCharToMultiByte(CP_UTF8,0,var.bstrVal,-1,szName,256,0,0); VariantClear(&var); if (strcmp(szName, s->dev)==0) break; pMoniker->Release(); pBag->Release(); pMoniker=NULL; pBag=NULL; } if(pMoniker==NULL) { return -6; } hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter ); if(FAILED(hr)) { return -7; } s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter"); pMoniker->Release(); pEnumMoniker->Release(); pCreateDevEnum->Release(); GUID pPinCategory; if (try_format(s->m_pDeviceFilter, s->pix_fmt, &pPinCategory)==0) s->pix_fmt = s->pix_fmt; else if (try_format(s->m_pDeviceFilter,MS_YUV420P, &pPinCategory)==0) s->pix_fmt = MS_YUV420P; else if (try_format(s->m_pDeviceFilter,MS_YUY2, &pPinCategory)==0) s->pix_fmt = MS_YUY2; else if (try_format(s->m_pDeviceFilter,MS_YUYV, &pPinCategory)==0) s->pix_fmt = MS_YUYV; else if (try_format(s->m_pDeviceFilter,MS_UYVY, &pPinCategory)==0) s->pix_fmt = MS_UYVY; else if (try_format(s->m_pDeviceFilter,MS_RGB24, &pPinCategory)==0) s->pix_fmt = MS_RGB24; else { ms_error("Unsupported video pixel format."); return -8; } if (s->pix_fmt == MS_YUV420P) ms_message("Driver supports YUV420P, using that format."); else if (s->pix_fmt == MS_YUY2) ms_message("Driver supports YUY2 (YUYV), using that format."); else if (s->pix_fmt == MS_YUYV) ms_message("Driver supports YUV422, using that format."); else if (s->pix_fmt == MS_UYVY) ms_message("Driver supports UYVY, using that format."); else if (s->pix_fmt == MS_RGB24) ms_message("Driver supports RGB24, using that format."); if (try_format_size(s, s->pix_fmt, s->vsize.width, s->vsize.height, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", s->vsize.width, s->vsize.height); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H); else { ms_error("No supported size found for format."); /* size not supported? */ return -9; } // get DXFilter s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE); if(s->m_pDXFilter==NULL) { return -10; } s->m_pDXFilter->AddRef(); CMediaType mt; mt.SetType(&MEDIATYPE_Video); GUID m = MEDIASUBTYPE_RGB24; if (s->pix_fmt == MS_YUV420P) m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0')); else if (s->pix_fmt == MS_YUY2) m = MEDIASUBTYPE_YUY2; else if (s->pix_fmt == MS_YUYV) m = MEDIASUBTYPE_YUYV; else if (s->pix_fmt == MS_UYVY) m = MEDIASUBTYPE_UYVY; else if (s->pix_fmt == MS_RGB24) m = MEDIASUBTYPE_RGB24; mt.SetSubtype(&m); mt.formattype = FORMAT_VideoInfo; mt.SetTemporalCompression(FALSE); VIDEOINFO *pvi = (VIDEOINFO *) mt.AllocFormatBuffer(sizeof(VIDEOINFO)); if (NULL == pvi) return -11; ZeroMemory(pvi, sizeof(VIDEOINFO)); if (s->pix_fmt == MS_YUV420P) pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0'); else if (s->pix_fmt == MS_YUY2) pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2'); else if (s->pix_fmt == MS_YUYV) pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','V'); else if (s->pix_fmt == MS_UYVY) pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y'); else if (s->pix_fmt == MS_RGB24) pvi->bmiHeader.biCompression = BI_RGB; if (s->pix_fmt == MS_YUV420P) pvi->bmiHeader.biBitCount = 12; else if (s->pix_fmt == MS_YUY2) pvi->bmiHeader.biBitCount = 16; else if (s->pix_fmt == MS_YUYV) pvi->bmiHeader.biBitCount = 16; else if (s->pix_fmt == MS_UYVY) pvi->bmiHeader.biBitCount = 16; else if (s->pix_fmt == MS_RGB24) pvi->bmiHeader.biBitCount = 24; pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER); pvi->bmiHeader.biWidth = s->vsize.width; pvi->bmiHeader.biHeight = s->vsize.height; pvi->bmiHeader.biPlanes = 1; pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader); pvi->bmiHeader.biClrImportant = 0; mt.SetSampleSize(pvi->bmiHeader.biSizeImage); mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO)); hr = s->m_pDXFilter->SetAcceptedMediaType(&mt); if(FAILED(hr)) { return -12; } hr = s->m_pDXFilter->SetCallback(Callback); if(FAILED(hr)) { return -13; } hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter, (LPVOID *)&s->m_pIDXFilter); if(FAILED(hr)) { return -14; } hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter"); if(FAILED(hr)) { return -15; } // get null renderer hr=CoCreateInstance (CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void **)&s->m_pNullRenderer); if(FAILED(hr)) { return -16; } if (s->m_pNullRenderer!=NULL) { s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer"); } hr = s->m_pBuilder->RenderStream(&pPinCategory, &MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer); if (FAILED(hr)) { return -17; } IAMStreamConfig *pConfig = NULL; hr = s->m_pBuilder->FindInterface( &pPinCategory, // Preview pin. &MEDIATYPE_Video, // Any media type. s->m_pDeviceFilter, // Pointer to the capture filter. IID_IAMStreamConfig, (void**)&pConfig); if (pConfig!=NULL) { AM_MEDIA_TYPE *pType = NULL; int iCount, iSize; pConfig->GetNumberOfCapabilities(&iCount, &iSize); for (int i = 0; i < iCount; i++) { VIDEO_STREAM_CONFIG_CAPS scc; pType = NULL; pConfig->GetStreamCaps(i, &pType, (BYTE *)&scc); if (!((pType->formattype == FORMAT_VideoInfo) && (pType->cbFormat >= sizeof(VIDEOINFOHEADER)) && (pType->pbFormat != NULL))) continue; VIDEOINFOHEADER & videoInfo = *(VIDEOINFOHEADER *)pType->pbFormat; if (m != pType->subtype) continue; if (videoInfo.bmiHeader.biWidth != s->vsize.width) continue; if (videoInfo.bmiHeader.biHeight != s->vsize.height) continue; if (videoInfo.bmiHeader.biBitCount != pvi->bmiHeader.biBitCount) continue; if (videoInfo.bmiHeader.biCompression != pvi->bmiHeader.biCompression) continue; videoInfo.AvgTimePerFrame = UNITS / (LONGLONG)s->fps; pConfig->SetFormat(pType); } pConfig->GetFormat(&pType); if (pType!=NULL) { VIDEOINFO *pvi; pvi = (VIDEOINFO *)pType->pbFormat; ms_message("v4w: camera asked fps=%.2f // real fps=%.2f", s->fps, ((float)UNITS / (float)pvi->AvgTimePerFrame)); } pConfig->Release(); } //m_pDXFilter->SetBufferSamples(TRUE); s_callback = s; hr = s->m_pControl->Run(); if(FAILED(hr)) { return -18; } s->rotregvalue=1; return 0; }
static int v4w_open_videodevice(V4wState *s) { // Initialize COM CoInitialize(NULL); // get a Graph HRESULT hr=s->m_pGraph.CoCreateInstance(CLSID_FilterGraph); if(FAILED(hr)) { return -1; } // get a CaptureGraphBuilder2 hr=s->m_pBuilder.CoCreateInstance(CLSID_CaptureGraphBuilder2); if(FAILED(hr)) { return -2; } // connect capture graph builder with the graph s->m_pBuilder->SetFiltergraph(s->m_pGraph); // get mediacontrol so we can start and stop the filter graph hr=s->m_pGraph.QueryInterface(&(s->m_pControl)); if(FAILED(hr)) { return -3; } #ifdef _DEBUG HANDLE m_hLogFile=CreateFile(L"DShowGraphLog.txt",GENERIC_READ|GENERIC_WRITE,FILE_SHARE_READ,NULL,OPEN_ALWAYS,FILE_ATTRIBUTE_NORMAL,NULL); if(m_hLogFile!=INVALID_HANDLE_VALUE) { hr=s->m_pGraph->SetLogFile((DWORD_PTR)m_hLogFile); /* ASSERT(SUCCEEDED(hr)); */ } //AddGraphToRot(s->m_pGraph, &s->rotregvalue); #endif ICreateDevEnum *pCreateDevEnum = NULL; IEnumMoniker *pEnumMoniker = NULL; IMoniker *pMoniker = NULL; ULONG nFetched = 0; hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum); if(FAILED(hr)) { return -4; } hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumMoniker, 0); if (FAILED(hr) || pEnumMoniker == NULL) { //printf("no device\n"); return -5; } pEnumMoniker->Reset(); int pos=0; while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) ) { if (pos>=s->devidx) break; pos++; pMoniker->Release(); pMoniker=NULL; } if(pMoniker==NULL) { return -6; } hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter ); if(FAILED(hr)) { return -7; } s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter"); pMoniker->Release(); pEnumMoniker->Release(); pCreateDevEnum->Release(); if (try_format(s, s->pix_fmt)==0) s->pix_fmt = s->pix_fmt; else if (try_format(s,MS_YUV420P)==0) s->pix_fmt = MS_YUV420P; else if (try_format(s,MS_YUY2)==0) s->pix_fmt = MS_YUY2; else if (try_format(s,MS_YUYV)==0) s->pix_fmt = MS_YUYV; else if (try_format(s,MS_UYVY)==0) s->pix_fmt = MS_UYVY; else if (try_format(s,MS_RGB24)==0) s->pix_fmt = MS_RGB24; else { ms_error("Unsupported video pixel format."); return -8; } if (s->pix_fmt == MS_YUV420P) ms_message("Driver supports YUV420P, using that format."); else if (s->pix_fmt == MS_YUY2) ms_message("Driver supports YUY2 (UYVY), using that format."); else if (s->pix_fmt == MS_YUYV) ms_message("Driver supports YUV422, using that format."); else if (s->pix_fmt == MS_UYVY) ms_message("Driver supports UYVY, using that format."); else if (s->pix_fmt == MS_RGB24) ms_message("Driver supports RGB24, using that format."); if (try_format_size(s, s->pix_fmt, s->vsize.width, s->vsize.height)==0) ms_message("Selected Size: %ix%i.", s->vsize.width, s->vsize.height); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H); else { ms_error("No supported size found for format."); /* size not supported? */ return -9; } // get DXFilter s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE); if(s->m_pDXFilter==NULL) { return -10; } s->m_pDXFilter->AddRef(); CMediaType mt; mt.SetType(&MEDIATYPE_Video); GUID m = MEDIASUBTYPE_RGB24; if (s->pix_fmt == MS_YUV420P) m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0')); else if (s->pix_fmt == MS_YUY2) m = MEDIASUBTYPE_YUY2; else if (s->pix_fmt == MS_YUYV) m = MEDIASUBTYPE_YUYV; else if (s->pix_fmt == MS_UYVY) m = MEDIASUBTYPE_UYVY; else if (s->pix_fmt == MS_RGB24) m = MEDIASUBTYPE_RGB24; mt.SetSubtype(&m); mt.formattype = FORMAT_VideoInfo; mt.SetTemporalCompression(FALSE); VIDEOINFO *pvi = (VIDEOINFO *) mt.AllocFormatBuffer(sizeof(VIDEOINFO)); if (NULL == pvi) return -11; ZeroMemory(pvi, sizeof(VIDEOINFO)); if (s->pix_fmt == MS_YUV420P) pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0'); else if (s->pix_fmt == MS_YUY2) pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2'); else if (s->pix_fmt == MS_YUYV) pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','V'); else if (s->pix_fmt == MS_UYVY) pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y'); else if (s->pix_fmt == MS_RGB24) pvi->bmiHeader.biCompression = BI_RGB; if (s->pix_fmt == MS_YUV420P) pvi->bmiHeader.biBitCount = 12; else if (s->pix_fmt == MS_YUY2) pvi->bmiHeader.biBitCount = 16; else if (s->pix_fmt == MS_YUYV) pvi->bmiHeader.biBitCount = 16; else if (s->pix_fmt == MS_UYVY) pvi->bmiHeader.biBitCount = 16; else if (s->pix_fmt == MS_RGB24) pvi->bmiHeader.biBitCount = 24; pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER); pvi->bmiHeader.biWidth = s->vsize.width; pvi->bmiHeader.biHeight = s->vsize.height; pvi->bmiHeader.biPlanes = 1; pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader); pvi->bmiHeader.biClrImportant = 0; mt.SetSampleSize(pvi->bmiHeader.biSizeImage); mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO)); hr = s->m_pDXFilter->SetAcceptedMediaType(&mt); if(FAILED(hr)) { return -12; } hr = s->m_pDXFilter->SetCallback(Callback); if(FAILED(hr)) { return -13; } hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter, (LPVOID *)&s->m_pIDXFilter); if(FAILED(hr)) { return -14; } hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter"); if(FAILED(hr)) { return -15; } // get null renderer hr=s->m_pNullRenderer.CoCreateInstance(CLSID_NullRenderer); if(FAILED(hr)) { return -16; } if (s->m_pNullRenderer!=NULL) { s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer"); } hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer); if (FAILED(hr)) { //hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_CAPTURE, // &MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer); if (FAILED(hr)) { return -17; } } //m_pDXFilter->SetBufferSamples(TRUE); s_callback = s; hr = s->m_pControl->Run(); if(FAILED(hr)) { return -18; } s->rotregvalue=1; return 0; }
int main() { // for playing IGraphBuilder *pGraphBuilder; ICaptureGraphBuilder2 *pCaptureGraphBuilder2; IMediaControl *pMediaControl; IBaseFilter *pDeviceFilter = NULL; // to select a video input device ICreateDevEnum *pCreateDevEnum = NULL; IEnumMoniker *pEnumMoniker = NULL; IMoniker *pMoniker = NULL; ULONG nFetched = 0; // initialize COM CoInitialize(NULL); // // selecting a device // // Create CreateDevEnum to list device CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum); // Create EnumMoniker to list VideoInputDevice pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumMoniker, 0); if (pEnumMoniker == NULL) { // this will be shown if there is no capture device printf("no device\n"); return 0; } // reset EnumMoniker pEnumMoniker->Reset(); // get each Moniker while (pEnumMoniker->Next(1, &pMoniker, &nFetched) == S_OK) { IPropertyBag *pPropertyBag; TCHAR devname[256]; // bind to IPropertyBag pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropertyBag); VARIANT var; // get FriendlyName var.vt = VT_BSTR; pPropertyBag->Read(L"FriendlyName", &var, 0); WideCharToMultiByte(CP_ACP, 0, var.bstrVal, -1, devname, sizeof(devname), 0, 0); VariantClear(&var); printf("%s\r\n", devname); printf(" select this device ? [y] or [n]\r\n"); int ch = getchar(); // you can start playing by 'y' + return key // if you press the other key, it will not be played. if (ch == 'y') { // Bind Monkier to Filter pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pDeviceFilter ); } else { getchar(); } // release pMoniker->Release(); pPropertyBag->Release(); if (pDeviceFilter != NULL) { // go out of loop if getchar() returns 'y' break; } } if (pDeviceFilter != NULL) { // // PLAY // // create FilterGraph CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC, IID_IGraphBuilder, (LPVOID *)&pGraphBuilder); // create CaptureGraphBuilder2 CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, IID_ICaptureGraphBuilder2, (LPVOID *)&pCaptureGraphBuilder2); //============================================================ //=========== MY CODE ====================================== //============================================================= HRESULT hr = CoInitialize(0); IAMStreamConfig *pConfig = NULL; hr = pCaptureGraphBuilder2->FindInterface(&PIN_CATEGORY_CAPTURE, 0, pDeviceFilter, IID_IAMStreamConfig, (void**)&pConfig); int iCount = 0, iSize = 0; hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize); // Check the size to make sure we pass in the correct structure. if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS)) { // Use the video capabilities structure. for (int iFormat = 0; iFormat < iCount; iFormat++) { VIDEO_STREAM_CONFIG_CAPS scc; AM_MEDIA_TYPE *pmtConfig; hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc); if (SUCCEEDED(hr)) { /* Examine the format, and possibly use it. */ if ((pmtConfig->majortype == MEDIATYPE_Video) && (pmtConfig->subtype == MEDIASUBTYPE_RGB24) && (pmtConfig->formattype == FORMAT_VideoInfo) && (pmtConfig->cbFormat >= sizeof (VIDEOINFOHEADER)) && (pmtConfig->pbFormat != NULL)) { VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat; // pVih contains the detailed format information. LONG lWidth = pVih->bmiHeader.biWidth; LONG lHeight = pVih->bmiHeader.biHeight; if( lWidth == 1280 ) // if (iFormat == 26) { //2 = '1280x720YUV' YUV, 22 = '1280x800YUV', 26 = '1280x720RGB' hr = pConfig->SetFormat(pmtConfig); } } // Delete the media type when you are done. DeleteMediaType(pmtConfig); } } } // Query the capture filter for the IAMCameraControl interface. IAMCameraControl *pCameraControl = 0; hr = pDeviceFilter->QueryInterface(IID_IAMCameraControl, (void**)&pCameraControl); if (FAILED(hr)) { // The device does not support IAMCameraControl } else { long Min, Max, Step, Default, Flags, Val; // Get the range and default values hr = pCameraControl->GetRange(CameraControl_Exposure, &Min, &Max, &Step, &Default, &Flags); hr = pCameraControl->GetRange(CameraControl_Focus, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) { hr = pCameraControl->Set(CameraControl_Exposure, -11, CameraControl_Flags_Manual ); // Min = -11, Max = 1, Step = 1 hr = pCameraControl->Set(CameraControl_Focus, 12, CameraControl_Flags_Manual ); } } // Query the capture filter for the IAMVideoProcAmp interface. IAMVideoProcAmp *pProcAmp = 0; hr = pDeviceFilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&pProcAmp); if (FAILED(hr)) { // The device does not support IAMVideoProcAmp } else { long Min, Max, Step, Default, Flags, Val; // Get the range and default values hr = pProcAmp->GetRange(VideoProcAmp_Brightness, &Min, &Max, &Step, &Default, &Flags); hr = pProcAmp->GetRange(VideoProcAmp_BacklightCompensation, &Min, &Max, &Step, &Default, &Flags); hr = pProcAmp->GetRange(VideoProcAmp_Contrast, &Min, &Max, &Step, &Default, &Flags); hr = pProcAmp->GetRange(VideoProcAmp_Saturation, &Min, &Max, &Step, &Default, &Flags); hr = pProcAmp->GetRange(VideoProcAmp_Sharpness, &Min, &Max, &Step, &Default, &Flags); hr = pProcAmp->GetRange(VideoProcAmp_WhiteBalance, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) { hr = pProcAmp->Set(VideoProcAmp_Brightness, 142, VideoProcAmp_Flags_Manual); hr = pProcAmp->Set(VideoProcAmp_BacklightCompensation, 0, VideoProcAmp_Flags_Manual); hr = pProcAmp->Set(VideoProcAmp_Contrast, 4, VideoProcAmp_Flags_Manual); hr = pProcAmp->Set(VideoProcAmp_Saturation, 100, VideoProcAmp_Flags_Manual); hr = pProcAmp->Set(VideoProcAmp_Sharpness, 0, VideoProcAmp_Flags_Manual); hr = pProcAmp->Set(VideoProcAmp_WhiteBalance, 2800, VideoProcAmp_Flags_Manual); } } //============================================================ //=========== END MY CODE ====================================== //============================================================= hr = S_OK; CTransformer* trans = new CTransformer( "Dif trans", 0, CLSID_DIFFilter, &hr ); IBaseFilter * ttt = 0; trans->QueryInterface(IID_IBaseFilter, (LPVOID *)&ttt); // set FilterGraph hr = pCaptureGraphBuilder2->SetFiltergraph(pGraphBuilder); // get MediaControl interface hr = pGraphBuilder->QueryInterface(IID_IMediaControl, (LPVOID *)&pMediaControl); // add device filter to FilterGraph hr = pGraphBuilder->AddFilter(ttt, L"Dif trans"); hr = pGraphBuilder->AddFilter(pDeviceFilter, L"Device Filter"); // create Graph hr = pCaptureGraphBuilder2->RenderStream(&PIN_CATEGORY_CAPTURE, NULL, pDeviceFilter, NULL, NULL); // start playing hr = pMediaControl->Run(); // to block execution // without this messagebox, the graph will be stopped immediately MessageBox(NULL, "Block Execution", "Block", MB_OK); // release pMediaControl->Release(); pCaptureGraphBuilder2->Release(); pGraphBuilder->Release(); } // release pEnumMoniker->Release(); pCreateDevEnum->Release(); // finalize COM CoUninitialize(); return 0; }
// Enumerate all of the video input devices // Return the filter with a matching friendly name HRESULT GetVideoInputFilter(IBaseFilter** gottaFilter, wchar_t* matchName) { BOOL done = false; // Create the System Device Enumerator. ICreateDevEnum *pSysDevEnum = NULL; HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **)&pSysDevEnum); if (FAILED(hr)) { return hr; } // Obtain a class enumerator for the video input category. IEnumMoniker *pEnumCat = NULL; hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0); if (hr == S_OK) { // Enumerate the monikers. IMoniker *pMoniker = NULL; ULONG cFetched; while ((pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK) && (!done)) { // Bind the first moniker to an object IPropertyBag *pPropBag; hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag); if (SUCCEEDED(hr)) { // To retrieve the filter's friendly name, do the following: VARIANT varName; VariantInit(&varName); hr = pPropBag->Read(L"FriendlyName", &varName, 0); if (SUCCEEDED(hr)) { wprintf(L"Testing Video Input Device: %s\n", varName.bstrVal); // Do a comparison, find out if it's the right one if (wcsncmp(varName.bstrVal, matchName, wcslen(matchName)) == 0) { // We found it, so send it back to the caller hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**) gottaFilter); done = true; } } VariantClear(&varName); pPropBag->Release(); } pMoniker->Release(); } pEnumCat->Release(); } pSysDevEnum->Release(); if (done) { return hr; // found it, return native error } else { return VFW_E_NOT_FOUND; // didn't find it error } }
void CVisualPage::OnCamSetupButton() { CComboBox * box = (CComboBox*)(GetDlgItem(IDC_RECORDING_COMBO)); int i = box->GetCurSel(); int n = box->GetLBTextLen(i); CString s; box->GetLBText(i, s.GetBuffer(n)); PString setupDeviceName = s; s.ReleaseBuffer(); if (setupDeviceName.IsEmpty()) return; if (setupDeviceName.Find("fake") == 0) return; if (setupDeviceName.Find("monitor") == 0) return; if (setupDeviceName.Find("zmonitor") == 0) return; PTRACE(4,"PVidDirectShow\tCurrent device: " << setupDeviceName); HRESULT hr; IBaseFilter * pFilter = NULL; IMoniker *pMoniker =NULL; ICreateDevEnum *pDevEnum =NULL; IEnumMoniker *pClassEnum = NULL; ULONG cFetched; ::CoInitialize(NULL); // Create the system device enumerator hr = CoCreateInstance (CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC, IID_ICreateDevEnum, (void **) &pDevEnum); if (FAILED(hr)) { ::CoUninitialize(); return; } // Create an enumerator for the video capture devices hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pClassEnum, 0); if (FAILED(hr)) { ::CoUninitialize(); return; } if (pClassEnum == NULL) { ::CoUninitialize(); return; } PTRACE(4,"PVidDirectShow\tEntering device enumeration loop..."); while (1) { // Get the next device hr = pClassEnum->Next(1, &pMoniker, &cFetched); if (hr != S_OK) { PTRACE(4, "PVidDirectShow\tGetInputDeviceNames() No more video capture device"); break; } // Get the property bag IPropertyBag *pPropBag; hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)(&pPropBag)); if (FAILED(hr)) { PTRACE(4,"PVidDerectShow\tBindToStorage failed, continue"); pMoniker->Release(); continue; } // Find the description or friendly name. VARIANT DeviceName; DeviceName.vt = VT_BSTR; hr = pPropBag->Read(L"Description", &DeviceName, NULL); if (FAILED(hr)) hr = pPropBag->Read(L"FriendlyName", &DeviceName, NULL); if (SUCCEEDED(hr)) { char *pDeviceName = BSTR_to_ANSI(DeviceName.bstrVal); if (pDeviceName) { PTRACE(4, "PVidDirectShow\tGetInputDeviceNames() Found this capture device '"<< pDeviceName <<"'"); if(PString(pDeviceName) == setupDeviceName) { PTRACE(4, "PVidDirectShow\tCamera Setup: device found"); pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**) &pFilter); ISpecifyPropertyPages *p_spec; CAUUID cauuid; HRESULT hr = pFilter->QueryInterface( IID_ISpecifyPropertyPages, (void **)&p_spec ); if( !FAILED(hr) ) if( SUCCEEDED(p_spec->GetPages( &cauuid )) ) { if( cauuid.cElems > 0 ) { HWND hwnd_desktop = ::GetDesktopWindow(); OleCreatePropertyFrame( hwnd_desktop, 30, 30, NULL, 1, (LPUNKNOWN *)(&pFilter), cauuid.cElems, cauuid.pElems, 0, 0, NULL ); CoTaskMemFree( cauuid.pElems ); } p_spec->Release(); } } free(pDeviceName); } } pPropBag->Release(); pMoniker->Release(); } ::CoUninitialize(); }
static gboolean gst_dshowvideosrc_start (GstBaseSrc * bsrc) { HRESULT hres = S_FALSE; GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc); DshowDeviceEntry *device_entry; IMoniker *moniker = NULL; device_entry = gst_dshow_select_device (&CLSID_VideoInputDeviceCategory, src->device, src->device_name, src->device_index); if (device_entry == NULL) { GST_ELEMENT_ERROR (src, RESOURCE, FAILED, ("Failed to find device"), (NULL)); return FALSE; } g_free (src->device); g_free (src->device_name); src->device = g_strdup (device_entry->device); src->device_name = g_strdup (device_entry->device_name); src->device_index = device_entry->device_index; moniker = device_entry->moniker; device_entry->moniker = NULL; gst_dshow_device_entry_free (device_entry); src->video_cap_filter = gst_dshow_create_capture_filter (moniker); moniker->Release (); if (src->video_cap_filter == NULL) { GST_ELEMENT_ERROR (src, RESOURCE, FAILED, ("Failed to create capture filter for device"), (NULL)); return FALSE; } src->caps = gst_dshowvideosrc_getcaps_from_capture_filter ( src->video_cap_filter, (GList**)&src->pins_mediatypes); if (gst_caps_is_empty (src->caps)) { GST_ELEMENT_ERROR (src, RESOURCE, FAILED, ("Failed to get any caps from devce"), (NULL)); return FALSE; } /* The filter graph now is created via the IGraphBuilder Interface Code added to build upstream filters, needed for USB Analog TV Tuners / DVD Maker, based on AMCap code. by Fabrice Costa <*****@*****.**> */ hres = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, (LPVOID *) & src->graph_builder ); if (hres != S_OK || !src->graph_builder ) { GST_ERROR ("Can't create an instance of the dshow graph builder (error=0x%x)", hres); goto error; } else { /*graph builder is derived from IFilterGraph so we can assign it to the old src->filter_graph*/ src->filter_graph = (IFilterGraph*) src->graph_builder; } /*adding capture graph builder to correctly create upstream filters, Analog TV, TV Tuner */ hres = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (LPVOID *) & src->capture_builder); if ( hres != S_OK || !src->capture_builder ) { GST_ERROR ("Can't create an instance of the dshow capture graph builder manager (error=0x%x)", hres); goto error; } else { src->capture_builder->SetFiltergraph(src->graph_builder); } hres = src->filter_graph->QueryInterface (IID_IMediaFilter, (LPVOID *) & src->media_filter); if (hres != S_OK || !src->media_filter) { GST_ERROR ("Can't get IMediacontrol interface from the graph manager (error=0x%x)", hres); goto error; } src->dshow_fakesink = new CDshowFakeSink; src->dshow_fakesink->AddRef (); hres = src->filter_graph->AddFilter (src->video_cap_filter, L"capture"); if (hres != S_OK) { GST_ERROR ("Can't add video capture filter to the graph (error=0x%x)", hres); goto error; } /* Finding interfaces really creates the upstream filters */ hres = src->capture_builder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Interleaved, src->video_cap_filter, IID_IAMVideoCompression, (LPVOID *)&src->pVC); if(hres != S_OK) { hres = src->capture_builder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, src->video_cap_filter, IID_IAMVideoCompression, (LPVOID *)&src->pVC); } hres = src->capture_builder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Interleaved, src->video_cap_filter, IID_IAMStreamConfig, (LPVOID *)&src->pVSC); if(hres != S_OK) { hres = src->capture_builder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, src->video_cap_filter, IID_IAMStreamConfig, (LPVOID *)&src->pVSC); if (hres != S_OK) { /* this means we can't set frame rate (non-DV only) */ GST_ERROR ("Error %x: Cannot find VCapture:IAMStreamConfig", hres); goto error; } } hres = src->filter_graph->AddFilter (src->dshow_fakesink, L"sink"); if (hres != S_OK) { GST_ERROR ("Can't add our fakesink filter to the graph (error=0x%x)", hres); goto error; } return TRUE; error: GST_ELEMENT_ERROR (src, RESOURCE, FAILED, ("Failed to build filter graph"), (NULL)); if (src->dshow_fakesink) { src->dshow_fakesink->Release (); src->dshow_fakesink = NULL; } if (src->media_filter) { src->media_filter->Release (); src->media_filter = NULL; } if (src->graph_builder) { src->graph_builder->Release (); src->graph_builder = NULL; } if (src->capture_builder) { src->capture_builder->Release (); src->capture_builder = NULL; } if (src->pVC) { src->pVC->Release (); src->pVC = NULL; } if (src->pVSC) { src->pVSC->Release (); src->pVSC = NULL; } return FALSE; }
HRESULT TffdshowVideoInputPin::CheckMediaType(const CMediaType* mt) { if (mt->majortype != MEDIATYPE_Video && !(mt->majortype == MEDIATYPE_DVD_ENCRYPTED_PACK && supdvddec)) { return VFW_E_TYPE_NOT_ACCEPTED; } if (mt->subtype == MEDIASUBTYPE_DVD_SUBPICTURE) { return VFW_E_TYPE_NOT_ACCEPTED; } BITMAPINFOHEADER *hdr = NULL, hdr0; if (mt->formattype == FORMAT_VideoInfo) { VIDEOINFOHEADER *vih = (VIDEOINFOHEADER*)mt->pbFormat; hdr = &vih->bmiHeader; fixMPEGinAVI(hdr->biCompression); } else if (mt->formattype == FORMAT_VideoInfo2) { VIDEOINFOHEADER2 *vih2 = (VIDEOINFOHEADER2*)mt->pbFormat; hdr = &vih2->bmiHeader; fixMPEGinAVI(hdr->biCompression); } else if (mt->formattype == FORMAT_MPEGVideo) { MPEG1VIDEOINFO *mpeg1info = (MPEG1VIDEOINFO*)mt->pbFormat; hdr = &(hdr0 = mpeg1info->hdr.bmiHeader); hdr->biCompression = FOURCC_MPG1; } else if (mt->formattype == FORMAT_MPEG2Video) { MPEG2VIDEOINFO *mpeg2info = (MPEG2VIDEOINFO*)mt->pbFormat; hdr = &(hdr0 = mpeg2info->hdr.bmiHeader); if (hdr->biCompression == 0 || hdr->biCompression == 0x0038002d) { if (mt->subtype == MEDIASUBTYPE_H264_TRANSPORT) { hdr->biCompression = FOURCC_H264; } else if (mt->subtype == MEDIASUBTYPE_AVC1 || mt->subtype == MEDIASUBTYPE_avc1 || mt->subtype == MEDIASUBTYPE_H264 || mt->subtype == MEDIASUBTYPE_h264 || mt->subtype == MEDIASUBTYPE_CCV1) { hdr->biCompression = FOURCC_H264; } else { hdr->biCompression = FOURCC_MPG2; } } } else if (mt->formattype == FORMAT_TheoraIll) { sTheoraFormatBlock *oggFormat = (sTheoraFormatBlock*)mt->pbFormat; hdr = &hdr0; hdr->biWidth = oggFormat->width; hdr->biHeight = oggFormat->height; hdr->biCompression = FOURCC_THEO; } else if (mt->formattype == FORMAT_RLTheora) { hdr = &hdr0; hdr->biCompression = FOURCC_THEO; } else { return VFW_E_TYPE_NOT_ACCEPTED; } char_t pomS[60]; DPRINTF(_l("TffdshowVideoInputPin::CheckMediaType: %s, %i, %i"), fourcc2str(hdr2fourcc(hdr, &mt->subtype), pomS, 60), hdr->biWidth, hdr->biHeight); /* Information : WMP 11 and Media Center under Vista do not check for uncompressed format anymore, so no way to get ffdshow raw video decoder for postprocessing on uncompressed. So instead of saying "Media Type not supported", we says it is but only if there is an existing filter that can take this format in charge, and then ffdshow will be plugged after this codec (plug is done by TffdshowDecVideo::ConnectCompatibleFilter). */ int res = getVideoCodecId(hdr, &mt->subtype, NULL); OSVERSIONINFO osvi; ZeroMemory(&osvi, sizeof(OSVERSIONINFO)); osvi.dwOSVersionInfoSize = sizeof(OSVERSIONINFO); GetVersionEx(&osvi); ffstring exeFilename(fv->getExefilename()); exeFilename.ConvertToLowerCase(); if (res == 0 && pCompatibleFilter == NULL && fv->deci->getParam2(IDFF_alternateUncompressed) == 1 && // Enable WMP11 postprocessing fv->deci->getParam2(IDFF_rawv) != 0 && // Raw video not on disabled (exeFilename == _l("wmplayer.exe") || exeFilename == _l("ehshell.exe"))) { // Only WMP and Media Center are concerned bool doPostProcessing = false; if (osvi.dwMajorVersion > 5) { // OS >= VISTA doPostProcessing = true; } else if (osvi.dwMajorVersion == 5 // If OS=XP, check version of WMP && exeFilename == _l("ehshell.exe")) { // But only for Media Center // Read WMP version from the aRegistry HKEY hKey = NULL; LONG regErr; // Read WMP version from the following registry key regErr = RegOpenKeyEx(HKEY_LOCAL_MACHINE, _l("SOFTWARE\\Microsoft\\MediaPlayer\\Setup\\Installed Versions"), 0, KEY_READ, &hKey); if (regErr != ERROR_SUCCESS) { return res == AV_CODEC_ID_NONE ? VFW_E_TYPE_NOT_ACCEPTED : S_OK; } DWORD dwType; BYTE buf[4096] = { '\0' }; // make it big enough for any kind of values DWORD dwSize = sizeof(buf); regErr = RegQueryValueEx(hKey, _T("wmplayer.exe"), 0, &dwType, buf, &dwSize); if (hKey) { RegCloseKey(hKey); } if (regErr != ERROR_SUCCESS || dwType != REG_BINARY) { return res == AV_CODEC_ID_NONE ? VFW_E_TYPE_NOT_ACCEPTED : S_OK; } if (buf[2] >= 0x0b) { // Third byte is the major version number doPostProcessing = true; } } if (doPostProcessing) { DPRINTF(_l("TffdshowVideoInputPin::CheckMediaType: input format disabled or not supported. Trying to maintain in the graph...")); IFilterMapper2 *pMapper = NULL; IEnumMoniker *pEnum = NULL; HRESULT hr = CoCreateInstance(CLSID_FilterMapper2, NULL, CLSCTX_INPROC, IID_IFilterMapper2, (void **) &pMapper); if (FAILED(hr)) { // Error handling omitted for clarity. } GUID arrayInTypes[2]; arrayInTypes[0] = mt->majortype;//MEDIATYPE_Video; arrayInTypes[1] = mt->subtype;//MEDIASUBTYPE_dvsd; hr = pMapper->EnumMatchingFilters( &pEnum, 0, // Reserved. TRUE, // Use exact match? MERIT_DO_NOT_USE + 1, // Minimum merit. TRUE, // At least one input pin? 1, // Number of major type/subtype pairs for input. arrayInTypes, // Array of major type/subtype pairs for input. NULL, // Input medium. NULL, // Input pin category. FALSE, // Must be a renderer? TRUE, // At least one output pin? 0, // Number of major type/subtype pairs for output. NULL, // Array of major type/subtype pairs for output. NULL, // Output medium. NULL); // Output pin category. // Enumerate the monikers. IMoniker *pMoniker; ULONG cFetched; while (pEnum->Next(1, &pMoniker, &cFetched) == S_OK) { IPropertyBag *pPropBag = NULL; hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag); if (SUCCEEDED(hr)) { // To retrieve the friendly name of the filter, do the following: VARIANT varName; VariantInit(&varName); hr = pPropBag->Read(L"FriendlyName", &varName, 0); if (SUCCEEDED(hr)) { if (varName.pbstrVal == NULL || _strnicmp(FFDSHOW_NAME_L, varName.bstrVal, 22) != 0) { // Display the name in your UI somehow. DPRINTF(_l("TffdshowVideoInputPin::CheckMediaType: compatible filter found (%s)"), varName.pbstrVal); hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)&pCompatibleFilter); } } // Now add the filter to the graph. Remember to release pFilter later. IFilterGraph *pGraph = NULL; fv->deci->getGraph(&pGraph); IGraphBuilder *pGraphBuilder = NULL; hr = pGraph->QueryInterface(IID_IGraphBuilder, (void **)&pGraphBuilder); if (hr == S_OK) { pGraphBuilder->AddFilter(pCompatibleFilter, varName.bstrVal); } else { pCompatibleFilter->Release(); pCompatibleFilter = NULL; } // Clean up. VariantClear(&varName); pGraphBuilder->Release(); pPropBag->Release(); } pMoniker->Release(); if (pCompatibleFilter != NULL) { break; } } // Clean up. pMapper->Release(); pEnum->Release(); } } if (pCompatibleFilter != NULL) { return S_OK; } return res == AV_CODEC_ID_NONE ? VFW_E_TYPE_NOT_ACCEPTED : S_OK; }
// delicious copypasta static QList<QString> get_camera_names(void) { QList<QString> ret; #if defined(_WIN32) // Create the System Device Enumerator. HRESULT hr; ICreateDevEnum *pSysDevEnum = NULL; hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **)&pSysDevEnum); if (FAILED(hr)) { return ret; } // Obtain a class enumerator for the video compressor category. IEnumMoniker *pEnumCat = NULL; hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0); if (hr == S_OK) { // Enumerate the monikers. IMoniker *pMoniker = NULL; ULONG cFetched; while (pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK) { IPropertyBag *pPropBag; hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag); if (SUCCEEDED(hr)) { // To retrieve the filter's friendly name, do the following: VARIANT varName; VariantInit(&varName); hr = pPropBag->Read(L"FriendlyName", &varName, 0); if (SUCCEEDED(hr)) { // Display the name in your UI somehow. QString str((QChar*)varName.bstrVal, wcslen(varName.bstrVal)); ret.append(str); } VariantClear(&varName); ////// To create an instance of the filter, do the following: ////IBaseFilter *pFilter; ////hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, //// (void**)&pFilter); // Now add the filter to the graph. //Remember to release pFilter later. pPropBag->Release(); } pMoniker->Release(); } pEnumCat->Release(); } pSysDevEnum->Release(); #else for (int i = 0; i < 16; i++) { char buf[128]; sprintf(buf, "/dev/video%d", i); if (access(buf, R_OK | W_OK) == 0) { ret.append(buf); } else { continue; } } #endif return ret; }
void VideoDeviceImpl::setup() { HRESULT hr = CoInitializeEx(nullptr, COINIT_MULTITHREADED); if (FAILED(hr)) return fail("Could not initialize video device."); hr = CoCreateInstance( CLSID_CaptureGraphBuilder2, nullptr, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void**) &cInterface->captureGraph_); if (FAILED(hr)) return fail("Could not create the Filter Graph Manager"); hr = CoCreateInstance(CLSID_FilterGraph, nullptr, CLSCTX_INPROC_SERVER,IID_IGraphBuilder, (void**) &cInterface->graph_); if (FAILED(hr)) return fail("Could not add the graph builder!"); hr = cInterface->captureGraph_->SetFiltergraph(cInterface->graph_); if (FAILED(hr)) return fail("Could not set filtergraph."); ICreateDevEnum *pSysDevEnum = nullptr; hr = CoCreateInstance(CLSID_SystemDeviceEnum, nullptr, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **)&pSysDevEnum); if (FAILED(hr)) return fail("Could not create the enumerator!"); IEnumMoniker* pEnumCat = nullptr; hr = pSysDevEnum->CreateClassEnumerator( CLSID_VideoInputDeviceCategory, &pEnumCat, 0); if (SUCCEEDED(hr)) { // Auto-deletion at if {} exist or at exception auto IEnumMonikerDeleter = [](IEnumMoniker* p){ p->Release(); }; std::unique_ptr<IEnumMoniker, decltype(IEnumMonikerDeleter)&> pEnumCatGuard {pEnumCat, IEnumMonikerDeleter}; IMoniker *pMoniker = nullptr; ULONG cFetched; unsigned int deviceCounter = 0; while ((pEnumCatGuard->Next(1, &pMoniker, &cFetched) == S_OK)) { if (deviceCounter == this->id) { IPropertyBag *pPropBag; hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void **)&pPropBag); if (SUCCEEDED(hr)) { VARIANT varName; VariantInit(&varName); hr = pPropBag->Read(L"FriendlyName", &varName, 0); if (SUCCEEDED(hr)) { int l = WideCharToMultiByte( CP_UTF8, 0, varName.bstrVal, -1, 0, 0, 0, 0); auto tmp = new char[l]; WideCharToMultiByte( CP_UTF8, 0, varName.bstrVal, -1, tmp, l, 0, 0); this->name = std::string(tmp); this->device = std::string("video=") + this->name; hr = pMoniker->BindToObject( nullptr, nullptr, IID_IBaseFilter, (void**)&cInterface->videoInputFilter_); if (SUCCEEDED(hr)) hr = cInterface->graph_->AddFilter( cInterface->videoInputFilter_, varName.bstrVal); else { fail("Could not add filter to video device."); } hr = cInterface->captureGraph_->FindInterface( &PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, cInterface->videoInputFilter_, IID_IAMStreamConfig, (void **)&cInterface->streamConf_); if(FAILED(hr)) { hr = cInterface->captureGraph_->FindInterface( &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, cInterface->videoInputFilter_, IID_IAMStreamConfig, (void **)&cInterface->streamConf_); if (FAILED(hr)) { fail("Couldn't config the stream!"); } } break; // Device found } VariantClear(&varName); pPropBag->Release(); pPropBag = nullptr; pMoniker->Release(); pMoniker = nullptr; } } deviceCounter++; } if (SUCCEEDED(hr)) { int piCount; int piSize; cInterface->streamConf_->GetNumberOfCapabilities(&piCount, &piSize); AM_MEDIA_TYPE *pmt; VIDEO_STREAM_CONFIG_CAPS pSCC; for (int i = 0; i < piCount; i++) { cInterface->streamConf_->GetStreamCaps(i, &pmt, (BYTE*)&pSCC); if (pmt->formattype == FORMAT_VideoInfo) { auto videoInfo = (VIDEOINFOHEADER*) pmt->pbFormat; sizeList_.emplace_back(videoInfo->bmiHeader.biWidth, videoInfo->bmiHeader.biHeight); rateList_[sizeList_.back()].emplace_back(1e7, pSCC.MinFrameInterval); rateList_[sizeList_.back()].emplace_back(1e7, pSCC.MaxFrameInterval); capMap_[sizeList_.back()] = pmt; } } } } pSysDevEnum->Release(); pSysDevEnum = NULL; }
QStringList getCaptureDevices(GUID catGuid) //, QList<IMoniker*>& monList) { // Create the System Device Enumerator. QStringList list; HRESULT hr; ICreateDevEnum *pDevEnum = NULL; IEnumMoniker *pEnum = NULL; // тест-источники if( catGuid == CLSID_VideoInputDeviceCategory){ list.append("VS-A"); list.append("VS-B"); }else{ list.append("AS-A"); list.append("AS-B"); } // hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **)&pDevEnum); //CHECK(hr, "create SystemDeviceEnum"); // заполнение списка видео-источников hr = pDevEnum->CreateClassEnumerator(catGuid,&pEnum, 0); //CHECK(hr, "create ClassEnumerator"); if(pEnum == NULL) qDebug() << "bad pEnum"; IMoniker *pMoniker = NULL; while ( (pEnum != NULL) && pEnum->Next(1, &pMoniker, NULL) == S_OK) { IPropertyBag *pPropBag; hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag); if (SUCCEEDED(hr)) { // To retrieve the filter's friendly name, do the following: VARIANT varName; VariantInit(&varName); hr = pPropBag->Read(L"FriendlyName", &varName, 0); if (SUCCEEDED(hr)) { char* pN = _com_util::ConvertBSTRToString(varName.bstrVal); QString txt = QString::fromLocal8Bit(pN); // иначе кракозябры вместо кириллицы list.append(txt); } VariantClear(&varName); pPropBag->Release(); } pMoniker->Release(); } if(pEnum != NULL) pEnum->Release(); if(pDevEnum != NULL) pDevEnum->Release(); return list; }
int Captura::Enumerar(HWND hList) { if (!hList) return -1; int id = 0; //CComPtr<ICreateDevEnum> pCreateDevEnum; ICreateDevEnum * pCreateDevEnum; HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void**)&pCreateDevEnum); if (hr != NOERROR) { return -1; } // CComPtr<IEnumMoniker> pEm; IEnumMoniker *pEm; hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEm, 0); if (hr != NOERROR) { return -1 ; } pEm->Reset(); ULONG cFetched; IMoniker *pM; while(hr = pEm->Next(1, &pM, &cFetched), hr==S_OK) { IPropertyBag *pBag; hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pBag); if(SUCCEEDED(hr)) { VARIANT var; var.vt = VT_BSTR; hr = pBag->Read(L"FriendlyName", &var, NULL); if (hr == NOERROR) { TCHAR str[2048]; id++; WideCharToMultiByte(CP_ACP,0,var.bstrVal, -1, str, 2048, NULL, NULL); (long)SendMessage(hList, CB_ADDSTRING, 0,(LPARAM)str); SysFreeString(var.bstrVal); } pBag->Release(); } pM->Release(); } return id; }
/** Accesses the correct visual studio instance if possible. */ EAccessVisualStudioResult AccessVisualStudioViaDTE(TComPtr<EnvDTE::_DTE>& OutDTE, const FString& InSolutionPath, const TArray<FVisualStudioSourceCodeAccessor::VisualStudioLocation>& InLocations) { EAccessVisualStudioResult AccessResult = EAccessVisualStudioResult::VSInstanceIsNotOpen; // Open the Running Object Table (ROT) IRunningObjectTable* RunningObjectTable; if(SUCCEEDED(GetRunningObjectTable(0, &RunningObjectTable)) && RunningObjectTable) { IEnumMoniker* MonikersTable; if(SUCCEEDED(RunningObjectTable->EnumRunning(&MonikersTable))) { MonikersTable->Reset(); // Look for all visual studio instances in the ROT IMoniker* CurrentMoniker; while(AccessResult != EAccessVisualStudioResult::VSInstanceIsOpen && MonikersTable->Next(1, &CurrentMoniker, NULL) == S_OK) { IBindCtx* BindContext; LPOLESTR OutName; if(SUCCEEDED(CreateBindCtx(0, &BindContext)) && SUCCEEDED(CurrentMoniker->GetDisplayName(BindContext, NULL, &OutName))) { if(IsVisualStudioDTEMoniker(FString(OutName), InLocations)) { TComPtr<IUnknown> ComObject; if(SUCCEEDED(RunningObjectTable->GetObject(CurrentMoniker, &ComObject))) { TComPtr<EnvDTE::_DTE> TempDTE; if (SUCCEEDED(TempDTE.FromQueryInterface(__uuidof(EnvDTE::_DTE), ComObject))) { // Get the solution path for this instance // If it equals the solution we would have opened above in RunVisualStudio(), we'll take that TComPtr<EnvDTE::_Solution> Solution; BSTR OutPath = nullptr; if (SUCCEEDED(TempDTE->get_Solution(&Solution)) && SUCCEEDED(Solution->get_FullName(&OutPath))) { FString Filename(OutPath); FPaths::NormalizeFilename(Filename); if (Filename == InSolutionPath) { OutDTE = TempDTE; AccessResult = EAccessVisualStudioResult::VSInstanceIsOpen; } SysFreeString(OutPath); } else { UE_LOG(LogVSAccessor, Warning, TEXT("Visual Studio is open but could not be queried - it may be blocked by a modal operation")); AccessResult = EAccessVisualStudioResult::VSInstanceIsBlocked; } } else { UE_LOG(LogVSAccessor, Warning, TEXT("Could not get DTE interface from returned Visual Studio instance")); AccessResult = EAccessVisualStudioResult::VSInstanceIsBlocked; } } else { UE_LOG(LogVSAccessor, Warning, TEXT("Couldn't get Visual Studio COM object")); AccessResult = EAccessVisualStudioResult::VSInstanceUnknown; } } } else { UE_LOG(LogVSAccessor, Warning, TEXT("Couldn't get display name")); AccessResult = EAccessVisualStudioResult::VSInstanceUnknown; } BindContext->Release(); CurrentMoniker->Release(); } MonikersTable->Release(); } else { UE_LOG(LogVSAccessor, Warning, TEXT("Couldn't enumerate ROT table")); AccessResult = EAccessVisualStudioResult::VSInstanceUnknown; } RunningObjectTable->Release(); } else { UE_LOG(LogVSAccessor, Warning, TEXT("Couldn't get ROT table")); AccessResult = EAccessVisualStudioResult::VSInstanceUnknown; } return AccessResult; }
static int v4w_configure_videodevice(V4wState *s) { // Initialize COM CoInitialize(NULL); // get a Graph HRESULT hr= CoCreateInstance (CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, //IID_IBaseFilter, (void **)&s->m_pGraph); if(FAILED(hr)) { return -1; } // get a CaptureGraphBuilder2 hr= CoCreateInstance (CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, //IID_IBaseFilter, (void **)&s->m_pBuilder); if(FAILED(hr)) { return -2; } // connect capture graph builder with the graph s->m_pBuilder->SetFiltergraph(s->m_pGraph); // get mediacontrol so we can start and stop the filter graph hr=s->m_pGraph->QueryInterface (IID_IMediaControl, (void **)&s->m_pControl); if(FAILED(hr)) { return -3; } ICreateDevEnum *pCreateDevEnum = NULL; IEnumMoniker *pEnumMoniker = NULL; IMoniker *pMoniker = NULL; ULONG nFetched = 0; hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum); if(FAILED(hr)) { return -4; } hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumMoniker, 0); if (FAILED(hr) || pEnumMoniker == NULL) { //printf("no device\n"); return -5; } pEnumMoniker->Reset(); int pos=0; while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) ) { IPropertyBag *pBag; hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag ); if( hr != S_OK ) continue; if (s->dev[0]=='\0') break; VARIANT var; VariantInit(&var); hr = pBag->Read( L"FriendlyName", &var, NULL ); if( hr != S_OK ) { pMoniker->Release(); continue; } //USES_CONVERSION; char szName[256]; WideCharToMultiByte(CP_UTF8,0,var.bstrVal,-1,szName,256,0,0); VariantClear(&var); if (strcmp(szName, s->dev)==0) break; pMoniker->Release(); pBag->Release(); pMoniker=NULL; pBag=NULL; } if(pMoniker==NULL) { int pos=0; while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) ) { IPropertyBag *pBag; hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag ); if( hr != S_OK ) continue; } } if(pMoniker==NULL) { return -6; } hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter ); if(FAILED(hr)) { return -7; } s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter"); pMoniker->Release(); pEnumMoniker->Release(); pCreateDevEnum->Release(); GUID pPinCategory; if (try_format(s->m_pDeviceFilter, s->pix_fmt, &pPinCategory)==0) s->pix_fmt = s->pix_fmt; else if (try_format(s->m_pDeviceFilter,MS_YUV420P, &pPinCategory)==0) s->pix_fmt = MS_YUV420P; else if (try_format(s->m_pDeviceFilter,MS_YUY2, &pPinCategory)==0) s->pix_fmt = MS_YUY2; else if (try_format(s->m_pDeviceFilter,MS_YUYV, &pPinCategory)==0) s->pix_fmt = MS_YUYV; else if (try_format(s->m_pDeviceFilter,MS_UYVY, &pPinCategory)==0) s->pix_fmt = MS_UYVY; else if (try_format(s->m_pDeviceFilter,MS_RGB24, &pPinCategory)==0) s->pix_fmt = MS_RGB24; else { ms_error("Unsupported video pixel format."); return -8; } if (s->pix_fmt == MS_YUV420P) ms_message("Driver supports YUV420P, using that format."); else if (s->pix_fmt == MS_YUY2) ms_message("Driver supports YUY2 (YUYV), using that format."); else if (s->pix_fmt == MS_YUYV) ms_message("Driver supports YUV422, using that format."); else if (s->pix_fmt == MS_UYVY) ms_message("Driver supports UYVY, using that format."); else if (s->pix_fmt == MS_RGB24) ms_message("Driver supports RGB24, using that format."); if (try_format_size(s, s->pix_fmt, s->vsize.width, s->vsize.height, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", s->vsize.width, s->vsize.height); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H); else { ms_error("No supported size found for format."); /* size not supported? */ return -9; } return 0; }
/* * Enumerate all video devices * * See also: * * Using the System Device Enumerator: * http://msdn2.microsoft.com/en-us/library/ms787871.aspx */ int enum_devices() { HRESULT hr; printf("Enumerating video input devices ...\n"); // Create the System Device Enumerator. ICreateDevEnum *pSysDevEnum = NULL; hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **)&pSysDevEnum); if(FAILED(hr)) { fprintf(stderr, "ERROR: Unable to create system device enumerator.\n"); return hr; } // Obtain a class enumerator for the video input device category. IEnumMoniker *pEnumCat = NULL; hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0); if(hr == S_OK) { // Enumerate the monikers. IMoniker *pMoniker = NULL; ULONG cFetched; while(pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK) { IPropertyBag *pPropBag; hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag); if(SUCCEEDED(hr)) { // To retrieve the filter's friendly name, do the following: VARIANT varName; VariantInit(&varName); hr = pPropBag->Read(L"FriendlyName", &varName, 0); if (SUCCEEDED(hr)) { // Display the name in your UI somehow. wprintf(L" Found device: %s\n", varName.bstrVal); } VariantClear(&varName); // To create an instance of the filter, do the following: IBaseFilter *pFilter; hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)&pFilter); process_filter(pFilter); //Remember to release pFilter later. pPropBag->Release(); } pMoniker->Release(); } pEnumCat->Release(); } pSysDevEnum->Release(); return 0; }
bool directx_camera_server::open_and_find_parameters(const int which, unsigned width, unsigned height) { HRESULT hr; //------------------------------------------------------------------- // Create COM and DirectX objects needed to access a video stream. // Initialize COM. This must have a matching uninitialize somewhere before // the object is destroyed. #ifdef DEBUG printf("directx_camera_server::open_and_find_parameters(): Before CoInitialize\n"); #endif CoInitialize(NULL); // Create the filter graph manager #ifdef DEBUG printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance FilterGraph\n"); #endif CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, (void **)&_pGraph); if (_pGraph == NULL) { fprintf(stderr, "directx_camera_server::open_and_find_parameters(): Can't create graph manager\n"); return false; } _pGraph->QueryInterface(IID_IMediaControl, (void **)&_pMediaControl); _pGraph->QueryInterface(IID_IMediaEvent, (void **)&_pEvent); // Create the Capture Graph Builder. #ifdef DEBUG printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance CaptureGraphBuilder2\n"); #endif CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, IID_ICaptureGraphBuilder2, (void **)&_pBuilder); if (_pBuilder == NULL) { fprintf(stderr, "directx_camera_server::open_and_find_parameters(): Can't create graph builder\n"); return false; } // Associate the graph with the builder. #ifdef DEBUG printf("directx_camera_server::open_and_find_parameters(): Before SetFilterGraph\n"); #endif _pBuilder->SetFiltergraph(_pGraph); //------------------------------------------------------------------- // Go find a video device to use: in this case, we are using the Nth // one we find, where the number N is the "which" parameter. // Create the system device enumerator. #ifdef DEBUG printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance SystemDeviceEnum\n"); #endif ICreateDevEnum *pDevEnum = NULL; CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC, IID_ICreateDevEnum, (void **)&pDevEnum); if (pDevEnum == NULL) { fprintf(stderr, "directx_camera_server::open_and_find_parameters(): Can't create device enumerator\n"); return false; } // Create an enumerator for video capture devices. #ifdef DEBUG printf("directx_camera_server::open_and_find_parameters(): Before CreateClassEnumerator\n"); #endif IEnumMoniker *pClassEnum = NULL; pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pClassEnum, 0); if (pClassEnum == NULL) { fprintf(stderr, "directx_camera_server::open_and_find_parameters(): Can't create video enumerator (no cameras?)\n"); pDevEnum->Release(); return false; } #ifdef DEBUG printf("directx_camera_server::open_and_find_parameters(): Before Loop over enumerators\n"); #endif ULONG cFetched; IMoniker *pMoniker = NULL; IBaseFilter *pSrc = NULL; // Skip (which - 1) cameras int i; for (i = 0; i < which-1 ; i++) { if (pClassEnum->Next(1, &pMoniker, &cFetched) != S_OK) { fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't open camera (not enough cameras)\n"); pMoniker->Release(); return false; } } // Take the next camera and bind it if (pClassEnum->Next(1, &pMoniker, &cFetched) == S_OK) { // Bind the first moniker to a filter object. pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pSrc); pMoniker->Release(); } else { fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't open camera (not enough cameras)\n"); pMoniker->Release(); return false; } pClassEnum->Release(); pDevEnum->Release(); //------------------------------------------------------------------- // Construct the sample grabber callback handler that will be used // to receive image data from the sample grabber. if ( (_pCallback = new directx_samplegrabber_callback()) == NULL) { fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't create sample grabber callback handler (out of memory?)\n"); return false; } //------------------------------------------------------------------- // Construct the sample grabber that will be used to snatch images from // the video stream as they go by. Set its media type and callback. // Create the Sample Grabber. #ifdef DEBUG printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance SampleGrabber\n"); #endif CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, reinterpret_cast<void**>(&_pSampleGrabberFilter)); if (_pSampleGrabberFilter == NULL) { fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't get SampleGrabber filter (not DirectX 8.1+?)\n"); return false; } #ifdef DEBUG printf("directx_camera_server::open_and_find_parameters(): Before QueryInterface\n"); #endif _pSampleGrabberFilter->QueryInterface(IID_ISampleGrabber, reinterpret_cast<void**>(&_pGrabber)); // Set the media type to video #ifdef DEBUG printf("directx_camera_server::open_and_find_parameters(): Before SetMediaType\n"); #endif AM_MEDIA_TYPE mt; // Ask for video media producers that produce 8-bit RGB ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE)); mt.majortype = MEDIATYPE_Video; // Ask for video media producers mt.subtype = MEDIASUBTYPE_RGB24; // Ask for 8 bit RGB _pGrabber->SetMediaType(&mt); //------------------------------------------------------------------- // Ask for the video resolution that has been passed in. // This code is based on // intuiting that we need to use the SetFormat call on the IAMStreamConfig // interface; this interface is described in the help pages. // If the width and height are specified as 0, then they are not set // in the header, letting them use whatever is the default. if ( (width != 0) && (height != 0) ) { _pBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, pSrc, IID_IAMStreamConfig, (void **)&_pStreamConfig); if (_pStreamConfig == NULL) { fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't get StreamConfig interface\n"); return false; } ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE)); mt.majortype = MEDIATYPE_Video; // Ask for video media producers mt.subtype = MEDIASUBTYPE_RGB24; // Ask for 8 bit RGB mt.pbFormat = (BYTE*)CoTaskMemAlloc(sizeof(VIDEOINFOHEADER)); VIDEOINFOHEADER *pVideoHeader = (VIDEOINFOHEADER*)mt.pbFormat; ZeroMemory(pVideoHeader, sizeof(VIDEOINFOHEADER)); pVideoHeader->bmiHeader.biBitCount = 24; pVideoHeader->bmiHeader.biWidth = width; pVideoHeader->bmiHeader.biHeight = height; pVideoHeader->bmiHeader.biPlanes = 1; pVideoHeader->bmiHeader.biSize = sizeof(BITMAPINFOHEADER); pVideoHeader->bmiHeader.biSizeImage = DIBSIZE(pVideoHeader->bmiHeader); // Set the format type and size. mt.formattype = FORMAT_VideoInfo; mt.cbFormat = sizeof(VIDEOINFOHEADER); // Set the sample size. mt.bFixedSizeSamples = TRUE; mt.lSampleSize = DIBSIZE(pVideoHeader->bmiHeader); // Make the call to actually set the video type to what we want. if (_pStreamConfig->SetFormat(&mt) != S_OK) { fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't set resolution to %dx%d\n", pVideoHeader->bmiHeader.biWidth, pVideoHeader->bmiHeader.biHeight); return false; } // Clean up the pbFormat header memory we allocated above. CoTaskMemFree(mt.pbFormat); } //------------------------------------------------------------------- // Create a NULL renderer that will be used to discard the video frames // on the output pin of the sample grabber #ifdef DEBUG printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance NullRenderer\n"); #endif IBaseFilter *pNull = NULL; CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, reinterpret_cast<void**>(&pNull)); //------------------------------------------------------------------- // Build the filter graph. First add the filters and then connect them. // pSrc is the capture filter for the video device we found above. _pGraph->AddFilter(pSrc, L"Video Capture"); // Add the sample grabber filter _pGraph->AddFilter(_pSampleGrabberFilter, L"SampleGrabber"); // Add the null renderer filter _pGraph->AddFilter(pNull, L"NullRenderer"); // Connect the output of the video reader to the sample grabber input ConnectTwoFilters(_pGraph, pSrc, _pSampleGrabberFilter); // Connect the output of the sample grabber to the NULL renderer input ConnectTwoFilters(_pGraph, _pSampleGrabberFilter, pNull); //------------------------------------------------------------------- // XXX See if this is a video tuner card by querying for that interface. // Set it to read the video channel if it is one. IAMTVTuner *pTuner = NULL; hr = _pBuilder->FindInterface(NULL, NULL, pSrc, IID_IAMTVTuner, (void**)&pTuner); if (pTuner != NULL) { #ifdef DEBUG printf("directx_camera_server::open_and_find_parameters(): Found a TV Tuner!\n"); #endif //XXX Put code here. // Set the first input pin to use the cable as input hr = pTuner->put_InputType(0, TunerInputCable); if (FAILED(hr)) { fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't set input to cable\n"); } // Set the channel on the video to be baseband (is this channel zero?) hr = pTuner->put_Channel(0, -1, -1); if (FAILED(hr)) { fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't set channel\n"); } pTuner->Release(); } //------------------------------------------------------------------- // Find _num_rows and _num_columns in the video stream. _pGrabber->GetConnectedMediaType(&mt); VIDEOINFOHEADER *pVih; if (mt.formattype == FORMAT_VideoInfo) { pVih = reinterpret_cast<VIDEOINFOHEADER*>(mt.pbFormat); } else { fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't get video header type\n"); return false; } // Number of rows and columns. This is different if we are using a target // rectangle (rcTarget) than if we are not. if (IsRectEmpty(&pVih->rcTarget)) { _num_columns = pVih->bmiHeader.biWidth; _num_rows = pVih->bmiHeader.biHeight; } else { _num_columns = pVih->rcTarget.right; _num_rows = pVih->bmiHeader.biHeight; printf("XXX directx_camera_server::open_and_find_parameters(): Warning: may not work correctly with target rectangle\n"); } _minX = 0; _maxX = _num_columns - 1; _minY = 0; _maxY = _num_rows - 1; #ifdef DEBUG printf("Got %dx%d video\n", _num_columns, _num_rows); #endif // Make sure that the image is not compressed and that we have 8 bits // per pixel. if (pVih->bmiHeader.biCompression != BI_RGB) { fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Compression not RGB\n"); switch (pVih->bmiHeader.biCompression) { case BI_RLE8: fprintf(stderr," (It is BI_RLE8)\n"); break; case BI_RLE4: fprintf(stderr," (It is BI_RLE4)\n"); case BI_BITFIELDS: fprintf(stderr," (It is BI_BITFIELDS)\n"); break; default: fprintf(stderr," (Unknown compression type)\n"); } return false; } int BytesPerPixel = pVih->bmiHeader.biBitCount / 8; if (BytesPerPixel != 3) { fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Not 3 bytes per pixel (%d)\n", pVih->bmiHeader.biBitCount); return false; } // A negative height indicates that the images are stored non-inverted in Y // Not sure what to do with images that have negative height -- need to // read the book some more to find out. if (_num_rows < 0) { fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Num Rows is negative (internal error)\n"); return false; } // Find the stride to take when moving from one row of video to the // next. This is rounded up to the nearest DWORD. _stride = (_num_columns * BytesPerPixel + 3) & ~3; // Set the callback, where '0' means 'use the SampleCB callback' _pGrabber->SetCallback(_pCallback, 0); //------------------------------------------------------------------- // Release resources that won't be used later and return pSrc->Release(); pNull->Release(); return true; }
static int v4w_open_videodevice(V4wState *s, int format, MSVideoSize *vsize) { // Initialize COM CoInitialize(NULL); // get a Graph HRESULT hr=s->m_pGraph.CoCreateInstance(CLSID_FilterGraph); if(FAILED(hr)) { return -1; } // get a CaptureGraphBuilder2 #if !defined(_WIN32_WCE) hr=s->m_pBuilder.CoCreateInstance(CLSID_CaptureGraphBuilder2); #else hr=s->m_pBuilder.CoCreateInstance(CLSID_CaptureGraphBuilder); #endif if(FAILED(hr)) { return -2; } // connect capture graph builder with the graph s->m_pBuilder->SetFiltergraph(s->m_pGraph); // get mediacontrol so we can start and stop the filter graph hr=s->m_pGraph.QueryInterface(&(s->m_pControl)); if(FAILED(hr)) { return -3; } // get DXFilter s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE); if(s->m_pDXFilter==NULL) { return -4; } s->m_pDXFilter->AddRef(); if(FAILED(hr)) { return -4; } CMediaType mt; mt.SetType(&MEDIATYPE_Video); if (format==MS_YUV420P) { GUID m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0')); mt.SetSubtype(&m); mt.SetSubtype(&MEDIASUBTYPE_YV12); } else //if (format==MS_RGB24) { mt.SetSubtype(&MEDIASUBTYPE_RGB24); } //mt.SetSubtype(&MEDIASUBTYPE_IYUV); //mt.SetSubtype(&MEDIASUBTYPE_YUYV); //mt.SetSubtype(&MEDIASUBTYPE_RGB24); //mt.SetSampleSize(); mt.formattype = FORMAT_VideoInfo; mt.SetTemporalCompression(FALSE); VIDEOINFO *pvi = (VIDEOINFO *) mt.AllocFormatBuffer(sizeof(VIDEOINFO)); if (NULL == pvi) return E_OUTOFMEMORY; ZeroMemory(pvi, sizeof(VIDEOINFO)); if (format==MS_YUV420P) { pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0'); pvi->bmiHeader.biCompression = MAKEFOURCC('Y','V','1','2'); pvi->bmiHeader.biBitCount = 12; } else { pvi->bmiHeader.biCompression = BI_RGB; pvi->bmiHeader.biBitCount = 24; } pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER); pvi->bmiHeader.biWidth = vsize->width; pvi->bmiHeader.biHeight = vsize->height; pvi->bmiHeader.biPlanes = 1; pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader); pvi->bmiHeader.biClrImportant = 0; mt.SetSampleSize(pvi->bmiHeader.biSizeImage); mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO)); hr = s->m_pDXFilter->SetAcceptedMediaType(&mt); if(FAILED(hr)) { return -5; } hr = s->m_pDXFilter->SetCallback(Callback); if(FAILED(hr)) { return -6; } hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter, (LPVOID *)&s->m_pIDXFilter); if(FAILED(hr)) { return -7; } hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter"); if(FAILED(hr)) { return -8; } #ifdef WM6 ICreateDevEnum *pCreateDevEnum = NULL; IEnumMoniker *pEnumMoniker = NULL; IMoniker *pMoniker = NULL; ULONG nFetched = 0; hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum); if(FAILED(hr)) { return -9; } hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumMoniker, 0); if (FAILED(hr) || pEnumMoniker == NULL) { //printf("no device\n"); return -10; } pEnumMoniker->Reset(); hr = pEnumMoniker->Next(1, &pMoniker, &nFetched); if(FAILED(hr) || pMoniker==NULL) { return -11; } hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter ); if(FAILED(hr)) { return -12; } s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter"); pMoniker->Release(); pEnumMoniker->Release(); pCreateDevEnum->Release(); #else WCHAR wzDeviceName[ MAX_PATH + 1 ]; CComVariant varCamName; CPropertyBag PropBag; CComPtr<IPersistPropertyBag> pPropertyBag; GetFirstCameraDriver(wzDeviceName); hr = s->m_pDeviceFilter.CoCreateInstance( CLSID_VideoCapture ); if (FAILED(hr)) { return -8; } s->m_pDeviceFilter.QueryInterface( &pPropertyBag ); varCamName = wzDeviceName; if(( varCamName.vt == VT_BSTR ) == NULL ) { return E_OUTOFMEMORY; } PropBag.Write( L"VCapName", &varCamName ); pPropertyBag->Load( &PropBag, NULL ); pPropertyBag.Release(); hr = s->m_pGraph->AddFilter( s->m_pDeviceFilter, L"Video capture source" ); #endif if (FAILED(hr)) { return -8; } // get null renderer s->m_pNullRenderer = NULL; #if 0 hr=s->m_pNullRenderer.CoCreateInstance(CLSID_NullRenderer); if(FAILED(hr)) { return -13; } #endif if (s->m_pNullRenderer!=NULL) { s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer"); } hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer); if (FAILED(hr)) { //hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_CAPTURE, // &MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer); if (FAILED(hr)) { return -14; } } //m_pDXFilter->SetBufferSamples(TRUE); // Create the System Device Enumerator. IFilterMapper *pMapper = NULL; //IEnumMoniker *pEnum = NULL; IEnumRegFilters *pEnum = NULL; hr = CoCreateInstance(CLSID_FilterMapper, NULL, CLSCTX_INPROC, IID_IFilterMapper, (void **) &pMapper); if (FAILED(hr)) { // Error handling omitted for clarity. } GUID arrayInTypes[2]; arrayInTypes[0] = MEDIATYPE_Video; arrayInTypes[1] = MEDIASUBTYPE_dvsd; hr = pMapper->EnumMatchingFilters( &pEnum, MERIT_HW_COMPRESSOR, // Minimum merit. FALSE, // At least one input pin? MEDIATYPE_NULL, MEDIASUBTYPE_NULL, FALSE, // Must be a renderer? FALSE, // At least one output pin? MEDIATYPE_NULL, MEDIASUBTYPE_NULL); // Enumerate the monikers. //IMoniker *pMoniker; REGFILTER *pMoniker; ULONG cFetched; while (pEnum->Next(1, &pMoniker, &cFetched) == S_OK) { IPropertyBag *pPropBag = NULL; #if 0 hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag); if (SUCCEEDED(hr)) { // To retrieve the friendly name of the filter, do the following: VARIANT varName; VariantInit(&varName); hr = pPropBag->Read(L"FriendlyName", &varName, 0); if (SUCCEEDED(hr)) { // Display the name in your UI somehow. } VariantClear(&varName); // To create an instance of the filter, do the following: IBaseFilter *pFilter; hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)&pFilter); // Now add the filter to the graph. Remember to release pFilter later. // Clean up. pPropBag->Release(); } pMoniker->Release(); #endif } // Clean up. pMapper->Release(); pEnum->Release(); s_callback = s; hr = s->m_pControl->Run(); if(FAILED(hr)) { return -15; } s->rotregvalue=1; s->pix_fmt = format; s->vsize.height = vsize->height; s->vsize.width = vsize->width; return 0; }
static GstCaps * gst_dshowaudiosrc_get_caps (GstBaseSrc * basesrc, GstCaps * filter) { HRESULT hres = S_OK; IBindCtx *lpbc = NULL; IMoniker *audiom = NULL; DWORD dwEaten; GstDshowAudioSrc *src = GST_DSHOWAUDIOSRC (basesrc); gunichar2 *unidevice = NULL; if (src->device) { g_free (src->device); src->device = NULL; } src->device = gst_dshow_getdevice_from_devicename (&CLSID_AudioInputDeviceCategory, &src->device_name); if (!src->device) { GST_ERROR ("No audio device found."); return NULL; } unidevice = g_utf8_to_utf16 (src->device, strlen (src->device), NULL, NULL, NULL); if (!src->audio_cap_filter) { hres = CreateBindCtx (0, &lpbc); if (SUCCEEDED (hres)) { hres = MkParseDisplayName (lpbc, (LPCOLESTR) unidevice, &dwEaten, &audiom); if (SUCCEEDED (hres)) { hres = audiom->BindToObject (lpbc, NULL, IID_IBaseFilter, (LPVOID *) & src->audio_cap_filter); audiom->Release (); } lpbc->Release (); } } if (src->audio_cap_filter && !src->caps) { /* get the capture pins supported types */ IPin *capture_pin = NULL; IEnumPins *enumpins = NULL; HRESULT hres; hres = src->audio_cap_filter->EnumPins (&enumpins); if (SUCCEEDED (hres)) { while (enumpins->Next (1, &capture_pin, NULL) == S_OK) { IKsPropertySet *pKs = NULL; hres = capture_pin->QueryInterface (IID_IKsPropertySet, (LPVOID *) & pKs); if (SUCCEEDED (hres) && pKs) { DWORD cbReturned; GUID pin_category; RPC_STATUS rpcstatus; hres = pKs->Get (AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0, &pin_category, sizeof (GUID), &cbReturned); /* we only want capture pins */ if (UuidCompare (&pin_category, (UUID *) & PIN_CATEGORY_CAPTURE, &rpcstatus) == 0) { IAMStreamConfig *streamcaps = NULL; if (SUCCEEDED (capture_pin->QueryInterface (IID_IAMStreamConfig, (LPVOID *) & streamcaps))) { src->caps = gst_dshowaudiosrc_getcaps_from_streamcaps (src, capture_pin, streamcaps); streamcaps->Release (); } } pKs->Release (); } capture_pin->Release (); } enumpins->Release (); } } if (unidevice) { g_free (unidevice); } if (src->caps) { GstCaps *caps; if (filter) { caps = gst_caps_intersect_full (filter, src->caps, GST_CAPS_INTERSECT_FIRST); } else { caps = gst_caps_ref (src->caps); } return caps; } return NULL; }
void showhelppp(const char *anchor = 0) //Alone Coder 0.36.6 { if (!hMSHTML) hMSHTML = LoadLibrary("MSHTML.DLL"); if (!hURLMON) hURLMON = LoadLibrary("URLMON.DLL"); tShowHTMLDialog Show = hMSHTML? (tShowHTMLDialog)GetProcAddress(hMSHTML, "ShowHTMLDialog") : 0; tCreateURLMoniker CreateMoniker = hURLMON? (tCreateURLMoniker)GetProcAddress(hURLMON, "CreateURLMoniker") : 0; HWND fgwin = GetForegroundWindow(); if (!Show || !CreateMoniker) { MessageBox(fgwin, "Install IE4.0 or higher to view help", 0, MB_ICONERROR); return; } char dst[0x200]; GetTempPath(sizeof dst, dst); strcat(dst, "us_help.htm"); FILE *ff = fopen(helpname, "rb"), *gg = fopen(dst, "wb"); if (!ff || !gg) return; for (;;) { int x = getc(ff); if (x == EOF) break; if (x == '{') { char tag[0x100]; int r = 0; while (r < sizeof(tag)-1 && !feof(ff) && (x = getc(ff)) != '}') tag[r++] = x; tag[r] = 0; if (tag[0] == '?') { RECT rc; GetWindowRect(fgwin, &rc); if (tag[1] == 'x') fprintf(gg, "%d", rc.right-rc.left-20); if (tag[1] == 'y') fprintf(gg, "%d", rc.bottom-rc.top-20); } else { char res[0x100]; GetPrivateProfileString("SYSTEM.KEYS", tag, "not defined", res, sizeof res, ininame); char *comment = strchr(res, ';'); if (comment) *comment = 0; int len; //Alone Coder 0.36.7 for (/*int*/ len = strlen(res); len && res[len-1] == ' '; res[--len] = 0); for (len = 0; res[len]; len++) if (res[len] == ' ') res[len] = '-'; fprintf(gg, "%s", res); } } else putc(x, gg); } fclose(ff), fclose(gg); char url[0x200]; sprintf(url, "file://%s%s%s", dst, anchor?"#":nil, anchor?anchor:nil); WCHAR urlw[0x200]; MultiByteToWideChar(AreFileApisANSI()? CP_ACP : CP_OEMCP, MB_USEGLYPHCHARS, url, -1, urlw, _countof(urlw)); IMoniker *pmk = 0; CreateMoniker(0, urlw, &pmk); if (pmk) { bool restore_video = false; if (!(temp.rflags & (RF_GDI | RF_CLIP | RF_OVR | RF_16 | RF_32))) { temp.rflags = temp.rflags | RF_GDI; set_video(); restore_video = true; } Show(fgwin, pmk, 0,0,0); pmk->Release(); #if 0 if (dbgbreak) { temp.rflags = RF_MONITOR; set_video(); } else #endif { if (restore_video) apply_video(); } } DeleteFile(dst); }
gchar * gst_dshow_getdevice_from_devicename (const GUID * device_category, gchar ** device_name) { gchar *ret = NULL; ICreateDevEnum *devices_enum = NULL; IEnumMoniker *enum_moniker = NULL; IMoniker *moniker = NULL; HRESULT hres = S_FALSE; ULONG fetched; gboolean bfound = FALSE; hres = CoCreateInstance (CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **) &devices_enum); if (hres != S_OK) { /*error */ goto clean; } hres = devices_enum->CreateClassEnumerator (*device_category, &enum_moniker, 0); if (hres != S_OK || !enum_moniker) { /*error */ goto clean; } enum_moniker->Reset (); while (hres = enum_moniker->Next (1, &moniker, &fetched), hres == S_OK && !bfound) { IPropertyBag *property_bag = NULL; hres = moniker->BindToStorage (NULL, NULL, IID_IPropertyBag, (void **) &property_bag); if (SUCCEEDED (hres) && property_bag) { VARIANT varFriendlyName; VariantInit (&varFriendlyName); hres = property_bag->Read (L"FriendlyName", &varFriendlyName, NULL); if (hres == S_OK && varFriendlyName.bstrVal) { gchar *friendly_name = g_utf16_to_utf8 ((const gunichar2 *) varFriendlyName.bstrVal, wcslen (varFriendlyName.bstrVal), NULL, NULL, NULL); if (!*device_name) { *device_name = g_strdup (friendly_name); } if (_stricmp (*device_name, friendly_name) == 0) { WCHAR *wszDisplayName = NULL; hres = moniker->GetDisplayName (NULL, NULL, &wszDisplayName); if (hres == S_OK && wszDisplayName) { ret = g_utf16_to_utf8 ((const gunichar2 *) wszDisplayName, wcslen (wszDisplayName), NULL, NULL, NULL); CoTaskMemFree (wszDisplayName); } bfound = TRUE; } SysFreeString (varFriendlyName.bstrVal); } property_bag->Release (); } moniker->Release (); } clean: if (enum_moniker) { enum_moniker->Release (); } if (devices_enum) { devices_enum->Release (); } return ret; }
// enumerate all needed devices // HRESULT EnumerateDevices(char *videodriver[], int *vcount, IGraphBuilder *pIGB, ICaptureGraphBuilder *pICGB, int devicenumber , IBaseFilter **pCap, BOOL isVideo) { ICreateDevEnum *pCreateDevEnum=NULL; IEnumMoniker *pEm= NULL; IMoniker *pM = NULL; UINT uIndex = 0; HRESULT hr = NOERROR; int cc = 0; int oldv=0x7FFFFFFF; if (vcount!=NULL) { oldv=*vcount; *vcount=-1; } // for(cc=0;cc<2;cc++) for(cc=1;cc<2;cc++) { hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void**)&pCreateDevEnum); if (SUCCEEDED(hr)) { if (cc==0) hr = pCreateDevEnum->CreateClassEnumerator(AM_KSCATEGORY_CAPTURE/*AM_KSCATEGORY_VIDEO*/, &pEm, 0); else { if (isVideo) hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEm, 0); else hr = pCreateDevEnum->CreateClassEnumerator(CLSID_AudioInputDeviceCategory, &pEm, 0); } pCreateDevEnum->Release(); if (SUCCEEDED(hr)&&(pEm!=NULL)) { pEm->Reset(); ULONG cFetched=0; while( (hr = pEm->Next(1, &pM, &cFetched), hr==S_OK) ) { IPropertyBag *pBag=NULL; hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pBag); if(SUCCEEDED(hr)) { VARIANT var; var.vt = VT_BSTR; hr = pBag->Read(L"FriendlyName", &var, NULL); if ( (hr == NOERROR) && (videodriver!=NULL) && (vcount!=NULL) ) { char achName[80]; WideCharToMultiByte(CP_ACP, 0, var.bstrVal, -1, achName, 80, NULL, NULL); *vcount=(*vcount)+1; #if 0 if (cc==0) lstrcpy(videodriver[*vcount],"(WDM)"); else lstrcpy(videodriver[*vcount],"(VfW)"); #else lstrcpy(videodriver[*vcount],""); #endif lstrcat(videodriver[*vcount],achName); SysFreeString(var.bstrVal); } pBag->Release(); if (vcount!=NULL) { if ((pCap!=NULL)&&(devicenumber==*vcount)) hr = pM->BindToObject(0, 0, IID_IBaseFilter, (void**)pCap); } } pM->Release(); uIndex++; if (vcount!=NULL) { if (*vcount>=oldv) break; } } pEm->Release(); } } } return(NOERROR); }
static GValueArray * gst_dshowvideosrc_get_device_name_values (GstDshowVideoSrc * src) { GValueArray *array = g_value_array_new (0); ICreateDevEnum *devices_enum = NULL; IEnumMoniker *moniker_enum = NULL; IMoniker *moniker = NULL; HRESULT hres = S_FALSE; ULONG fetched; hres = CoCreateInstance (CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (LPVOID *) & devices_enum); if (hres != S_OK) { GST_ERROR ("Can't create system device enumerator (error=0x%x)", hres); array = NULL; goto clean; } hres = devices_enum->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, &moniker_enum, 0); if (hres != S_OK || !moniker_enum) { GST_ERROR ("Can't get enumeration of video devices (error=0x%x)", hres); array = NULL; goto clean; } moniker_enum->Reset (); while (hres = moniker_enum->Next (1, &moniker, &fetched), hres == S_OK) { IPropertyBag *property_bag = NULL; hres = moniker->BindToStorage (NULL, NULL, IID_IPropertyBag, (LPVOID *) & property_bag); if (SUCCEEDED (hres) && property_bag) { VARIANT varFriendlyName; VariantInit (&varFriendlyName); hres = property_bag->Read (L"FriendlyName", &varFriendlyName, NULL); if (hres == S_OK && varFriendlyName.bstrVal) { gchar *friendly_name = g_utf16_to_utf8 ((const gunichar2 *) varFriendlyName.bstrVal, wcslen (varFriendlyName.bstrVal), NULL, NULL, NULL); GValue value = { 0 }; g_value_init (&value, G_TYPE_STRING); g_value_set_string (&value, friendly_name); g_value_array_append (array, &value); g_value_unset (&value); g_free (friendly_name); SysFreeString (varFriendlyName.bstrVal); } property_bag->Release (); } moniker->Release (); } clean: if (moniker_enum) moniker_enum->Release (); if (devices_enum) devices_enum->Release (); return array; }
HRESULT videoInputCamera::getDevice(IBaseFilter** gottaFilter, int deviceId, WCHAR * wDeviceName, char * nDeviceName){ BOOL done = false; int deviceCounter = 0; // Create the System Device Enumerator. ICreateDevEnum *pSysDevEnum = NULL; HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **)&pSysDevEnum); if (FAILED(hr)) { return hr; } // Obtain a class enumerator for the video input category. IEnumMoniker *pEnumCat = NULL; hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0); if (hr == S_OK) { // Enumerate the monikers. IMoniker *pMoniker = NULL; ULONG cFetched; while ((pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK) && (!done)) { if(deviceCounter == deviceId) { // Bind the first moniker to an object IPropertyBag *pPropBag; hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag); if (SUCCEEDED(hr)) { // To retrieve the filter's friendly name, do the following: VARIANT varName; VariantInit(&varName); hr = pPropBag->Read(L"FriendlyName", &varName, 0); if (SUCCEEDED(hr)) { //copy the name to nDeviceName & wDeviceName int count = 0; while( varName.bstrVal[count] != 0x00 ) { wDeviceName[count] = varName.bstrVal[count]; nDeviceName[count] = (char)varName.bstrVal[count]; count++; } // We found it, so send it back to the caller hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)gottaFilter); done = true; } VariantClear(&varName); pPropBag->Release(); pPropBag = NULL; pMoniker->Release(); pMoniker = NULL; } } deviceCounter++; } pEnumCat->Release(); pEnumCat = NULL; } pSysDevEnum->Release(); pSysDevEnum = NULL; if (done) { return hr; // found it, return native error } else { return VFW_E_NOT_FOUND; // didn't find it error } }