void VideoCapture::EnumResolutions() { int iCount, iSize, iChosen=-1; IBaseFilter *pSource; CComPtr <ICaptureGraphBuilder2> pCaptB; VIDEO_STREAM_CONFIG_CAPS caps; HRESULT hr; bool response; IAMStreamConfig *pConfig; devices_resolutions = new DeviceResolutions[nDevices]; pCaptB.CoCreateInstance(CLSID_CaptureGraphBuilder2); for (unsigned int iDevice=0; iDevice<nDevices; iDevice++) { response = BindFilter(iDevice, &pSource); hr = pCaptB->FindInterface( &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, pSource, IID_IAMStreamConfig, (void**)&pConfig); if (!SUCCEEDED(hr)) { pSource->Release(); devices_resolutions[iDevice].nResolutions = 0; continue; } pConfig->GetNumberOfCapabilities(&iCount, &iSize); devices_resolutions[iDevice].SetNResolutions(iCount); for(int i=0; i < iCount; i++) { AM_MEDIA_TYPE *pmt; if( pConfig->GetStreamCaps(i, &pmt, reinterpret_cast<BYTE*>(&caps)) == S_OK ) { VIDEOINFOHEADER *pVih = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat); devices_resolutions[iDevice].x[i] = caps.InputSize.cx; devices_resolutions[iDevice].y[i] = caps.InputSize.cy; devices_resolutions[iDevice].color_space[i] = pmt->subtype; devices_resolutions[iDevice].compression[i] = pVih->bmiHeader.biCompression; DeleteMediaType(pmt); } } pSource->Release(); pConfig->Release(); pSource = 0; } }
bool MIPDirectShowCapture::listGUIDS(std::list<GUID> &guids) { guids.clear(); HRESULT hr; IAMStreamConfig *pConfig = 0; hr = m_pBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, 0, m_pCaptDevice, IID_IAMStreamConfig, (void**)&pConfig); if (HR_FAILED(hr)) { setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECONFIG); return false; } int count = 0; int s = 0; hr = pConfig->GetNumberOfCapabilities(&count, &s); if (HR_FAILED(hr)) { pConfig->Release(); setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECAPS); return false; } if (s != sizeof(VIDEO_STREAM_CONFIG_CAPS)) { pConfig->Release(); setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_INVALIDCAPS); return false; } for (int i = 0; i < count; i++) { VIDEO_STREAM_CONFIG_CAPS caps; AM_MEDIA_TYPE *pMediaType; hr = pConfig->GetStreamCaps(i, &pMediaType, (BYTE*)&caps); if (HR_SUCCEEDED(hr)) { if (pMediaType->majortype == MEDIATYPE_Video) { GUID subType = pMediaType->subtype; guids.push_back(subType); // uint8_t *pSubType = (uint8_t *)&subType; // // printf("0x%02x%02x%02x%02x %c%c%c%c\n",(int)pSubType[0],(int)pSubType[1],(int)pSubType[2],(int)pSubType[3], // (char)pSubType[0],(char)pSubType[1],(char)pSubType[2],(char)pSubType[3]); } } } return true; }
void DSCaptureDevice::initSupportedFormats() { HRESULT ret; IAMStreamConfig* streamConfig = NULL; AM_MEDIA_TYPE* mediaType = NULL; ret = m_captureGraphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, m_srcFilter, IID_IAMStreamConfig, (void**)&streamConfig); /* get to find all supported formats */ if(!FAILED(ret)) { int nb = 0; int size = 0; BYTE* allocBytes = NULL; streamConfig->GetNumberOfCapabilities(&nb, &size); allocBytes = new BYTE[size]; for(int i = 0 ; i < nb ; i++) { if(streamConfig->GetStreamCaps(i, &mediaType, allocBytes) == S_OK) { struct DSFormat format; VIDEOINFOHEADER* hdr = (VIDEOINFOHEADER*)mediaType->pbFormat; if(hdr) { format.height = hdr->bmiHeader.biHeight; format.width = hdr->bmiHeader.biWidth; format.pixelFormat = mediaType->subtype.Data1; format.mediaType = mediaType->subtype; m_formats.push_back(format); } } } delete allocBytes; } }
QVector<VideoMode> DirectShow::getDeviceModes(QString devName) { QVector<VideoMode> modes; IBaseFilter* devFilter = getDevFilter(devName); if (!devFilter) return modes; // The outter loop tries to find a valid output pin GUID category; DWORD r2; IEnumPins *pins = nullptr; IPin *pin; if (devFilter->EnumPins(&pins) != S_OK) return modes; while (pins->Next(1, &pin, nullptr) == S_OK) { IKsPropertySet *p = nullptr; PIN_INFO info; pin->QueryPinInfo(&info); info.pFilter->Release(); if (info.dir != PINDIR_OUTPUT) goto next; if (pin->QueryInterface(IID_IKsPropertySet, (void**)&p) != S_OK) goto next; if (p->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, nullptr, 0, &category, sizeof(GUID), &r2) != S_OK) goto next; if (!IsEqualGUID(category, PIN_CATEGORY_CAPTURE)) goto next; // Now we can list the video modes for the current pin // Prepare for another wall of spaghetti DIRECT SHOW QUALITY code { IAMStreamConfig *config = nullptr; VIDEO_STREAM_CONFIG_CAPS *vcaps = nullptr; int size, n; if (pin->QueryInterface(IID_IAMStreamConfig, (void**)&config) != S_OK) goto next; if (config->GetNumberOfCapabilities(&n, &size) != S_OK) goto pinend; assert(size == sizeof(VIDEO_STREAM_CONFIG_CAPS)); vcaps = new VIDEO_STREAM_CONFIG_CAPS; for (int i=0; i<n; ++i) { AM_MEDIA_TYPE* type = nullptr; if (config->GetStreamCaps(i, &type, (BYTE*)vcaps) != S_OK) goto nextformat; if (!IsEqualGUID(type->formattype, FORMAT_VideoInfo) && !IsEqualGUID(type->formattype, FORMAT_VideoInfo2)) goto nextformat; VideoMode mode; mode.width = vcaps->MaxOutputSize.cx; mode.height = vcaps->MaxOutputSize.cy; mode.FPS = 1e7 / vcaps->MinFrameInterval; if (!modes.contains(mode)) modes.append(std::move(mode)); nextformat: if (type->pbFormat) CoTaskMemFree(type->pbFormat); CoTaskMemFree(type); } pinend: config->Release(); delete vcaps; } next: if (p) p->Release(); pin->Release(); } return modes; }
static int v4w_open_videodevice(V4wState *s) { // Initialize COM CoInitialize(NULL); // get a Graph HRESULT hr= CoCreateInstance (CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, //IID_IBaseFilter, (void **)&s->m_pGraph); if(FAILED(hr)) { return -1; } // get a CaptureGraphBuilder2 hr= CoCreateInstance (CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, //IID_IBaseFilter, (void **)&s->m_pBuilder); if(FAILED(hr)) { return -2; } // connect capture graph builder with the graph s->m_pBuilder->SetFiltergraph(s->m_pGraph); // get mediacontrol so we can start and stop the filter graph hr=s->m_pGraph->QueryInterface (IID_IMediaControl, (void **)&s->m_pControl); if(FAILED(hr)) { return -3; } ICreateDevEnum *pCreateDevEnum = NULL; IEnumMoniker *pEnumMoniker = NULL; IMoniker *pMoniker = NULL; ULONG nFetched = 0; hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum); if(FAILED(hr)) { return -4; } hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumMoniker, 0); if (FAILED(hr) || pEnumMoniker == NULL) { //printf("no device\n"); return -5; } pEnumMoniker->Reset(); int pos=0; while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) ) { IPropertyBag *pBag; hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag ); if( hr != S_OK ) continue; if (s->dev[0]=='\0') break; VARIANT var; VariantInit(&var); hr = pBag->Read( L"FriendlyName", &var, NULL ); if( hr != S_OK ) { pMoniker->Release(); continue; } //USES_CONVERSION; char szName[256]; WideCharToMultiByte(CP_UTF8,0,var.bstrVal,-1,szName,256,0,0); VariantClear(&var); if (strcmp(szName, s->dev)==0) break; pMoniker->Release(); pBag->Release(); pMoniker=NULL; pBag=NULL; } if(pMoniker==NULL) { return -6; } hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter ); if(FAILED(hr)) { return -7; } s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter"); pMoniker->Release(); pEnumMoniker->Release(); pCreateDevEnum->Release(); GUID pPinCategory; if (try_format(s->m_pDeviceFilter, s->pix_fmt, &pPinCategory)==0) s->pix_fmt = s->pix_fmt; else if (try_format(s->m_pDeviceFilter,MS_YUV420P, &pPinCategory)==0) s->pix_fmt = MS_YUV420P; else if (try_format(s->m_pDeviceFilter,MS_YUY2, &pPinCategory)==0) s->pix_fmt = MS_YUY2; else if (try_format(s->m_pDeviceFilter,MS_YUYV, &pPinCategory)==0) s->pix_fmt = MS_YUYV; else if (try_format(s->m_pDeviceFilter,MS_UYVY, &pPinCategory)==0) s->pix_fmt = MS_UYVY; else if (try_format(s->m_pDeviceFilter,MS_RGB24, &pPinCategory)==0) s->pix_fmt = MS_RGB24; else { ms_error("Unsupported video pixel format."); return -8; } if (s->pix_fmt == MS_YUV420P) ms_message("Driver supports YUV420P, using that format."); else if (s->pix_fmt == MS_YUY2) ms_message("Driver supports YUY2 (YUYV), using that format."); else if (s->pix_fmt == MS_YUYV) ms_message("Driver supports YUV422, using that format."); else if (s->pix_fmt == MS_UYVY) ms_message("Driver supports UYVY, using that format."); else if (s->pix_fmt == MS_RGB24) ms_message("Driver supports RGB24, using that format."); if (try_format_size(s, s->pix_fmt, s->vsize.width, s->vsize.height, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", s->vsize.width, s->vsize.height); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H); else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H, &pPinCategory)==0) ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H); else { ms_error("No supported size found for format."); /* size not supported? */ return -9; } // get DXFilter s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE); if(s->m_pDXFilter==NULL) { return -10; } s->m_pDXFilter->AddRef(); CMediaType mt; mt.SetType(&MEDIATYPE_Video); GUID m = MEDIASUBTYPE_RGB24; if (s->pix_fmt == MS_YUV420P) m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0')); else if (s->pix_fmt == MS_YUY2) m = MEDIASUBTYPE_YUY2; else if (s->pix_fmt == MS_YUYV) m = MEDIASUBTYPE_YUYV; else if (s->pix_fmt == MS_UYVY) m = MEDIASUBTYPE_UYVY; else if (s->pix_fmt == MS_RGB24) m = MEDIASUBTYPE_RGB24; mt.SetSubtype(&m); mt.formattype = FORMAT_VideoInfo; mt.SetTemporalCompression(FALSE); VIDEOINFO *pvi = (VIDEOINFO *) mt.AllocFormatBuffer(sizeof(VIDEOINFO)); if (NULL == pvi) return -11; ZeroMemory(pvi, sizeof(VIDEOINFO)); if (s->pix_fmt == MS_YUV420P) pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0'); else if (s->pix_fmt == MS_YUY2) pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2'); else if (s->pix_fmt == MS_YUYV) pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','V'); else if (s->pix_fmt == MS_UYVY) pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y'); else if (s->pix_fmt == MS_RGB24) pvi->bmiHeader.biCompression = BI_RGB; if (s->pix_fmt == MS_YUV420P) pvi->bmiHeader.biBitCount = 12; else if (s->pix_fmt == MS_YUY2) pvi->bmiHeader.biBitCount = 16; else if (s->pix_fmt == MS_YUYV) pvi->bmiHeader.biBitCount = 16; else if (s->pix_fmt == MS_UYVY) pvi->bmiHeader.biBitCount = 16; else if (s->pix_fmt == MS_RGB24) pvi->bmiHeader.biBitCount = 24; pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER); pvi->bmiHeader.biWidth = s->vsize.width; pvi->bmiHeader.biHeight = s->vsize.height; pvi->bmiHeader.biPlanes = 1; pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader); pvi->bmiHeader.biClrImportant = 0; mt.SetSampleSize(pvi->bmiHeader.biSizeImage); mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO)); hr = s->m_pDXFilter->SetAcceptedMediaType(&mt); if(FAILED(hr)) { return -12; } hr = s->m_pDXFilter->SetCallback(Callback); if(FAILED(hr)) { return -13; } hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter, (LPVOID *)&s->m_pIDXFilter); if(FAILED(hr)) { return -14; } hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter"); if(FAILED(hr)) { return -15; } // get null renderer hr=CoCreateInstance (CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void **)&s->m_pNullRenderer); if(FAILED(hr)) { return -16; } if (s->m_pNullRenderer!=NULL) { s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer"); } hr = s->m_pBuilder->RenderStream(&pPinCategory, &MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer); if (FAILED(hr)) { return -17; } IAMStreamConfig *pConfig = NULL; hr = s->m_pBuilder->FindInterface( &pPinCategory, // Preview pin. &MEDIATYPE_Video, // Any media type. s->m_pDeviceFilter, // Pointer to the capture filter. IID_IAMStreamConfig, (void**)&pConfig); if (pConfig!=NULL) { AM_MEDIA_TYPE *pType = NULL; int iCount, iSize; pConfig->GetNumberOfCapabilities(&iCount, &iSize); for (int i = 0; i < iCount; i++) { VIDEO_STREAM_CONFIG_CAPS scc; pType = NULL; pConfig->GetStreamCaps(i, &pType, (BYTE *)&scc); if (!((pType->formattype == FORMAT_VideoInfo) && (pType->cbFormat >= sizeof(VIDEOINFOHEADER)) && (pType->pbFormat != NULL))) continue; VIDEOINFOHEADER & videoInfo = *(VIDEOINFOHEADER *)pType->pbFormat; if (m != pType->subtype) continue; if (videoInfo.bmiHeader.biWidth != s->vsize.width) continue; if (videoInfo.bmiHeader.biHeight != s->vsize.height) continue; if (videoInfo.bmiHeader.biBitCount != pvi->bmiHeader.biBitCount) continue; if (videoInfo.bmiHeader.biCompression != pvi->bmiHeader.biCompression) continue; videoInfo.AvgTimePerFrame = UNITS / (LONGLONG)s->fps; pConfig->SetFormat(pType); } pConfig->GetFormat(&pType); if (pType!=NULL) { VIDEOINFO *pvi; pvi = (VIDEOINFO *)pType->pbFormat; ms_message("v4w: camera asked fps=%.2f // real fps=%.2f", s->fps, ((float)UNITS / (float)pvi->AvgTimePerFrame)); } pConfig->Release(); } //m_pDXFilter->SetBufferSamples(TRUE); s_callback = s; hr = s->m_pControl->Run(); if(FAILED(hr)) { return -18; } s->rotregvalue=1; return 0; }
static GstCaps * gst_dshowvideosrc_getcaps_from_streamcaps (GstDshowVideoSrc * src, IPin * pin) { GstCaps *caps = NULL; HRESULT hres = S_OK; int icount = 0; int isize = 0; VIDEO_STREAM_CONFIG_CAPS vscc; int i = 0; IAMStreamConfig *streamcaps = NULL; hres = pin->QueryInterface (IID_IAMStreamConfig, (LPVOID *) & streamcaps); if (FAILED (hres)) { GST_ERROR ("Failed to retrieve IAMStreamConfig (error=0x%x)", hres); return NULL; } streamcaps->GetNumberOfCapabilities (&icount, &isize); if (isize != sizeof (vscc)) { streamcaps->Release (); return NULL; } caps = gst_caps_new_empty (); for (i = 0; i < icount; i++) { GstCapturePinMediaType *pin_mediatype = gst_dshow_new_pin_mediatype_from_streamcaps (pin, i, streamcaps); if (pin_mediatype) { GstCaps *mediacaps = NULL; GstVideoFormat video_format = gst_dshow_guid_to_gst_video_format (pin_mediatype->mediatype); if (video_format != GST_VIDEO_FORMAT_UNKNOWN) { mediacaps = gst_dshow_new_video_caps (video_format, NULL, pin_mediatype); } else if (gst_dshow_check_mediatype (pin_mediatype->mediatype, MEDIASUBTYPE_dvsd, FORMAT_VideoInfo)) { mediacaps = gst_dshow_new_video_caps (GST_VIDEO_FORMAT_UNKNOWN, "video/x-dv, systemstream=FALSE", pin_mediatype); } else if (gst_dshow_check_mediatype (pin_mediatype->mediatype, MEDIASUBTYPE_dvsd, FORMAT_DvInfo)) { mediacaps = gst_dshow_new_video_caps (GST_VIDEO_FORMAT_UNKNOWN, "video/x-dv, systemstream=TRUE", pin_mediatype); pin_mediatype->granularityWidth = 0; pin_mediatype->granularityHeight = 0; } else if(gst_dshow_check_mediatype(pin_mediatype->mediatype, MEDIASUBTYPE_MJPG, FORMAT_VideoInfo)) { mediacaps = gst_dshow_new_video_caps(GST_VIDEO_FORMAT_UNKNOWN, "image/jpeg", pin_mediatype); } if (mediacaps) { src->pins_mediatypes = g_list_append (src->pins_mediatypes, pin_mediatype); gst_caps_append (caps, mediacaps); } else { /* failed to convert dshow caps */ gst_dshow_free_pin_mediatype (pin_mediatype); } } } streamcaps->Release (); if (caps && gst_caps_is_empty (caps)) { gst_caps_unref (caps); caps = NULL; } return caps; }
void DirectShowGrabber::setCaptureOutputFormat() { IAMStreamConfig *pConfig; int iCount; int iSize; VIDEOINFOHEADER *pVih; VIDEO_STREAM_CONFIG_CAPS scc; AM_MEDIA_TYPE *pmtConfig; int formatSet; HRESULT hr; // Reference http://msdn.microsoft.com/library/default.asp?url=/library/en-us/directshow/htm/configurethevideooutputformat.asp debug_msg("DirectShowGrabber::setCaptureOutputFormat(): enter...\n"); formatSet = 0; pConfig = NULL; hr = pBuild_->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, pCaptureFilter_, IID_IAMStreamConfig, (void**)&pConfig); if (FAILED(hr)) { Grabber::status_=-1; return; } debug_msg("DirectShowGrabber::setCaptureOutputFormat(): IAMStreamConfig interface acquired\n"); iCount = iSize = 0; hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize); // Check the size to make sure we pass in the correct structure. // The alternative output of iSize is AUDIO_STREAM_CONFIG_CAPS, btw. if ( iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS) ) { for (int iFormat = 0; iFormat < iCount; iFormat++) { hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE *)&scc); //showErrorMessage(hr); if( SUCCEEDED(hr) ) { if ((pmtConfig->majortype == MEDIATYPE_Video) && (pmtConfig->subtype == MEDIASUBTYPE_RGB24) && (pmtConfig->formattype == FORMAT_VideoInfo) && (pmtConfig->cbFormat >= sizeof (VIDEOINFOHEADER)) && (pmtConfig->pbFormat != NULL)) { pVih = (VIDEOINFOHEADER *)pmtConfig->pbFormat; pVih->bmiHeader.biWidth = 320; pVih->bmiHeader.biHeight = 240; pVih->bmiHeader.biSizeImage = DIBSIZE(pVih->bmiHeader); debug_msg("Windows GDI BITMAPINFOHEADER follows:\n"); debug_msg("biWidth= %d\n", pVih->bmiHeader.biWidth); debug_msg("biHeight= %d\n", pVih->bmiHeader.biHeight); debug_msg("biSize= %d\n", pVih->bmiHeader.biSize); debug_msg("biPlanes= %d\n", pVih->bmiHeader.biPlanes); debug_msg("biBitCount= %d\n", pVih->bmiHeader.biBitCount); debug_msg("biCompression= %d\n", pVih->bmiHeader.biCompression); debug_msg("biSizeImage= %d\n", pVih->bmiHeader.biSizeImage); debug_msg("biXPelsPerMeter=%d\n", pVih->bmiHeader.biXPelsPerMeter); debug_msg("biYPelsPerMeter=%d\n", pVih->bmiHeader.biYPelsPerMeter); debug_msg("biClrUsed= %d\n", pVih->bmiHeader.biClrUsed); debug_msg("biClrImportant= %d\n", pVih->bmiHeader.biClrImportant); hr = pConfig->SetFormat(pmtConfig); //showErrorMessage(hr); // XXX: leak. need to deal with this - msp //DeleteMediaType(pmtConfig); formatSet = 1; break; } } } } pConfig->Release(); if( formatSet ) debug_msg("DirectShowGrabber::setCaptureOutputFormat: format set\n"); else debug_msg("DirectShowGrabber::setCaptureOutputFormat: format not set\n"); }
bool MIPDirectShowCapture::setFormat(int w, int h, real_t rate) { HRESULT hr; IAMStreamConfig *pConfig = 0; hr = m_pBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, 0, m_pCaptDevice, IID_IAMStreamConfig, (void**)&pConfig); if (HR_FAILED(hr)) { setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECONFIG); return false; } int count = 0; int s = 0; hr = pConfig->GetNumberOfCapabilities(&count, &s); if (HR_FAILED(hr)) { pConfig->Release(); setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECAPS); return false; } if (s != sizeof(VIDEO_STREAM_CONFIG_CAPS)) { pConfig->Release(); setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_INVALIDCAPS); return false; } for (int i = 0; i < count; i++) { VIDEO_STREAM_CONFIG_CAPS caps; AM_MEDIA_TYPE *pMediaType; hr = pConfig->GetStreamCaps(i, &pMediaType, (BYTE*)&caps); if (HR_SUCCEEDED(hr)) { if ((pMediaType->majortype == MEDIATYPE_Video) && (pMediaType->subtype == m_selectedGuid) && (pMediaType->formattype == FORMAT_VideoInfo) && (pMediaType->cbFormat >= sizeof (VIDEOINFOHEADER)) && (pMediaType->pbFormat != 0)) { VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pMediaType->pbFormat; pVih->bmiHeader.biWidth = w; pVih->bmiHeader.biHeight = h; pVih->bmiHeader.biSizeImage = DIBSIZE(pVih->bmiHeader); pVih->AvgTimePerFrame = (REFERENCE_TIME)(10000000.0/rate); hr = pConfig->SetFormat(pMediaType); if (HR_SUCCEEDED(hr)) { CoTaskMemFree(pMediaType->pbFormat); pConfig->Release(); return true; } } if (pMediaType->pbFormat != 0) CoTaskMemFree(pMediaType->pbFormat); } } pConfig->Release(); setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTSETCAPS); return false; }
HRESULT DSCaptureDevice::setFormat(const DSFormat& format) { HRESULT hr; IAMStreamConfig* streamConfig = NULL; /* get the right interface to change capture settings */ hr = m_captureGraphBuilder->FindInterface( &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, m_srcFilter, IID_IAMStreamConfig, (void**) &streamConfig); if(SUCCEEDED(hr)) { int nb = 0; int size = 0; AM_MEDIA_TYPE* mediaType = NULL; size_t bitCount = 0; hr = streamConfig->GetNumberOfCapabilities(&nb, &size); if (SUCCEEDED(hr) && nb) { BYTE* scc = new BYTE[size]; if (scc) { DWORD pixfmt = format.pixelFormat; for (int i = 0 ; i < nb ; i++) { AM_MEDIA_TYPE* mt; if (streamConfig->GetStreamCaps(i, &mt, scc) == S_OK) { VIDEOINFOHEADER* hdr = (VIDEOINFOHEADER*) mt->pbFormat; if (hdr && (mt->subtype.Data1 == pixfmt) && ((long) format.height == hdr->bmiHeader.biHeight) && ((long) format.width == hdr->bmiHeader.biWidth)) { mediaType = mt; if ((pixfmt == MEDIASUBTYPE_ARGB32.Data1) || (pixfmt == MEDIASUBTYPE_RGB32.Data1)) bitCount = 32; else if (pixfmt == MEDIASUBTYPE_RGB24.Data1) bitCount = 24; else bitCount = hdr->bmiHeader.biBitCount; break; } else _DeleteMediaType(mt); } } delete[] scc; } else hr = E_OUTOFMEMORY; } if (mediaType) { hr = streamConfig->SetFormat(mediaType); if (SUCCEEDED(hr)) { m_bitPerPixel = bitCount; m_format = format; m_format.mediaType = mediaType->subtype; } _DeleteMediaType(mediaType); } else if (SUCCEEDED(hr)) hr = E_FAIL; streamConfig->Release(); } return hr; }
AM_MEDIA_TYPE * CCaptureDevice::SelectMediaType(void) { // Preferred sequence: UYVY, YUY2, RGB565, RGB555, RGB24, RGB32 VIDEO_STREAM_CONFIG_CAPS pSCC; AM_MEDIA_TYPE * pmt = NULL; HRESULT hr = S_OK; int nCounts=0, nSize=0; int preferredIndex = -1; enum { UYVY = 0, YUY2, RGB565, RGB555, RGB24, RGB32, Unknown } currentPreferred, temp; currentPreferred = Unknown; IAMStreamConfig * pCfg = GetStreamConfig(); pCfg->GetNumberOfCapabilities(&nCounts, &nSize); for (int i = 0; i < nCounts; i++) { if (pCfg->GetStreamCaps(i, &pmt, (BYTE *)&pSCC) == S_OK) { if (pmt->subtype == MEDIASUBTYPE_RGB32) { temp = RGB32; } else if (pmt->subtype == MEDIASUBTYPE_RGB24) { temp = RGB24; } else if (pmt->subtype == MEDIASUBTYPE_RGB565) { temp = RGB565; } else if (pmt->subtype == MEDIASUBTYPE_RGB555) { temp = RGB555; } else if (pmt->subtype == MEDIASUBTYPE_YUY2) { temp = YUY2; } else if (pmt->subtype == MEDIASUBTYPE_UYVY) { temp = UYVY; } else { temp = Unknown; } if (temp < currentPreferred) { currentPreferred = temp; preferredIndex = i; } DeleteMediaType(pmt); } } // Get the preferred media type if (preferredIndex != -1) { hr = pCfg->GetStreamCaps(preferredIndex, &pmt, (BYTE *)&pSCC); } else { hr = pCfg->GetFormat(&pmt); } return pmt; }
HRESULT CAudioCompressorFormats::GetSupportedFormats(std::vector<WAVEFORMATEX*>& listFormats) { CStringW swDeviceName(m_sAudComp); HRESULT hr = m_pSysDevEnum->CreateClassEnumerator(CLSID_AudioCompressorCategory, &m_pEnumCat, 0); if(NULL == m_pEnumCat) return E_POINTER; if(S_OK == hr) { ULONG cFetched; while(m_pEnumCat->Next(1, &m_pMoniker, &cFetched) == S_OK) { IPropertyBag *pPropBag; hr = m_pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag); if (SUCCEEDED(hr)) { VARIANT varName; VariantInit(&varName); hr = pPropBag->Read(L"FriendlyName", &varName, 0); if (SUCCEEDED(hr)) { if(wcscmp((WCHAR*)varName.pbstrVal, swDeviceName.GetBuffer()) == 0) { m_pMoniker->AddRef(); break; } } VariantClear(&varName); pPropBag->Release(); } m_pMoniker->Release(); } } if(m_pMoniker) { IBaseFilter *pFilter = 0; hr = m_pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pFilter); if(SUCCEEDED(hr)) { IEnumPins *pEnum = NULL; hr = pFilter->EnumPins(&pEnum); if (SUCCEEDED(hr)) { IPin *pPin = NULL; while(S_OK == pEnum->Next(1, &pPin, NULL)) { IAMStreamConfig *pConf; hr = pPin->QueryInterface(IID_IAMStreamConfig, (void**)&pConf); if (SUCCEEDED(hr)) { CString sFormat; int iCount, iSize; BYTE *pSCC = NULL; AM_MEDIA_TYPE *pmt; float fSample; hr = pConf->GetNumberOfCapabilities(&iCount, &iSize); pSCC = new BYTE[iSize]; if (pSCC == NULL) { return E_POINTER; } if (iSize == sizeof(AUDIO_STREAM_CONFIG_CAPS)) { // Use the audio capabilities structure. for (int iFormat = 0; iFormat < iCount; iFormat++) { AUDIO_STREAM_CONFIG_CAPS scc; AM_MEDIA_TYPE *pmtConfig; hr = pConf->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc); if (SUCCEEDED(hr)) { if(pmtConfig->formattype == FORMAT_WaveFormatEx) { WAVEFORMATEX *pFormat = new WAVEFORMATEX(*(reinterpret_cast<WAVEFORMATEX*>(pmtConfig->pbFormat))); if(pFormat) { listFormats.push_back(pFormat); } FreeMediaType(*pmtConfig); CoTaskMemFree(pmtConfig); } } } delete pSCC; } pConf->Release(); } pPin->Release(); } pEnum->Release(); } pFilter->Release(); } } }
int main() { // for playing IGraphBuilder *pGraphBuilder; ICaptureGraphBuilder2 *pCaptureGraphBuilder2; IMediaControl *pMediaControl; IBaseFilter *pDeviceFilter = NULL; // to select a video input device ICreateDevEnum *pCreateDevEnum = NULL; IEnumMoniker *pEnumMoniker = NULL; IMoniker *pMoniker = NULL; ULONG nFetched = 0; // initialize COM CoInitialize(NULL); // // selecting a device // // Create CreateDevEnum to list device CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum); // Create EnumMoniker to list VideoInputDevice pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumMoniker, 0); if (pEnumMoniker == NULL) { // this will be shown if there is no capture device printf("no device\n"); return 0; } // reset EnumMoniker pEnumMoniker->Reset(); // get each Moniker while (pEnumMoniker->Next(1, &pMoniker, &nFetched) == S_OK) { IPropertyBag *pPropertyBag; TCHAR devname[256]; // bind to IPropertyBag pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropertyBag); VARIANT var; // get FriendlyName var.vt = VT_BSTR; pPropertyBag->Read(L"FriendlyName", &var, 0); WideCharToMultiByte(CP_ACP, 0, var.bstrVal, -1, devname, sizeof(devname), 0, 0); VariantClear(&var); printf("%s\r\n", devname); printf(" select this device ? [y] or [n]\r\n"); int ch = getchar(); // you can start playing by 'y' + return key // if you press the other key, it will not be played. if (ch == 'y') { // Bind Monkier to Filter pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pDeviceFilter ); } else { getchar(); } // release pMoniker->Release(); pPropertyBag->Release(); if (pDeviceFilter != NULL) { // go out of loop if getchar() returns 'y' break; } } if (pDeviceFilter != NULL) { // // PLAY // // create FilterGraph CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC, IID_IGraphBuilder, (LPVOID *)&pGraphBuilder); // create CaptureGraphBuilder2 CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, IID_ICaptureGraphBuilder2, (LPVOID *)&pCaptureGraphBuilder2); //============================================================ //=========== MY CODE ====================================== //============================================================= HRESULT hr = CoInitialize(0); IAMStreamConfig *pConfig = NULL; hr = pCaptureGraphBuilder2->FindInterface(&PIN_CATEGORY_CAPTURE, 0, pDeviceFilter, IID_IAMStreamConfig, (void**)&pConfig); int iCount = 0, iSize = 0; hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize); // Check the size to make sure we pass in the correct structure. if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS)) { // Use the video capabilities structure. for (int iFormat = 0; iFormat < iCount; iFormat++) { VIDEO_STREAM_CONFIG_CAPS scc; AM_MEDIA_TYPE *pmtConfig; hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc); if (SUCCEEDED(hr)) { /* Examine the format, and possibly use it. */ if ((pmtConfig->majortype == MEDIATYPE_Video) && (pmtConfig->subtype == MEDIASUBTYPE_RGB24) && (pmtConfig->formattype == FORMAT_VideoInfo) && (pmtConfig->cbFormat >= sizeof (VIDEOINFOHEADER)) && (pmtConfig->pbFormat != NULL)) { VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat; // pVih contains the detailed format information. LONG lWidth = pVih->bmiHeader.biWidth; LONG lHeight = pVih->bmiHeader.biHeight; if( lWidth == 1280 ) // if (iFormat == 26) { //2 = '1280x720YUV' YUV, 22 = '1280x800YUV', 26 = '1280x720RGB' hr = pConfig->SetFormat(pmtConfig); } } // Delete the media type when you are done. DeleteMediaType(pmtConfig); } } } // Query the capture filter for the IAMCameraControl interface. IAMCameraControl *pCameraControl = 0; hr = pDeviceFilter->QueryInterface(IID_IAMCameraControl, (void**)&pCameraControl); if (FAILED(hr)) { // The device does not support IAMCameraControl } else { long Min, Max, Step, Default, Flags, Val; // Get the range and default values hr = pCameraControl->GetRange(CameraControl_Exposure, &Min, &Max, &Step, &Default, &Flags); hr = pCameraControl->GetRange(CameraControl_Focus, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) { hr = pCameraControl->Set(CameraControl_Exposure, -11, CameraControl_Flags_Manual ); // Min = -11, Max = 1, Step = 1 hr = pCameraControl->Set(CameraControl_Focus, 12, CameraControl_Flags_Manual ); } } // Query the capture filter for the IAMVideoProcAmp interface. IAMVideoProcAmp *pProcAmp = 0; hr = pDeviceFilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&pProcAmp); if (FAILED(hr)) { // The device does not support IAMVideoProcAmp } else { long Min, Max, Step, Default, Flags, Val; // Get the range and default values hr = pProcAmp->GetRange(VideoProcAmp_Brightness, &Min, &Max, &Step, &Default, &Flags); hr = pProcAmp->GetRange(VideoProcAmp_BacklightCompensation, &Min, &Max, &Step, &Default, &Flags); hr = pProcAmp->GetRange(VideoProcAmp_Contrast, &Min, &Max, &Step, &Default, &Flags); hr = pProcAmp->GetRange(VideoProcAmp_Saturation, &Min, &Max, &Step, &Default, &Flags); hr = pProcAmp->GetRange(VideoProcAmp_Sharpness, &Min, &Max, &Step, &Default, &Flags); hr = pProcAmp->GetRange(VideoProcAmp_WhiteBalance, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) { hr = pProcAmp->Set(VideoProcAmp_Brightness, 142, VideoProcAmp_Flags_Manual); hr = pProcAmp->Set(VideoProcAmp_BacklightCompensation, 0, VideoProcAmp_Flags_Manual); hr = pProcAmp->Set(VideoProcAmp_Contrast, 4, VideoProcAmp_Flags_Manual); hr = pProcAmp->Set(VideoProcAmp_Saturation, 100, VideoProcAmp_Flags_Manual); hr = pProcAmp->Set(VideoProcAmp_Sharpness, 0, VideoProcAmp_Flags_Manual); hr = pProcAmp->Set(VideoProcAmp_WhiteBalance, 2800, VideoProcAmp_Flags_Manual); } } //============================================================ //=========== END MY CODE ====================================== //============================================================= hr = S_OK; CTransformer* trans = new CTransformer( "Dif trans", 0, CLSID_DIFFilter, &hr ); IBaseFilter * ttt = 0; trans->QueryInterface(IID_IBaseFilter, (LPVOID *)&ttt); // set FilterGraph hr = pCaptureGraphBuilder2->SetFiltergraph(pGraphBuilder); // get MediaControl interface hr = pGraphBuilder->QueryInterface(IID_IMediaControl, (LPVOID *)&pMediaControl); // add device filter to FilterGraph hr = pGraphBuilder->AddFilter(ttt, L"Dif trans"); hr = pGraphBuilder->AddFilter(pDeviceFilter, L"Device Filter"); // create Graph hr = pCaptureGraphBuilder2->RenderStream(&PIN_CATEGORY_CAPTURE, NULL, pDeviceFilter, NULL, NULL); // start playing hr = pMediaControl->Run(); // to block execution // without this messagebox, the graph will be stopped immediately MessageBox(NULL, "Block Execution", "Block", MB_OK); // release pMediaControl->Release(); pCaptureGraphBuilder2->Release(); pGraphBuilder->Release(); } // release pEnumMoniker->Release(); pCreateDevEnum->Release(); // finalize COM CoUninitialize(); return 0; }
// use cameraID 1 for first and so on HRESULT VideoTexture::init(int cameraID) { if (cameraID <= 0) return S_FALSE; glEnable(GL_TEXTURE_2D); // Texture -> This will be put into the camera module glGenTextures(1, textures); // Create The Texture // Typical Texture Generation Using Data From The Bitmap for (int i = 0; i < 1; i++) { //glActiveTexture(GL_TEXTURE0 + i); glBindTexture(GL_TEXTURE_2D, textures[i]); // Generate The Texture (640x480... make changeable!) //glTexImage2D(GL_TEXTURE_2D, 0, 3, 640, 480, 0, GL_RGB, GL_UNSIGNED_BYTE, ...THe data111!!!); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); // Linear Filtering glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); // Linear Filtering // Enable Texture Mapping glTexImage2D(GL_TEXTURE_2D, 0, 3, TEXTURE_WIDTH, TEXTURE_HEIGHT, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL); } // Video stuff: // Create captue graph builder: HRESULT hr = InitCaptureGraphBuilder(&pGraph, &pBuild); if (FAILED(hr)) return hr; IEnumMoniker *enumerator; hr = EnumerateDevices(CLSID_VideoInputDeviceCategory, &enumerator); //DisplayDeviceInformation(enumerator); // Take the first camera: IMoniker *pMoniker = NULL; for (int i = 0; i < cameraID; i++) { enumerator->Next(1, &pMoniker, NULL); } IBaseFilter *pCap = NULL; hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pCap); if (SUCCEEDED(hr)) { hr = pGraph->AddFilter(pCap, L"Capture Filter"); if (FAILED(hr)) return hr; } else return hr; // Create the Sample Grabber which we will use // To take each frame for texture generation hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER, IID_ISampleGrabber, (void **)&pGrabber); if (FAILED(hr)) return hr; hr = pGrabber->QueryInterface(IID_IBaseFilter, (void **)&pGrabberBase); // We have to set the 24-bit RGB desire here // So that the proper conversion filters // Are added automatically. AM_MEDIA_TYPE desiredType; memset(&desiredType, 0, sizeof(desiredType)); desiredType.majortype = MEDIATYPE_Video; desiredType.subtype = MEDIASUBTYPE_RGB24; desiredType.formattype = FORMAT_VideoInfo; pGrabber->SetMediaType(&desiredType); pGrabber->SetBufferSamples(TRUE); // add to Graph pGraph->AddFilter(pGrabberBase, L"Grabber"); /* Null render filter */ hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&pNullRender); if(FAILED(hr)) return hr; pGraph->AddFilter(pNullRender, L"Render"); // Connect the graph hr = ConnectFilters(pGraph, pCap, pGrabberBase); if(FAILED(hr)) return hr; hr = ConnectFilters(pGraph, pGrabberBase, pNullRender); // Set output format of capture: IAMStreamConfig *pConfig = NULL; hr = pBuild->FindInterface( &PIN_CATEGORY_CAPTURE, // Capture pin. 0, // Any media type. pCap, // Pointer to the capture filter. IID_IAMStreamConfig, (void**)&pConfig); if (FAILED(hr)) return hr; AM_MEDIA_TYPE *pmtConfig; hr = pConfig->GetFormat(&pmtConfig); if (FAILED(hr)) return hr; // Try and find a good video format int iCount = 0, iSize = 0; hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize); // Check the size to make sure we pass in the correct structure. if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS)) { // Use the video capabilities structure. for (int iFormat = 0; iFormat < iCount; iFormat++) { VIDEO_STREAM_CONFIG_CAPS scc; AM_MEDIA_TYPE *pmtConfig; hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc); if (SUCCEEDED(hr)) { VIDEOINFOHEADER *hdr = (VIDEOINFOHEADER *)pmtConfig->pbFormat; if (hdr->bmiHeader.biWidth == CAM_WIDTH && hdr->bmiHeader.biHeight == CAM_HEIGHT && hdr->bmiHeader.biBitCount == 24) { pConfig->SetFormat(pmtConfig); } } } } pConfig->Release(); // Set camera stuff IAMCameraControl *pCamControl = NULL; hr = pCap->QueryInterface(IID_IAMCameraControl, (void **)&pCamControl); if (FAILED(hr)) return hr; // Get the range and default value. long Min, Max, Step, Default, Flags; // For getting: long Val; hr = pCamControl->GetRange(CameraControl_Focus, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pCamControl->Set(CameraControl_Focus, 0, CameraControl_Flags_Manual); #if 0 hr = pCamControl->GetRange(CameraControl_Exposure, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pCamControl->Set(CameraControl_Exposure, -4, CameraControl_Flags_Manual); #endif pCamControl->Release(); IAMVideoProcAmp *pProcAmp = 0; hr = pCap->QueryInterface(IID_IAMVideoProcAmp, (void**)&pProcAmp); if (FAILED(hr)) return hr; #if 0 hr = pProcAmp->GetRange(VideoProcAmp_Brightness, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Brightness, 30, VideoProcAmp_Flags_Manual); hr = pProcAmp->GetRange(VideoProcAmp_Gain, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Gain, 30, VideoProcAmp_Flags_Manual); hr = pProcAmp->GetRange(VideoProcAmp_WhiteBalance, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_WhiteBalance, 4500, VideoProcAmp_Flags_Manual); hr = pProcAmp->GetRange(VideoProcAmp_Saturation, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Saturation, 100, VideoProcAmp_Flags_Manual); hr = pProcAmp->GetRange(VideoProcAmp_Contrast, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Contrast, 6, VideoProcAmp_Flags_Manual); #endif pProcAmp->Release(); hr = pMediaControl->Run(); return hr; }
HRESULT CaptureVideo() { HRESULT hr; IBaseFilter *pSrcFilter=NULL; // Get DirectShow interfaces hr = GetInterfaces(); if (FAILED(hr)) { Msg(TEXT("Failed to get video interfaces! hr=0x%x"), hr); return hr; } // Attach the filter graph to the capture graph hr = g_pCapture->SetFiltergraph(g_pGraph); if (FAILED(hr)) { Msg(TEXT("Failed to set capture filter graph! hr=0x%x"), hr); return hr; } // Use the system device enumerator and class enumerator to find // a video capture/preview device, such as a desktop USB video camera. hr = FindCaptureDevice(&pSrcFilter); if (FAILED(hr)) { // Don't display a message because FindCaptureDevice will handle it return hr; } // Add Capture filter to our graph. hr = g_pGraph->AddFilter(pSrcFilter, L"Video Capture"); if (FAILED(hr)) { Msg(TEXT("Couldn't add the capture filter to the graph! hr=0x%x\r\n\r\n") TEXT("If you have a working video capture device, please make sure\r\n") TEXT("that it is connected and is not being used by another application.\r\n\r\n") TEXT("The sample will now close."), hr); pSrcFilter->Release(); return hr; } // Copied code //======================================== IAMStreamConfig *pSC; hr = g_pCapture->FindInterface(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Interleaved, pSrcFilter, IID_IAMStreamConfig, (void **)&pSC); if(FAILED(hr)) hr = g_pCapture->FindInterface(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, pSrcFilter, IID_IAMStreamConfig, (void **)&pSC); if (!pSC) { return hr; } int iCount = 0, iSize = 0; hr = pSC->GetNumberOfCapabilities(&iCount, &iSize); // Check the size to make sure we pass in the correct structure. if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS)) { // Use the video capabilities structure. int i = 0; for (int iFormat = 0; iFormat < iCount; iFormat++) { VIDEO_STREAM_CONFIG_CAPS scc; AM_MEDIA_TYPE *pmtConfig; hr = pSC->GetFormat(&pmtConfig); VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *)pmtConfig->pbFormat; double fps = 30; pvi->AvgTimePerFrame = (LONGLONG)(10000000/fps); pvi->bmiHeader.biSizeImage = DIBSIZE(pvi->bmiHeader); pvi->bmiHeader.biWidth = 1920; pvi->bmiHeader.biHeight = 1080; hr = pSC->SetFormat(pmtConfig); //hr = pSC->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc); //if (SUCCEEDED(hr)) //{ // /* Examine the format, and possibly use it. */ // if (pmtConfig->formattype == FORMAT_VideoInfo) { // long width = HEADER(pmtConfig->pbFormat)->biWidth; // long height = HEADER(pmtConfig->pbFormat)->biHeight; // // if (width == 1920 && height == 1080) { // VIDEOINFOHEADER *info = (VIDEOINFOHEADER *)pmtConfig->pbFormat; // if (i == 0) { // pSC->SetFormat(pmtConfig); // DeleteMediaType(pmtConfig); // break; // } // i++; // } // } // // Delete the media type when you are done. // DeleteMediaType(pmtConfig); //} } } if(SUCCEEDED(hr)) { pSC->Release(); } //======================================== // Render the preview pin on the video capture filter // Use this instead of g_pGraph->RenderFile hr = g_pCapture->RenderStream (&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, pSrcFilter, NULL, NULL); if (FAILED(hr)) { Msg(TEXT("Couldn't render the video capture stream. hr=0x%x\r\n") TEXT("The capture device may already be in use by another application.\r\n\r\n") TEXT("The sample will now close."), hr); pSrcFilter->Release(); return hr; } // Now that the filter has been added to the graph and we have // rendered its stream, we can release this reference to the filter. pSrcFilter->Release(); // Set video window style and position hr = SetupVideoWindow(); if (FAILED(hr)) { Msg(TEXT("Couldn't initialize video window! hr=0x%x"), hr); return hr; } #ifdef REGISTER_FILTERGRAPH // Add our graph to the running object table, which will allow // the GraphEdit application to "spy" on our graph hr = AddGraphToRot(g_pGraph, &g_dwGraphRegister); if (FAILED(hr)) { Msg(TEXT("Failed to register filter graph with ROT! hr=0x%x"), hr); g_dwGraphRegister = 0; } #endif // Start previewing video data hr = g_pMC->Run(); if (FAILED(hr)) { Msg(TEXT("Couldn't run the graph! hr=0x%x"), hr); return hr; } // Remember current state g_psCurrent = Running; return S_OK; }
std::vector<CameraConfig> videoInputCamera::getCameraConfigs(int dev_id) { std::vector<CameraConfig> cfg_list; int count = getDeviceCount(); if (count==0) return cfg_list; comInit(); HRESULT hr; ICaptureGraphBuilder2 *lpCaptureGraphBuilder; IGraphBuilder *lpGraphBuilder; IBaseFilter *lpInputFilter; IAMStreamConfig *lpStreamConfig; char nDeviceName[255]; WCHAR wDeviceName[255]; for (int cam_id=0;cam_id<count;cam_id++) { if ((dev_id>=0) && (dev_id!=cam_id)) continue; hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void **)&lpCaptureGraphBuilder); if (FAILED(hr)) // FAILED is a macro that tests the return value { printf("ERROR - Could not create the Filter Graph Manager\n"); comUnInit(); return cfg_list; } // Create the Filter Graph Manager. hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,IID_IGraphBuilder, (void**)&lpGraphBuilder); if (FAILED(hr)) { printf("ERROR - Could not add the graph builder!\n"); lpCaptureGraphBuilder->Release(); comUnInit(); return cfg_list; } hr = lpCaptureGraphBuilder->SetFiltergraph(lpGraphBuilder); if (FAILED(hr)) { printf("ERROR - Could not set filtergraph\n"); lpGraphBuilder->Release(); lpCaptureGraphBuilder->Release(); comUnInit(); return cfg_list; } memset(wDeviceName, 0, sizeof(WCHAR) * 255); memset(nDeviceName, 0, sizeof(char) * 255); hr = getDevice(&lpInputFilter, cam_id, wDeviceName, nDeviceName); if (SUCCEEDED(hr)){ hr = lpGraphBuilder->AddFilter(lpInputFilter, wDeviceName); }else{ printf("ERROR - Could not find specified video device\n"); lpGraphBuilder->Release(); lpCaptureGraphBuilder->Release(); comUnInit(); return cfg_list; } hr = lpCaptureGraphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, lpInputFilter, IID_IAMStreamConfig, (void **)&lpStreamConfig); if(FAILED(hr)){ printf("ERROR: Couldn't config the stream!\n"); lpInputFilter->Release(); lpGraphBuilder->Release(); lpCaptureGraphBuilder->Release(); comUnInit(); return cfg_list; } CameraConfig cam_cfg; CameraTool::initCameraConfig(&cam_cfg); cam_cfg.driver = DRIVER_DEFAULT; cam_cfg.device = cam_id; sprintf(cam_cfg.name, "%s", nDeviceName); int iCount = 0; int iSize = 0; hr = lpStreamConfig->GetNumberOfCapabilities(&iCount, &iSize); std::vector<CameraConfig> fmt_list; if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS)) { GUID lastFormat = MEDIASUBTYPE_None; for (int iFormat = 0; iFormat < iCount; iFormat+=2) { VIDEO_STREAM_CONFIG_CAPS scc; AM_MEDIA_TYPE *pmtConfig; hr = lpStreamConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc); if (SUCCEEDED(hr)){ if ( pmtConfig->subtype != lastFormat) { if (fmt_list.size()>0) { std::sort(fmt_list.begin(), fmt_list.end()); cfg_list.insert( cfg_list.end(), fmt_list.begin(), fmt_list.end() ); fmt_list.clear(); } cam_cfg.cam_format = getMediaSubtype(pmtConfig->subtype); lastFormat = pmtConfig->subtype; } int stepX = scc.OutputGranularityX; int stepY = scc.OutputGranularityY; if(stepX < 1 || stepY < 1) continue; else if ((stepX==1) && (stepY==1)) { cam_cfg.cam_width = scc.InputSize.cx; cam_cfg.cam_height = scc.InputSize.cy; int maxFrameInterval = scc.MaxFrameInterval; if (maxFrameInterval==0) maxFrameInterval = 10000000; float last_fps=-1; VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat; for (int iv=scc.MinFrameInterval;iv<=maxFrameInterval;iv=iv*2) { pVih->AvgTimePerFrame = iv; hr = lpStreamConfig->SetFormat(pmtConfig); if (hr==S_OK) { hr = lpStreamConfig->GetFormat(&pmtConfig); float fps = ((int)floor(100000000.0f/(float)pVih->AvgTimePerFrame + 0.5f))/10.0f; if (fps!=last_fps) { cam_cfg.cam_fps = fps; fmt_list.push_back(cam_cfg); last_fps=fps; } } } } else { int x,y; for (x=scc.MinOutputSize.cx,y=scc.MinOutputSize.cy;x<=scc.MaxOutputSize.cx,y<=scc.MaxOutputSize.cy;x+=stepX,y+=stepY) { cam_cfg.cam_width = x; cam_cfg.cam_height = y; int maxFrameInterval = scc.MaxFrameInterval; if (maxFrameInterval==0) maxFrameInterval = 10000000; float last_fps=-1; VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat; for (int iv=scc.MinFrameInterval;iv<=maxFrameInterval;iv=iv*2) { pVih->AvgTimePerFrame = iv; hr = lpStreamConfig->SetFormat(pmtConfig); if (hr==S_OK) { hr = lpStreamConfig->GetFormat(&pmtConfig); float fps = ((int)floor(100000000.0f/(float)pVih->AvgTimePerFrame + 0.5f))/10.0f; if (fps!=last_fps) { cam_cfg.cam_fps = fps; fmt_list.push_back(cam_cfg); last_fps=fps; } } } } } deleteMediaType(pmtConfig); } } } if (fmt_list.size()>0) { std::sort(fmt_list.begin(), fmt_list.end()); cfg_list.insert( cfg_list.end(), fmt_list.begin(), fmt_list.end() ); fmt_list.clear(); } lpStreamConfig->Release(); lpInputFilter->Release(); lpGraphBuilder->Release(); lpCaptureGraphBuilder->Release(); } comUnInit(); return cfg_list; }