//将输入crossbar变成PhysConn_Video_Composite void CCameraDS::SetCrossBar() { int i; IAMCrossbar *pXBar1 = NULL; ICaptureGraphBuilder2 *pBuilder = NULL; HRESULT hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void **)&pBuilder); if (SUCCEEDED(hr)) { hr = pBuilder->SetFiltergraph(m_pGraph); } hr = pBuilder->FindInterface(&LOOK_UPSTREAM_ONLY, NULL, m_pDeviceFilter,IID_IAMCrossbar, (void**)&pXBar1); if (SUCCEEDED(hr)) { long OutputPinCount; long InputPinCount; long PinIndexRelated; long PhysicalType; long inPort = 0; long outPort = 0; pXBar1->get_PinCounts(&OutputPinCount,&InputPinCount); for( i =0;i<InputPinCount;i++) { pXBar1->get_CrossbarPinInfo(TRUE,i,&PinIndexRelated,&PhysicalType); if(PhysConn_Video_Composite==PhysicalType) { inPort = i; break; } } for( i =0;i<OutputPinCount;i++) { pXBar1->get_CrossbarPinInfo(FALSE,i,&PinIndexRelated,&PhysicalType); if(PhysConn_Video_VideoDecoder==PhysicalType) { outPort = i; break; } } if(S_OK==pXBar1->CanRoute(outPort,inPort)) { pXBar1->Route(outPort,inPort); } pXBar1->Release(); } pBuilder->Release(); }
/******************************Public*Routine******************************\ * BuildAndRender * * 采集卡源 \**************************************************************************/ HRESULT CVMR9Subgraph::BuildAndRender(IBaseFilter* pCap , GUID VidType ,UINT Width,UINT Height,int nFPS,IMultiVMR9Wizard* pWizard ,BOOL bUsingColorSpace,SourceConnectProc ConnectProc) { HRESULT hr = S_OK; if(!pWizard)return E_FAIL; if(!pCap)return E_FAIL; if(m_pGraph) return E_FAIL; IVMRFilterConfig9 * pConfig = NULL; IGraphBuilder * pGb= NULL; IBaseFilter * pColorSpace = 0; IBaseFilter * pVMR9 = 0; ICaptureGraphBuilder2 * pBuild = NULL; //IBaseFilter* pCap = NULL; IPin * pPin = NULL; IAMStreamConfig *pStrCfig = 0; AM_MEDIA_TYPE * mmt = 0; //m_DeviceId = DeviceId; m_GraphType = Capture_Device; m_ConnectProc = ConnectProc; CMediaHelper Helper; // create graph try { hr = CoCreateInstance( CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IFilterGraph, (void**)&(m_pGraph) ); if( FAILED(hr))throw hr;if(!m_pGraph)throw E_OUTOFMEMORY; //-- hr = m_pGraph->QueryInterface( IID_IGraphBuilder, (void**)&(pGb) ); if( FAILED(hr))throw hr;if(!pGb)throw E_OUTOFMEMORY; //-- hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, IID_ICaptureGraphBuilder2, (void **)&pBuild); if( FAILED(hr))throw hr;if(!pBuild)throw E_OUTOFMEMORY; //-- hr =pBuild->SetFiltergraph(pGb); if( FAILED(hr))throw hr; //-- // create and add VMR9 hr = CoCreateInstance( CLSID_VideoMixingRenderer9, NULL, CLSCTX_INPROC, IID_IBaseFilter, (void**)&(pVMR9) ); if( FAILED(hr))throw hr;if(! pVMR9)throw E_OUTOFMEMORY; //-- hr = m_pGraph->AddFilter( pVMR9, L"VMR9"); if( FAILED(hr))throw hr; //-- hr = pVMR9->QueryInterface( IID_IVMRFilterConfig9, (void**)&(pConfig) ); if( FAILED(hr))throw hr; //-- // set VMR to the renderless mode hr = pConfig->SetRenderingMode( VMR9Mode_Renderless ); //-- hr = pWizard->Attach( pVMR9,D3DFMT_UNKNOWN, &m_dwID ); if( FAILED(hr))throw hr; if(bUsingColorSpace){ hr = CoCreateInstance( CLSID_Colour, NULL, CLSCTX_INPROC, IID_IBaseFilter, (void**)&(pColorSpace) ); if( FAILED(hr))throw hr;if( !pColorSpace)throw E_OUTOFMEMORY; //-- hr = m_pGraph->AddFilter( pColorSpace, L"ColorSpace"); if( FAILED(hr))throw hr; } ////连接采集卡 //hr = Helper.GetVidCapDevice(DeviceId,&pCap); //if( FAILED(hr))throw hr; //if(!pCap)throw E_OUTOFMEMORY; hr = m_pGraph->AddFilter(pCap,L"Capture"); if( FAILED(hr))throw hr; if(m_ConnectProc) { hr = m_ConnectProc(m_dwID,m_pGraph,pCap,pVMR9); } else{//使用默认连接方法 hr = pBuild->FindInterface(&PIN_CATEGORY_CAPTURE,&MEDIATYPE_Interleaved,pCap,IID_IAMStreamConfig,(void **)&pStrCfig); if( FAILED( hr) ){ hr = pBuild->FindInterface(&PIN_CATEGORY_CAPTURE,&MEDIATYPE_Video,pCap,IID_IAMStreamConfig,(void **)&pStrCfig); if (FAILED(hr ))throw hr; } hr = pStrCfig->GetFormat(&mmt); if(mmt->formattype == FORMAT_VideoInfo){ if (FAILED(hr ))throw hr; if(!mmt) throw E_OUTOFMEMORY; VIDEOINFO *pvi = (VIDEOINFO *) mmt->pbFormat; pvi->AvgTimePerFrame = UNITS/nFPS; pvi->bmiHeader.biWidth = Width; pvi->bmiHeader.biHeight = Height; mmt->subtype = VidType; // hr=pStrCfig->SetFormat(mmt); //重新设置参数 if( FAILED(hr))throw hr; } /*hr = pBuild->FindPin( pCap, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE,&MEDIATYPE_Interleaved, TRUE, 0, &pPin); if( FAILED(hr)){ hr = pBuild->FindPin( pCap, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE,&MEDIATYPE_Video, TRUE, 0, &pPin); if (FAILED(hr ))throw hr; } if(!pPin) throw E_OUTOFMEMORY;*/ hr = pBuild->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Interleaved,pCap,pColorSpace,pVMR9); if( FAILED(hr)){ hr = pBuild->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,pCap,pColorSpace,pVMR9); if (FAILED(hr ))throw hr; } } if( FAILED(hr))throw hr; // ok, all is rendered, now get MediaControl, MediaSeeking and continue hr = m_pGraph->QueryInterface( IID_IMediaControl, (void**)&(m_pMc) ); if( FAILED(hr))throw hr; hr = m_pGraph->QueryInterface( IID_IMediaSeeking, (void**)&(m_pMs) ); if( FAILED(hr))throw hr; } catch(HRESULT hr1) { pWizard->Detach( m_dwID ); hr = hr1; } RELEASE( pConfig ); RELEASE( pGb ); RELEASE( pColorSpace ); RELEASE( pBuild ); RELEASE( pStrCfig ); RELEASE( pVMR9); Helper.DeleteMediaType(mmt); return hr; }
int main(int argc, char* argv[]) { ICaptureGraphBuilder2 *pCaptureGraphBuilder = NULL; IGraphBuilder *pGraphBuilder = NULL; IBaseFilter *pSource = NULL; IBaseFilter *pMux = NULL; IBaseFilter *pVideoCompressor = NULL; IBaseFilter *pAudioCompressor = NULL; IAMStreamConfig *pAMStreamConfig = NULL; IAMVideoCompression *pAMVideoCompression = NULL; IMediaControl *pControl = NULL; IMediaSeeking *pSeek = NULL; IMediaEvent *pEvent = NULL; HRESULT hr; DWORD pdwRegister=0; CoInitialize(NULL); // Create the capture graph builder. CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, IID_ICaptureGraphBuilder2, (void **)&pCaptureGraphBuilder); // Make the rendering section of the graph. pCaptureGraphBuilder->SetOutputFileName( &MEDIASUBTYPE_Avi, // File type. L"C:\\STDIUE1.avi", // File name. &pMux, // pointer to the multiplexer. NULL); // pointer to the file writer. // Load the source file. pCaptureGraphBuilder->GetFiltergraph(&pGraphBuilder); pGraphBuilder->AddSourceFilter(L"C:\\Program Files\\Microsoft Money\\Media\\STDIUE1.avi", L"Source Filter", &pSource); // Add the compressor filter. CoCreateInstance(CLSID_AVICo, NULL, CLSCTX_INPROC, IID_IBaseFilter, (void **)&pVideoCompressor); pGraphBuilder->AddFilter(pVideoCompressor, L"Video Compressor"); // Render the video stream, through the compressor. pCaptureGraphBuilder->RenderStream( NULL, // Output pin category NULL, // Media type pSource, // Source filter pVideoCompressor, // Compressor filter pMux); // Sink filter (the AVI Mux) /* CoCreateInstance(CLSID_GSM, NULL, CLSCTX_INPROC, IID_IBaseFilter, (void **)&pAudioCompressor); pGraphBuilder->AddFilter(pAudioCompressor, L"Audio Compressor");*/ // Render the audio stream. pCaptureGraphBuilder->RenderStream( NULL, NULL, pSource, pAudioCompressor, pMux); // Compress at 100k/second data rate. AM_MEDIA_TYPE *pmt; pCaptureGraphBuilder->FindInterface(NULL, NULL, pVideoCompressor, IID_IAMStreamConfig, (void **)&pAMStreamConfig); pAMStreamConfig->GetFormat(&pmt); if (pmt->formattype == FORMAT_VideoInfo) { ((VIDEOINFOHEADER *)(pmt->pbFormat))->dwBitRate = 100000; pAMStreamConfig->SetFormat(pmt); } // Request key frames every four frames. pAMStreamConfig->QueryInterface(IID_IAMVideoCompression, (void **)&pAMVideoCompression); pAMVideoCompression->put_KeyFrameRate(4); pAMVideoCompression->Release(); pAMStreamConfig->Release(); // Run the graph. pGraphBuilder->QueryInterface(IID_IMediaControl, (void **)&pControl); pGraphBuilder->QueryInterface(IID_IMediaEvent, (void **)&pEvent); hr = pMux->QueryInterface(IID_IMediaSeeking, (void**)&pSeek); pControl->Run(); printf("Recompressing... \n"); long evCode; if (SUCCEEDED(hr)) { REFERENCE_TIME rtTotal, rtNow = 0; pSeek->GetDuration(&rtTotal); while ((pEvent->WaitForCompletion(1000, &evCode)) == E_ABORT) { pSeek->GetCurrentPosition(&rtNow); printf("%d%%\n", (rtNow * 100)/rtTotal); } pSeek->Release(); } else // Cannot update the progress. { pEvent->WaitForCompletion(INFINITE, &evCode); } pControl->Stop(); printf("All done\n"); pSource->Release(); pMux->Release(); pVideoCompressor->Release(); pAudioCompressor->Release (); pControl->Release(); pEvent->Release(); pCaptureGraphBuilder->Release(); pGraphBuilder->Release(); CoUninitialize(); return 0; }
std::vector<CameraConfig> videoInputCamera::getCameraConfigs(int dev_id) { std::vector<CameraConfig> cfg_list; int count = getDeviceCount(); if (count==0) return cfg_list; comInit(); HRESULT hr; ICaptureGraphBuilder2 *lpCaptureGraphBuilder; IGraphBuilder *lpGraphBuilder; IBaseFilter *lpInputFilter; IAMStreamConfig *lpStreamConfig; char nDeviceName[255]; WCHAR wDeviceName[255]; for (int cam_id=0;cam_id<count;cam_id++) { if ((dev_id>=0) && (dev_id!=cam_id)) continue; hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void **)&lpCaptureGraphBuilder); if (FAILED(hr)) // FAILED is a macro that tests the return value { printf("ERROR - Could not create the Filter Graph Manager\n"); comUnInit(); return cfg_list; } // Create the Filter Graph Manager. hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,IID_IGraphBuilder, (void**)&lpGraphBuilder); if (FAILED(hr)) { printf("ERROR - Could not add the graph builder!\n"); lpCaptureGraphBuilder->Release(); comUnInit(); return cfg_list; } hr = lpCaptureGraphBuilder->SetFiltergraph(lpGraphBuilder); if (FAILED(hr)) { printf("ERROR - Could not set filtergraph\n"); lpGraphBuilder->Release(); lpCaptureGraphBuilder->Release(); comUnInit(); return cfg_list; } memset(wDeviceName, 0, sizeof(WCHAR) * 255); memset(nDeviceName, 0, sizeof(char) * 255); hr = getDevice(&lpInputFilter, cam_id, wDeviceName, nDeviceName); if (SUCCEEDED(hr)){ hr = lpGraphBuilder->AddFilter(lpInputFilter, wDeviceName); }else{ printf("ERROR - Could not find specified video device\n"); lpGraphBuilder->Release(); lpCaptureGraphBuilder->Release(); comUnInit(); return cfg_list; } hr = lpCaptureGraphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, lpInputFilter, IID_IAMStreamConfig, (void **)&lpStreamConfig); if(FAILED(hr)){ printf("ERROR: Couldn't config the stream!\n"); lpInputFilter->Release(); lpGraphBuilder->Release(); lpCaptureGraphBuilder->Release(); comUnInit(); return cfg_list; } CameraConfig cam_cfg; CameraTool::initCameraConfig(&cam_cfg); cam_cfg.driver = DRIVER_DEFAULT; cam_cfg.device = cam_id; sprintf(cam_cfg.name, "%s", nDeviceName); int iCount = 0; int iSize = 0; hr = lpStreamConfig->GetNumberOfCapabilities(&iCount, &iSize); std::vector<CameraConfig> fmt_list; if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS)) { GUID lastFormat = MEDIASUBTYPE_None; for (int iFormat = 0; iFormat < iCount; iFormat+=2) { VIDEO_STREAM_CONFIG_CAPS scc; AM_MEDIA_TYPE *pmtConfig; hr = lpStreamConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc); if (SUCCEEDED(hr)){ if ( pmtConfig->subtype != lastFormat) { if (fmt_list.size()>0) { std::sort(fmt_list.begin(), fmt_list.end()); cfg_list.insert( cfg_list.end(), fmt_list.begin(), fmt_list.end() ); fmt_list.clear(); } cam_cfg.cam_format = getMediaSubtype(pmtConfig->subtype); lastFormat = pmtConfig->subtype; } int stepX = scc.OutputGranularityX; int stepY = scc.OutputGranularityY; if(stepX < 1 || stepY < 1) continue; else if ((stepX==1) && (stepY==1)) { cam_cfg.cam_width = scc.InputSize.cx; cam_cfg.cam_height = scc.InputSize.cy; int maxFrameInterval = scc.MaxFrameInterval; if (maxFrameInterval==0) maxFrameInterval = 10000000; float last_fps=-1; VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat; for (int iv=scc.MinFrameInterval;iv<=maxFrameInterval;iv=iv*2) { pVih->AvgTimePerFrame = iv; hr = lpStreamConfig->SetFormat(pmtConfig); if (hr==S_OK) { hr = lpStreamConfig->GetFormat(&pmtConfig); float fps = ((int)floor(100000000.0f/(float)pVih->AvgTimePerFrame + 0.5f))/10.0f; if (fps!=last_fps) { cam_cfg.cam_fps = fps; fmt_list.push_back(cam_cfg); last_fps=fps; } } } } else { int x,y; for (x=scc.MinOutputSize.cx,y=scc.MinOutputSize.cy;x<=scc.MaxOutputSize.cx,y<=scc.MaxOutputSize.cy;x+=stepX,y+=stepY) { cam_cfg.cam_width = x; cam_cfg.cam_height = y; int maxFrameInterval = scc.MaxFrameInterval; if (maxFrameInterval==0) maxFrameInterval = 10000000; float last_fps=-1; VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat; for (int iv=scc.MinFrameInterval;iv<=maxFrameInterval;iv=iv*2) { pVih->AvgTimePerFrame = iv; hr = lpStreamConfig->SetFormat(pmtConfig); if (hr==S_OK) { hr = lpStreamConfig->GetFormat(&pmtConfig); float fps = ((int)floor(100000000.0f/(float)pVih->AvgTimePerFrame + 0.5f))/10.0f; if (fps!=last_fps) { cam_cfg.cam_fps = fps; fmt_list.push_back(cam_cfg); last_fps=fps; } } } } } deleteMediaType(pmtConfig); } } } if (fmt_list.size()>0) { std::sort(fmt_list.begin(), fmt_list.end()); cfg_list.insert( cfg_list.end(), fmt_list.begin(), fmt_list.end() ); fmt_list.clear(); } lpStreamConfig->Release(); lpInputFilter->Release(); lpGraphBuilder->Release(); lpCaptureGraphBuilder->Release(); } comUnInit(); return cfg_list; }