Exemplo n.º 1
0
bool MIPDirectShowCapture::initCaptureGraphBuilder()
{
    IGraphBuilder *pGraph = NULL;
    ICaptureGraphBuilder2 *pBuild = NULL;

    HRESULT hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void**)&pBuild);
    if (HR_FAILED(hr))
    {
		setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTCREATECAPTUREBUILDER);
		return false;
	}

    hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, (void**)&pGraph);
	if (HR_FAILED(hr))
	{
		pBuild->Release();
		setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTCREATEMANAGER);
		return false;
	}
	
	pBuild->SetFiltergraph(pGraph);

	m_pBuilder = pBuild;
	m_pGraph = pGraph;

	return true;
}
Exemplo n.º 2
0
//将输入crossbar变成PhysConn_Video_Composite
void CCameraDS::SetCrossBar()
{
	int i;
	IAMCrossbar *pXBar1 = NULL;
	ICaptureGraphBuilder2 *pBuilder = NULL;

 
	HRESULT hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL,
					CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, 
					(void **)&pBuilder);

	if (SUCCEEDED(hr))
	{
		hr = pBuilder->SetFiltergraph(m_pGraph);
	}


	hr = pBuilder->FindInterface(&LOOK_UPSTREAM_ONLY, NULL, 
								m_pDeviceFilter,IID_IAMCrossbar, (void**)&pXBar1);

	if (SUCCEEDED(hr)) 
	{
  		long OutputPinCount;
		long InputPinCount;
		long PinIndexRelated;
		long PhysicalType;
		long inPort = 0;
		long outPort = 0;

		pXBar1->get_PinCounts(&OutputPinCount,&InputPinCount);
		for( i =0;i<InputPinCount;i++)
		{
			pXBar1->get_CrossbarPinInfo(TRUE,i,&PinIndexRelated,&PhysicalType);
			if(PhysConn_Video_Composite==PhysicalType) 
			{
				inPort = i;
				break;
			}
		}
		for( i =0;i<OutputPinCount;i++)
		{
			pXBar1->get_CrossbarPinInfo(FALSE,i,&PinIndexRelated,&PhysicalType);
			if(PhysConn_Video_VideoDecoder==PhysicalType) 
			{
				outPort = i;
				break;
			}
		}
  
		if(S_OK==pXBar1->CanRoute(outPort,inPort))
		{
			pXBar1->Route(outPort,inPort);
		}
		pXBar1->Release();  
	}
	pBuilder->Release();
}
HRESULT VideoTexture::InitCaptureGraphBuilder(
  IGraphBuilder **ppGraph,  // Receives the pointer.
  ICaptureGraphBuilder2 **ppBuild  // Receives the pointer.
)
{
    if (!ppGraph || !ppBuild)
    {
        return E_POINTER;
    }
    IGraphBuilder *pGraph = NULL;
    ICaptureGraphBuilder2 *pBuild = NULL;

    // Create the Capture Graph Builder.
    HRESULT hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, 
        CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void**)&pBuild );
    if (SUCCEEDED(hr))
    {
        // Create the Filter Graph Manager.
        hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,
            IID_IGraphBuilder, (void**)&pGraph);
        if (SUCCEEDED(hr))
        {
            // Initialize the Capture Graph Builder.
            pBuild->SetFiltergraph(pGraph);

            // Return both interface pointers to the caller.
            *ppBuild = pBuild;
            *ppGraph = pGraph; // The caller must release both interfaces.

			// media control and so on
			hr = pGraph->QueryInterface(IID_IMediaControl, (void **)&pMediaControl);
			if (FAILED(hr)) return hr;
			hr = pGraph->QueryInterface (IID_IMediaEvent, (void **)&pMediaEvent);
			if (FAILED(hr)) return hr;

            return S_OK;
        }
        else
        {
            pBuild->Release();
        }
    }
    return hr; // Failed
}
Exemplo n.º 4
0
HRESULT CVMR9Subgraph::AddCaptureFilter(LPCWSTR lpPath ,LPCWSTR pszVidComName,LPCWSTR lpszAudDevice ,LPCWSTR pszAudComName ,IMultiVMR9Wizard* pWizard )
{
	HRESULT hr = S_OK;
    IGraphBuilder*  pGb = 0;
	ICaptureGraphBuilder2 * pBuild = 0;
	IBaseFilter* pStreamFt = 0;
	IBaseFilter* pAudFt = 0;
	IBaseFilter* pAudCom = 0;//音频压缩
	IBaseFilter* pVidCom = 0;//视频压缩
	IBaseFilter* pAviMux = 0;
	IFileSinkFilter	*pSink  = 0;//file
	IConfigAviMux *pConfigMux = NULL;
	CMediaHelper Helper;
	if(m_pCapGraph) 
	hr = DelCaptureFilter();
	if(FAILED(hr) || m_pCapGraph)
		return E_INVALIDARG;
	try
	{
  
		// create graph
		hr = CoCreateInstance( CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, 
			IID_IFilterGraph, (void**)&(m_pCapGraph) );
		if( FAILED(hr))throw hr;

		hr = m_pCapGraph->QueryInterface( IID_IGraphBuilder, (void**)&(pGb) );
		if( FAILED(hr))throw hr;

		hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, IID_ICaptureGraphBuilder2, (void **)&pBuild);		
		if( FAILED(hr))throw hr;

		hr =pBuild->SetFiltergraph(pGb);
		if( FAILED(hr))throw hr;

		//连接输出流filter/////////////////////////////////////////
		//
		//  filter			--->	|-------|
		//							|AVI MUX|	---->	Avi File
		//	sound device	--->	|-------|
		//
		///////////////////////////////////////////////////////////
		hr = pWizard->InitializeOutStream(m_dwID);
		if( FAILED(hr)) throw hr;
		hr = pWizard->GetOutStreamFilter(m_dwID,&pStreamFt);
		if( FAILED(hr)) throw hr;
		if(pStreamFt == 0) throw E_INVALIDARG;
		hr = m_pCapGraph->AddFilter( pStreamFt, L"OutFilter");
		if( FAILED(hr))throw hr;

		//create AVI MUX Filter

		hr = pBuild->SetOutputFileName(&MEDIASUBTYPE_Avi,lpPath, &pAviMux, &pSink); 
		if( FAILED(hr) || !pAviMux) throw hr;
		if(pszVidComName){//视频压缩
			hr = Helper.GetVidCompression(pszVidComName,&pVidCom);
			if( SUCCEEDED(hr) && pVidCom){
				hr = m_pCapGraph->AddFilter(pVidCom,L"VidCom");
				if( FAILED(hr) ) throw hr;
			}
		}
		if( lpszAudDevice){//音频采集
			hr = Helper.GetAudCapDevice(lpszAudDevice,&pAudFt);
			if( FAILED(hr) || !pAudFt) throw hr;
			hr = m_pCapGraph->AddFilter(pAudFt,L"AudFt");
			if( FAILED(hr) ) throw hr;
			if(pszAudComName){
				hr = Helper.GetAudCompression(pszAudComName,&pAudCom);
				if( FAILED(hr) || !pAudCom) throw hr;
				hr = m_pCapGraph->AddFilter(pAudCom,L"AudCom");
				if( FAILED(hr) ) throw hr;
			}
		}		
		
		//开始连接
		hr = pBuild->RenderStream(0, &MEDIATYPE_Video,pStreamFt,pVidCom,pAviMux);

		if( FAILED(hr) ) throw hr;
		if(pAudFt){
			pBuild->RenderStream(0,0,pAudFt,pAudCom,pAviMux);
			if( FAILED(hr) ) throw hr;
			hr = pAviMux->QueryInterface(IID_IConfigAviMux, (void**)&pConfigMux);
			if( SUCCEEDED(hr)) {
				pConfigMux->SetMasterStream(1);
			}
		}
		/*IConfigInterleaving *pInterleave = NULL;
		hr = pAviMux->QueryInterface(IID_IConfigInterleaving, (void**)&pInterleave);
		if (SUCCEEDED(hr))
		{
			pInterleave->put_Mode(INTERLEAVE_CAPTURE);
			pInterleave->Release();
		}*/
		//连接完成

		hr = CheckConnection(pStreamFt);
		if( FAILED(hr))throw hr;

		////   Done   /////////////////////////////////////////////

		// ok, all is rendered, now get MediaControl
		hr = m_pCapGraph->QueryInterface( IID_IMediaControl, (void**)&(m_pCapMc) );
		if( FAILED(hr))throw hr;


	}
	catch(HRESULT hr1)
	{
		hr = hr1;
	}
	RELEASE(pStreamFt);
	RELEASE(pGb);
	RELEASE(pBuild);
	RELEASE(pConfigMux);
	RELEASE(pAudFt);
	RELEASE(pAudCom );
	RELEASE(pVidCom );
	RELEASE(pAviMux );
	RELEASE(pSink);
   

    return hr;

	
}
Exemplo n.º 5
0
/******************************Public*Routine******************************\
* BuildAndRender
*
* 采集卡源
\**************************************************************************/
HRESULT CVMR9Subgraph::BuildAndRender(IBaseFilter* pCap , GUID VidType ,UINT Width,UINT Height,int nFPS,IMultiVMR9Wizard* pWizard ,BOOL bUsingColorSpace,SourceConnectProc ConnectProc)
{
    HRESULT hr = S_OK;
	if(!pWizard)return E_FAIL;
	if(!pCap)return E_FAIL;
	if(m_pGraph)			return E_FAIL;
    IVMRFilterConfig9	*	pConfig = NULL;
	IGraphBuilder		*	pGb= NULL;
	IBaseFilter			*	pColorSpace = 0;
	IBaseFilter				* pVMR9 = 0;

	ICaptureGraphBuilder2 * pBuild = NULL;
	//IBaseFilter* pCap = NULL;
	IPin * pPin = NULL;
	IAMStreamConfig *pStrCfig = 0;
	AM_MEDIA_TYPE * mmt = 0;
	//m_DeviceId = DeviceId;
	m_GraphType = Capture_Device;
		
	m_ConnectProc = ConnectProc;
	CMediaHelper Helper;
    // create graph
	try
	{
		hr = CoCreateInstance( CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, 
			IID_IFilterGraph, (void**)&(m_pGraph) );
		if( FAILED(hr))throw hr;if(!m_pGraph)throw  E_OUTOFMEMORY;
		//--
		hr = m_pGraph->QueryInterface( IID_IGraphBuilder, (void**)&(pGb) );
		if( FAILED(hr))throw hr;if(!pGb)throw  E_OUTOFMEMORY;
		//--
		hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, IID_ICaptureGraphBuilder2, (void **)&pBuild);		
		if( FAILED(hr))throw hr;if(!pBuild)throw  E_OUTOFMEMORY;
	
		//--
		hr =pBuild->SetFiltergraph(pGb);
		if( FAILED(hr))throw hr;
		//--
		// create and add VMR9
		hr = CoCreateInstance( CLSID_VideoMixingRenderer9, NULL, CLSCTX_INPROC,
			IID_IBaseFilter, (void**)&(pVMR9) );
		if( FAILED(hr))throw hr;if(! pVMR9)throw  E_OUTOFMEMORY;
		//--
		hr = m_pGraph->AddFilter( pVMR9, L"VMR9");
		if( FAILED(hr))throw hr;

		//--
		hr = pVMR9->QueryInterface( IID_IVMRFilterConfig9, (void**)&(pConfig) );
		if( FAILED(hr))throw hr;
		//--
		// set VMR to the renderless mode
		hr = pConfig->SetRenderingMode(  VMR9Mode_Renderless );
				//--
		hr = pWizard->Attach( pVMR9,D3DFMT_UNKNOWN, &m_dwID );
		if( FAILED(hr))throw hr;

		if(bUsingColorSpace){
			hr = CoCreateInstance( CLSID_Colour, NULL, CLSCTX_INPROC,
				IID_IBaseFilter, (void**)&(pColorSpace) );
			if( FAILED(hr))throw hr;if( !pColorSpace)throw  E_OUTOFMEMORY;
			//--
			hr = m_pGraph->AddFilter( pColorSpace, L"ColorSpace");
			if( FAILED(hr))throw hr;
		}

		
		////连接采集卡
		//hr = Helper.GetVidCapDevice(DeviceId,&pCap);
		//if( FAILED(hr))throw hr;
		//if(!pCap)throw  E_OUTOFMEMORY;
		hr = m_pGraph->AddFilter(pCap,L"Capture");
		if( FAILED(hr))throw hr;
		if(m_ConnectProc) {
			hr = m_ConnectProc(m_dwID,m_pGraph,pCap,pVMR9);
		}
		else{//使用默认连接方法
			hr = pBuild->FindInterface(&PIN_CATEGORY_CAPTURE,&MEDIATYPE_Interleaved,pCap,IID_IAMStreamConfig,(void **)&pStrCfig);
			if( FAILED( hr) ){
				hr = pBuild->FindInterface(&PIN_CATEGORY_CAPTURE,&MEDIATYPE_Video,pCap,IID_IAMStreamConfig,(void **)&pStrCfig);
				if (FAILED(hr ))throw hr;	
			}
			
				hr = pStrCfig->GetFormat(&mmt);
				if(mmt->formattype  == FORMAT_VideoInfo){
				if (FAILED(hr ))throw hr;	
				if(!mmt) throw E_OUTOFMEMORY;
				VIDEOINFO *pvi = (VIDEOINFO *) mmt->pbFormat;
				pvi->AvgTimePerFrame = UNITS/nFPS;
				pvi->bmiHeader.biWidth = Width;
				pvi->bmiHeader.biHeight = Height;
				mmt->subtype = VidType;
		//		hr=pStrCfig->SetFormat(mmt);   //重新设置参数
				
				if( FAILED(hr))throw hr;
			}
			/*hr = pBuild->FindPin( pCap, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE,&MEDIATYPE_Interleaved, TRUE, 0, &pPin);
			if( FAILED(hr)){
				hr = pBuild->FindPin( pCap, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE,&MEDIATYPE_Video, TRUE, 0, &pPin); 
				if (FAILED(hr ))throw hr;
			}
			if(!pPin) throw E_OUTOFMEMORY;*/
			hr = pBuild->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Interleaved,pCap,pColorSpace,pVMR9);
			if( FAILED(hr)){
				hr = pBuild->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,pCap,pColorSpace,pVMR9);
				if (FAILED(hr ))throw hr;
			}
		}
		if( FAILED(hr))throw hr;

		
		// ok, all is rendered, now get MediaControl, MediaSeeking and continue
		hr = m_pGraph->QueryInterface( IID_IMediaControl, (void**)&(m_pMc) );
		if( FAILED(hr))throw hr;

		hr = m_pGraph->QueryInterface( IID_IMediaSeeking, (void**)&(m_pMs) );
		if( FAILED(hr))throw hr;



	}
	catch(HRESULT hr1)
	{
		pWizard->Detach( m_dwID );
		hr = hr1;
	}

	RELEASE( pConfig );
	RELEASE( pGb );
	RELEASE( pColorSpace );
	RELEASE( pBuild  );
	RELEASE( pStrCfig );
	RELEASE( pVMR9);
	Helper.DeleteMediaType(mmt);
	return hr;
}
Exemplo n.º 6
0
/******************************Public*Routine******************************\
* BuildAndRender
*
* 视频文件源
\**************************************************************************/
HRESULT CVMR9Subgraph::BuildAndRender( LPCWSTR lpPath , IMultiVMR9Wizard* pWizard ,SourceConnectProc ConnectProc)
{
    HRESULT hr = S_OK;
	if(!pWizard)return E_FAIL;
	if(m_pGraph)			return E_FAIL;
	m_ConnectProc = ConnectProc;
    if( !lpPath )
    {
        return E_POINTER;
    }
	IBaseFilter				* pVMR9 = 0;
	IBaseFilter				* pFileScr  = 0;
	IVMRFilterConfig9		* pConfig = 0;
    IGraphBuilder			* pGb = 0;
	ICaptureGraphBuilder2	* pBuild = NULL;
	IBaseFilter *pAviSplit = NULL;
	IPin*pPin = 0;

    USES_CONVERSION;
	m_GraphType = Media_File;
	try
	{
    // first, check that file exists
		if( INVALID_FILE_ATTRIBUTES == GetFileAttributes( lpPath))throw VFW_E_NOT_FOUND;

		// create graph
		hr = CoCreateInstance( CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, 
			IID_IFilterGraph, (void**)&(m_pGraph) );
		if( FAILED(hr))throw hr;
		
		// create and add VMR9
		hr = CoCreateInstance( CLSID_VideoMixingRenderer9, NULL, CLSCTX_INPROC_SERVER,
			IID_IBaseFilter, (void**)&(pVMR9) );
		if( FAILED(hr))throw hr;


		hr = m_pGraph->AddFilter( pVMR9, L"VMR9");
		if( FAILED(hr))throw hr;

		// configure VMR9
		hr = pVMR9->QueryInterface( IID_IVMRFilterConfig9, (void**)&(pConfig) );
		if( FAILED(hr))throw hr;
		
		
	    
		// if wizard is provided, set VMR to the renderless code and attach to the wizard
		if( pWizard )
		{
			// set VMR to the renderless mode
			hr = pConfig->SetRenderingMode( VMR9Mode_Renderless );
			if( FAILED(hr))throw hr;
			
			hr = pWizard->Attach( pVMR9, D3DFMT_UNKNOWN,&m_dwID );
			if( FAILED(hr))throw hr;

		}

		// try to render media source
		hr = m_pGraph->QueryInterface( IID_IGraphBuilder, (void**)&(pGb) );
		if( FAILED(hr))throw hr;

		hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, IID_ICaptureGraphBuilder2, (void **)&pBuild);		
		if( FAILED(hr))throw hr;if(!pBuild)throw  E_OUTOFMEMORY;
	
		//--
		hr = pBuild->SetFiltergraph(pGb);
		if( FAILED(hr))throw hr;


		hr = pGb->AddSourceFilter( lpPath, L"Source", &pFileScr);
		if( FAILED(hr))throw hr;


		
		if(m_ConnectProc) {
			hr = m_ConnectProc(m_dwID,m_pGraph,pFileScr,pVMR9);
			if( FAILED(hr))throw hr;
		}else
		{
			hr = CoCreateInstance(CLSID_AviSplitter, 0, CLSCTX_INPROC_SERVER,IID_IBaseFilter, reinterpret_cast<void**>(&pAviSplit));
			if( FAILED(hr))throw hr;
			hr = m_pGraph->AddFilter(pAviSplit, L"AVI Splitter");
			if( FAILED(hr))throw hr;

			hr =	pBuild->RenderStream(0, 0,pFileScr,pAviSplit,pVMR9);
			if( FAILED(hr))throw hr;

		}
	

		// ok, all is rendered, now get MediaControl, MediaSeeking and continue
		hr = m_pGraph->QueryInterface( IID_IMediaControl, (void**)&(m_pMc) );
		if( FAILED(hr))throw hr;

		hr = m_pGraph->QueryInterface( IID_IMediaSeeking, (void**)&(m_pMs) );
		if( FAILED(hr))throw hr;
	}
	catch(HRESULT hr1)
	{
		pWizard->Detach( m_dwID );
		hr = hr1;
	}
	RELEASE(pConfig);
	RELEASE(pGb);
	RELEASE(pVMR9);
	RELEASE(pFileScr);
	RELEASE(pAviSplit);
	RELEASE(pBuild);
	RELEASE(pPin);

	//AddCaptureFilter(pWizard);
    return hr;
}
Exemplo n.º 7
0
int main(int argc, char* argv[])
{
    ICaptureGraphBuilder2   *pCaptureGraphBuilder = NULL;
    IGraphBuilder           *pGraphBuilder = NULL;
    IBaseFilter             *pSource = NULL;
    IBaseFilter             *pMux = NULL;
    IBaseFilter             *pVideoCompressor = NULL;
    IBaseFilter             *pAudioCompressor = NULL;

    IAMStreamConfig         *pAMStreamConfig = NULL;
    IAMVideoCompression     *pAMVideoCompression = NULL;

    IMediaControl           *pControl = NULL;
    IMediaSeeking           *pSeek = NULL;
    IMediaEvent             *pEvent = NULL;

    HRESULT hr;

    DWORD pdwRegister=0;
    CoInitialize(NULL);

    // Create the capture graph builder.
    CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC,
                     IID_ICaptureGraphBuilder2, (void **)&pCaptureGraphBuilder);

    // Make the rendering section of the graph.
    pCaptureGraphBuilder->SetOutputFileName(
        &MEDIASUBTYPE_Avi,  // File type.
        L"C:\\STDIUE1.avi",  // File name.
        &pMux,              // pointer to the multiplexer.
        NULL);              // pointer to the file writer.

    // Load the source file.
    pCaptureGraphBuilder->GetFiltergraph(&pGraphBuilder);
    pGraphBuilder->AddSourceFilter(L"C:\\Program Files\\Microsoft Money\\Media\\STDIUE1.avi", L"Source Filter", &pSource);

    // Add the compressor filter.
    CoCreateInstance(CLSID_AVICo, NULL, CLSCTX_INPROC,
                     IID_IBaseFilter, (void **)&pVideoCompressor);
    pGraphBuilder->AddFilter(pVideoCompressor, L"Video Compressor");

    // Render the video stream, through the compressor.
    pCaptureGraphBuilder->RenderStream(
        NULL,       // Output pin category
        NULL,       // Media type
        pSource,       // Source filter
        pVideoCompressor,     // Compressor filter
        pMux);      // Sink filter (the AVI Mux)

    /* CoCreateInstance(CLSID_GSM, NULL, CLSCTX_INPROC,
             IID_IBaseFilter, (void **)&pAudioCompressor);
     pGraphBuilder->AddFilter(pAudioCompressor, L"Audio Compressor");*/

    // Render the audio stream.
    pCaptureGraphBuilder->RenderStream(
        NULL,
        NULL,
        pSource,
        pAudioCompressor,
        pMux);

    // Compress at 100k/second data rate.
    AM_MEDIA_TYPE *pmt;
    pCaptureGraphBuilder->FindInterface(NULL, NULL, pVideoCompressor, IID_IAMStreamConfig, (void **)&pAMStreamConfig);

    pAMStreamConfig->GetFormat(&pmt);

    if (pmt->formattype == FORMAT_VideoInfo)
    {

        ((VIDEOINFOHEADER *)(pmt->pbFormat))->dwBitRate = 100000;

        pAMStreamConfig->SetFormat(pmt);
    }


    // Request key frames every four frames.
    pAMStreamConfig->QueryInterface(IID_IAMVideoCompression, (void **)&pAMVideoCompression);
    pAMVideoCompression->put_KeyFrameRate(4);
    pAMVideoCompression->Release();
    pAMStreamConfig->Release();

    // Run the graph.

    pGraphBuilder->QueryInterface(IID_IMediaControl, (void **)&pControl);
    pGraphBuilder->QueryInterface(IID_IMediaEvent, (void **)&pEvent);

    hr = pMux->QueryInterface(IID_IMediaSeeking, (void**)&pSeek);


    pControl->Run();
    printf("Recompressing... \n");

    long evCode;
    if (SUCCEEDED(hr))
    {
        REFERENCE_TIME rtTotal, rtNow = 0;
        pSeek->GetDuration(&rtTotal);
        while ((pEvent->WaitForCompletion(1000, &evCode)) == E_ABORT)
        {
            pSeek->GetCurrentPosition(&rtNow);
            printf("%d%%\n", (rtNow * 100)/rtTotal);
        }
        pSeek->Release();
    }
    else  // Cannot update the progress.
    {
        pEvent->WaitForCompletion(INFINITE, &evCode);
    }
    pControl->Stop();
    printf("All done\n");

    pSource->Release();
    pMux->Release();
    pVideoCompressor->Release();
    pAudioCompressor->Release ();
    pControl->Release();
    pEvent->Release();
    pCaptureGraphBuilder->Release();
    pGraphBuilder->Release();
    CoUninitialize();

    return 0;
}
int main(int argc, char* argv[])
{
	
	
	IGraphBuilder         *pGraph = NULL;
	ICaptureGraphBuilder2 *pBuilder = NULL;
	IBaseFilter           *pSrc = NULL;
	IBaseFilter           *ppf = NULL;
	IFileSinkFilter       *pSink = NULL;
	IMediaControl         *pMC   = NULL;
	HRESULT hr;
	
	CoInitialize (NULL);
	// Create the filter graph.
	CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC,
	      IID_IGraphBuilder, (void **)&pGraph);

	// Create the capture graph builder.
	CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, 
	      IID_ICaptureGraphBuilder2, (void **)&pBuilder);

	pBuilder->SetFiltergraph(pGraph);    
	
	pSrc=GetAudioDevice ();
	// add the first audio filter in the list
	pGraph->AddFilter(pSrc, L"Video Capture");

/*	pBuilder->SetOutputFileName(
		&MEDIASUBTYPE_Avi,
		L"C:\\Example.avi", 
		&ppf, 
		&pSink);*/
//	pBuilder->AllocCapFile (L"C:\\temp.avi", _MAX_PATH);

	pBuilder->RenderStream(
        &PIN_CATEGORY_CAPTURE,  // Pin category
        &MEDIATYPE_Audio,       // Media type
        pSrc,                   // Capture filter
        NULL,                   // Compression filter (optional)
        ppf                     // Multiplexer or renderer filter
    );


 
	REFERENCE_TIME  rtStart = 20000000, 
                rtStop = 50000000;

/*	pBuilder->ControlStream(
        &PIN_CATEGORY_CAPTURE, 
        &MEDIATYPE_Audio, 
        pSrc,       // Source filter
        &rtStart,   // Start time
        &rtStop,    // Stop time
        0,          // Start cookie
        0           // Stop cookie
	 );*/

	pGraph->QueryInterface (IID_IMediaControl, (void **) &pMC);
	pMC->Run ();

	MessageBox (NULL, "Stop Recording", NULL, NULL);
	pMC->Stop ();

/*	CProgress *pProg = new CProgress(TEXT(""), NULL, &hr);
        IAMCopyCaptureFileProgress *pIProg = NULL;
        
            hr = pProg->QueryInterface(IID_IAMCopyCaptureFileProgress,
                                            (void **)&pIProg);
	//pBuilder->CopyCaptureFile (L"C:\\temp.avi", L"C:\\final.avi", TRUE, pIProg);*/
   
	CoUninitialize ();
	
	return 0;
}
Exemplo n.º 9
0
// A very simple program to capture a webcam & audio to a file using DirectShow
//
int main(int argc, char* argv[])
{
    ICaptureGraphBuilder2 *pCaptureGraph = NULL;	// Capture graph builder object
	IGraphBuilder *pGraph = NULL;	// Graph builder object
    IMediaControl *pControl = NULL;	// Media control object
	IFileSinkFilter *pSink = NULL;	// File sink object
	IBaseFilter *pAudioInputFilter = NULL; // Audio Capture filter
	IBaseFilter *pVideoInputFilter = NULL; // Video Capture filter
	IBaseFilter *pASFWriter = NULL;	// WM ASF File config interface

    // Initialize the COM library.
    HRESULT hr = CoInitialize(NULL);
    if (FAILED(hr))
    {
	     // We’ll send our error messages to the console.
        printf("ERROR - Could not initialize COM library");
        return hr;
    }

    // Create the filter graph manager and query for interfaces.
    hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER,
                          IID_ICaptureGraphBuilder2, (void **)&pCaptureGraph);
    if (FAILED(hr))	// FAILED is a macro that tests the return value
    {
        printf("ERROR - Could not create the Filter Graph Manager.");
        return hr;
    }

	// Use a method of the capture graph builder
	// To create an output path for the stream 
	hr = pCaptureGraph->SetOutputFileName(&MEDIASUBTYPE_Asf, 
		L"C:\\MyWebcam.ASF", &pASFWriter, &pSink);

	// Now configure the ASF Writer
	// Present the property pages for this filter
	hr = ShowFilterPropertyPages(pASFWriter);

	// Now get the filter graph manager
	// That's part of the capture graph builder
	hr = pCaptureGraph->GetFiltergraph(&pGraph);

	 // Using QueryInterface on the graph builder, 
    // Get the Media Control object.
    hr = pGraph->QueryInterface(IID_IMediaControl, (void **)&pControl);
    if (FAILED(hr))
    {
        printf("ERROR - Could not create the Media Control object.");
        pGraph->Release();	// Clean up after ourselves.
		CoUninitialize();  // And uninitalize COM
        return hr;
    }

	// Get an AudioCapture filter.
	// But there are several to choose from
	// So we need to enumerate them, and pick one.
	// Then add the audio capture filter to the filter graph. 
	hr = GetAudioInputFilter(&pAudioInputFilter, L"Logitech");
	if (SUCCEEDED(hr)) {
		hr = pGraph->AddFilter(pAudioInputFilter, L"Webcam Audio Capture");
	}

	// Now create the video input filter from the webcam
	hr = GetVideoInputFilter(&pVideoInputFilter, L"Logitech");
	if (SUCCEEDED(hr)) {
		hr = pGraph->AddFilter(pVideoInputFilter, L"Webcam Video Capture");
	}

	// Add a video renderer
	//IBaseFilter *pVideoRenderer = NULL;
	//hr = AddFilterByCLSID(pGraph, CLSID_VideoRenderer, L"Video Renderer", &pVideoRenderer);

	// Use another method of the capture graph builder
	// To provide a render path for video preview
	IBaseFilter *pIntermediate = NULL;
	hr = pCaptureGraph->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video,
		pVideoInputFilter, NULL, NULL);

	// Now add the video capture to the output file
	hr = pCaptureGraph->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
		pVideoInputFilter, NULL, pASFWriter);
	
	// And do the same for the audio
	hr = pCaptureGraph->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio,
		pAudioInputFilter, NULL, pASFWriter);

    if (SUCCEEDED(hr))
    {
        // Run the graph.
        hr = pControl->Run();
        if (SUCCEEDED(hr))
        {
			// Wait patiently for completion of the recording
			wprintf(L"Started recording...press Enter to stop recording.\n");

            // Wait for completion.
			char ch;
			ch = getchar();		// We wait for keyboard input
        }

		// And let's stop the filter graph
		hr = pControl->Stop();

		wprintf(L"Stopped recording.\n");	// To the console

		// Before we finish up, save the filter graph to a file.
		SaveGraphFile(pGraph, L"C:\\MyGraph.GRF");
    }

	// Now release everything, and clean up.
	pSink->Release();
	pASFWriter->Release();
	pVideoInputFilter->Release();
	pAudioInputFilter->Release();
    pControl->Release();
    pGraph->Release();
	pCaptureGraph->Release();
    CoUninitialize();

	return 0;
}
Exemplo n.º 10
0
std::vector<CameraConfig> videoInputCamera::getCameraConfigs(int dev_id) {

	std::vector<CameraConfig> cfg_list;

	int count = getDeviceCount();
	if (count==0) return cfg_list;

	comInit();

	HRESULT hr;
	ICaptureGraphBuilder2 *lpCaptureGraphBuilder;
	IGraphBuilder *lpGraphBuilder;
	IBaseFilter *lpInputFilter;
	IAMStreamConfig *lpStreamConfig;

	char 	nDeviceName[255];
	WCHAR 	wDeviceName[255];

	for (int cam_id=0;cam_id<count;cam_id++) {
		if ((dev_id>=0) && (dev_id!=cam_id)) continue;
		hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void **)&lpCaptureGraphBuilder);
		if (FAILED(hr))	// FAILED is a macro that tests the return value
		{
			printf("ERROR - Could not create the Filter Graph Manager\n");
			comUnInit();
			return cfg_list;
		}

		// Create the Filter Graph Manager.
		hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,IID_IGraphBuilder, (void**)&lpGraphBuilder);
		if (FAILED(hr))
		{
			printf("ERROR - Could not add the graph builder!\n");
			lpCaptureGraphBuilder->Release();
			comUnInit();
			return cfg_list;
		}

		hr = lpCaptureGraphBuilder->SetFiltergraph(lpGraphBuilder);
		if (FAILED(hr))
		{
			printf("ERROR - Could not set filtergraph\n");
			lpGraphBuilder->Release();
			lpCaptureGraphBuilder->Release();
			comUnInit();
			return cfg_list;
		}

		memset(wDeviceName, 0, sizeof(WCHAR) * 255);
		memset(nDeviceName, 0, sizeof(char) * 255);
		hr = getDevice(&lpInputFilter, cam_id, wDeviceName, nDeviceName);

		if (SUCCEEDED(hr)){
			hr = lpGraphBuilder->AddFilter(lpInputFilter, wDeviceName);
		}else{
			printf("ERROR - Could not find specified video device\n");
			lpGraphBuilder->Release();
			lpCaptureGraphBuilder->Release();
			comUnInit();
			return cfg_list;
		}

		hr = lpCaptureGraphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, lpInputFilter, IID_IAMStreamConfig, (void **)&lpStreamConfig);
		if(FAILED(hr)){
			printf("ERROR: Couldn't config the stream!\n");
			lpInputFilter->Release();
			lpGraphBuilder->Release();
			lpCaptureGraphBuilder->Release();
			comUnInit();
			return cfg_list;
		}

		CameraConfig cam_cfg;
		CameraTool::initCameraConfig(&cam_cfg);

		cam_cfg.driver = DRIVER_DEFAULT;
		cam_cfg.device = cam_id;
		sprintf(cam_cfg.name, "%s", nDeviceName);

		int iCount = 0;
		int iSize = 0;
		hr = lpStreamConfig->GetNumberOfCapabilities(&iCount, &iSize);
		std::vector<CameraConfig> fmt_list;

		if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
		{
			GUID lastFormat = MEDIASUBTYPE_None;
			for (int iFormat = 0; iFormat < iCount; iFormat+=2)
			{
				VIDEO_STREAM_CONFIG_CAPS scc;
				AM_MEDIA_TYPE *pmtConfig;
				hr =  lpStreamConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
				if (SUCCEEDED(hr)){

					if ( pmtConfig->subtype != lastFormat) {

						if (fmt_list.size()>0) {
							std::sort(fmt_list.begin(), fmt_list.end());
							cfg_list.insert( cfg_list.end(), fmt_list.begin(), fmt_list.end() );
							fmt_list.clear();
						}
						cam_cfg.cam_format = getMediaSubtype(pmtConfig->subtype);
						lastFormat = pmtConfig->subtype;
					}

					int stepX = scc.OutputGranularityX;
					int stepY = scc.OutputGranularityY;
					if(stepX < 1 || stepY < 1) continue;

					else if ((stepX==1) && (stepY==1)) {

						cam_cfg.cam_width = scc.InputSize.cx;
						cam_cfg.cam_height = scc.InputSize.cy;

						int maxFrameInterval = scc.MaxFrameInterval;
						if (maxFrameInterval==0) maxFrameInterval = 10000000;
						float last_fps=-1;
						VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat;
						for (int iv=scc.MinFrameInterval;iv<=maxFrameInterval;iv=iv*2) {
							pVih->AvgTimePerFrame = iv;
							hr = lpStreamConfig->SetFormat(pmtConfig);
							if (hr==S_OK) { hr = lpStreamConfig->GetFormat(&pmtConfig);
							float fps = ((int)floor(100000000.0f/(float)pVih->AvgTimePerFrame + 0.5f))/10.0f;
							if (fps!=last_fps) {
								cam_cfg.cam_fps = fps;
								fmt_list.push_back(cam_cfg);
								last_fps=fps;
							} }
						}

					} else {
						int x,y;
						for (x=scc.MinOutputSize.cx,y=scc.MinOutputSize.cy;x<=scc.MaxOutputSize.cx,y<=scc.MaxOutputSize.cy;x+=stepX,y+=stepY) {

							cam_cfg.cam_width = x;
							cam_cfg.cam_height = y;

							int maxFrameInterval = scc.MaxFrameInterval;
							if (maxFrameInterval==0) maxFrameInterval = 10000000;
							float last_fps=-1;
							VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat;
							for (int iv=scc.MinFrameInterval;iv<=maxFrameInterval;iv=iv*2) {
								pVih->AvgTimePerFrame = iv;
								hr = lpStreamConfig->SetFormat(pmtConfig);
								if (hr==S_OK) { hr = lpStreamConfig->GetFormat(&pmtConfig);
								float fps = ((int)floor(100000000.0f/(float)pVih->AvgTimePerFrame + 0.5f))/10.0f;
								if (fps!=last_fps) {
									cam_cfg.cam_fps = fps;
									fmt_list.push_back(cam_cfg);
									last_fps=fps;
								} }
							}

						}
					}

					deleteMediaType(pmtConfig);
				}
			}
		}

		if (fmt_list.size()>0) {
			std::sort(fmt_list.begin(), fmt_list.end());
			cfg_list.insert( cfg_list.end(), fmt_list.begin(), fmt_list.end() );
			fmt_list.clear();
		}

		lpStreamConfig->Release();
		lpInputFilter->Release();
		lpGraphBuilder->Release();
		lpCaptureGraphBuilder->Release();
	}

	comUnInit();
	return cfg_list;
}