Exemple #1
0
void VideoCapture::EnumResolutions()
{
	int iCount, iSize, iChosen=-1;
	IBaseFilter *pSource;
   	CComPtr <ICaptureGraphBuilder2> pCaptB;
	VIDEO_STREAM_CONFIG_CAPS caps;
	HRESULT hr;
	bool response;

	IAMStreamConfig *pConfig;

	devices_resolutions = new DeviceResolutions[nDevices];

	pCaptB.CoCreateInstance(CLSID_CaptureGraphBuilder2);

	for (unsigned int iDevice=0; iDevice<nDevices; iDevice++)
	{
		response = BindFilter(iDevice, &pSource);

		hr = pCaptB->FindInterface(
        &PIN_CATEGORY_CAPTURE,
        &MEDIATYPE_Video,
        pSource,
        IID_IAMStreamConfig,
        (void**)&pConfig);

		if (!SUCCEEDED(hr))
		{
			pSource->Release();
			devices_resolutions[iDevice].nResolutions = 0;
			continue;
		}

		pConfig->GetNumberOfCapabilities(&iCount, &iSize);

		devices_resolutions[iDevice].SetNResolutions(iCount);


		for(int i=0; i < iCount; i++) {
			AM_MEDIA_TYPE *pmt;
			if( pConfig->GetStreamCaps(i, &pmt, reinterpret_cast<BYTE*>(&caps)) == S_OK ) {

				VIDEOINFOHEADER *pVih = 
					reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat);
				
				devices_resolutions[iDevice].x[i] = caps.InputSize.cx;
				devices_resolutions[iDevice].y[i] = caps.InputSize.cy;
				devices_resolutions[iDevice].color_space[i] = pmt->subtype;
				devices_resolutions[iDevice].compression[i] = pVih->bmiHeader.biCompression;
				DeleteMediaType(pmt);
			}
		}

		pSource->Release();
		pConfig->Release();

		pSource = 0;
	}
}
/* 构建滤波器链表,添加各个滤波器,链接并运行链表*/
HRESULT CVMR_Capture::Init(int iDeviceID,HWND hWnd, int iWidth, int iHeight)
{
	HRESULT hr;
	
	//再次调用函数,释放已经建立的链表
	CloseInterfaces();

	// 创建IGraphBuilder
    hr = CoCreateInstance(CLSID_FilterGraph, NULL, 
					CLSCTX_INPROC_SERVER, 
					IID_IGraphBuilder, (void **)&m_pGB);

    if (SUCCEEDED(hr))
    {
        // 创建VMR并添加到Graph中
        InitializeWindowlessVMR(hWnd);        
		
		// 把指定的设备捆绑到一个滤波器
		if(!BindFilter(iDeviceID, &m_pDF))
			return S_FALSE;
		// 添加采集设备滤波器到Graph中
		hr = m_pGB->AddFilter(m_pDF, L"Video Capture");
		if (FAILED(hr)) return hr;
		
		// 获取捕获滤波器的引脚
		IEnumPins  *pEnum;
		m_pDF->EnumPins(&pEnum);
		hr |= pEnum->Reset();
		hr |= pEnum->Next(1, &m_pCamOutPin, NULL); 
			
        // 获取媒体控制和事件接口
        hr |= m_pGB->QueryInterface(IID_IMediaControl, (void **)&m_pMC);
        hr |= m_pGB->QueryInterface(IID_IMediaEventEx, (void **)&m_pME);     

		// 设置窗口通知消息处理
        //hr = pME->SetNotifyWindow((OAHWND)hWnd, WM_GRAPHNOTIFY, 0);
		
		// 匹配视频分辨率,对视频显示窗口设置
		hr |= InitVideoWindow(hWnd,iWidth, iHeight);

		// 为捕获图像帧申请内存
		m_nFramelen=iWidth*iHeight*3;
		m_pFrame=(BYTE*) new BYTE[m_nFramelen];		

		// 运行Graph,捕获视频
		m_psCurrent = STOPPED;
		hr |= m_pGB->Render(m_pCamOutPin);
		hr |= m_pMC->Run();

		if (FAILED(hr)) return hr;

		m_psCurrent = RUNNING;
	}

	return hr;
}
HRESULT CVMR_Capture::Init(int iDeviceID,HWND hWnd, int iWidth, int iHeight)
{
	HRESULT hr;
	// Get the interface for DirectShow's GraphBuilder
    hr=CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, 
                         IID_IGraphBuilder, (void **)&m_pGB);

    if(SUCCEEDED(hr))
    {
        // Create the Video Mixing Renderer and add it to the graph
        InitializeWindowlessVMR(hWnd);        
		// Bind Device Filter.  We know the device because the id was passed in
		if(!BindFilter(iDeviceID, &m_pDF))
			return S_FALSE;

		hr=m_pGB->AddFilter(m_pDF, L"Video Capture");
		if (FAILED(hr))
		return hr;

		CComPtr<IEnumPins> pEnum;
		m_pDF->EnumPins(&pEnum);

		hr = pEnum->Reset();
		hr = pEnum->Next(1, &m_pCamOutPin, NULL); 
		
		
		
        // QueryInterface for DirectShow interfaces
        hr = m_pGB->QueryInterface(IID_IMediaControl, (void **)&m_pMC);

        hr = m_pGB->QueryInterface(IID_IMediaEventEx, (void **)&m_pME);     

		// Have the graph signal event via window callbacks for performance
        //hr = pME->SetNotifyWindow((OAHWND)hWnd, WM_GRAPHNOTIFY, 0);


		hr = InitVideoWindow(hWnd,iWidth, iHeight);

		m_nFramelen=iWidth*iHeight*3;
		m_pFrame=(BYTE*) new BYTE[m_nFramelen];		

        
		
		// Run the graph to play the media file
		
		m_psCurrent=Stopped;
        
		hr = m_pGB->Render(m_pCamOutPin);
		hr = m_pMC->Run();
		m_psCurrent=Running;

        
	}
	return hr;

}
HRESULT ARLayoutCameraDS::AddCamera(int nCamID, bool bDisplayProperties)
{
	int idx = -1;
	for (int i =0; i < NUMCAM; i++)
	{
		if (m_pCamFilter[i] == NULL)
		{
			idx = i;
			break;
		}
	}
	if (idx < 0 )
		return E_FAIL;
	OAFilterState fstate;
	m_pMediaControl->GetState(1000, &fstate);
	if (fstate != State_Stopped)
	{
		return E_FAIL;
	}
	HRESULT hr = S_OK;

	BindFilter(nCamID, &m_pCamFilter[idx]);
	CComPtr<IEnumPins> pEnum;
	m_pCamFilter[idx]->EnumPins(&pEnum);
	hr = pEnum->Reset();
	hr = pEnum->Next(1, &m_pCamOutputPin[idx], NULL);
	
	if (m_pCamWarpFilter[idx] != NULL)
	{
		m_pGraph->RemoveFilter(m_pCamWarpFilter[idx]);
		m_pCamWarpFilter[idx] = NULL;
		m_pCamWarpInputPin[idx] = NULL;
		m_pCamWarpOutputPin[idx] = NULL;
	}

	hr = CoCreateInstance(CLSID_HomoWarpFilter, NULL, CLSCTX_INPROC_SERVER, 
		IID_IBaseFilter, (LPVOID *)&m_pCamWarpFilter[idx]);
	
	hr = m_pCamWarpFilter[idx]->QueryInterface(IID_IHomoWarpFilter, (LPVOID *)&m_pICamWarpFilter[idx]);
	hr = m_pCamWarpFilter[idx]->FindPin(L"input", &m_pCamWarpInputPin[idx]);
	hr = m_pCamWarpFilter[idx]->FindPin(L"d3dsurf", &m_pCamWarpOutputPin[idx]);
	WCHAR str[MAX_PATH] = {0};
	swprintf_s(str, MAX_PATH, L"Camera%d", idx);
	hr = m_pGraph->AddFilter(m_pCamFilter[idx], str);
	swprintf_s(str, MAX_PATH, L"Cam%d HomoWarp", idx);
	hr = m_pGraph->AddFilter(m_pCamWarpFilter[idx], str);

	ShowFilterProp(m_pCamOutputPin[idx]);
	m_pGraph->Connect(m_pCamOutputPin[idx], m_pCamWarpInputPin[idx]);
	m_pGraph->Connect(m_pCamWarpOutputPin[idx], m_pDXBlendInputPin[idx]);

	return S_OK;
}
Exemple #5
0
bool CCameraDS::OpenCamera(int nCamID, bool bDisplayProperties, int nWidth, int nHeight)
{
	
	HRESULT hr = S_OK;

	CoInitialize(NULL);
	// Create the Filter Graph Manager.
	hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC,
							IID_IGraphBuilder, (void **)&m_pGraph);

	hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER, 
							IID_IBaseFilter, (LPVOID *)&m_pSampleGrabberFilter);

	hr = m_pGraph->QueryInterface(IID_IMediaControl, (void **) &m_pMediaControl);
	hr = m_pGraph->QueryInterface(IID_IMediaEvent, (void **) &m_pMediaEvent);

	hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER,
							IID_IBaseFilter, (LPVOID*) &m_pNullFilter);


	hr = m_pGraph->AddFilter(m_pNullFilter, L"NullRenderer");
	
	hr = m_pSampleGrabberFilter->QueryInterface(IID_ISampleGrabber, (void**)&m_pSampleGrabber);

	AM_MEDIA_TYPE   mt;
	ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
	mt.majortype = MEDIATYPE_Video;
	mt.subtype = MEDIASUBTYPE_RGB24;
	mt.formattype = FORMAT_VideoInfo; 
	hr = m_pSampleGrabber->SetMediaType(&mt);
	MYFREEMEDIATYPE(mt);

	m_pGraph->AddFilter(m_pSampleGrabberFilter, L"Grabber");
 
	// Bind Device Filter.  We know the device because the id was passed in
	BindFilter(nCamID, &m_pDeviceFilter);
	m_pGraph->AddFilter(m_pDeviceFilter, NULL);

	CComPtr<IEnumPins> pEnum;
	m_pDeviceFilter->EnumPins(&pEnum);
 
	hr = pEnum->Reset();
	hr = pEnum->Next(1, &m_pCameraOutput, NULL); 

	pEnum = NULL; 
	m_pSampleGrabberFilter->EnumPins(&pEnum);
	pEnum->Reset();
	hr = pEnum->Next(1, &m_pGrabberInput, NULL); 

	pEnum = NULL;
	m_pSampleGrabberFilter->EnumPins(&pEnum);
	pEnum->Reset();
	pEnum->Skip(1);
	hr = pEnum->Next(1, &m_pGrabberOutput, NULL); 

	pEnum = NULL;
	m_pNullFilter->EnumPins(&pEnum);
	pEnum->Reset();
	hr = pEnum->Next(1, &m_pNullInputPin, NULL);

	//SetCrossBar();

	if (bDisplayProperties) 
	{
		CComPtr<ISpecifyPropertyPages> pPages;

		HRESULT hr = m_pCameraOutput->QueryInterface(IID_ISpecifyPropertyPages, (void**)&pPages);
		if (SUCCEEDED(hr))
		{
			PIN_INFO PinInfo;
			m_pCameraOutput->QueryPinInfo(&PinInfo);

			CAUUID caGUID;
			pPages->GetPages(&caGUID);

			OleCreatePropertyFrame(NULL, 0, 0,
						L"Property Sheet", 1,
						(IUnknown **)&(m_pCameraOutput.p),
						caGUID.cElems,
						caGUID.pElems,
						0, 0, NULL);
			CoTaskMemFree(caGUID.pElems);
			PinInfo.pFilter->Release();
		}
		pPages = NULL;
	}
	else 
	{
		//////////////////////////////////////////////////////////////////////////////
		// 加入由 lWidth和lHeight设置的摄像头的宽和高 的功能,默认320*240
		// by flymanbox @2009-01-24
		//////////////////////////////////////////////////////////////////////////////
	   int _Width = nWidth, _Height = nHeight;
	   IAMStreamConfig*   iconfig; 
	   iconfig = NULL;
	   hr = m_pCameraOutput->QueryInterface(IID_IAMStreamConfig,   (void**)&iconfig);   
      
	   AM_MEDIA_TYPE* pmt;    
	   if(iconfig->GetFormat(&pmt) !=S_OK) 
	   {
		  //printf("GetFormat Failed ! \n");
		  return   false;   
	   }
      
	   VIDEOINFOHEADER*   phead;
	   if ( pmt->formattype == FORMAT_VideoInfo)   
	   {   
			phead=( VIDEOINFOHEADER*)pmt->pbFormat;   
			phead->bmiHeader.biWidth = _Width;   
			phead->bmiHeader.biHeight = _Height;   
			if(( hr=iconfig->SetFormat(pmt)) != S_OK )   
			{
				return   false;
			}

		}   
		iconfig->Release();   
		iconfig=NULL;   
		MYFREEMEDIATYPE(*pmt);
	}

	hr = m_pGraph->Connect(m_pCameraOutput, m_pGrabberInput);
	hr = m_pGraph->Connect(m_pGrabberOutput, m_pNullInputPin);

	if (FAILED(hr))
	{
		switch(hr)
		{
			case VFW_S_NOPREVIEWPIN :
				break;
			case E_FAIL :
				break;
			case E_INVALIDARG :
				break;
			case E_POINTER :
				break;
		}
	}

	m_pSampleGrabber->SetBufferSamples(TRUE);
	m_pSampleGrabber->SetOneShot(TRUE);
    
	hr = m_pSampleGrabber->GetConnectedMediaType(&mt);
	if(FAILED(hr))
		return false;

	VIDEOINFOHEADER *videoHeader;
	videoHeader = reinterpret_cast<VIDEOINFOHEADER*>(mt.pbFormat);
	m_nWidth = videoHeader->bmiHeader.biWidth;
	m_nHeight = videoHeader->bmiHeader.biHeight;
	m_bConnected = true;

	pEnum = NULL;
	return true;
}
Exemple #6
0
bool VideoCapture::InitCamera(int iDevice, int iResolution)
{
	if (cameraInitialized) return false;

	HRESULT hr;
	bool ans;
	int iWidth = devices_resolutions[iDevice].x[iResolution];
	int iHeight = devices_resolutions[iDevice].y[iResolution];

    USES_CONVERSION;

	CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
		  IID_ISampleGrabber, (void**)&pGrabber);
	if( !pGrabber ) return false;
    
	hr = pNull.CoCreateInstance(CLSID_NullRenderer);
	
    CComQIPtr< IBaseFilter, &IID_IBaseFilter > pGrabberBase( pGrabber );

	ans = BindFilter(iDevice, &pSource);
	if( !pSource ) return false;
	
	hr = pGraph.CoCreateInstance( CLSID_FilterGraph );
	if (hr!=S_OK) return false;

	hr = pGraph->AddFilter( pSource, L"Source" );
	if (hr!=S_OK) return false;
	hr = pGraph->AddFilter( pGrabberBase, L"Grabber" );
    if (hr!=S_OK) return false;
	hr = pGraph->AddFilter( pNull, L"NullRenderer" );
	if (hr!=S_OK) return false;
	
	// Tell the grabber to grab 24-bit video. Must do this
    // before connecting it
    CMediaType GrabType;

	GrabType.SetType( &MEDIATYPE_Video );
	GrabType.SetSubtype( &MEDIASUBTYPE_RGB24 );

	hr = pGrabber->SetMediaType( &GrabType );
	if (hr!=S_OK) return false;

    CComPtr< IPin > pGrabPinIn;
    CComPtr< IPin > pGrabPinOut;
    CComPtr< IPin > pNullPinIn;

    pSourcePin = GetOutPin( pSource, 0 );
    pGrabPinIn   = GetInPin( pGrabberBase, 0 );
    pGrabPinOut  = GetOutPin( pGrabberBase, 0 );
    pNullPinIn  = GetInPin( pNull, 0 );

	CComPtr <ICaptureGraphBuilder2> pCaptB;
	pCaptB.CoCreateInstance(CLSID_CaptureGraphBuilder2);
	pCaptB->SetFiltergraph(pGraph);

	hr = pCaptB->FindInterface(&PIN_CATEGORY_CAPTURE,
		&MEDIATYPE_Video,pSource, 
		IID_IAMStreamConfig, (void**)&pConfig);
	if (hr!=S_OK) return false;

	SetResolution(iDevice, iResolution);
    // ... and connect them
    //
    hr = pGraph->Connect( pSourcePin, pGrabPinIn);
    if (hr!=S_OK) return false;
	
	hr = pGraph->Connect( pGrabPinOut, pNullPinIn );
	if (hr!=S_OK) return false;
	
	// Ask for the connection media type so we know its size
    //
    AM_MEDIA_TYPE mt;
	hr = pGrabber->GetConnectedMediaType( &mt );
	if (hr!=S_OK) return false;

    VIDEOINFOHEADER * vih = (VIDEOINFOHEADER*) mt.pbFormat;
    callback.im_width  = iWidth;
    callback.im_height = iHeight;
    FreeMediaType( mt );

	// Write the bitmap format
    //   
    memset( &(callback.cbInfo.bih), 0, sizeof( callback.cbInfo.bih ) );
    callback.cbInfo.bih.biSize = iWidth*iHeight*3;
    callback.cbInfo.bih.biWidth = iWidth;
    callback.cbInfo.bih.biHeight = iHeight;
    callback.cbInfo.bih.biPlanes = 1;
    callback.cbInfo.bih.biBitCount = 24;

	callback.cbInfo.pBuffer = new byte[callback.cbInfo.bih.biSize];
	    
	hr = pGrabber->SetBufferSamples( FALSE );
	hr = pGrabber->SetOneShot( FALSE );
    hr = pGrabber->SetCallback( &callback, 1);

	hr = pGraph->QueryInterface(IID_IMediaControl,(LPVOID *) &pControl);

	hr = pControl->Run( );

	cameraInitialized = true;
	return true;	
}
HRESULT CCaptureVideo::Init(int iDeviceID, HWND hWnd)
{
	HRESULT hr;

	hr = InitCaptureGraphBuilder();

	if (FAILED(hr))
	{
		IBA_LOG0(_T("Failed to get video interfaces!"));
		return hr;
	}

	// Bind Device Filter. We know the device because the id was passed in
	if(!BindFilter(iDeviceID, &m_pBF))
		return S_FALSE;
	
	hr = m_pGB->AddFilter(m_pBF, L"Capture Filter");

	m_pGrabber = NULL;

	hr = m_pGrabber.CoCreateInstance( CLSID_SampleGrabber );

	if( !m_pGrabber )
	{
		IBA_LOG0(_T("Fail to create SampleGrabber, maybe qedit.dll is not registered?"));
		return hr;
	}

	CComQIPtr< IBaseFilter, &IID_IBaseFilter > pGrabBase( m_pGrabber );
	
	//设置视频格式
	AM_MEDIA_TYPE mt;
	
	ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
	
	mt.majortype = MEDIATYPE_Video;
	mt.subtype = MEDIASUBTYPE_RGB24;
	hr = m_pGrabber->SetMediaType(&mt);

	if( FAILED( hr ) )
	{
		IBA_LOG0(_T("Fail to set media type!"));
		return hr;
	}
	hr = m_pGB->AddFilter( pGrabBase, L"Grabber" );
	
	if( FAILED( hr ) )
	{
		IBA_LOG0(_T("Fail to put sample grabber in graph"));
		return hr;
	}

	// try to render preview/capture pin
	hr = m_pCapture->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video,m_pBF,pGrabBase,NULL);
	
	if( FAILED( hr ) )
		hr = m_pCapture->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,m_pBF,pGrabBase,NULL);

	if( FAILED( hr ) )
	{
//		OutputEx(ToString("Can’t build the graph"));
		return hr;
	}

	hr = m_pGrabber->GetConnectedMediaType( &mt );

	if ( FAILED( hr) )
	{
		IBA_LOG0(_T("Failt to read the connected media type"));
		return hr;
	}

	VIDEOINFOHEADER * vih = (VIDEOINFOHEADER*) mt.pbFormat;

	m_CB.m_lWidth = vih->bmiHeader.biWidth;

	m_CB.m_lHeight = vih->bmiHeader.biHeight;

	FreeMediaType(mt);

	hr = m_pGrabber->SetBufferSamples( FALSE );

	hr = m_pGrabber->SetOneShot( FALSE );

	hr = m_pGrabber->SetCallback( &m_CB, 1 );

	//设置视频捕捉窗口
	m_hWnd = hWnd ;

	SetupVideoWindow();

	hr = m_pMC->Run();//开始视频捕捉

	if(FAILED(hr))
	{
		IBA_LOG0(_T("Couldn’t run the graph!"));
		return hr;
	}

	m_bOpened = true;

	return S_OK;
}