예제 #1
1
HRESULT SetCaptureSize(IPin* capPreviewOutputPin, int width, int height, int avgTimePerFrame)
{
	HRESULT hr = S_OK;
	IAMStreamConfig *streamConfig;
	hr = capPreviewOutputPin->QueryInterface(IID_IAMStreamConfig, (void**)&streamConfig);
	if(FAILED(hr))
	{
		ErrorPrint("Get stream config interface error", hr);
		return hr;
	}
	AM_MEDIA_TYPE *mediaType;
	VIDEO_STREAM_CONFIG_CAPS configCaps;
	hr = streamConfig->GetStreamCaps(0, &mediaType, (BYTE*)&configCaps);
	if (FAILED(hr))
	{
		ErrorPrint("Get stream caps error");
		return hr;
	}
	VIDEOINFOHEADER* videoHeader = (VIDEOINFOHEADER*)mediaType->pbFormat;
	videoHeader->bmiHeader.biWidth = width;
	videoHeader->bmiHeader.biHeight = height;
	videoHeader->bmiHeader.biSizeImage = DIBSIZE(videoHeader->bmiHeader);
	videoHeader->AvgTimePerFrame = avgTimePerFrame;
	streamConfig->SetFormat(mediaType);
	DeleteMediaType(mediaType);
	return S_OK;
}
예제 #2
0
void VideoCapture::EnumResolutions()
{
	int iCount, iSize, iChosen=-1;
	IBaseFilter *pSource;
   	CComPtr <ICaptureGraphBuilder2> pCaptB;
	VIDEO_STREAM_CONFIG_CAPS caps;
	HRESULT hr;
	bool response;

	IAMStreamConfig *pConfig;

	devices_resolutions = new DeviceResolutions[nDevices];

	pCaptB.CoCreateInstance(CLSID_CaptureGraphBuilder2);

	for (unsigned int iDevice=0; iDevice<nDevices; iDevice++)
	{
		response = BindFilter(iDevice, &pSource);

		hr = pCaptB->FindInterface(
        &PIN_CATEGORY_CAPTURE,
        &MEDIATYPE_Video,
        pSource,
        IID_IAMStreamConfig,
        (void**)&pConfig);

		if (!SUCCEEDED(hr))
		{
			pSource->Release();
			devices_resolutions[iDevice].nResolutions = 0;
			continue;
		}

		pConfig->GetNumberOfCapabilities(&iCount, &iSize);

		devices_resolutions[iDevice].SetNResolutions(iCount);


		for(int i=0; i < iCount; i++) {
			AM_MEDIA_TYPE *pmt;
			if( pConfig->GetStreamCaps(i, &pmt, reinterpret_cast<BYTE*>(&caps)) == S_OK ) {

				VIDEOINFOHEADER *pVih = 
					reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat);
				
				devices_resolutions[iDevice].x[i] = caps.InputSize.cx;
				devices_resolutions[iDevice].y[i] = caps.InputSize.cy;
				devices_resolutions[iDevice].color_space[i] = pmt->subtype;
				devices_resolutions[iDevice].compression[i] = pVih->bmiHeader.biCompression;
				DeleteMediaType(pmt);
			}
		}

		pSource->Release();
		pConfig->Release();

		pSource = 0;
	}
}
예제 #3
0
HRESULT 
recChannel_t::set_rate(float FR)
{
     __CONTEXT("recChannel_t::set_rate");
	if (FR<1)
    {
        return S_OK;
    }
	float factorRate = FR/30;
	int hr = 0;

	if (factorRate<0.1) factorRate = 0.1;
	frameRate = factorRate;

	IAMStreamConfig *pConfig = NULL;		
				
	if ((camInfo->getKind() == SHARED ||
         camInfo->getKind() == CAM)   && 
		actualFormat.pbFormat != NULL)
	{
			VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*) actualFormat.pbFormat;
            double newFR = 10000000.0/FR;
			pVih->AvgTimePerFrame = newFR;
            camInfo->setRate(pVih->AvgTimePerFrame);
            if (camInfo->getKind() == CAM)
            {
                IPin * pInput = NULL;
                get_camInfo()->output->ConnectedTo(&pInput);
                if (mapping)
                {
                    pControl->Stop();
                }
                if (pInput)
                {
                    get_camInfo()->output->Disconnect();
                    pInput->Disconnect();
                }
             	hr = get_camInfo()->output->QueryInterface(IID_IAMStreamConfig, (void**)&pConfig);
                if (pConfig)
                {
                    int hr = pConfig->SetFormat(&actualFormat);
                    errorCheck(hr);
                    pConfig->Release();
                }
                if (pInput)
                {
                    hr = pGraph->Connect(get_camInfo()->output,pInput);
                    errorCheck(hr);
                }
                errorCheck(hr);
                if (mapping)
                {
                    pControl->Run();
                }
			}
    }
    return hr;
	
}
예제 #4
0
bool MIPDirectShowCapture::listGUIDS(std::list<GUID> &guids)
{
	guids.clear();

	HRESULT hr;

	IAMStreamConfig *pConfig = 0;

	hr = m_pBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, 0, m_pCaptDevice, IID_IAMStreamConfig, (void**)&pConfig);
	if (HR_FAILED(hr))
	{
		setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECONFIG);
		return false;
	}

	int count = 0;
	int s = 0;
	
	hr = pConfig->GetNumberOfCapabilities(&count, &s);
	if (HR_FAILED(hr))
	{
		pConfig->Release();
		setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECAPS);
		return false;
	}

	if (s != sizeof(VIDEO_STREAM_CONFIG_CAPS))
	{
		pConfig->Release();
		setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_INVALIDCAPS);
		return false;
	}

	for (int i = 0; i < count; i++)
	{
        VIDEO_STREAM_CONFIG_CAPS caps;
        AM_MEDIA_TYPE *pMediaType;

        hr = pConfig->GetStreamCaps(i, &pMediaType, (BYTE*)&caps);
        if (HR_SUCCEEDED(hr))
        {
			if (pMediaType->majortype == MEDIATYPE_Video)
			{
				GUID subType = pMediaType->subtype;
				
				guids.push_back(subType);

//				uint8_t *pSubType = (uint8_t *)&subType;
//
//				printf("0x%02x%02x%02x%02x %c%c%c%c\n",(int)pSubType[0],(int)pSubType[1],(int)pSubType[2],(int)pSubType[3],
//					                                   (char)pSubType[0],(char)pSubType[1],(char)pSubType[2],(char)pSubType[3]);
			}
		}
	}

	return true;
}
예제 #5
0
void CCaptureDevice::SetCaptureBufferSize(void)
{
	IPin * pCapturePin = GetPin();
	if (pCapturePin)
	{
		DWORD  dwBytesPerSec = 0;
		AM_MEDIA_TYPE * pmt = {0};
		IAMStreamConfig * pCfg = NULL;
		HRESULT hr = pCapturePin->QueryInterface(IID_IAMStreamConfig, (void **)&pCfg);
		if ( hr==S_OK )
		{
            hr = pCfg->GetFormat(&pmt);
			if ( hr==S_OK )
			{
				WAVEFORMATEX *pWF = (WAVEFORMATEX *) pmt->pbFormat;
				dwBytesPerSec = pWF->nAvgBytesPerSec;
				pWF->nChannels = 1;
				pWF->wBitsPerSample = 8;
				pWF->nSamplesPerSec = 11025;
				pWF->nAvgBytesPerSec = pWF->nSamplesPerSec * pWF->nChannels * pWF->wBitsPerSample / 8;
				pWF->nBlockAlign = 1;
/*
	info.cbSize = sizeof(WAVEFORMATEX);
	info.wFormatTag = 1;
	info.nChannels = 2;
	info.nSamplesPerSec = 44100;
	//info.nSamplesPerSec = 22050;
	11025
	info.wBitsPerSample = 16;
	info.nAvgBytesPerSec = info.nSamplesPerSec * info.nChannels * info.wBitsPerSample / 8;
	info.nBlockAlign = 4;
	*/
				pCfg->SetFormat( pmt );
				DeleteMediaType(pmt);
			}
			pCfg->Release();
		}
/*		if (dwBytesPerSec)
		{
			IAMBufferNegotiation * pNeg = NULL;
			hr = pCapturePin->QueryInterface(IID_IAMBufferNegotiation, 
				(void **)&pNeg);
			if (SUCCEEDED(hr))
			{
				ALLOCATOR_PROPERTIES AllocProp;
				AllocProp.cbAlign  = -1;  // -1 means no preference.
				AllocProp.cbBuffer = dwBytesPerSec *  dwLatencyInMilliseconds / 1000;
				AllocProp.cbPrefix = -1;
				AllocProp.cBuffers = -1;
				hr = pNeg->SuggestAllocatorProperties(&AllocProp);
				pNeg->Release();
			}
		}*/
	}
}
예제 #6
0
HRESULT Captura::IniciarVentanaVideo(HWND hWnd,int width, int height)
{

	HRESULT hr;
	RECT rcDest;
	
//    CComPtr<IAMStreamConfig> pConfig;
    IAMStreamConfig * pConfig;
    IEnumMediaTypes *pMedia;
    AM_MEDIA_TYPE *pmt = NULL, *pfnt = NULL;

    hr = m_pCamOutPin->EnumMediaTypes( &pMedia );
    if(SUCCEEDED(hr))
    {

        while(pMedia->Next(1, &pmt, 0) == S_OK)
        {
            if( pmt->formattype == FORMAT_VideoInfo )
            {
                VIDEOINFOHEADER *vih = (VIDEOINFOHEADER *)pmt->pbFormat;

                if( vih->bmiHeader.biWidth == width && vih->bmiHeader.biHeight == height )
                {
                    pfnt = pmt;

                    break;
                }
                BorrarTipoMedio( pmt );
            }                        
        }
        pMedia->Release();
    }
    hr = m_pCamOutPin->QueryInterface( IID_IAMStreamConfig, (void **) &pConfig );
    if(SUCCEEDED(hr))
    {
        if( pfnt != NULL )
        {
            hr=pConfig->SetFormat( pfnt );

            BorrarTipoMedio( pfnt );
        }
        hr = pConfig->GetFormat( &pfnt );
        if(SUCCEEDED(hr))
        {
			
            m_nAncho = ((VIDEOINFOHEADER *)pfnt->pbFormat)->bmiHeader.biWidth;
            m_nAlto = ((VIDEOINFOHEADER *)pfnt->pbFormat)->bmiHeader.biHeight;
			
            BorrarTipoMedio( pfnt );
        }
    }
	::GetClientRect (hWnd,&rcDest);
    hr = m_pWC->SetVideoPosition(NULL, &rcDest);
    return hr;
}
예제 #7
0
/* 设置捕获图像帧的格式,遍历所有格式是否有预定格式,若没有则以默认格式捕获 */
HRESULT CVMR_Capture::InitVideoWindow(HWND hWnd,int width, int height)
{
	HRESULT hr;
	RECT rcDest;
	
    IAMStreamConfig *pConfig;
    IEnumMediaTypes *pMedia;
    AM_MEDIA_TYPE *pmt = NULL, *pfnt = NULL;

    hr = m_pCamOutPin->EnumMediaTypes( &pMedia );
    if(SUCCEEDED(hr))
    {
		//把所有视频的所有格式遍历一遍,看是否有预定的格式
        while(pMedia->Next(1, &pmt, 0) == S_OK)
        {
            if( pmt->formattype == FORMAT_VideoInfo )
            {
                VIDEOINFOHEADER *vih = (VIDEOINFOHEADER *)pmt->pbFormat;
				// 当前的格式是否与预定格式相同,即宽和高相同
                if( vih->bmiHeader.biWidth == width && vih->bmiHeader.biHeight == height )
                {
                    pfnt = pmt;
				    break;
                }
                DeleteMediaType( pmt );
            }  
        }
        pMedia->Release();
    }
	
    hr = m_pCamOutPin->QueryInterface( IID_IAMStreamConfig, (void **) &pConfig );
    if(SUCCEEDED(hr))
    {
		// 有预定的格式
        if( pfnt != NULL )
        {
            hr=pConfig->SetFormat( pfnt );
            DeleteMediaType( pfnt );
        }
		// 没有预定的格式,读取缺省媒体格式
        hr = pConfig->GetFormat( &pfnt );
        if(SUCCEEDED(hr))
        {
            m_nWidth = ((VIDEOINFOHEADER *)pfnt->pbFormat)->bmiHeader.biWidth;   //读取高
            m_nHeight = ((VIDEOINFOHEADER *)pfnt->pbFormat)->bmiHeader.biHeight; //读取宽
            DeleteMediaType( pfnt );
        }

    }
	// 获取传入窗口的区域,以设置显示窗口
	::GetClientRect (hWnd,&rcDest);
    hr = m_pWC->SetVideoPosition(NULL, &rcDest);
    return hr;
}
예제 #8
0
IAMStreamConfig * CCaptureDevice::GetStreamConfig(void)
{
	IAMStreamConfig * pConfig = NULL;
	if (m_pBaseFilter)
	{
		// Get the capture output pin first
		IPin * pCapture = GetPin();
		if (pCapture)
		{
			pCapture->QueryInterface(IID_IAMStreamConfig, (void **)&pConfig);
		}

		if (pConfig)
		{
			pConfig->Release();
		}
	}
	return pConfig;
}
예제 #9
0
void DSCaptureDevice::initSupportedFormats()
{
    HRESULT ret;
    IAMStreamConfig* streamConfig = NULL;
    AM_MEDIA_TYPE* mediaType = NULL;

    ret = m_captureGraphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
        m_srcFilter, IID_IAMStreamConfig, (void**)&streamConfig);

    /* get to find all supported formats */
    if(!FAILED(ret))
    {
        int nb = 0;
        int size = 0;
        BYTE* allocBytes = NULL;

        streamConfig->GetNumberOfCapabilities(&nb, &size);
        allocBytes = new BYTE[size];
 
        for(int i = 0 ; i < nb ; i++)
        {
            if(streamConfig->GetStreamCaps(i, &mediaType, allocBytes) == S_OK)
            {
                struct DSFormat format;
                VIDEOINFOHEADER* hdr = (VIDEOINFOHEADER*)mediaType->pbFormat;

                if(hdr)
                {
                    format.height = hdr->bmiHeader.biHeight;
                    format.width = hdr->bmiHeader.biWidth;
                    format.pixelFormat = mediaType->subtype.Data1;
                    format.mediaType = mediaType->subtype;

                    m_formats.push_back(format);
                }
            }
        }

        delete allocBytes;
    }
}
예제 #10
0
void CCaptureDevice::AdjustOutput(void)
{
	HRESULT  hr         = S_OK;
	AM_MEDIA_TYPE * pmt = NULL;
	LONGLONG avgTimePerFrame = 2000000;  // 20fps

	pmt = SelectMediaType();
	if (pmt)
	{
		if (pmt->formattype == FORMAT_VideoInfo) 
		{
			VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *) pmt->pbFormat;
			pvi->AvgTimePerFrame       = avgTimePerFrame;
			pvi->bmiHeader.biWidth     = Preferred_Width;
			pvi->bmiHeader.biHeight    = Preferred_Height;
			pvi->bmiHeader.biSizeImage = Preferred_Width * Preferred_Height * pvi->bmiHeader.biBitCount / 8;
			
			IAMStreamConfig * pCfg = GetStreamConfig();
			hr = pCfg->SetFormat(pmt);
		}
		DeleteMediaType(pmt);
	}
}
bool VideoCaptureDirectShow2::setDeviceFilterMediaType(ICaptureGraphBuilder2* captureBuilder, IBaseFilter* deviceFilter, AVCapability cap) {

  if(!captureBuilder) {
    RX_ERROR("Cannot set device filter media type because the given ICaptureGraphBuilder* is invalid");
    return false;
  }

  if(!deviceFilter) {
    RX_ERROR("Cannot set the media type for the device filter because the device filter is invalid");
    return false;
  }
  
  if(cap.index < 0) {
    RX_ERROR("Cannot set the media type for the device filter because the given AVCapability has not index. Iterate over the stream caps to retrieve the caps index that we need");
    return false;
  }

  IAMStreamConfig* conf = NULL;
  HRESULT hr = captureBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, deviceFilter, IID_IAMStreamConfig, (void**)&conf);
  if(FAILED(hr)) {
    RX_ERROR("Failed to retrieve a IAMStreamConfig to set the device filter media type");
    return false;
  }

  bool result = true;
  AM_MEDIA_TYPE* mt;
  VIDEO_STREAM_CONFIG_CAPS caps;
  hr = conf->GetStreamCaps(cap.index, &mt, (BYTE*)&caps);
  if(FAILED(hr)) {
    RX_ERROR("Failed to retrieve the AM_MEDIA_TYPE for the AVCapabiltiy with stream caps index: %d", cap.index);
    result = false;
    goto done;
  }
  
  if(mt->majortype != MEDIATYPE_Video) {
    RX_ERROR("The AM_MEDIA_TYPE we found is not an Video type so we cannot use it to set the media format of the device filter");
    result = false;
    goto done;
  }
  if(mt->formattype != FORMAT_VideoInfo) {
    RX_ERROR("The AM_MEDIA_TYPE we found is not a Format_VideoInfo, so cannot set media type of device filter");
    result = false;
    goto done;
  }
  if(mt->cbFormat < sizeof(VIDEOINFOHEADER)) {
    RX_ERROR("The AMD_MEDIA_TYPE has an invalid cbFormat size");
    result = false;
    goto done;
  }
  if(mt->pbFormat == NULL) {
    RX_ERROR("The AM_MEDIA_TYPE.pbFormat is NULL; cannot set type of device filter");
    result = false;
    goto done;
  }

  GUID guid_pixfmt = libavPixelFormatToMediaSubTypeGUID(cap.pixel_format);
  if(mt->subtype != guid_pixfmt) {
    RX_ERROR("The AM_MEDIA_TYPE.subtype is not the same as the one we want..");
    result = false;
    goto done;
  }

  hr = conf->SetFormat(mt);
  if(FAILED(hr)) {
    RX_ERROR("Failed to set the AM_MEDIA_TYPE for the device filter");
    result = false;
    goto done;
  }

 done:
  deleteMediaType(mt);
  safeReleaseDirectShow(&conf);
  return result;
}
예제 #12
0
/// 设置音频信息
BOOL CAudioCapture::SetAudioFormat(ENUM_FREQUENCY_TYPE enFrequency,
	ENUM_CHANNEL_TYPE enChannel, ENUM_SAMPLE_TYPE enSample)
{
	if(NULL != m_pCaptureFilter)
	{
		BOOL bResult = FALSE;
		do
		{
			IPin* pOutPin = GetOutputPin(m_pCaptureFilter, (uint16_t)0);
			if(NULL != pOutPin)
			{
				IAMBufferNegotiation *pNeg = NULL;
				IAMStreamConfig *pCfg = NULL;

				// Get buffer negotiation interface
				HRESULT hr = pOutPin->QueryInterface(IID_IAMBufferNegotiation, (void **)&pNeg);
				if (FAILED(hr))
				{
					pOutPin->Release();
					break;
				}

				// Find number of bytes in one second
				long lBytesPerSecond = (long) (enSample * enFrequency * enChannel);

				// 针对FAAC编码器 做出的调整
				long lBufferSize =  1024 * enSample * enChannel;

				// Set the buffer size based on selected settings
				ALLOCATOR_PROPERTIES prop={0};
				prop.cbBuffer = lBufferSize;
				prop.cBuffers = 6;
				prop.cbAlign = enSample * enChannel;
				hr = pNeg->SuggestAllocatorProperties(&prop);
				pNeg->Release();

				// Now set the actual format of the audio data
				hr = pOutPin->QueryInterface(IID_IAMStreamConfig, (void **)&pCfg);
				if (FAILED(hr))
				{
					pOutPin->Release();
					break;
				}            

				// Read current media type/format
				AM_MEDIA_TYPE *pmt={0};
				hr = pCfg->GetFormat(&pmt);

				if (SUCCEEDED(hr))
				{
					// Fill in values for the new format
					WAVEFORMATEX *pWF = (WAVEFORMATEX *) pmt->pbFormat;
					pWF->nChannels = (WORD) enChannel;
					pWF->nSamplesPerSec = enFrequency;
					pWF->nAvgBytesPerSec = lBytesPerSecond;
					pWF->wBitsPerSample = (WORD) (enSample * 8);
					pWF->nBlockAlign = (WORD) (enSample * enChannel);

					// Set the new formattype for the output pin
					hr = pCfg->SetFormat(pmt);
					UtilDeleteMediaType(pmt);
				}

				// Release interfaces
				pCfg->Release();
				pOutPin->Release();

				bResult = TRUE;
			}
		}while(FALSE);

		return bResult;
	}
	else
	{
		m_enFrequency = enFrequency;
		m_enChannel = enChannel;
		m_enSample = enSample;
		return TRUE;
	}
}
예제 #13
0
bool CaptureDShow::init()
{
    // Create the pipeline.
    if (FAILED(CoCreateInstance(CLSID_FilterGraph,
                                NULL,
                                CLSCTX_INPROC_SERVER,
                                IID_IGraphBuilder,
                                reinterpret_cast<void **>(&this->m_graph))))
        return false;

    // Create the webcam filter.
    IBaseFilter *webcamFilter = this->findFilterP(this->m_device);

    if (!webcamFilter) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    if (FAILED(this->m_graph->AddFilter(webcamFilter, SOURCE_FILTER_NAME))) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    // Create the Sample Grabber filter.
    IBaseFilter *grabberFilter = NULL;

    if (FAILED(CoCreateInstance(CLSID_SampleGrabber,
                                NULL,
                                CLSCTX_INPROC_SERVER,
                                IID_IBaseFilter,
                                reinterpret_cast<void **>(&grabberFilter)))) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    if (FAILED(this->m_graph->AddFilter(grabberFilter, L"Grabber"))) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    ISampleGrabber *grabberPtr = NULL;

    if (FAILED(grabberFilter->QueryInterface(IID_ISampleGrabber,
                                             reinterpret_cast<void **>(&grabberPtr)))) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    if (FAILED(grabberPtr->SetOneShot(FALSE))) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    HRESULT hr = grabberPtr->SetBufferSamples(TRUE);

    if (FAILED(hr)) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    if (this->m_ioMethod != IoMethodDirectRead) {
        int type = this->m_ioMethod == IoMethodGrabSample? 0: 1;
        hr = grabberPtr->SetCallback(&this->m_frameGrabber, type);
    }

    this->m_grabber = SampleGrabberPtr(grabberPtr, this->deleteUnknown);

    if (!this->connectFilters(this->m_graph, webcamFilter, grabberFilter)) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    // Create null filter.
    IBaseFilter *nullFilter = NULL;

    if (FAILED(CoCreateInstance(CLSID_NullRenderer,
                                NULL,
                                CLSCTX_INPROC_SERVER,
                                IID_IBaseFilter,
                                reinterpret_cast<void **>(&nullFilter)))) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    if (FAILED(this->m_graph->AddFilter(nullFilter, L"NullFilter"))) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    if (!this->connectFilters(this->m_graph, grabberFilter, nullFilter)) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    // Set capture format
    QList<int> streams = this->streams();

    if (streams.isEmpty()) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    MediaTypesList mediaTypes = this->listMediaTypes(webcamFilter);

    if (mediaTypes.isEmpty()) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    MediaTypePtr mediaType = streams[0] < mediaTypes.size()?
                                mediaTypes[streams[0]]:
                                mediaTypes.first();

    if (FAILED(grabberPtr->SetMediaType(mediaType.data()))) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    PinList pins = this->enumPins(webcamFilter, PINDIR_OUTPUT);

    for (const PinPtr &pin: pins) {
        IAMStreamConfig *pStreamConfig = NULL;
        HRESULT hr =
                pin->QueryInterface(IID_IAMStreamConfig,
                                    reinterpret_cast<void **>(&pStreamConfig));

        if (SUCCEEDED(hr))
            pStreamConfig->SetFormat(mediaType.data());

        if (pStreamConfig)
            pStreamConfig->Release();
    }

    // Run the pipeline
    IMediaControl *control = NULL;

    if (FAILED(this->m_graph->QueryInterface(IID_IMediaControl,
                                             reinterpret_cast<void **>(&control)))) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    this->m_id = Ak::id();
    AkCaps caps = this->capsFromMediaType(mediaType);
    this->m_timeBase = AkFrac(caps.property("fps").toString()).invert();

    if (FAILED(control->Run())) {
        control->Release();
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    control->Release();

    this->m_localImageControls.clear();
    this->m_localImageControls.clear();

    return true;
}
예제 #14
0
AM_MEDIA_TYPE * CCaptureDevice::SelectMediaType(void)
{
	// Preferred sequence: UYVY, YUY2, RGB565, RGB555, RGB24, RGB32  
	VIDEO_STREAM_CONFIG_CAPS  pSCC;
	AM_MEDIA_TYPE * pmt = NULL;
	HRESULT hr = S_OK;
	int nCounts=0, nSize=0;
	int preferredIndex = -1;
	enum {
		UYVY = 0, YUY2, RGB565, RGB555, RGB24, RGB32, Unknown
	} currentPreferred, temp;
	currentPreferred = Unknown;

	IAMStreamConfig * pCfg = GetStreamConfig();
	pCfg->GetNumberOfCapabilities(&nCounts, &nSize);
	for (int i = 0; i < nCounts; i++)
	{
		if (pCfg->GetStreamCaps(i, &pmt, (BYTE *)&pSCC) == S_OK)
		{
			if (pmt->subtype == MEDIASUBTYPE_RGB32)
			{
				temp = RGB32;
			}
			else if (pmt->subtype == MEDIASUBTYPE_RGB24)
			{
				temp = RGB24;
			}
			else if (pmt->subtype == MEDIASUBTYPE_RGB565)
			{
				temp = RGB565;
			}
			else if (pmt->subtype == MEDIASUBTYPE_RGB555)
			{
				temp = RGB555;
			}
			else if (pmt->subtype == MEDIASUBTYPE_YUY2)
			{
				temp = YUY2;
			}
			else if (pmt->subtype == MEDIASUBTYPE_UYVY)
			{
				temp = UYVY;
			}
			else
			{
				temp = Unknown;
			}

			if (temp < currentPreferred)
			{
				currentPreferred = temp;
				preferredIndex   = i;
			}
			DeleteMediaType(pmt);
		}
	}

	// Get the preferred media type
	if (preferredIndex != -1)
	{
		hr = pCfg->GetStreamCaps(preferredIndex, &pmt, (BYTE *)&pSCC);
	}
	else
	{
		hr = pCfg->GetFormat(&pmt);
	}

	return pmt;
}
예제 #15
0
bool CCameraDS::OpenCamera(int nCamID, bool bDisplayProperties, int nWidth, int nHeight)
{
	
	HRESULT hr = S_OK;

	CoInitialize(NULL);
	// Create the Filter Graph Manager.
	hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC,
							IID_IGraphBuilder, (void **)&m_pGraph);

	hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER, 
							IID_IBaseFilter, (LPVOID *)&m_pSampleGrabberFilter);

	hr = m_pGraph->QueryInterface(IID_IMediaControl, (void **) &m_pMediaControl);
	hr = m_pGraph->QueryInterface(IID_IMediaEvent, (void **) &m_pMediaEvent);

	hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER,
							IID_IBaseFilter, (LPVOID*) &m_pNullFilter);


	hr = m_pGraph->AddFilter(m_pNullFilter, L"NullRenderer");
	
	hr = m_pSampleGrabberFilter->QueryInterface(IID_ISampleGrabber, (void**)&m_pSampleGrabber);

	AM_MEDIA_TYPE   mt;
	ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
	mt.majortype = MEDIATYPE_Video;
	mt.subtype = MEDIASUBTYPE_RGB24;
	mt.formattype = FORMAT_VideoInfo; 
	hr = m_pSampleGrabber->SetMediaType(&mt);
	MYFREEMEDIATYPE(mt);

	m_pGraph->AddFilter(m_pSampleGrabberFilter, L"Grabber");
 
	// Bind Device Filter.  We know the device because the id was passed in
	BindFilter(nCamID, &m_pDeviceFilter);
	m_pGraph->AddFilter(m_pDeviceFilter, NULL);

	CComPtr<IEnumPins> pEnum;
	m_pDeviceFilter->EnumPins(&pEnum);
 
	hr = pEnum->Reset();
	hr = pEnum->Next(1, &m_pCameraOutput, NULL); 

	pEnum = NULL; 
	m_pSampleGrabberFilter->EnumPins(&pEnum);
	pEnum->Reset();
	hr = pEnum->Next(1, &m_pGrabberInput, NULL); 

	pEnum = NULL;
	m_pSampleGrabberFilter->EnumPins(&pEnum);
	pEnum->Reset();
	pEnum->Skip(1);
	hr = pEnum->Next(1, &m_pGrabberOutput, NULL); 

	pEnum = NULL;
	m_pNullFilter->EnumPins(&pEnum);
	pEnum->Reset();
	hr = pEnum->Next(1, &m_pNullInputPin, NULL);

	//SetCrossBar();

	if (bDisplayProperties) 
	{
		CComPtr<ISpecifyPropertyPages> pPages;

		HRESULT hr = m_pCameraOutput->QueryInterface(IID_ISpecifyPropertyPages, (void**)&pPages);
		if (SUCCEEDED(hr))
		{
			PIN_INFO PinInfo;
			m_pCameraOutput->QueryPinInfo(&PinInfo);

			CAUUID caGUID;
			pPages->GetPages(&caGUID);

			OleCreatePropertyFrame(NULL, 0, 0,
						L"Property Sheet", 1,
						(IUnknown **)&(m_pCameraOutput.p),
						caGUID.cElems,
						caGUID.pElems,
						0, 0, NULL);
			CoTaskMemFree(caGUID.pElems);
			PinInfo.pFilter->Release();
		}
		pPages = NULL;
	}
	else 
	{
		//////////////////////////////////////////////////////////////////////////////
		// 加入由 lWidth和lHeight设置的摄像头的宽和高 的功能,默认320*240
		// by flymanbox @2009-01-24
		//////////////////////////////////////////////////////////////////////////////
	   int _Width = nWidth, _Height = nHeight;
	   IAMStreamConfig*   iconfig; 
	   iconfig = NULL;
	   hr = m_pCameraOutput->QueryInterface(IID_IAMStreamConfig,   (void**)&iconfig);   
      
	   AM_MEDIA_TYPE* pmt;    
	   if(iconfig->GetFormat(&pmt) !=S_OK) 
	   {
		  //printf("GetFormat Failed ! \n");
		  return   false;   
	   }
      
	   VIDEOINFOHEADER*   phead;
	   if ( pmt->formattype == FORMAT_VideoInfo)   
	   {   
			phead=( VIDEOINFOHEADER*)pmt->pbFormat;   
			phead->bmiHeader.biWidth = _Width;   
			phead->bmiHeader.biHeight = _Height;   
			if(( hr=iconfig->SetFormat(pmt)) != S_OK )   
			{
				return   false;
			}

		}   
		iconfig->Release();   
		iconfig=NULL;   
		MYFREEMEDIATYPE(*pmt);
	}

	hr = m_pGraph->Connect(m_pCameraOutput, m_pGrabberInput);
	hr = m_pGraph->Connect(m_pGrabberOutput, m_pNullInputPin);

	if (FAILED(hr))
	{
		switch(hr)
		{
			case VFW_S_NOPREVIEWPIN :
				break;
			case E_FAIL :
				break;
			case E_INVALIDARG :
				break;
			case E_POINTER :
				break;
		}
	}

	m_pSampleGrabber->SetBufferSamples(TRUE);
	m_pSampleGrabber->SetOneShot(TRUE);
    
	hr = m_pSampleGrabber->GetConnectedMediaType(&mt);
	if(FAILED(hr))
		return false;

	VIDEOINFOHEADER *videoHeader;
	videoHeader = reinterpret_cast<VIDEOINFOHEADER*>(mt.pbFormat);
	m_nWidth = videoHeader->bmiHeader.biWidth;
	m_nHeight = videoHeader->bmiHeader.biHeight;
	m_bConnected = true;

	pEnum = NULL;
	return true;
}
예제 #16
0
bool DeviceSource::LoadFilters()
{
    if(bCapturing || bFiltersLoaded)
        return false;

    bool bSucceeded = false;

    List<MediaOutputInfo> outputList;
    IAMStreamConfig *config = NULL;
    bool bAddedVideoCapture = false, bAddedAudioCapture = false, bAddedDevice = false;
    GUID expectedMediaType;
    IPin *devicePin = NULL, *audioPin = NULL;
    HRESULT err;
    String strShader;

    bUseThreadedConversion = API->UseMultithreadedOptimizations() && (OSGetTotalCores() > 1);

    //------------------------------------------------
    // basic initialization vars

    bool bCheckForceAudio = data->GetInt(TEXT("forceCustomAudioDevice")) != 0;

    bUseCustomResolution = data->GetInt(TEXT("customResolution"));
    strDevice = data->GetString(TEXT("device"));
    strDeviceName = data->GetString(TEXT("deviceName"));
    strDeviceID = data->GetString(TEXT("deviceID"));
    strAudioDevice = data->GetString(TEXT("audioDevice"));
    strAudioName = data->GetString(TEXT("audioDeviceName"));
    strAudioID = data->GetString(TEXT("audioDeviceID"));
    strAudioGUID = data->GetString(TEXT("audioDeviceCLSID"));

    if(strAudioGUID.Compare(TEXT("CLSID_AudioInputDeviceCategory"))) matchGUID = CLSID_AudioInputDeviceCategory;
    if(strAudioGUID.Compare(TEXT("CLSID_VideoInputDeviceCategory"))) matchGUID = CLSID_VideoInputDeviceCategory;
    if(strAudioGUID.Compare(TEXT("CLSID_AudioRendererCategory"))) {
        //Log(TEXT("Dese are spekers.\n"));
        matchGUID = CLSID_AudioRendererCategory;
    }

    bFlipVertical = data->GetInt(TEXT("flipImage")) != 0;
    bFlipHorizontal = data->GetInt(TEXT("flipImageHorizontal")) != 0;

    opacity = data->GetInt(TEXT("opacity"), 100);

    float volume = data->GetFloat(TEXT("volume"), 1.0f);

    //------------------------------------------------
    // chrom key stuff

    bUseChromaKey = data->GetInt(TEXT("useChromaKey")) != 0;
    keyColor = data->GetInt(TEXT("keyColor"), 0xFFFFFFFF);
    keySimilarity = data->GetInt(TEXT("keySimilarity"));
    keyBlend = data->GetInt(TEXT("keyBlend"), 80);
    keySpillReduction = data->GetInt(TEXT("keySpillReduction"), 50);

    if(keyBaseColor.x < keyBaseColor.y && keyBaseColor.x < keyBaseColor.z)
        keyBaseColor -= keyBaseColor.x;
    else if(keyBaseColor.y < keyBaseColor.x && keyBaseColor.y < keyBaseColor.z)
        keyBaseColor -= keyBaseColor.y;
    else if(keyBaseColor.z < keyBaseColor.x && keyBaseColor.z < keyBaseColor.y)
        keyBaseColor -= keyBaseColor.z;

    //------------------------------------------------
    // get the device filter and pins

    if(strAudioDevice.IsValid())
    {
        audioDeviceFilter = GetDeviceByValue(matchGUID, L"FriendlyName", strAudioName, L"DevicePath", strAudioID);
        if(!audioDeviceFilter) {
            AppWarning(TEXT("DShowAudioPlugin: Invalid audio device: name '%s', path '%s'"), strAudioName.Array(), strAudioID.Array());
        }
    }

    soundOutputType = data->GetInt(TEXT("soundOutputType"));

    if(soundOutputType != 0)
    {
        if(matchGUID == CLSID_AudioRendererCategory) {
            err = capture->FindPin(audioDeviceFilter, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, FALSE, 0, &audioPin);
        }
        else {
            err = capture->FindPin(audioDeviceFilter, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, FALSE, 0, &audioPin);
        }
        if(FAILED(err))
        {
            Log(TEXT("DShowAudioPlugin: No audio pin, result = %lX"), err);
            soundOutputType = 0;
        }
    }

    int soundTimeOffset = data->GetInt(TEXT("soundTimeOffset"));

    //GetOutputList(devicePin, outputList);

    //------------------------------------------------
    // initialize the basic video variables and data


    //------------------------------------------------
    // log audio info
    {
    String strTest;

        strTest = FormattedString(TEXT("    audio device: %s,\r\n    audio device id %s,\r\n"), strAudioDevice.Array(), strAudioID.Array());

        Log(TEXT("------------------------------------------"));
        Log(strTest.Array());
    }

    //------------------------------------------------
    // get audio pin configuration, optionally configure audio pin to 44100

    GUID expectedAudioType;

    if(soundOutputType == 1)
    {
        IAMStreamConfig *audioConfig;
        if(SUCCEEDED(audioPin->QueryInterface(IID_IAMStreamConfig, (void**)&audioConfig)))
        {
            AM_MEDIA_TYPE *audioMediaType;
            if(SUCCEEDED(err = audioConfig->GetFormat(&audioMediaType)))
            {
                SetAudioInfo(audioMediaType, expectedAudioType);
                //Log(TEXT("Fixed size samples: %s\r\n"), (audioMediaType->bFixedSizeSamples) ? "Yes" : "No");
                //Log(TEXT("Temporal Compression: %s\r\n"), (audioMediaType->bTemporalCompression) ? "Yes" : "No");
                //Log(TEXT("cbFormat: %.16X\r\n"), audioMediaType->cbFormat);
                //Log(TEXT("Sample size: %u\r\n"), audioMediaType->lSampleSize);
            }
            else if(err == E_NOTIMPL) //elgato probably
            {
                IEnumMediaTypes *audioMediaTypes;
                if(SUCCEEDED(err = audioPin->EnumMediaTypes(&audioMediaTypes)))
                {
                    ULONG i = 0;
                    if((err = audioMediaTypes->Next(1, &audioMediaType, &i)) == S_OK)
                        SetAudioInfo(audioMediaType, expectedAudioType);
                    else
                    {
                        AppWarning(TEXT("DShowAudioPlugin: audioMediaTypes->Next failed, result = %08lX"), err);
                        soundOutputType = 0;
                    }

                    audioMediaTypes->Release();
                }
                else
                {
                    AppWarning(TEXT("DShowAudioPlugin: audioMediaTypes->Next failed, result = %08lX"), err);
                    soundOutputType = 0;
                }
            }
            else
            {
                AppWarning(TEXT("DShowAudioPlugin: Could not get audio format, result = %08lX"), err);
                soundOutputType = 0;
            }

            audioConfig->Release();
        }
        else {
            soundOutputType = 0;
        }
    }

    //------------------------------------------------
    // add video capture filter if any

    captureFilter = new CaptureFilter(this, MEDIATYPE_Video, expectedMediaType);

    if(FAILED(err = graph->AddFilter(captureFilter, NULL)))
    {
        AppWarning(TEXT("DShowAudioPlugin: Failed to add video capture filter to graph, result = %08lX"), err);
        goto cleanFinish;
    }

    bAddedVideoCapture = true;

    //------------------------------------------------
    // add audio capture filter if any

    if(soundOutputType == 1)
    {
        audioFilter = new CaptureFilter(this, MEDIATYPE_Audio, expectedAudioType);
        if(!audioFilter)
        {
            AppWarning(TEXT("Failed to create audio capture filter"));
            soundOutputType = 0;
        }
    }
    else if(soundOutputType == 2)
    {
        if(FAILED(err = CoCreateInstance(CLSID_DSoundRender, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&audioFilter)))
        {
            AppWarning(TEXT("DShowPlugin: failed to create audio renderer, result = %08lX"), err);
            soundOutputType = 0;
        }

        IBasicAudio *basicAudio;
        if(SUCCEEDED(audioFilter->QueryInterface(IID_IBasicAudio, (void**)&basicAudio)))
        {
            long lVol = long((double(volume)*NEAR_SILENTf)-NEAR_SILENTf);
            if(lVol <= -NEAR_SILENT)
                lVol = -10000;
            basicAudio->put_Volume(lVol);
            basicAudio->Release();
        }
    }

    if(soundOutputType != 0)
    {
        if(FAILED(err = graph->AddFilter(audioFilter, NULL)))
        {
            AppWarning(TEXT("DShowAudioPlugin: Failed to add audio capture filter to graph, result = %08lX"), err);
            goto cleanFinish;
        }

        bAddedAudioCapture = true;
    }

    //------------------------------------------------
    // add primary device filter

    if(FAILED(err = graph->AddFilter(audioDeviceFilter, NULL)))
    {
        AppWarning(TEXT("DShowAudioPlugin: Failed to add audio device filter to graph, result = %08lX"), err);
        goto cleanFinish;
    }

    bAddedDevice = true;

    //------------------------------------------------
    // connect all pins and set up the whole capture thing

    bool bConnected;

    if(soundOutputType != 0)
    {
        bConnected = SUCCEEDED(err = capture->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, audioDeviceFilter, NULL, audioFilter));
        if(!bConnected)
        {
            AppWarning(TEXT("DShowAudioPlugin: Failed to connect the audio device pin to the audio capture pin, result = %08lX"), err);
            soundOutputType = 0;
        }
    }

    if(FAILED(err = graph->QueryInterface(IID_IMediaControl, (void**)&control)))
    {
        AppWarning(TEXT("DShowAudioPlugin: Failed to get IMediaControl, result = %08lX"), err);
        goto cleanFinish;
    }

    if(soundOutputType == 1)
    {
        audioOut = new DeviceAudioSource;
        audioOut->Initialize(this);
        API->AddAudioSource(audioOut);

        audioOut->SetAudioOffset(soundTimeOffset);
        audioOut->SetVolume(volume);
    }

    bSucceeded = true;

cleanFinish:
    SafeRelease(config);
    SafeRelease(devicePin);
    SafeRelease(audioPin);

    for(UINT i=0; i<outputList.Num(); i++)
        outputList[i].FreeData();

    if(!bSucceeded)
    {
        bCapturing = false;

        if(bAddedVideoCapture)
            graph->RemoveFilter(captureFilter);
        if(bAddedAudioCapture)
            graph->RemoveFilter(audioFilter);
        if(bAddedDevice) {
            graph->RemoveFilter(audioDeviceFilter);
            graph->RemoveFilter(deviceFilter);
        }

        SafeRelease(audioDeviceFilter);
        SafeRelease(deviceFilter);
        SafeRelease(captureFilter);
        SafeRelease(audioFilter);
        SafeRelease(control);

        if(colorConvertShader)
        {
            delete colorConvertShader;
            colorConvertShader = NULL;
        }

        if(audioOut)
        {
            delete audioOut;
            audioOut = NULL;
        }

        if(lpImageBuffer)
        {
            Free(lpImageBuffer);
            lpImageBuffer = NULL;
        }

        bReadyToDraw = true;
    }
    else
        bReadyToDraw = false;

    if(!renderCX) renderCX = 32;
    if(!renderCY) renderCY = 32;

    //-----------------------------------------------------
    // create the texture regardless, will just show up as red to indicate failure
    BYTE *textureData = (BYTE*)Allocate(renderCX*renderCY*4);

    if(colorType == DeviceOutputType_RGB) //you may be confused, but when directshow outputs RGB, it's actually outputting BGR
    {
        msetd(textureData, 0xFFFF0000, renderCX*renderCY*4);
        texture = CreateTexture(renderCX, renderCY, GS_BGR, textureData, FALSE, FALSE);
    }
    else //if we're working with planar YUV, we can just use regular RGB textures instead
    {
        msetd(textureData, 0xFF0000FF, renderCX*renderCY*4);
        texture = CreateTexture(renderCX, renderCY, GS_RGB, textureData, FALSE, FALSE);
    }

    if(bSucceeded && bUseThreadedConversion)
    {
        if(colorType == DeviceOutputType_I420 || colorType == DeviceOutputType_YV12)
        {
            LPBYTE lpData;
            if(texture->Map(lpData, texturePitch))
                texture->Unmap();
            else
                texturePitch = renderCX*4;

            lpImageBuffer = (LPBYTE)Allocate(texturePitch*renderCY);
        }
    }

    Free(textureData);

    bFiltersLoaded = bSucceeded;
    return bSucceeded;
}
예제 #17
0
static int v4w_open_videodevice(V4wState *s)
{
	// Initialize COM
	CoInitialize(NULL);

	// get a Graph
	HRESULT hr= CoCreateInstance (CLSID_FilterGraph,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_IGraphBuilder, //IID_IBaseFilter,
		(void **)&s->m_pGraph);
	if(FAILED(hr))
	{
		return -1;
	}

	// get a CaptureGraphBuilder2
	hr= CoCreateInstance (CLSID_CaptureGraphBuilder2,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_ICaptureGraphBuilder2, //IID_IBaseFilter,
		(void **)&s->m_pBuilder);
	if(FAILED(hr))
	{
		return -2;
	}

	// connect capture graph builder with the graph
	s->m_pBuilder->SetFiltergraph(s->m_pGraph);

	// get mediacontrol so we can start and stop the filter graph
	hr=s->m_pGraph->QueryInterface (IID_IMediaControl, (void **)&s->m_pControl);
	if(FAILED(hr))
	{
		return -3;
	}


	ICreateDevEnum *pCreateDevEnum = NULL;
	IEnumMoniker *pEnumMoniker = NULL;
	IMoniker *pMoniker = NULL;

	ULONG nFetched = 0;

	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, 
		IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
	if(FAILED(hr))
	{
		return -4;
	}

	hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
		&pEnumMoniker, 0);
	if (FAILED(hr) || pEnumMoniker == NULL) {
		//printf("no device\n");
		return -5;
	}

	pEnumMoniker->Reset();

	int pos=0;
	while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
	{
		IPropertyBag *pBag;
		hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag );
		if( hr != S_OK )
			continue; 

		if (s->dev[0]=='\0')
			break;

		VARIANT var;
		VariantInit(&var);
		hr = pBag->Read( L"FriendlyName", &var, NULL ); 
		if( hr != S_OK )
		{
			pMoniker->Release();
			continue;
		}
		//USES_CONVERSION;
		char szName[256];

		WideCharToMultiByte(CP_UTF8,0,var.bstrVal,-1,szName,256,0,0);
		VariantClear(&var); 

		if (strcmp(szName, s->dev)==0)
			break;

		pMoniker->Release();
		pBag->Release();
		pMoniker=NULL;
		pBag=NULL;
	}

	if(pMoniker==NULL)
	{
		return -6;
	}

	hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
	if(FAILED(hr))
	{
		return -7;
	}

	s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

	pMoniker->Release();
	pEnumMoniker->Release();
	pCreateDevEnum->Release();


	GUID pPinCategory;

	if (try_format(s->m_pDeviceFilter, s->pix_fmt, &pPinCategory)==0)
		s->pix_fmt = s->pix_fmt;
	else if (try_format(s->m_pDeviceFilter,MS_YUV420P, &pPinCategory)==0)
		s->pix_fmt = MS_YUV420P;
	else if (try_format(s->m_pDeviceFilter,MS_YUY2, &pPinCategory)==0)
		s->pix_fmt = MS_YUY2;
	else if (try_format(s->m_pDeviceFilter,MS_YUYV, &pPinCategory)==0)
		s->pix_fmt = MS_YUYV;
	else if (try_format(s->m_pDeviceFilter,MS_UYVY, &pPinCategory)==0)
		s->pix_fmt = MS_UYVY;
	else if (try_format(s->m_pDeviceFilter,MS_RGB24, &pPinCategory)==0)
		s->pix_fmt = MS_RGB24;
	else
	{
		ms_error("Unsupported video pixel format.");
		return -8;
	}

	if (s->pix_fmt == MS_YUV420P)
		ms_message("Driver supports YUV420P, using that format.");
	else if (s->pix_fmt == MS_YUY2)
		ms_message("Driver supports YUY2 (YUYV), using that format.");
	else if (s->pix_fmt == MS_YUYV)
		ms_message("Driver supports YUV422, using that format.");
	else if (s->pix_fmt == MS_UYVY)
		ms_message("Driver supports UYVY, using that format.");
	else if (s->pix_fmt == MS_RGB24)
		ms_message("Driver supports RGB24, using that format.");

	if (try_format_size(s, s->pix_fmt, s->vsize.width, s->vsize.height, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", s->vsize.width, s->vsize.height);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H);
	else
	{
		ms_error("No supported size found for format.");
		/* size not supported? */
		return -9;
	}

	// get DXFilter
	s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE);
	if(s->m_pDXFilter==NULL)
	{
		return -10;
	}
	s->m_pDXFilter->AddRef();

	CMediaType mt;
	mt.SetType(&MEDIATYPE_Video);

	GUID m = MEDIASUBTYPE_RGB24;
	if (s->pix_fmt == MS_YUV420P)
		m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0'));
	else if (s->pix_fmt == MS_YUY2)
		m = MEDIASUBTYPE_YUY2;
	else if (s->pix_fmt == MS_YUYV)
		m = MEDIASUBTYPE_YUYV;
	else if (s->pix_fmt == MS_UYVY)
		m = MEDIASUBTYPE_UYVY;
	else if (s->pix_fmt == MS_RGB24)
		m = MEDIASUBTYPE_RGB24;
	mt.SetSubtype(&m);

	mt.formattype = FORMAT_VideoInfo;
	mt.SetTemporalCompression(FALSE);

	VIDEOINFO *pvi = (VIDEOINFO *)
		mt.AllocFormatBuffer(sizeof(VIDEOINFO));
	if (NULL == pvi)
		return -11;
	ZeroMemory(pvi, sizeof(VIDEOINFO));

	if (s->pix_fmt == MS_YUV420P)
		pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
	else if (s->pix_fmt == MS_YUY2)
		pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2');
	else if (s->pix_fmt == MS_YUYV)
		pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','V');
	else if (s->pix_fmt == MS_UYVY)
		pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y');
	else if (s->pix_fmt == MS_RGB24)
		pvi->bmiHeader.biCompression = BI_RGB;

	if (s->pix_fmt == MS_YUV420P)
		pvi->bmiHeader.biBitCount = 12;
	else if (s->pix_fmt == MS_YUY2)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_YUYV)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_UYVY)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_RGB24)
		pvi->bmiHeader.biBitCount = 24;

	pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
	pvi->bmiHeader.biWidth = s->vsize.width;
	pvi->bmiHeader.biHeight = s->vsize.height;
	pvi->bmiHeader.biPlanes = 1;
	pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
	pvi->bmiHeader.biClrImportant = 0;
	mt.SetSampleSize(pvi->bmiHeader.biSizeImage);

	mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO));

	hr = s->m_pDXFilter->SetAcceptedMediaType(&mt);
	if(FAILED(hr))
	{
		return -12;
	}

	hr = s->m_pDXFilter->SetCallback(Callback); 
	if(FAILED(hr))
	{
		return -13;
	}

	hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter,
		(LPVOID *)&s->m_pIDXFilter);
	if(FAILED(hr))
	{
		return -14;
	}

	hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter");
	if(FAILED(hr))
	{
		return -15;
	}


	// get null renderer
	hr=CoCreateInstance (CLSID_NullRenderer,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_IBaseFilter,
		(void **)&s->m_pNullRenderer);
	if(FAILED(hr))
	{
		return -16;
	}
	if (s->m_pNullRenderer!=NULL)
	{
		s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer");
	}

	hr = s->m_pBuilder->RenderStream(&pPinCategory,
		&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
	if (FAILED(hr))
	{
		return -17;
	}

	IAMStreamConfig *pConfig = NULL;
	hr = s->m_pBuilder->FindInterface(
		&pPinCategory, // Preview pin.
		&MEDIATYPE_Video,    // Any media type.
		s->m_pDeviceFilter, // Pointer to the capture filter.
		IID_IAMStreamConfig, (void**)&pConfig); 
	if (pConfig!=NULL)
	{
		AM_MEDIA_TYPE *pType = NULL;
		int iCount, iSize;
		pConfig->GetNumberOfCapabilities(&iCount, &iSize);

		for (int i = 0; i < iCount; i++) {
			VIDEO_STREAM_CONFIG_CAPS scc;
			pType = NULL;
			pConfig->GetStreamCaps(i, &pType, (BYTE *)&scc);

			if (!((pType->formattype == FORMAT_VideoInfo) &&
				(pType->cbFormat >= sizeof(VIDEOINFOHEADER)) &&
				(pType->pbFormat != NULL)))
				continue;

			VIDEOINFOHEADER & videoInfo = *(VIDEOINFOHEADER *)pType->pbFormat;

			if (m != pType->subtype)
				continue;

			if (videoInfo.bmiHeader.biWidth != s->vsize.width)
				continue;

			if (videoInfo.bmiHeader.biHeight != s->vsize.height)
				continue;

			if (videoInfo.bmiHeader.biBitCount != pvi->bmiHeader.biBitCount)
				continue;

			if (videoInfo.bmiHeader.biCompression != pvi->bmiHeader.biCompression)
				continue;

			videoInfo.AvgTimePerFrame = UNITS / (LONGLONG)s->fps;
			pConfig->SetFormat(pType);    
		}

		pConfig->GetFormat(&pType);
		if (pType!=NULL)
		{
			VIDEOINFO *pvi;
			pvi = (VIDEOINFO *)pType->pbFormat;
			ms_message("v4w: camera asked fps=%.2f // real fps=%.2f", s->fps, ((float)UNITS / (float)pvi->AvgTimePerFrame));
		}

		pConfig->Release();
	}

	//m_pDXFilter->SetBufferSamples(TRUE);

	s_callback = s;
	hr = s->m_pControl->Run();
	if(FAILED(hr))
	{
		return -18;
	}


	s->rotregvalue=1;
	return 0;
}
예제 #18
0
bool MIPDirectShowCapture::setFormat(int w, int h, real_t rate)
{
	HRESULT hr;

	IAMStreamConfig *pConfig = 0;

	hr = m_pBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, 0, m_pCaptDevice, IID_IAMStreamConfig, (void**)&pConfig);
	if (HR_FAILED(hr))
	{
		setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECONFIG);
		return false;
	}

	int count = 0;
	int s = 0;
	
	hr = pConfig->GetNumberOfCapabilities(&count, &s);
	if (HR_FAILED(hr))
	{
		pConfig->Release();
		setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECAPS);
		return false;
	}

	if (s != sizeof(VIDEO_STREAM_CONFIG_CAPS))
	{
		pConfig->Release();
		setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_INVALIDCAPS);
		return false;
	}

	for (int i = 0; i < count; i++)
	{
        VIDEO_STREAM_CONFIG_CAPS caps;
        AM_MEDIA_TYPE *pMediaType;

        hr = pConfig->GetStreamCaps(i, &pMediaType, (BYTE*)&caps);
        if (HR_SUCCEEDED(hr))
        {
			if ((pMediaType->majortype == MEDIATYPE_Video) &&
				(pMediaType->subtype == m_selectedGuid) &&
				(pMediaType->formattype == FORMAT_VideoInfo) &&
				(pMediaType->cbFormat >= sizeof (VIDEOINFOHEADER)) &&
				(pMediaType->pbFormat != 0))
			{
				VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pMediaType->pbFormat;
				
				pVih->bmiHeader.biWidth = w;
				pVih->bmiHeader.biHeight = h;
				pVih->bmiHeader.biSizeImage = DIBSIZE(pVih->bmiHeader);
				pVih->AvgTimePerFrame = (REFERENCE_TIME)(10000000.0/rate);

				hr = pConfig->SetFormat(pMediaType);
				if (HR_SUCCEEDED(hr))
				{
					CoTaskMemFree(pMediaType->pbFormat);
					pConfig->Release();
					return true;
				}
			}

			if (pMediaType->pbFormat != 0)
				CoTaskMemFree(pMediaType->pbFormat);
		}
	}

	pConfig->Release();
	setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTSETCAPS);
	return false;
}
예제 #19
0
bool DeviceSource::LoadFilters()
{
    if(bCapturing || bFiltersLoaded)
        return false;

    bool bSucceeded = false;

    List<MediaOutputInfo> outputList;
    IAMStreamConfig *config = NULL;
    bool bAddedVideoCapture = false, bAddedAudioCapture = false, bAddedDevice = false;
    GUID expectedMediaType;
    IPin *devicePin = NULL, *audioPin = NULL;
    HRESULT err;
    String strShader;

    bUseThreadedConversion = API->UseMultithreadedOptimizations() && (OSGetTotalCores() > 1);

    //------------------------------------------------
    // basic initialization vars

    bool bForceCustomAudio = data->GetInt(TEXT("forceCustomAudioDevice")) != 0;

    bUseCustomResolution = data->GetInt(TEXT("customResolution"));
    strDevice = data->GetString(TEXT("device"));
    strDeviceName = data->GetString(TEXT("deviceName"));
    strDeviceID = data->GetString(TEXT("deviceID"));
    strAudioDevice = data->GetString(TEXT("audioDevice"));
    strAudioName = data->GetString(TEXT("audioDeviceName"));
    strAudioID = data->GetString(TEXT("audioDeviceID"));

    bFlipVertical = data->GetInt(TEXT("flipImage")) != 0;
    bFlipHorizontal = data->GetInt(TEXT("flipImageHorizontal")) != 0;
    bUsePointFiltering = data->GetInt(TEXT("usePointFiltering")) != 0;

    opacity = data->GetInt(TEXT("opacity"), 100);

    float volume = data->GetFloat(TEXT("volume"), 1.0f);

    bUseBuffering = data->GetInt(TEXT("useBuffering")) != 0;
    bufferTime = data->GetInt(TEXT("bufferTime"))*10000;

    //------------------------------------------------
    // chrom key stuff

    bUseChromaKey = data->GetInt(TEXT("useChromaKey")) != 0;
    keyColor = data->GetInt(TEXT("keyColor"), 0xFFFFFFFF);
    keySimilarity = data->GetInt(TEXT("keySimilarity"));
    keyBlend = data->GetInt(TEXT("keyBlend"), 80);
    keySpillReduction = data->GetInt(TEXT("keySpillReduction"), 50);

    if(keyBaseColor.x < keyBaseColor.y && keyBaseColor.x < keyBaseColor.z)
        keyBaseColor -= keyBaseColor.x;
    else if(keyBaseColor.y < keyBaseColor.x && keyBaseColor.y < keyBaseColor.z)
        keyBaseColor -= keyBaseColor.y;
    else if(keyBaseColor.z < keyBaseColor.x && keyBaseColor.z < keyBaseColor.y)
        keyBaseColor -= keyBaseColor.z;

    //------------------------------------------------
    // get the device filter and pins

    if(strDeviceName.IsValid())
        deviceFilter = GetDeviceByValue(CLSID_VideoInputDeviceCategory, L"FriendlyName", strDeviceName, L"DevicePath", strDeviceID);
    else
    {
        if(!strDevice.IsValid())
        {
            AppWarning(TEXT("DShowPlugin: Invalid device specified"));
            goto cleanFinish;
        }

        deviceFilter = GetDeviceByValue(CLSID_VideoInputDeviceCategory, L"FriendlyName", strDevice);
    }
    
    if(!deviceFilter)
    {
        AppWarning(TEXT("DShowPlugin: Could not create device filter"));
        goto cleanFinish;
    }

    devicePin = GetOutputPin(deviceFilter, &MEDIATYPE_Video);
    if(!devicePin)
    {
        AppWarning(TEXT("DShowPlugin: Could not get device video pin"));
        goto cleanFinish;
    }

    soundOutputType = data->GetInt(TEXT("soundOutputType")); //0 is for backward-compatibility
    if (strAudioID.CompareI(TEXT("Disabled")))
        soundOutputType = 0;

    if(soundOutputType != 0)
    {
        if(!bForceCustomAudio)
        {
            err = capture->FindPin(deviceFilter, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, FALSE, 0, &audioPin);
            bDeviceHasAudio = SUCCEEDED(err);
        }
        else
            bDeviceHasAudio = false;

        if(!bDeviceHasAudio)
        {
            if(strDeviceName.IsValid())
            {
                audioDeviceFilter = GetDeviceByValue(CLSID_AudioInputDeviceCategory, L"FriendlyName", strAudioName, L"DevicePath", strAudioID);
                if(!audioDeviceFilter)
                    AppWarning(TEXT("DShowPlugin: Invalid audio device: name '%s', path '%s'"), strAudioName.Array(), strAudioID.Array());
            }
            else if(strAudioDevice.IsValid())
            {
                audioDeviceFilter = GetDeviceByValue(CLSID_AudioInputDeviceCategory, L"FriendlyName", strAudioDevice);
                if(!audioDeviceFilter)
                    AppWarning(TEXT("DShowPlugin: Could not create audio device filter"));
            }

            if(audioDeviceFilter)
                err = capture->FindPin(audioDeviceFilter, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, FALSE, 0, &audioPin);
            else
                err = E_FAIL;
        }

        if(FAILED(err) || !audioPin)
        {
            Log(TEXT("DShowPlugin: No audio pin, result = %lX"), err);
            soundOutputType = 0;
        }
    }
    else
        bDeviceHasAudio = bForceCustomAudio = false;

    int soundTimeOffset = data->GetInt(TEXT("soundTimeOffset"));

    GetOutputList(devicePin, outputList);

    //------------------------------------------------
    // initialize the basic video variables and data

    renderCX = renderCY = 0;
    frameInterval = 0;

    if(bUseCustomResolution)
    {
        renderCX = data->GetInt(TEXT("resolutionWidth"));
        renderCY = data->GetInt(TEXT("resolutionHeight"));
        frameInterval = data->GetInt(TEXT("frameInterval"));
    }
    else
    {
        SIZE size;
        if (!GetClosestResolution(outputList, size, frameInterval))
        {
            AppWarning(TEXT("DShowPlugin: Unable to find appropriate resolution"));
            renderCX = renderCY = 64;
            goto cleanFinish;
        }

        renderCX = size.cx;
        renderCY = size.cy;
    }

    if(!renderCX || !renderCY || !frameInterval)
    {
        AppWarning(TEXT("DShowPlugin: Invalid size/fps specified"));
        goto cleanFinish;
    }

    preferredOutputType = (data->GetInt(TEXT("usePreferredType")) != 0) ? data->GetInt(TEXT("preferredType")) : -1;

    int numThreads = MAX(OSGetTotalCores()-2, 1);
    for(int i=0; i<numThreads; i++)
    {
        convertData[i].width  = renderCX;
        convertData[i].height = renderCY;
        convertData[i].sample = NULL;
        convertData[i].hSignalConvert  = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertData[i].hSignalComplete = CreateEvent(NULL, FALSE, FALSE, NULL);

        if(i == 0)
            convertData[i].startY = 0;
        else
            convertData[i].startY = convertData[i-1].endY;

        if(i == (numThreads-1))
            convertData[i].endY = renderCY;
        else
            convertData[i].endY = ((renderCY/numThreads)*(i+1)) & 0xFFFFFFFE;
    }

    bFirstFrame = true;

    //------------------------------------------------
    // get the closest media output for the settings used

    MediaOutputInfo *bestOutput = GetBestMediaOutput(outputList, renderCX, renderCY, preferredOutputType, frameInterval);
    if(!bestOutput)
    {
        AppWarning(TEXT("DShowPlugin: Could not find appropriate resolution to create device image source"));
        goto cleanFinish;
    }

    //------------------------------------------------
    // log video info

    {
        String strTest = FormattedString(TEXT("    device: %s,\r\n    device id %s,\r\n    chosen type: %s, usingFourCC: %s, res: %ux%u - %ux%u, frameIntervals: %llu-%llu"),
            strDevice.Array(), strDeviceID.Array(),
            EnumToName[(int)bestOutput->videoType],
            bestOutput->bUsingFourCC ? TEXT("true") : TEXT("false"),
            bestOutput->minCX, bestOutput->minCY, bestOutput->maxCX, bestOutput->maxCY,
            bestOutput->minFrameInterval, bestOutput->maxFrameInterval);

        BITMAPINFOHEADER *bmiHeader = GetVideoBMIHeader(bestOutput->mediaType);

        char fourcc[5];
        mcpy(fourcc, &bmiHeader->biCompression, 4);
        fourcc[4] = 0;

        if(bmiHeader->biCompression > 1000)
            strTest << FormattedString(TEXT(", fourCC: '%S'\r\n"), fourcc);
        else
            strTest << FormattedString(TEXT(", fourCC: %08lX\r\n"), bmiHeader->biCompression);

        if(!bDeviceHasAudio) strTest << FormattedString(TEXT("    audio device: %s,\r\n    audio device id %s,\r\n"), strAudioDevice.Array(), strAudioID.Array());

        Log(TEXT("------------------------------------------"));
        Log(strTest.Array());
    }

    //------------------------------------------------
    // set up shaders and video output data

    expectedMediaType = bestOutput->mediaType->subtype;

    colorType = DeviceOutputType_RGB;
    if(bestOutput->videoType == VideoOutputType_I420)
        colorType = DeviceOutputType_I420;
    else if(bestOutput->videoType == VideoOutputType_YV12)
        colorType = DeviceOutputType_YV12;
    else if(bestOutput->videoType == VideoOutputType_YVYU)
        colorType = DeviceOutputType_YVYU;
    else if(bestOutput->videoType == VideoOutputType_YUY2)
        colorType = DeviceOutputType_YUY2;
    else if(bestOutput->videoType == VideoOutputType_UYVY)
        colorType = DeviceOutputType_UYVY;
    else if(bestOutput->videoType == VideoOutputType_HDYC)
        colorType = DeviceOutputType_HDYC;
    else
    {
        colorType = DeviceOutputType_RGB;
        expectedMediaType = MEDIASUBTYPE_RGB32;
    }

    strShader = ChooseShader();
    if(strShader.IsValid())
        colorConvertShader = CreatePixelShaderFromFile(strShader);

    if(colorType != DeviceOutputType_RGB && !colorConvertShader)
    {
        AppWarning(TEXT("DShowPlugin: Could not create color space conversion pixel shader"));
        goto cleanFinish;
    }

    if(colorType == DeviceOutputType_YV12 || colorType == DeviceOutputType_I420)
    {
        for(int i=0; i<numThreads; i++)
            hConvertThreads[i] = OSCreateThread((XTHREAD)PackPlanarThread, convertData+i);
    }

    //------------------------------------------------
    // set chroma details

    keyBaseColor = Color4().MakeFromRGBA(keyColor);
    Matrix4x4TransformVect(keyChroma, (colorType == DeviceOutputType_HDYC) ? (float*)yuv709Mat : (float*)yuvMat, keyBaseColor);
    keyChroma *= 2.0f;

    //------------------------------------------------
    // configure video pin

    if(FAILED(err = devicePin->QueryInterface(IID_IAMStreamConfig, (void**)&config)))
    {
        AppWarning(TEXT("DShowPlugin: Could not get IAMStreamConfig for device pin, result = %08lX"), err);
        goto cleanFinish;
    }

    AM_MEDIA_TYPE outputMediaType;
    CopyMediaType(&outputMediaType, bestOutput->mediaType);

    VIDEOINFOHEADER *vih  = reinterpret_cast<VIDEOINFOHEADER*>(outputMediaType.pbFormat);
    BITMAPINFOHEADER *bmi = GetVideoBMIHeader(&outputMediaType);
    vih->AvgTimePerFrame  = frameInterval;
    bmi->biWidth          = renderCX;
    bmi->biHeight         = renderCY;
    bmi->biSizeImage      = renderCX*renderCY*(bmi->biBitCount>>3);

    if(FAILED(err = config->SetFormat(&outputMediaType)))
    {
        if(err != E_NOTIMPL)
        {
            AppWarning(TEXT("DShowPlugin: SetFormat on device pin failed, result = %08lX"), err);
            goto cleanFinish;
        }
    }

    FreeMediaType(outputMediaType);

    //------------------------------------------------
    // get audio pin configuration, optionally configure audio pin to 44100

    GUID expectedAudioType;

    if(soundOutputType == 1)
    {
        IAMStreamConfig *audioConfig;
        if(SUCCEEDED(audioPin->QueryInterface(IID_IAMStreamConfig, (void**)&audioConfig)))
        {
            AM_MEDIA_TYPE *audioMediaType;
            if(SUCCEEDED(err = audioConfig->GetFormat(&audioMediaType)))
            {
                SetAudioInfo(audioMediaType, expectedAudioType);
            }
            else if(err == E_NOTIMPL) //elgato probably
            {
                IEnumMediaTypes *audioMediaTypes;
                if(SUCCEEDED(err = audioPin->EnumMediaTypes(&audioMediaTypes)))
                {
                    ULONG i = 0;
                    if((err = audioMediaTypes->Next(1, &audioMediaType, &i)) == S_OK)
                        SetAudioInfo(audioMediaType, expectedAudioType);
                    else
                    {
                        AppWarning(TEXT("DShowPlugin: audioMediaTypes->Next failed, result = %08lX"), err);
                        soundOutputType = 0;
                    }

                    audioMediaTypes->Release();
                }
                else
                {
                    AppWarning(TEXT("DShowPlugin: audioMediaTypes->Next failed, result = %08lX"), err);
                    soundOutputType = 0;
                }
            }
            else
            {
                AppWarning(TEXT("DShowPlugin: Could not get audio format, result = %08lX"), err);
                soundOutputType = 0;
            }

            audioConfig->Release();
        }
        else {
            soundOutputType = 0;
        }
    }

    //------------------------------------------------
    // add video capture filter if any

    captureFilter = new CaptureFilter(this, MEDIATYPE_Video, expectedMediaType);

    if(FAILED(err = graph->AddFilter(captureFilter, NULL)))
    {
        AppWarning(TEXT("DShowPlugin: Failed to add video capture filter to graph, result = %08lX"), err);
        goto cleanFinish;
    }

    bAddedVideoCapture = true;

    //------------------------------------------------
    // add audio capture filter if any

    if(soundOutputType == 1)
    {
        audioFilter = new CaptureFilter(this, MEDIATYPE_Audio, expectedAudioType);
        if(!audioFilter)
        {
            AppWarning(TEXT("Failed to create audio capture filter"));
            soundOutputType = 0;
        }
    }
    else if(soundOutputType == 2)
    {
        if(FAILED(err = CoCreateInstance(CLSID_AudioRender, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&audioFilter)))
        {
            AppWarning(TEXT("DShowPlugin: failed to create audio renderer, result = %08lX"), err);
            soundOutputType = 0;
        }

        IBasicAudio *basicAudio;
        if(SUCCEEDED(audioFilter->QueryInterface(IID_IBasicAudio, (void**)&basicAudio)))
        {
            long lVol = long((double(volume)*NEAR_SILENTf)-NEAR_SILENTf);
            if(lVol <= -NEAR_SILENT)
                lVol = -10000;
            basicAudio->put_Volume(lVol);
            basicAudio->Release();
        }
    }

    if(soundOutputType != 0)
    {
        if(FAILED(err = graph->AddFilter(audioFilter, NULL)))
            AppWarning(TEXT("DShowPlugin: Failed to add audio capture filter to graph, result = %08lX"), err);

        bAddedAudioCapture = true;
    }

    //------------------------------------------------
    // add primary device filter

    if(FAILED(err = graph->AddFilter(deviceFilter, NULL)))
    {
        AppWarning(TEXT("DShowPlugin: Failed to add device filter to graph, result = %08lX"), err);
        goto cleanFinish;
    }

    if(soundOutputType != 0 && !bDeviceHasAudio)
    {
        if(FAILED(err = graph->AddFilter(audioDeviceFilter, NULL)))
            AppWarning(TEXT("DShowPlugin: Failed to add audio device filter to graph, result = %08lX"), err);
    }

    bAddedDevice = true;

    //------------------------------------------------
    // connect all pins and set up the whole capture thing

    //THANK THE NINE DIVINES I FINALLY GOT IT WORKING
    bool bConnected = SUCCEEDED(err = capture->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, deviceFilter, NULL, captureFilter));
    if(!bConnected)
    {
        if(FAILED(err = graph->Connect(devicePin, captureFilter->GetCapturePin())))
        {
            AppWarning(TEXT("DShowPlugin: Failed to connect the video device pin to the video capture pin, result = %08lX"), err);
            goto cleanFinish;
        }
    }

    if(soundOutputType != 0)
    {
        if(!bDeviceHasAudio)
            bConnected = SUCCEEDED(err = capture->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, audioDeviceFilter, NULL, audioFilter));
        else
            bConnected = SUCCEEDED(err = capture->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, deviceFilter, NULL, audioFilter));

        if(!bConnected)
        {
            AppWarning(TEXT("DShowPlugin: Failed to connect the audio device pin to the audio capture pin, result = %08lX"), err);
            soundOutputType = 0;
        }
    }

    if(FAILED(err = graph->QueryInterface(IID_IMediaControl, (void**)&control)))
    {
        AppWarning(TEXT("DShowPlugin: Failed to get IMediaControl, result = %08lX"), err);
        goto cleanFinish;
    }

    if (bUseBuffering) {
        if (!(hStopSampleEvent = CreateEvent(NULL, FALSE, FALSE, NULL))) {
            AppWarning(TEXT("DShowPlugin: Failed to create stop event"), err);
            goto cleanFinish;
        }

        if (!(hSampleThread = OSCreateThread((XTHREAD)SampleThread, this))) {
            AppWarning(TEXT("DShowPlugin: Failed to create sample thread"), err);
            goto cleanFinish;
        }
    }

    if(soundOutputType == 1)
    {
        audioOut = new DeviceAudioSource;
        audioOut->Initialize(this);
        API->AddAudioSource(audioOut);

        audioOut->SetAudioOffset(soundTimeOffset);
        audioOut->SetVolume(volume);
    }

    bSucceeded = true;

cleanFinish:
    SafeRelease(config);
    SafeRelease(devicePin);
    SafeRelease(audioPin);

    for(UINT i=0; i<outputList.Num(); i++)
        outputList[i].FreeData();

    if(!bSucceeded)
    {
        bCapturing = false;

        if(bAddedVideoCapture)
            graph->RemoveFilter(captureFilter);
        if(bAddedAudioCapture)
            graph->RemoveFilter(audioFilter);

        if(bAddedDevice)
        {
            if(!bDeviceHasAudio && audioDeviceFilter)
                graph->RemoveFilter(audioDeviceFilter);
            graph->RemoveFilter(deviceFilter);
        }

        SafeRelease(audioDeviceFilter);
        SafeRelease(deviceFilter);
        SafeRelease(captureFilter);
        SafeRelease(audioFilter);
        SafeRelease(control);

        if (hSampleThread) {
            SetEvent(hStopSampleEvent);
            WaitForSingleObject(hSampleThread, INFINITE);
            CloseHandle(hSampleThread);
            hSampleThread = NULL;
        }

        if (hStopSampleEvent) {
            CloseHandle(hStopSampleEvent);
            hStopSampleEvent = NULL;
        }

        if(colorConvertShader)
        {
            delete colorConvertShader;
            colorConvertShader = NULL;
        }

        if(audioOut)
        {
            delete audioOut;
            audioOut = NULL;
        }

        if(lpImageBuffer)
        {
            Free(lpImageBuffer);
            lpImageBuffer = NULL;
        }

        bReadyToDraw = true;
    }
    else
        bReadyToDraw = false;

    if(!renderCX) renderCX = 32;
    if(!renderCY) renderCY = 32;

    //-----------------------------------------------------
    // create the texture regardless, will just show up as red to indicate failure
    BYTE *textureData = (BYTE*)Allocate(renderCX*renderCY*4);

    if(colorType == DeviceOutputType_RGB) //you may be confused, but when directshow outputs RGB, it's actually outputting BGR
    {
        msetd(textureData, 0xFFFF0000, renderCX*renderCY*4);
        texture = CreateTexture(renderCX, renderCY, GS_BGR, textureData, FALSE, FALSE);
    }
    else //if we're working with planar YUV, we can just use regular RGB textures instead
    {
        msetd(textureData, 0xFF0000FF, renderCX*renderCY*4);
        texture = CreateTexture(renderCX, renderCY, GS_RGB, textureData, FALSE, FALSE);
    }

    if(bSucceeded && bUseThreadedConversion)
    {
        if(colorType == DeviceOutputType_I420 || colorType == DeviceOutputType_YV12)
        {
            LPBYTE lpData;
            if(texture->Map(lpData, texturePitch))
                texture->Unmap();
            else
                texturePitch = renderCX*4;

            lpImageBuffer = (LPBYTE)Allocate(texturePitch*renderCY);
        }
    }

    Free(textureData);

    bFiltersLoaded = bSucceeded;
    return bSucceeded;
}
예제 #20
0
HRESULT DSCaptureDevice::setFormat(const DSFormat& format)
{
    HRESULT hr;
    IAMStreamConfig* streamConfig = NULL;

    /* get the right interface to change capture settings */
    hr
        = m_captureGraphBuilder->FindInterface(
                &PIN_CATEGORY_CAPTURE,
                &MEDIATYPE_Video,
                m_srcFilter,
                IID_IAMStreamConfig,
                (void**) &streamConfig);
    if(SUCCEEDED(hr))
    {
        int nb = 0;
        int size = 0;
        AM_MEDIA_TYPE* mediaType = NULL;
        size_t bitCount = 0;

        hr = streamConfig->GetNumberOfCapabilities(&nb, &size);
        if (SUCCEEDED(hr) && nb)
        {
            BYTE* scc = new BYTE[size];

            if (scc)
            {
                DWORD pixfmt = format.pixelFormat;

                for (int i = 0 ; i < nb ; i++)
                {
                    AM_MEDIA_TYPE* mt;

                    if (streamConfig->GetStreamCaps(i, &mt, scc) == S_OK)
                    {
                        VIDEOINFOHEADER* hdr = (VIDEOINFOHEADER*) mt->pbFormat;

                        if (hdr
                                && (mt->subtype.Data1 == pixfmt)
                                && ((long) format.height
                                        == hdr->bmiHeader.biHeight)
                                && ((long) format.width
                                        == hdr->bmiHeader.biWidth))
                        {
                            mediaType = mt;
                            if ((pixfmt == MEDIASUBTYPE_ARGB32.Data1)
                                    || (pixfmt == MEDIASUBTYPE_RGB32.Data1))
                                bitCount = 32;
                            else if (pixfmt == MEDIASUBTYPE_RGB24.Data1)
                                bitCount = 24;
                            else
                                bitCount = hdr->bmiHeader.biBitCount;
                            break;
                        }
                        else
                            _DeleteMediaType(mt);
                    }
                }

                delete[] scc;
            }
            else
                hr = E_OUTOFMEMORY;
        }

        if (mediaType)
        {
            hr = streamConfig->SetFormat(mediaType);
            if (SUCCEEDED(hr))
            {
                m_bitPerPixel = bitCount;
                m_format = format;
                m_format.mediaType = mediaType->subtype;
            }
            _DeleteMediaType(mediaType);
        }
        else if (SUCCEEDED(hr))
            hr = E_FAIL;

        streamConfig->Release();
    }

    return hr;
}
예제 #21
0
int 
recChannel_t::source_format(char* newFormat)
{
     __CONTEXT("recChannel_t::source_format");

	int hr = 0;
    bool formatFound = false;

	IAMStreamConfig *pConfig = NULL;
	AM_MEDIA_TYPE * format = NULL;
	
	pControl->StopWhenReady();

    ql_t<AM_MEDIA_TYPE *> auxFormats = camInfo->getFormatList();
	
    for(int i = 0; i<auxFormats.len() ; i++)
    {
		AM_MEDIA_TYPE format = *(auxFormats.nth(i));
		IAMStreamConfig *pConfig = NULL;
		IVideoWindow * pWindow = NULL;
		
		char subtypeName [100];
		memset(subtypeName,0,100);
		GetGUIDString(subtypeName,&format.subtype);	
	
		VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*) format.pbFormat;
		
		if((pVih==NULL && strcmp(newFormat,sourceFormat)==0 )||
		   (pVih->bmiHeader.biHeight == capInfo.heigth &&
		    pVih->bmiHeader.biWidth == capInfo.width &&
		    strcmp(subtypeName,newFormat)==0) || 
			camInfo->getKind() == SHARED
			)
		{
		
			if (strcmp(sourceFormat,newFormat))
			{
				memset(sourceFormat,0,100);
				strcpy(sourceFormat,newFormat);
			}
			
			if (!hr && (camInfo->getKind() == CAM || camInfo->getKind() == SHARED)){
				camInfo->output->Disconnect();
				hr = camInfo->output->QueryInterface(IID_IAMStreamConfig, (void**)&pConfig);
			    //pVih->AvgTimePerFrame = 666666;//
                pVih->AvgTimePerFrame = 333333/(frameRate);
				int hr = pConfig->SetFormat(&format);
				actualFormat = format;	
				pConfig->Release();
			}
	        formatFound = true;
			break;
		}
	    
    }
	
    if (!formatFound)
    {
        IAMStreamConfig *pConfig = NULL;
		if (camInfo->getKind() == CAM || 
            camInfo->getKind() == SHARED)
        {
            VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*) actualFormat.pbFormat;
            camInfo->output->Disconnect();
            hr = camInfo->output->QueryInterface(IID_IAMStreamConfig, (void**)&pConfig);
            //pVih->AvgTimePerFrame = 666666;
            if (pConfig)
            {
                int hr = pConfig->SetFormat(&actualFormat);
                pConfig->Release();
            }
        }
    }
        
	    NOTIFY("reChannel_t"
			   "\r\n=========================================\r\n"
               "Channel %d : Source Description...\r\n"
               "- sourceName: %s\r\n"
               "- capture Size: %dx%d\r\n"
               "- supported Formats: %s\r\n"
               "- Window Info: (%d,%d,%d,%d)\r\n"
               "- Title: %s\r\n"
               "=========================================\r\n",
               getId(),
               camInfo->getCamName(),
               capInfo.width,
               capInfo.heigth,
               camInfo->getSupportedFormats(),
               windowInfo.top,
               windowInfo.left,
               windowInfo.width,
               windowInfo.heigth,
               title);

	remap();

	if (mapping){
		map();
	}
	return 0;
		
}
예제 #22
0
BOOL CBoxView::APlaying()
{
	if (m_pGraph == NULL)
	{
		return FALSE;
	}

	CComPtr<IBaseFilter> pAudioInputFilter;

	HRESULT hr = S_OK;

	//查找audio filter并加入graph
	hr = FindInputFilters((void**)&pAudioInputFilter, CLSID_AudioInputDeviceCategory);
	if (NULL == pAudioInputFilter)
	{
		TRACE(L"[SVC]  CBoxView:: Could not create the Filter AudioInputFilter");
		return FALSE;
	}

	CComPtr<IPin> pinIn;
	CComPtr<IAMAudioInputMixer> pPinMixer;
	GetUnconnectedPin(pAudioInputFilter, PINDIR_INPUT, &pinIn);
	hr = pinIn->QueryInterface(IID_IAMAudioInputMixer, (void **)&pPinMixer);
	if (SUCCEEDED(hr))
	{
		hr = pPinMixer->put_Enable(TRUE);
	}

	hr = m_pGraph->AddFilter(pAudioInputFilter, L"ACapture");
	//创建render filter并加入graph
	CComPtr <IBaseFilter> pAudioRenderer = NULL;
	hr = CoCreateInstance(CLSID_AudioRender, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void **)&pAudioRenderer);
	hr = m_pGraph->AddFilter(pAudioRenderer, L"Audio Renderer");
	if (pAudioRenderer == NULL)
	{
		TRACE(L"[SVC]  CBoxView:: Could not create the Filter AudioRenderer");
		return FALSE;
	}

	//获取麦克风输出脚
	CComPtr<IPin> pAudioOutput;
	CComPtr<IEnumPins> pEnum;
	pAudioInputFilter->EnumPins(&pEnum);
	hr = pEnum->Reset();
	hr = pEnum->Next(1, &pAudioOutput, NULL);

	//设置麦克风输出脚
	IAMStreamConfig *pCfg = NULL;
	hr = pAudioOutput->QueryInterface(IID_IAMStreamConfig, (void **)&pCfg);
	// Read current media type/format   
	AM_MEDIA_TYPE *pmt = { 0 };
	hr = pCfg->GetFormat(&pmt);
	WAVEFORMATEX *pWF = (WAVEFORMATEX *)pmt->pbFormat;
	// Release interfaces   
	pCfg->Release();

	//设置麦克风输出脚缓冲
	IAMBufferNegotiation *pNeg;
	pAudioOutput->QueryInterface(IID_IAMBufferNegotiation, (void **)&pNeg);
	ALLOCATOR_PROPERTIES prop = { 0 };
	prop.cbBuffer = pWF->nAvgBytesPerSec * 50 / 1000;
	prop.cBuffers = -1;
	prop.cbAlign = -1;
	prop.cbPrefix = -1;
	hr = pNeg->SuggestAllocatorProperties(&prop);
	pNeg->Release();

#if 0
	//method 1:Render RUN
	hr = m_pCGB->RenderStream(&PIN_CATEGORY_PREVIEW,
		&MEDIATYPE_Audio,
		pAudioInputFilter,
		NULL,
		NULL);
	
#else

	//method 2:Connect RUN
	hr = ConnectFilters(m_pGraph, pAudioOutput, pAudioRenderer);

#endif

	return FAILED(hr);
}
static GstCaps *
gst_dshowvideosrc_getcaps_from_streamcaps (GstDshowVideoSrc * src, IPin * pin)
{
    GstCaps *caps = NULL;
    HRESULT hres = S_OK;
    int icount = 0;
    int isize = 0;
    VIDEO_STREAM_CONFIG_CAPS vscc;
    int i = 0;
    IAMStreamConfig *streamcaps = NULL;

    hres = pin->QueryInterface (IID_IAMStreamConfig, (LPVOID *) & streamcaps);
    if (FAILED (hres)) {
        GST_ERROR ("Failed to retrieve IAMStreamConfig (error=0x%x)", hres);
        return NULL;
    }

    streamcaps->GetNumberOfCapabilities (&icount, &isize);

    if (isize != sizeof (vscc)) {
        streamcaps->Release ();
        return NULL;
    }

    caps = gst_caps_new_empty ();

    for (i = 0; i < icount; i++) {

        GstCapturePinMediaType *pin_mediatype =
            gst_dshow_new_pin_mediatype_from_streamcaps (pin, i, streamcaps);

        if (pin_mediatype) {

            GstCaps *mediacaps = NULL;
            GstVideoFormat video_format =
                gst_dshow_guid_to_gst_video_format (pin_mediatype->mediatype);

            if (video_format != GST_VIDEO_FORMAT_UNKNOWN) {
                mediacaps = gst_dshow_new_video_caps (video_format, NULL,
                                                      pin_mediatype);

            } else if (gst_dshow_check_mediatype (pin_mediatype->mediatype,
                                                  MEDIASUBTYPE_dvsd, FORMAT_VideoInfo)) {
                mediacaps =
                    gst_dshow_new_video_caps (GST_VIDEO_FORMAT_UNKNOWN,
                                              "video/x-dv, systemstream=FALSE", pin_mediatype);

            } else if (gst_dshow_check_mediatype (pin_mediatype->mediatype,
                                                  MEDIASUBTYPE_dvsd, FORMAT_DvInfo)) {
                mediacaps =
                    gst_dshow_new_video_caps (GST_VIDEO_FORMAT_UNKNOWN,
                                              "video/x-dv, systemstream=TRUE", pin_mediatype);

                pin_mediatype->granularityWidth = 0;
                pin_mediatype->granularityHeight = 0;
            } else if(gst_dshow_check_mediatype(pin_mediatype->mediatype,
                                                MEDIASUBTYPE_MJPG, FORMAT_VideoInfo)) {
                mediacaps = gst_dshow_new_video_caps(GST_VIDEO_FORMAT_UNKNOWN,
                                                     "image/jpeg", pin_mediatype);
            }


            if (mediacaps) {
                src->pins_mediatypes =
                    g_list_append (src->pins_mediatypes, pin_mediatype);
                gst_caps_append (caps, mediacaps);
            } else {
                /* failed to convert dshow caps */
                gst_dshow_free_pin_mediatype (pin_mediatype);
            }
        }
    }

    streamcaps->Release ();

    if (caps && gst_caps_is_empty (caps)) {
        gst_caps_unref (caps);
        caps = NULL;
    }

    return caps;
}
예제 #24
0
int	main()
{
	// for playing
	IGraphBuilder *pGraphBuilder;
	ICaptureGraphBuilder2 *pCaptureGraphBuilder2;
	IMediaControl *pMediaControl;
	IBaseFilter *pDeviceFilter = NULL;

	// to select a video input device
	ICreateDevEnum *pCreateDevEnum = NULL;
	IEnumMoniker *pEnumMoniker = NULL;
	IMoniker *pMoniker = NULL;
	ULONG nFetched = 0;

	// initialize COM
	CoInitialize(NULL);

	//
	// selecting a device
	//

	// Create CreateDevEnum to list device
	CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, 
		IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);

	// Create EnumMoniker to list VideoInputDevice 
	pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
		&pEnumMoniker, 0);
	if (pEnumMoniker == NULL) {
		// this will be shown if there is no capture device
		printf("no device\n");
		return 0;
	}

	// reset EnumMoniker
	pEnumMoniker->Reset();

	// get each Moniker
	while (pEnumMoniker->Next(1, &pMoniker, &nFetched) == S_OK)
	{
		IPropertyBag *pPropertyBag;
		TCHAR devname[256];

		// bind to IPropertyBag
		pMoniker->BindToStorage(0, 0, IID_IPropertyBag,
			(void **)&pPropertyBag);

		VARIANT var;

		// get FriendlyName
		var.vt = VT_BSTR;
		pPropertyBag->Read(L"FriendlyName", &var, 0);
		WideCharToMultiByte(CP_ACP, 0,
			var.bstrVal, -1, devname, sizeof(devname), 0, 0);
		VariantClear(&var);

		printf("%s\r\n", devname);
		printf("  select this device ? [y] or [n]\r\n");
		int ch = getchar();

		// you can start playing by 'y' + return key
		// if you press the other key, it will not be played.
		if (ch == 'y')
		{
			// Bind Monkier to Filter
			pMoniker->BindToObject(0, 0, IID_IBaseFilter,
				(void**)&pDeviceFilter );
		}
		else
		{
			getchar();
		}

		// release
		pMoniker->Release();
		pPropertyBag->Release();

		if (pDeviceFilter != NULL)
		{
			// go out of loop if getchar() returns 'y'
			break;
		}
	}

	if (pDeviceFilter != NULL) {
		//
		// PLAY
		//

		// create FilterGraph
		CoCreateInstance(CLSID_FilterGraph,
			NULL,
			CLSCTX_INPROC,
			IID_IGraphBuilder,
			(LPVOID *)&pGraphBuilder);

		// create CaptureGraphBuilder2
		CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, 
			IID_ICaptureGraphBuilder2, 
			(LPVOID *)&pCaptureGraphBuilder2);

		//============================================================
		//===========  MY CODE  ======================================
		//=============================================================
		HRESULT hr = CoInitialize(0);
		IAMStreamConfig *pConfig = NULL;
		hr = pCaptureGraphBuilder2->FindInterface(&PIN_CATEGORY_CAPTURE, 0, pDeviceFilter, IID_IAMStreamConfig, (void**)&pConfig);

		int iCount = 0, iSize = 0;
		hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize);

		// Check the size to make sure we pass in the correct structure.
		if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
		{
			// Use the video capabilities structure.

			for (int iFormat = 0; iFormat < iCount; iFormat++)
			{
				VIDEO_STREAM_CONFIG_CAPS scc;
				AM_MEDIA_TYPE *pmtConfig;
				hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
				if (SUCCEEDED(hr))
				{
					/* Examine the format, and possibly use it. */
					if ((pmtConfig->majortype == MEDIATYPE_Video) &&
						(pmtConfig->subtype == MEDIASUBTYPE_RGB24) &&
						(pmtConfig->formattype == FORMAT_VideoInfo) &&
						(pmtConfig->cbFormat >= sizeof (VIDEOINFOHEADER)) &&
						(pmtConfig->pbFormat != NULL))
					{
						VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat;
						// pVih contains the detailed format information.
						LONG lWidth = pVih->bmiHeader.biWidth;
						LONG lHeight = pVih->bmiHeader.biHeight;
						if( lWidth == 1280 )
							//					if (iFormat == 26)
						{ //2 = '1280x720YUV' YUV, 22 = '1280x800YUV', 26 = '1280x720RGB'
							hr = pConfig->SetFormat(pmtConfig);
						}
					}
					// Delete the media type when you are done.
					DeleteMediaType(pmtConfig);
				}
			}
		}


		// Query the capture filter for the IAMCameraControl interface.
		IAMCameraControl *pCameraControl = 0;
		hr = pDeviceFilter->QueryInterface(IID_IAMCameraControl, (void**)&pCameraControl);
		if (FAILED(hr))
		{
			// The device does not support IAMCameraControl
		}
		else
		{
			long Min, Max, Step, Default, Flags, Val;

			// Get the range and default values 
			hr = pCameraControl->GetRange(CameraControl_Exposure, &Min, &Max, &Step, &Default, &Flags);
			hr = pCameraControl->GetRange(CameraControl_Focus, &Min, &Max, &Step, &Default, &Flags);
			if (SUCCEEDED(hr))
			{
				hr = pCameraControl->Set(CameraControl_Exposure, -11, CameraControl_Flags_Manual ); // Min = -11, Max = 1, Step = 1
				hr = pCameraControl->Set(CameraControl_Focus, 12, CameraControl_Flags_Manual );
			}
		}


		// Query the capture filter for the IAMVideoProcAmp interface.
		IAMVideoProcAmp *pProcAmp = 0;
		hr = pDeviceFilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&pProcAmp);
		if (FAILED(hr))
		{
			// The device does not support IAMVideoProcAmp
		}
		else
		{
			long Min, Max, Step, Default, Flags, Val;

			// Get the range and default values 
			hr = pProcAmp->GetRange(VideoProcAmp_Brightness, &Min, &Max, &Step, &Default, &Flags);
			hr = pProcAmp->GetRange(VideoProcAmp_BacklightCompensation, &Min, &Max, &Step, &Default, &Flags);
			hr = pProcAmp->GetRange(VideoProcAmp_Contrast, &Min, &Max, &Step, &Default, &Flags);
			hr = pProcAmp->GetRange(VideoProcAmp_Saturation, &Min, &Max, &Step, &Default, &Flags);
			hr = pProcAmp->GetRange(VideoProcAmp_Sharpness, &Min, &Max, &Step, &Default, &Flags);
			hr = pProcAmp->GetRange(VideoProcAmp_WhiteBalance, &Min, &Max, &Step, &Default, &Flags);
			if (SUCCEEDED(hr))
			{
				hr = pProcAmp->Set(VideoProcAmp_Brightness, 142, VideoProcAmp_Flags_Manual);
				hr = pProcAmp->Set(VideoProcAmp_BacklightCompensation, 0, VideoProcAmp_Flags_Manual);
				hr = pProcAmp->Set(VideoProcAmp_Contrast, 4, VideoProcAmp_Flags_Manual);
				hr = pProcAmp->Set(VideoProcAmp_Saturation, 100, VideoProcAmp_Flags_Manual);
				hr = pProcAmp->Set(VideoProcAmp_Sharpness, 0, VideoProcAmp_Flags_Manual);
				hr = pProcAmp->Set(VideoProcAmp_WhiteBalance, 2800, VideoProcAmp_Flags_Manual);
			}
		}


		//============================================================
		//=========== END MY CODE  ======================================
		//=============================================================

		hr = S_OK;
		CTransformer* trans = new CTransformer( "Dif trans", 0, CLSID_DIFFilter, &hr );
		IBaseFilter * ttt = 0;
		trans->QueryInterface(IID_IBaseFilter, (LPVOID *)&ttt);
		// set FilterGraph
		hr = pCaptureGraphBuilder2->SetFiltergraph(pGraphBuilder);

		// get MediaControl interface
		hr = pGraphBuilder->QueryInterface(IID_IMediaControl,
			(LPVOID *)&pMediaControl);

		// add device filter to FilterGraph
		hr = pGraphBuilder->AddFilter(ttt, L"Dif trans");
		hr = pGraphBuilder->AddFilter(pDeviceFilter, L"Device Filter");

		// create Graph
		hr = pCaptureGraphBuilder2->RenderStream(&PIN_CATEGORY_CAPTURE,
			NULL, pDeviceFilter, NULL, NULL);

		// start playing
		hr = pMediaControl->Run();

		// to block execution
		// without this messagebox, the graph will be stopped immediately
		MessageBox(NULL,
			"Block Execution",
			"Block",
			MB_OK);

		// release
		pMediaControl->Release();
		pCaptureGraphBuilder2->Release();
		pGraphBuilder->Release();
	}

	// release
	pEnumMoniker->Release();
	pCreateDevEnum->Release();

	// finalize COM
	CoUninitialize();

	return 0;
}
static GstCaps *
gst_dshowaudiosrc_get_caps (GstBaseSrc * basesrc)
{
  HRESULT hres = S_OK;
  IBindCtx *lpbc = NULL;
  IMoniker *audiom = NULL;
  DWORD dwEaten;
  GstDshowAudioSrc *src = GST_DSHOWAUDIOSRC (basesrc);
  gunichar2 *unidevice = NULL;

  if (src->device) {
    g_free (src->device);
    src->device = NULL;
  }

  src->device =
      gst_dshow_getdevice_from_devicename (&CLSID_AudioInputDeviceCategory,
      &src->device_name);
  if (!src->device) {
    GST_ERROR ("No audio device found.");
    return NULL;
  }
  unidevice =
      g_utf8_to_utf16 (src->device, strlen (src->device), NULL, NULL, NULL);

  if (!src->audio_cap_filter) {
    hres = CreateBindCtx (0, &lpbc);
    if (SUCCEEDED (hres)) {
      hres =
          MkParseDisplayName (lpbc, (LPCOLESTR) unidevice, &dwEaten, &audiom);
      if (SUCCEEDED (hres)) {
        hres = audiom->BindToObject (lpbc, NULL, IID_IBaseFilter,
            (LPVOID *) & src->audio_cap_filter);
        audiom->Release ();
      }
      lpbc->Release ();
    }
  }

  if (src->audio_cap_filter && !src->caps) {
    /* get the capture pins supported types */
    IPin *capture_pin = NULL;
    IEnumPins *enumpins = NULL;
    HRESULT hres;

    hres = src->audio_cap_filter->EnumPins (&enumpins);
    if (SUCCEEDED (hres)) {
      while (enumpins->Next (1, &capture_pin, NULL) == S_OK) {
        IKsPropertySet *pKs = NULL;

        hres =
            capture_pin->QueryInterface (IID_IKsPropertySet, (LPVOID *) & pKs);
        if (SUCCEEDED (hres) && pKs) {
          DWORD cbReturned;
          GUID pin_category;
          RPC_STATUS rpcstatus;

          hres =
              pKs->Get (AMPROPSETID_Pin,
              AMPROPERTY_PIN_CATEGORY, NULL, 0, &pin_category, sizeof (GUID),
              &cbReturned);

          /* we only want capture pins */
          if (UuidCompare (&pin_category, (UUID *) & PIN_CATEGORY_CAPTURE,
                  &rpcstatus) == 0) {
            IAMStreamConfig *streamcaps = NULL;

            if (SUCCEEDED (capture_pin->QueryInterface (IID_IAMStreamConfig,
                        (LPVOID *) & streamcaps))) {
              src->caps =
                  gst_dshowaudiosrc_getcaps_from_streamcaps (src, capture_pin,
                  streamcaps);
              streamcaps->Release ();
            }
          }
          pKs->Release ();
        }
        capture_pin->Release ();
      }
      enumpins->Release ();
    }
  }

  if (unidevice) {
    g_free (unidevice);
  }

  if (src->caps) {
    return gst_caps_ref (src->caps);
  }

  return NULL;
}
예제 #26
0
HRESULT CAudioCompressorFormats::GetSupportedFormats(std::vector<WAVEFORMATEX*>& listFormats)
{
	CStringW swDeviceName(m_sAudComp);

	HRESULT hr = m_pSysDevEnum->CreateClassEnumerator(CLSID_AudioCompressorCategory, &m_pEnumCat, 0);
	if(NULL == m_pEnumCat)
		return E_POINTER;
	if(S_OK == hr)
	{
		ULONG cFetched;
		while(m_pEnumCat->Next(1, &m_pMoniker, &cFetched) == S_OK)
		{
			IPropertyBag *pPropBag;
			hr = m_pMoniker->BindToStorage(0, 0, IID_IPropertyBag, 
				(void **)&pPropBag);
			if (SUCCEEDED(hr))
			{
				VARIANT varName;
				VariantInit(&varName);
				hr = pPropBag->Read(L"FriendlyName", &varName, 0);
				if (SUCCEEDED(hr))
				{
					if(wcscmp((WCHAR*)varName.pbstrVal, swDeviceName.GetBuffer()) == 0)
					{
						m_pMoniker->AddRef();
						break;
					}
				}
				VariantClear(&varName);
				pPropBag->Release();
			}
			m_pMoniker->Release();
		}
	}
	if(m_pMoniker)
	{
		IBaseFilter *pFilter = 0;
		hr = m_pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pFilter);
		if(SUCCEEDED(hr))
		{
			IEnumPins *pEnum = NULL;
			hr = pFilter->EnumPins(&pEnum);
			if (SUCCEEDED(hr))
			{
				IPin *pPin = NULL;
				while(S_OK == pEnum->Next(1, &pPin, NULL))
				{
					IAMStreamConfig *pConf;
					hr = pPin->QueryInterface(IID_IAMStreamConfig, (void**)&pConf);
					if (SUCCEEDED(hr))
					{
						CString sFormat;
						int iCount, iSize;
						BYTE *pSCC = NULL;
						AM_MEDIA_TYPE *pmt;
						float fSample;
						hr = pConf->GetNumberOfCapabilities(&iCount, &iSize);
						pSCC = new BYTE[iSize];
						if (pSCC == NULL)
						{
							return E_POINTER;
						}
						if (iSize == sizeof(AUDIO_STREAM_CONFIG_CAPS))
						{
							// Use the audio capabilities structure.
							for (int iFormat = 0; iFormat < iCount; iFormat++)
							{
								AUDIO_STREAM_CONFIG_CAPS scc;
								AM_MEDIA_TYPE *pmtConfig;
								hr = pConf->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
								if (SUCCEEDED(hr))
								{
									if(pmtConfig->formattype == FORMAT_WaveFormatEx)
									{
										WAVEFORMATEX *pFormat = new WAVEFORMATEX(*(reinterpret_cast<WAVEFORMATEX*>(pmtConfig->pbFormat)));
										if(pFormat)
										{
											listFormats.push_back(pFormat);
										}
										FreeMediaType(*pmtConfig); 
										CoTaskMemFree(pmtConfig);
									}
								}
							}
							delete pSCC;
						}
						pConf->Release();
					}
					pPin->Release();
				}
				pEnum->Release();
			}
			pFilter->Release();
		}
	}
}
예제 #27
0
QVector<VideoMode> DirectShow::getDeviceModes(QString devName)
{
    QVector<VideoMode> modes;

    IBaseFilter* devFilter = getDevFilter(devName);
    if (!devFilter)
        return modes;

    // The outter loop tries to find a valid output pin
    GUID category;
    DWORD r2;
    IEnumPins *pins = nullptr;
    IPin *pin;
    if (devFilter->EnumPins(&pins) != S_OK)
        return modes;
    while (pins->Next(1, &pin, nullptr) == S_OK)
    {
        IKsPropertySet *p = nullptr;
        PIN_INFO info;

        pin->QueryPinInfo(&info);
        info.pFilter->Release();
        if (info.dir != PINDIR_OUTPUT)
            goto next;
        if (pin->QueryInterface(IID_IKsPropertySet, (void**)&p) != S_OK)
            goto next;
        if (p->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY,
                nullptr, 0, &category, sizeof(GUID), &r2) != S_OK)
            goto next;
        if (!IsEqualGUID(category, PIN_CATEGORY_CAPTURE))
            goto next;

        // Now we can list the video modes for the current pin
        // Prepare for another wall of spaghetti DIRECT SHOW QUALITY code
        {
            IAMStreamConfig *config = nullptr;
            VIDEO_STREAM_CONFIG_CAPS *vcaps = nullptr;
            int size, n;
            if (pin->QueryInterface(IID_IAMStreamConfig, (void**)&config) != S_OK)
                goto next;
            if (config->GetNumberOfCapabilities(&n, &size) != S_OK)
                goto pinend;
            assert(size == sizeof(VIDEO_STREAM_CONFIG_CAPS));
            vcaps = new VIDEO_STREAM_CONFIG_CAPS;

            for (int i=0; i<n; ++i)
            {
                AM_MEDIA_TYPE* type = nullptr;
                if (config->GetStreamCaps(i, &type, (BYTE*)vcaps) != S_OK)
                    goto nextformat;

                if (!IsEqualGUID(type->formattype, FORMAT_VideoInfo)
                    && !IsEqualGUID(type->formattype, FORMAT_VideoInfo2))
                    goto nextformat;

                VideoMode mode;
                mode.width = vcaps->MaxOutputSize.cx;
                mode.height = vcaps->MaxOutputSize.cy;
                mode.FPS = 1e7 / vcaps->MinFrameInterval;
                if (!modes.contains(mode))
                    modes.append(std::move(mode));

nextformat:
                if (type->pbFormat)
                    CoTaskMemFree(type->pbFormat);
                CoTaskMemFree(type);
            }
pinend:
            config->Release();
            delete vcaps;
        }
next:
        if (p)
            p->Release();
        pin->Release();
    }

    return modes;
}
예제 #28
0
int main(int argc, char* argv[])
{
    ICaptureGraphBuilder2   *pCaptureGraphBuilder = NULL;
    IGraphBuilder           *pGraphBuilder = NULL;
    IBaseFilter             *pSource = NULL;
    IBaseFilter             *pMux = NULL;
    IBaseFilter             *pVideoCompressor = NULL;
    IBaseFilter             *pAudioCompressor = NULL;

    IAMStreamConfig         *pAMStreamConfig = NULL;
    IAMVideoCompression     *pAMVideoCompression = NULL;

    IMediaControl           *pControl = NULL;
    IMediaSeeking           *pSeek = NULL;
    IMediaEvent             *pEvent = NULL;

    HRESULT hr;

    DWORD pdwRegister=0;
    CoInitialize(NULL);

    // Create the capture graph builder.
    CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC,
                     IID_ICaptureGraphBuilder2, (void **)&pCaptureGraphBuilder);

    // Make the rendering section of the graph.
    pCaptureGraphBuilder->SetOutputFileName(
        &MEDIASUBTYPE_Avi,  // File type.
        L"C:\\STDIUE1.avi",  // File name.
        &pMux,              // pointer to the multiplexer.
        NULL);              // pointer to the file writer.

    // Load the source file.
    pCaptureGraphBuilder->GetFiltergraph(&pGraphBuilder);
    pGraphBuilder->AddSourceFilter(L"C:\\Program Files\\Microsoft Money\\Media\\STDIUE1.avi", L"Source Filter", &pSource);

    // Add the compressor filter.
    CoCreateInstance(CLSID_AVICo, NULL, CLSCTX_INPROC,
                     IID_IBaseFilter, (void **)&pVideoCompressor);
    pGraphBuilder->AddFilter(pVideoCompressor, L"Video Compressor");

    // Render the video stream, through the compressor.
    pCaptureGraphBuilder->RenderStream(
        NULL,       // Output pin category
        NULL,       // Media type
        pSource,       // Source filter
        pVideoCompressor,     // Compressor filter
        pMux);      // Sink filter (the AVI Mux)

    /* CoCreateInstance(CLSID_GSM, NULL, CLSCTX_INPROC,
             IID_IBaseFilter, (void **)&pAudioCompressor);
     pGraphBuilder->AddFilter(pAudioCompressor, L"Audio Compressor");*/

    // Render the audio stream.
    pCaptureGraphBuilder->RenderStream(
        NULL,
        NULL,
        pSource,
        pAudioCompressor,
        pMux);

    // Compress at 100k/second data rate.
    AM_MEDIA_TYPE *pmt;
    pCaptureGraphBuilder->FindInterface(NULL, NULL, pVideoCompressor, IID_IAMStreamConfig, (void **)&pAMStreamConfig);

    pAMStreamConfig->GetFormat(&pmt);

    if (pmt->formattype == FORMAT_VideoInfo)
    {

        ((VIDEOINFOHEADER *)(pmt->pbFormat))->dwBitRate = 100000;

        pAMStreamConfig->SetFormat(pmt);
    }


    // Request key frames every four frames.
    pAMStreamConfig->QueryInterface(IID_IAMVideoCompression, (void **)&pAMVideoCompression);
    pAMVideoCompression->put_KeyFrameRate(4);
    pAMVideoCompression->Release();
    pAMStreamConfig->Release();

    // Run the graph.

    pGraphBuilder->QueryInterface(IID_IMediaControl, (void **)&pControl);
    pGraphBuilder->QueryInterface(IID_IMediaEvent, (void **)&pEvent);

    hr = pMux->QueryInterface(IID_IMediaSeeking, (void**)&pSeek);


    pControl->Run();
    printf("Recompressing... \n");

    long evCode;
    if (SUCCEEDED(hr))
    {
        REFERENCE_TIME rtTotal, rtNow = 0;
        pSeek->GetDuration(&rtTotal);
        while ((pEvent->WaitForCompletion(1000, &evCode)) == E_ABORT)
        {
            pSeek->GetCurrentPosition(&rtNow);
            printf("%d%%\n", (rtNow * 100)/rtTotal);
        }
        pSeek->Release();
    }
    else  // Cannot update the progress.
    {
        pEvent->WaitForCompletion(INFINITE, &evCode);
    }
    pControl->Stop();
    printf("All done\n");

    pSource->Release();
    pMux->Release();
    pVideoCompressor->Release();
    pAudioCompressor->Release ();
    pControl->Release();
    pEvent->Release();
    pCaptureGraphBuilder->Release();
    pGraphBuilder->Release();
    CoUninitialize();

    return 0;
}
예제 #29
0
/******************************Public*Routine******************************\
* BuildAndRender
*
* 采集卡源
\**************************************************************************/
HRESULT CVMR9Subgraph::BuildAndRender(IBaseFilter* pCap , GUID VidType ,UINT Width,UINT Height,int nFPS,IMultiVMR9Wizard* pWizard ,BOOL bUsingColorSpace,SourceConnectProc ConnectProc)
{
    HRESULT hr = S_OK;
	if(!pWizard)return E_FAIL;
	if(!pCap)return E_FAIL;
	if(m_pGraph)			return E_FAIL;
    IVMRFilterConfig9	*	pConfig = NULL;
	IGraphBuilder		*	pGb= NULL;
	IBaseFilter			*	pColorSpace = 0;
	IBaseFilter				* pVMR9 = 0;

	ICaptureGraphBuilder2 * pBuild = NULL;
	//IBaseFilter* pCap = NULL;
	IPin * pPin = NULL;
	IAMStreamConfig *pStrCfig = 0;
	AM_MEDIA_TYPE * mmt = 0;
	//m_DeviceId = DeviceId;
	m_GraphType = Capture_Device;
		
	m_ConnectProc = ConnectProc;
	CMediaHelper Helper;
    // create graph
	try
	{
		hr = CoCreateInstance( CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, 
			IID_IFilterGraph, (void**)&(m_pGraph) );
		if( FAILED(hr))throw hr;if(!m_pGraph)throw  E_OUTOFMEMORY;
		//--
		hr = m_pGraph->QueryInterface( IID_IGraphBuilder, (void**)&(pGb) );
		if( FAILED(hr))throw hr;if(!pGb)throw  E_OUTOFMEMORY;
		//--
		hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, IID_ICaptureGraphBuilder2, (void **)&pBuild);		
		if( FAILED(hr))throw hr;if(!pBuild)throw  E_OUTOFMEMORY;
	
		//--
		hr =pBuild->SetFiltergraph(pGb);
		if( FAILED(hr))throw hr;
		//--
		// create and add VMR9
		hr = CoCreateInstance( CLSID_VideoMixingRenderer9, NULL, CLSCTX_INPROC,
			IID_IBaseFilter, (void**)&(pVMR9) );
		if( FAILED(hr))throw hr;if(! pVMR9)throw  E_OUTOFMEMORY;
		//--
		hr = m_pGraph->AddFilter( pVMR9, L"VMR9");
		if( FAILED(hr))throw hr;

		//--
		hr = pVMR9->QueryInterface( IID_IVMRFilterConfig9, (void**)&(pConfig) );
		if( FAILED(hr))throw hr;
		//--
		// set VMR to the renderless mode
		hr = pConfig->SetRenderingMode(  VMR9Mode_Renderless );
				//--
		hr = pWizard->Attach( pVMR9,D3DFMT_UNKNOWN, &m_dwID );
		if( FAILED(hr))throw hr;

		if(bUsingColorSpace){
			hr = CoCreateInstance( CLSID_Colour, NULL, CLSCTX_INPROC,
				IID_IBaseFilter, (void**)&(pColorSpace) );
			if( FAILED(hr))throw hr;if( !pColorSpace)throw  E_OUTOFMEMORY;
			//--
			hr = m_pGraph->AddFilter( pColorSpace, L"ColorSpace");
			if( FAILED(hr))throw hr;
		}

		
		////连接采集卡
		//hr = Helper.GetVidCapDevice(DeviceId,&pCap);
		//if( FAILED(hr))throw hr;
		//if(!pCap)throw  E_OUTOFMEMORY;
		hr = m_pGraph->AddFilter(pCap,L"Capture");
		if( FAILED(hr))throw hr;
		if(m_ConnectProc) {
			hr = m_ConnectProc(m_dwID,m_pGraph,pCap,pVMR9);
		}
		else{//使用默认连接方法
			hr = pBuild->FindInterface(&PIN_CATEGORY_CAPTURE,&MEDIATYPE_Interleaved,pCap,IID_IAMStreamConfig,(void **)&pStrCfig);
			if( FAILED( hr) ){
				hr = pBuild->FindInterface(&PIN_CATEGORY_CAPTURE,&MEDIATYPE_Video,pCap,IID_IAMStreamConfig,(void **)&pStrCfig);
				if (FAILED(hr ))throw hr;	
			}
			
				hr = pStrCfig->GetFormat(&mmt);
				if(mmt->formattype  == FORMAT_VideoInfo){
				if (FAILED(hr ))throw hr;	
				if(!mmt) throw E_OUTOFMEMORY;
				VIDEOINFO *pvi = (VIDEOINFO *) mmt->pbFormat;
				pvi->AvgTimePerFrame = UNITS/nFPS;
				pvi->bmiHeader.biWidth = Width;
				pvi->bmiHeader.biHeight = Height;
				mmt->subtype = VidType;
		//		hr=pStrCfig->SetFormat(mmt);   //重新设置参数
				
				if( FAILED(hr))throw hr;
			}
			/*hr = pBuild->FindPin( pCap, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE,&MEDIATYPE_Interleaved, TRUE, 0, &pPin);
			if( FAILED(hr)){
				hr = pBuild->FindPin( pCap, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE,&MEDIATYPE_Video, TRUE, 0, &pPin); 
				if (FAILED(hr ))throw hr;
			}
			if(!pPin) throw E_OUTOFMEMORY;*/
			hr = pBuild->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Interleaved,pCap,pColorSpace,pVMR9);
			if( FAILED(hr)){
				hr = pBuild->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,pCap,pColorSpace,pVMR9);
				if (FAILED(hr ))throw hr;
			}
		}
		if( FAILED(hr))throw hr;

		
		// ok, all is rendered, now get MediaControl, MediaSeeking and continue
		hr = m_pGraph->QueryInterface( IID_IMediaControl, (void**)&(m_pMc) );
		if( FAILED(hr))throw hr;

		hr = m_pGraph->QueryInterface( IID_IMediaSeeking, (void**)&(m_pMs) );
		if( FAILED(hr))throw hr;



	}
	catch(HRESULT hr1)
	{
		pWizard->Detach( m_dwID );
		hr = hr1;
	}

	RELEASE( pConfig );
	RELEASE( pGb );
	RELEASE( pColorSpace );
	RELEASE( pBuild  );
	RELEASE( pStrCfig );
	RELEASE( pVMR9);
	Helper.DeleteMediaType(mmt);
	return hr;
}
예제 #30
0
void DirectShowGrabber::setCaptureOutputFormat() {
   IAMStreamConfig          *pConfig;
   int                      iCount;
   int                      iSize;
   VIDEOINFOHEADER          *pVih;
   VIDEO_STREAM_CONFIG_CAPS scc;
   AM_MEDIA_TYPE            *pmtConfig;
   int                      formatSet;
   HRESULT                  hr;

   // Reference http://msdn.microsoft.com/library/default.asp?url=/library/en-us/directshow/htm/configurethevideooutputformat.asp

   debug_msg("DirectShowGrabber::setCaptureOutputFormat(): enter...\n");

   formatSet = 0;
   pConfig   = NULL;
   hr        = pBuild_->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
                                     pCaptureFilter_, IID_IAMStreamConfig, (void**)&pConfig);
   if (FAILED(hr)) {
   		Grabber::status_=-1;
		return;
   }

   debug_msg("DirectShowGrabber::setCaptureOutputFormat(): IAMStreamConfig interface acquired\n");

   iCount = iSize = 0;
   hr     = pConfig->GetNumberOfCapabilities(&iCount, &iSize);
   // Check the size to make sure we pass in the correct structure.
   // The alternative output of iSize is AUDIO_STREAM_CONFIG_CAPS, btw.
   if ( iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS) ) {

      for (int iFormat = 0; iFormat < iCount; iFormat++) {
         hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE *)&scc);
         //showErrorMessage(hr);

         if( SUCCEEDED(hr) ) {
            if ((pmtConfig->majortype  == MEDIATYPE_Video)            &&
                  (pmtConfig->subtype    == MEDIASUBTYPE_RGB24)       &&
                  (pmtConfig->formattype == FORMAT_VideoInfo)         &&
                  (pmtConfig->cbFormat   >= sizeof (VIDEOINFOHEADER)) &&
                  (pmtConfig->pbFormat   != NULL)) {

               pVih                        = (VIDEOINFOHEADER *)pmtConfig->pbFormat;
               pVih->bmiHeader.biWidth     = 320;
               pVih->bmiHeader.biHeight    = 240;
               pVih->bmiHeader.biSizeImage = DIBSIZE(pVih->bmiHeader);

               debug_msg("Windows GDI BITMAPINFOHEADER follows:\n");
               debug_msg("biWidth=        %d\n", pVih->bmiHeader.biWidth);
               debug_msg("biHeight=       %d\n", pVih->bmiHeader.biHeight);
               debug_msg("biSize=         %d\n", pVih->bmiHeader.biSize);
               debug_msg("biPlanes=       %d\n", pVih->bmiHeader.biPlanes);
               debug_msg("biBitCount=     %d\n", pVih->bmiHeader.biBitCount);
               debug_msg("biCompression=  %d\n", pVih->bmiHeader.biCompression);
               debug_msg("biSizeImage=    %d\n", pVih->bmiHeader.biSizeImage);
               debug_msg("biXPelsPerMeter=%d\n", pVih->bmiHeader.biXPelsPerMeter);
               debug_msg("biYPelsPerMeter=%d\n", pVih->bmiHeader.biYPelsPerMeter);
               debug_msg("biClrUsed=      %d\n", pVih->bmiHeader.biClrUsed);
               debug_msg("biClrImportant= %d\n", pVih->bmiHeader.biClrImportant);

               hr = pConfig->SetFormat(pmtConfig);
               //showErrorMessage(hr);

               // XXX:  leak.  need to deal with this - msp
               //DeleteMediaType(pmtConfig);

               formatSet = 1;
               break;

            }
         }
      }
   }
   pConfig->Release();

   if( formatSet )
      debug_msg("DirectShowGrabber::setCaptureOutputFormat:  format set\n");
   else
      debug_msg("DirectShowGrabber::setCaptureOutputFormat:  format not set\n");
}