Пример #1
0
bool MIPDirectShowCapture::listGUIDS(std::list<GUID> &guids)
{
	guids.clear();

	HRESULT hr;

	IAMStreamConfig *pConfig = 0;

	hr = m_pBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, 0, m_pCaptDevice, IID_IAMStreamConfig, (void**)&pConfig);
	if (HR_FAILED(hr))
	{
		setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECONFIG);
		return false;
	}

	int count = 0;
	int s = 0;
	
	hr = pConfig->GetNumberOfCapabilities(&count, &s);
	if (HR_FAILED(hr))
	{
		pConfig->Release();
		setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECAPS);
		return false;
	}

	if (s != sizeof(VIDEO_STREAM_CONFIG_CAPS))
	{
		pConfig->Release();
		setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_INVALIDCAPS);
		return false;
	}

	for (int i = 0; i < count; i++)
	{
        VIDEO_STREAM_CONFIG_CAPS caps;
        AM_MEDIA_TYPE *pMediaType;

        hr = pConfig->GetStreamCaps(i, &pMediaType, (BYTE*)&caps);
        if (HR_SUCCEEDED(hr))
        {
			if (pMediaType->majortype == MEDIATYPE_Video)
			{
				GUID subType = pMediaType->subtype;
				
				guids.push_back(subType);

//				uint8_t *pSubType = (uint8_t *)&subType;
//
//				printf("0x%02x%02x%02x%02x %c%c%c%c\n",(int)pSubType[0],(int)pSubType[1],(int)pSubType[2],(int)pSubType[3],
//					                                   (char)pSubType[0],(char)pSubType[1],(char)pSubType[2],(char)pSubType[3]);
			}
		}
	}

	return true;
}
Пример #2
0
HRESULT 
recChannel_t::set_rate(float FR)
{
     __CONTEXT("recChannel_t::set_rate");
	if (FR<1)
    {
        return S_OK;
    }
	float factorRate = FR/30;
	int hr = 0;

	if (factorRate<0.1) factorRate = 0.1;
	frameRate = factorRate;

	IAMStreamConfig *pConfig = NULL;		
				
	if ((camInfo->getKind() == SHARED ||
         camInfo->getKind() == CAM)   && 
		actualFormat.pbFormat != NULL)
	{
			VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*) actualFormat.pbFormat;
            double newFR = 10000000.0/FR;
			pVih->AvgTimePerFrame = newFR;
            camInfo->setRate(pVih->AvgTimePerFrame);
            if (camInfo->getKind() == CAM)
            {
                IPin * pInput = NULL;
                get_camInfo()->output->ConnectedTo(&pInput);
                if (mapping)
                {
                    pControl->Stop();
                }
                if (pInput)
                {
                    get_camInfo()->output->Disconnect();
                    pInput->Disconnect();
                }
             	hr = get_camInfo()->output->QueryInterface(IID_IAMStreamConfig, (void**)&pConfig);
                if (pConfig)
                {
                    int hr = pConfig->SetFormat(&actualFormat);
                    errorCheck(hr);
                    pConfig->Release();
                }
                if (pInput)
                {
                    hr = pGraph->Connect(get_camInfo()->output,pInput);
                    errorCheck(hr);
                }
                errorCheck(hr);
                if (mapping)
                {
                    pControl->Run();
                }
			}
    }
    return hr;
	
}
Пример #3
0
void VideoCapture::EnumResolutions()
{
	int iCount, iSize, iChosen=-1;
	IBaseFilter *pSource;
   	CComPtr <ICaptureGraphBuilder2> pCaptB;
	VIDEO_STREAM_CONFIG_CAPS caps;
	HRESULT hr;
	bool response;

	IAMStreamConfig *pConfig;

	devices_resolutions = new DeviceResolutions[nDevices];

	pCaptB.CoCreateInstance(CLSID_CaptureGraphBuilder2);

	for (unsigned int iDevice=0; iDevice<nDevices; iDevice++)
	{
		response = BindFilter(iDevice, &pSource);

		hr = pCaptB->FindInterface(
        &PIN_CATEGORY_CAPTURE,
        &MEDIATYPE_Video,
        pSource,
        IID_IAMStreamConfig,
        (void**)&pConfig);

		if (!SUCCEEDED(hr))
		{
			pSource->Release();
			devices_resolutions[iDevice].nResolutions = 0;
			continue;
		}

		pConfig->GetNumberOfCapabilities(&iCount, &iSize);

		devices_resolutions[iDevice].SetNResolutions(iCount);


		for(int i=0; i < iCount; i++) {
			AM_MEDIA_TYPE *pmt;
			if( pConfig->GetStreamCaps(i, &pmt, reinterpret_cast<BYTE*>(&caps)) == S_OK ) {

				VIDEOINFOHEADER *pVih = 
					reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat);
				
				devices_resolutions[iDevice].x[i] = caps.InputSize.cx;
				devices_resolutions[iDevice].y[i] = caps.InputSize.cy;
				devices_resolutions[iDevice].color_space[i] = pmt->subtype;
				devices_resolutions[iDevice].compression[i] = pVih->bmiHeader.biCompression;
				DeleteMediaType(pmt);
			}
		}

		pSource->Release();
		pConfig->Release();

		pSource = 0;
	}
}
Пример #4
0
void CCaptureDevice::SetCaptureBufferSize(void)
{
	IPin * pCapturePin = GetPin();
	if (pCapturePin)
	{
		DWORD  dwBytesPerSec = 0;
		AM_MEDIA_TYPE * pmt = {0};
		IAMStreamConfig * pCfg = NULL;
		HRESULT hr = pCapturePin->QueryInterface(IID_IAMStreamConfig, (void **)&pCfg);
		if ( hr==S_OK )
		{
            hr = pCfg->GetFormat(&pmt);
			if ( hr==S_OK )
			{
				WAVEFORMATEX *pWF = (WAVEFORMATEX *) pmt->pbFormat;
				dwBytesPerSec = pWF->nAvgBytesPerSec;
				pWF->nChannels = 1;
				pWF->wBitsPerSample = 8;
				pWF->nSamplesPerSec = 11025;
				pWF->nAvgBytesPerSec = pWF->nSamplesPerSec * pWF->nChannels * pWF->wBitsPerSample / 8;
				pWF->nBlockAlign = 1;
/*
	info.cbSize = sizeof(WAVEFORMATEX);
	info.wFormatTag = 1;
	info.nChannels = 2;
	info.nSamplesPerSec = 44100;
	//info.nSamplesPerSec = 22050;
	11025
	info.wBitsPerSample = 16;
	info.nAvgBytesPerSec = info.nSamplesPerSec * info.nChannels * info.wBitsPerSample / 8;
	info.nBlockAlign = 4;
	*/
				pCfg->SetFormat( pmt );
				DeleteMediaType(pmt);
			}
			pCfg->Release();
		}
/*		if (dwBytesPerSec)
		{
			IAMBufferNegotiation * pNeg = NULL;
			hr = pCapturePin->QueryInterface(IID_IAMBufferNegotiation, 
				(void **)&pNeg);
			if (SUCCEEDED(hr))
			{
				ALLOCATOR_PROPERTIES AllocProp;
				AllocProp.cbAlign  = -1;  // -1 means no preference.
				AllocProp.cbBuffer = dwBytesPerSec *  dwLatencyInMilliseconds / 1000;
				AllocProp.cbPrefix = -1;
				AllocProp.cBuffers = -1;
				hr = pNeg->SuggestAllocatorProperties(&AllocProp);
				pNeg->Release();
			}
		}*/
	}
}
Пример #5
0
IAMStreamConfig * CCaptureDevice::GetStreamConfig(void)
{
	IAMStreamConfig * pConfig = NULL;
	if (m_pBaseFilter)
	{
		// Get the capture output pin first
		IPin * pCapture = GetPin();
		if (pCapture)
		{
			pCapture->QueryInterface(IID_IAMStreamConfig, (void **)&pConfig);
		}

		if (pConfig)
		{
			pConfig->Release();
		}
	}
	return pConfig;
}
Пример #6
0
/// 设置音频信息
BOOL CAudioCapture::SetAudioFormat(ENUM_FREQUENCY_TYPE enFrequency,
	ENUM_CHANNEL_TYPE enChannel, ENUM_SAMPLE_TYPE enSample)
{
	if(NULL != m_pCaptureFilter)
	{
		BOOL bResult = FALSE;
		do
		{
			IPin* pOutPin = GetOutputPin(m_pCaptureFilter, (uint16_t)0);
			if(NULL != pOutPin)
			{
				IAMBufferNegotiation *pNeg = NULL;
				IAMStreamConfig *pCfg = NULL;

				// Get buffer negotiation interface
				HRESULT hr = pOutPin->QueryInterface(IID_IAMBufferNegotiation, (void **)&pNeg);
				if (FAILED(hr))
				{
					pOutPin->Release();
					break;
				}

				// Find number of bytes in one second
				long lBytesPerSecond = (long) (enSample * enFrequency * enChannel);

				// 针对FAAC编码器 做出的调整
				long lBufferSize =  1024 * enSample * enChannel;

				// Set the buffer size based on selected settings
				ALLOCATOR_PROPERTIES prop={0};
				prop.cbBuffer = lBufferSize;
				prop.cBuffers = 6;
				prop.cbAlign = enSample * enChannel;
				hr = pNeg->SuggestAllocatorProperties(&prop);
				pNeg->Release();

				// Now set the actual format of the audio data
				hr = pOutPin->QueryInterface(IID_IAMStreamConfig, (void **)&pCfg);
				if (FAILED(hr))
				{
					pOutPin->Release();
					break;
				}            

				// Read current media type/format
				AM_MEDIA_TYPE *pmt={0};
				hr = pCfg->GetFormat(&pmt);

				if (SUCCEEDED(hr))
				{
					// Fill in values for the new format
					WAVEFORMATEX *pWF = (WAVEFORMATEX *) pmt->pbFormat;
					pWF->nChannels = (WORD) enChannel;
					pWF->nSamplesPerSec = enFrequency;
					pWF->nAvgBytesPerSec = lBytesPerSecond;
					pWF->wBitsPerSample = (WORD) (enSample * 8);
					pWF->nBlockAlign = (WORD) (enSample * enChannel);

					// Set the new formattype for the output pin
					hr = pCfg->SetFormat(pmt);
					UtilDeleteMediaType(pmt);
				}

				// Release interfaces
				pCfg->Release();
				pOutPin->Release();

				bResult = TRUE;
			}
		}while(FALSE);

		return bResult;
	}
	else
	{
		m_enFrequency = enFrequency;
		m_enChannel = enChannel;
		m_enSample = enSample;
		return TRUE;
	}
}
Пример #7
0
void DirectShowGrabber::setCaptureOutputFormat() {
   IAMStreamConfig          *pConfig;
   int                      iCount;
   int                      iSize;
   VIDEOINFOHEADER          *pVih;
   VIDEO_STREAM_CONFIG_CAPS scc;
   AM_MEDIA_TYPE            *pmtConfig;
   int                      formatSet;
   HRESULT                  hr;

   // Reference http://msdn.microsoft.com/library/default.asp?url=/library/en-us/directshow/htm/configurethevideooutputformat.asp

   debug_msg("DirectShowGrabber::setCaptureOutputFormat(): enter...\n");

   formatSet = 0;
   pConfig   = NULL;
   hr        = pBuild_->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
                                     pCaptureFilter_, IID_IAMStreamConfig, (void**)&pConfig);
   if (FAILED(hr)) {
   		Grabber::status_=-1;
		return;
   }

   debug_msg("DirectShowGrabber::setCaptureOutputFormat(): IAMStreamConfig interface acquired\n");

   iCount = iSize = 0;
   hr     = pConfig->GetNumberOfCapabilities(&iCount, &iSize);
   // Check the size to make sure we pass in the correct structure.
   // The alternative output of iSize is AUDIO_STREAM_CONFIG_CAPS, btw.
   if ( iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS) ) {

      for (int iFormat = 0; iFormat < iCount; iFormat++) {
         hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE *)&scc);
         //showErrorMessage(hr);

         if( SUCCEEDED(hr) ) {
            if ((pmtConfig->majortype  == MEDIATYPE_Video)            &&
                  (pmtConfig->subtype    == MEDIASUBTYPE_RGB24)       &&
                  (pmtConfig->formattype == FORMAT_VideoInfo)         &&
                  (pmtConfig->cbFormat   >= sizeof (VIDEOINFOHEADER)) &&
                  (pmtConfig->pbFormat   != NULL)) {

               pVih                        = (VIDEOINFOHEADER *)pmtConfig->pbFormat;
               pVih->bmiHeader.biWidth     = 320;
               pVih->bmiHeader.biHeight    = 240;
               pVih->bmiHeader.biSizeImage = DIBSIZE(pVih->bmiHeader);

               debug_msg("Windows GDI BITMAPINFOHEADER follows:\n");
               debug_msg("biWidth=        %d\n", pVih->bmiHeader.biWidth);
               debug_msg("biHeight=       %d\n", pVih->bmiHeader.biHeight);
               debug_msg("biSize=         %d\n", pVih->bmiHeader.biSize);
               debug_msg("biPlanes=       %d\n", pVih->bmiHeader.biPlanes);
               debug_msg("biBitCount=     %d\n", pVih->bmiHeader.biBitCount);
               debug_msg("biCompression=  %d\n", pVih->bmiHeader.biCompression);
               debug_msg("biSizeImage=    %d\n", pVih->bmiHeader.biSizeImage);
               debug_msg("biXPelsPerMeter=%d\n", pVih->bmiHeader.biXPelsPerMeter);
               debug_msg("biYPelsPerMeter=%d\n", pVih->bmiHeader.biYPelsPerMeter);
               debug_msg("biClrUsed=      %d\n", pVih->bmiHeader.biClrUsed);
               debug_msg("biClrImportant= %d\n", pVih->bmiHeader.biClrImportant);

               hr = pConfig->SetFormat(pmtConfig);
               //showErrorMessage(hr);

               // XXX:  leak.  need to deal with this - msp
               //DeleteMediaType(pmtConfig);

               formatSet = 1;
               break;

            }
         }
      }
   }
   pConfig->Release();

   if( formatSet )
      debug_msg("DirectShowGrabber::setCaptureOutputFormat:  format set\n");
   else
      debug_msg("DirectShowGrabber::setCaptureOutputFormat:  format not set\n");
}
Пример #8
0
bool CaptureDShow::init()
{
    // Create the pipeline.
    if (FAILED(CoCreateInstance(CLSID_FilterGraph,
                                NULL,
                                CLSCTX_INPROC_SERVER,
                                IID_IGraphBuilder,
                                reinterpret_cast<void **>(&this->m_graph))))
        return false;

    // Create the webcam filter.
    IBaseFilter *webcamFilter = this->findFilterP(this->m_device);

    if (!webcamFilter) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    if (FAILED(this->m_graph->AddFilter(webcamFilter, SOURCE_FILTER_NAME))) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    // Create the Sample Grabber filter.
    IBaseFilter *grabberFilter = NULL;

    if (FAILED(CoCreateInstance(CLSID_SampleGrabber,
                                NULL,
                                CLSCTX_INPROC_SERVER,
                                IID_IBaseFilter,
                                reinterpret_cast<void **>(&grabberFilter)))) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    if (FAILED(this->m_graph->AddFilter(grabberFilter, L"Grabber"))) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    ISampleGrabber *grabberPtr = NULL;

    if (FAILED(grabberFilter->QueryInterface(IID_ISampleGrabber,
                                             reinterpret_cast<void **>(&grabberPtr)))) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    if (FAILED(grabberPtr->SetOneShot(FALSE))) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    HRESULT hr = grabberPtr->SetBufferSamples(TRUE);

    if (FAILED(hr)) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    if (this->m_ioMethod != IoMethodDirectRead) {
        int type = this->m_ioMethod == IoMethodGrabSample? 0: 1;
        hr = grabberPtr->SetCallback(&this->m_frameGrabber, type);
    }

    this->m_grabber = SampleGrabberPtr(grabberPtr, this->deleteUnknown);

    if (!this->connectFilters(this->m_graph, webcamFilter, grabberFilter)) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    // Create null filter.
    IBaseFilter *nullFilter = NULL;

    if (FAILED(CoCreateInstance(CLSID_NullRenderer,
                                NULL,
                                CLSCTX_INPROC_SERVER,
                                IID_IBaseFilter,
                                reinterpret_cast<void **>(&nullFilter)))) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    if (FAILED(this->m_graph->AddFilter(nullFilter, L"NullFilter"))) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    if (!this->connectFilters(this->m_graph, grabberFilter, nullFilter)) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    // Set capture format
    QList<int> streams = this->streams();

    if (streams.isEmpty()) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    MediaTypesList mediaTypes = this->listMediaTypes(webcamFilter);

    if (mediaTypes.isEmpty()) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    MediaTypePtr mediaType = streams[0] < mediaTypes.size()?
                                mediaTypes[streams[0]]:
                                mediaTypes.first();

    if (FAILED(grabberPtr->SetMediaType(mediaType.data()))) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    PinList pins = this->enumPins(webcamFilter, PINDIR_OUTPUT);

    for (const PinPtr &pin: pins) {
        IAMStreamConfig *pStreamConfig = NULL;
        HRESULT hr =
                pin->QueryInterface(IID_IAMStreamConfig,
                                    reinterpret_cast<void **>(&pStreamConfig));

        if (SUCCEEDED(hr))
            pStreamConfig->SetFormat(mediaType.data());

        if (pStreamConfig)
            pStreamConfig->Release();
    }

    // Run the pipeline
    IMediaControl *control = NULL;

    if (FAILED(this->m_graph->QueryInterface(IID_IMediaControl,
                                             reinterpret_cast<void **>(&control)))) {
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    this->m_id = Ak::id();
    AkCaps caps = this->capsFromMediaType(mediaType);
    this->m_timeBase = AkFrac(caps.property("fps").toString()).invert();

    if (FAILED(control->Run())) {
        control->Release();
        this->m_graph->Release();
        this->m_graph = NULL;

        return false;
    }

    control->Release();

    this->m_localImageControls.clear();
    this->m_localImageControls.clear();

    return true;
}
Пример #9
0
HRESULT CaptureVideo()
{
    HRESULT hr;
    IBaseFilter *pSrcFilter=NULL;

    // Get DirectShow interfaces
    hr = GetInterfaces();
    if (FAILED(hr))
    {
        Msg(TEXT("Failed to get video interfaces!  hr=0x%x"), hr);
        return hr;
    }

    // Attach the filter graph to the capture graph
    hr = g_pCapture->SetFiltergraph(g_pGraph);
    if (FAILED(hr))
    {
        Msg(TEXT("Failed to set capture filter graph!  hr=0x%x"), hr);
        return hr;
    }

    // Use the system device enumerator and class enumerator to find
    // a video capture/preview device, such as a desktop USB video camera.
    hr = FindCaptureDevice(&pSrcFilter);
    if (FAILED(hr))
    {
        // Don't display a message because FindCaptureDevice will handle it
        return hr;
    }
   
    // Add Capture filter to our graph.
    hr = g_pGraph->AddFilter(pSrcFilter, L"Video Capture");
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't add the capture filter to the graph!  hr=0x%x\r\n\r\n") 
            TEXT("If you have a working video capture device, please make sure\r\n")
            TEXT("that it is connected and is not being used by another application.\r\n\r\n")
            TEXT("The sample will now close."), hr);
        pSrcFilter->Release();
        return hr;
    }


    // Copied code
    //========================================
    IAMStreamConfig *pSC;

    hr = g_pCapture->FindInterface(&PIN_CATEGORY_PREVIEW,
                                      &MEDIATYPE_Interleaved,
                                      pSrcFilter, IID_IAMStreamConfig, (void **)&pSC);

    if(FAILED(hr))
        hr = g_pCapture->FindInterface(&PIN_CATEGORY_PREVIEW,
                                      &MEDIATYPE_Video, pSrcFilter,
                                      IID_IAMStreamConfig, (void **)&pSC);

    if (!pSC) {
        return hr;
    }

    int iCount = 0, iSize = 0;
    hr = pSC->GetNumberOfCapabilities(&iCount, &iSize);

    // Check the size to make sure we pass in the correct structure.
    if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
    {
        // Use the video capabilities structure.

        int i = 0;

        for (int iFormat = 0; iFormat < iCount; iFormat++)
        {
            VIDEO_STREAM_CONFIG_CAPS scc;
            AM_MEDIA_TYPE *pmtConfig;

            hr = pSC->GetFormat(&pmtConfig);

            VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *)pmtConfig->pbFormat;

            double fps = 30;

            pvi->AvgTimePerFrame = (LONGLONG)(10000000/fps);

            pvi->bmiHeader.biSizeImage = DIBSIZE(pvi->bmiHeader); 

            pvi->bmiHeader.biWidth = 1920;

            pvi->bmiHeader.biHeight = 1080;

            hr = pSC->SetFormat(pmtConfig);

            

            //hr = pSC->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
            //if (SUCCEEDED(hr))
            //{
            //    /* Examine the format, and possibly use it. */
            //    if (pmtConfig->formattype == FORMAT_VideoInfo) {
            //        long width = HEADER(pmtConfig->pbFormat)->biWidth;
            //        long height = HEADER(pmtConfig->pbFormat)->biHeight;

            //        

            //        if (width == 1920 && height == 1080) {
            //            VIDEOINFOHEADER *info = (VIDEOINFOHEADER *)pmtConfig->pbFormat;

            //            if (i == 0) {
            //                pSC->SetFormat(pmtConfig);
            //                DeleteMediaType(pmtConfig);
            //                break;
            //            }
            //            i++;
            //        }
            //    }

            //    // Delete the media type when you are done.
            //    DeleteMediaType(pmtConfig);
            //}
        }
    }

    if(SUCCEEDED(hr)) {
        pSC->Release();
    }

    

    //========================================

    // Render the preview pin on the video capture filter
    // Use this instead of g_pGraph->RenderFile
    hr = g_pCapture->RenderStream (&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video,
                                   pSrcFilter, NULL, NULL);
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't render the video capture stream.  hr=0x%x\r\n")
            TEXT("The capture device may already be in use by another application.\r\n\r\n")
            TEXT("The sample will now close."), hr);
        pSrcFilter->Release();
        return hr;
    }

    // Now that the filter has been added to the graph and we have
    // rendered its stream, we can release this reference to the filter.
    pSrcFilter->Release();

    // Set video window style and position
    hr = SetupVideoWindow();
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't initialize video window!  hr=0x%x"), hr);
        return hr;
    }

#ifdef REGISTER_FILTERGRAPH
    // Add our graph to the running object table, which will allow
    // the GraphEdit application to "spy" on our graph
    hr = AddGraphToRot(g_pGraph, &g_dwGraphRegister);
    if (FAILED(hr))
    {
        Msg(TEXT("Failed to register filter graph with ROT!  hr=0x%x"), hr);
        g_dwGraphRegister = 0;
    }
#endif

    // Start previewing video data
    hr = g_pMC->Run();
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't run the graph!  hr=0x%x"), hr);
        return hr;
    }

    // Remember current state
    g_psCurrent = Running;
        
    return S_OK;
}
Пример #10
0
int main(int argc, char* argv[])
{
    ICaptureGraphBuilder2   *pCaptureGraphBuilder = NULL;
    IGraphBuilder           *pGraphBuilder = NULL;
    IBaseFilter             *pSource = NULL;
    IBaseFilter             *pMux = NULL;
    IBaseFilter             *pVideoCompressor = NULL;
    IBaseFilter             *pAudioCompressor = NULL;

    IAMStreamConfig         *pAMStreamConfig = NULL;
    IAMVideoCompression     *pAMVideoCompression = NULL;

    IMediaControl           *pControl = NULL;
    IMediaSeeking           *pSeek = NULL;
    IMediaEvent             *pEvent = NULL;

    HRESULT hr;

    DWORD pdwRegister=0;
    CoInitialize(NULL);

    // Create the capture graph builder.
    CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC,
                     IID_ICaptureGraphBuilder2, (void **)&pCaptureGraphBuilder);

    // Make the rendering section of the graph.
    pCaptureGraphBuilder->SetOutputFileName(
        &MEDIASUBTYPE_Avi,  // File type.
        L"C:\\STDIUE1.avi",  // File name.
        &pMux,              // pointer to the multiplexer.
        NULL);              // pointer to the file writer.

    // Load the source file.
    pCaptureGraphBuilder->GetFiltergraph(&pGraphBuilder);
    pGraphBuilder->AddSourceFilter(L"C:\\Program Files\\Microsoft Money\\Media\\STDIUE1.avi", L"Source Filter", &pSource);

    // Add the compressor filter.
    CoCreateInstance(CLSID_AVICo, NULL, CLSCTX_INPROC,
                     IID_IBaseFilter, (void **)&pVideoCompressor);
    pGraphBuilder->AddFilter(pVideoCompressor, L"Video Compressor");

    // Render the video stream, through the compressor.
    pCaptureGraphBuilder->RenderStream(
        NULL,       // Output pin category
        NULL,       // Media type
        pSource,       // Source filter
        pVideoCompressor,     // Compressor filter
        pMux);      // Sink filter (the AVI Mux)

    /* CoCreateInstance(CLSID_GSM, NULL, CLSCTX_INPROC,
             IID_IBaseFilter, (void **)&pAudioCompressor);
     pGraphBuilder->AddFilter(pAudioCompressor, L"Audio Compressor");*/

    // Render the audio stream.
    pCaptureGraphBuilder->RenderStream(
        NULL,
        NULL,
        pSource,
        pAudioCompressor,
        pMux);

    // Compress at 100k/second data rate.
    AM_MEDIA_TYPE *pmt;
    pCaptureGraphBuilder->FindInterface(NULL, NULL, pVideoCompressor, IID_IAMStreamConfig, (void **)&pAMStreamConfig);

    pAMStreamConfig->GetFormat(&pmt);

    if (pmt->formattype == FORMAT_VideoInfo)
    {

        ((VIDEOINFOHEADER *)(pmt->pbFormat))->dwBitRate = 100000;

        pAMStreamConfig->SetFormat(pmt);
    }


    // Request key frames every four frames.
    pAMStreamConfig->QueryInterface(IID_IAMVideoCompression, (void **)&pAMVideoCompression);
    pAMVideoCompression->put_KeyFrameRate(4);
    pAMVideoCompression->Release();
    pAMStreamConfig->Release();

    // Run the graph.

    pGraphBuilder->QueryInterface(IID_IMediaControl, (void **)&pControl);
    pGraphBuilder->QueryInterface(IID_IMediaEvent, (void **)&pEvent);

    hr = pMux->QueryInterface(IID_IMediaSeeking, (void**)&pSeek);


    pControl->Run();
    printf("Recompressing... \n");

    long evCode;
    if (SUCCEEDED(hr))
    {
        REFERENCE_TIME rtTotal, rtNow = 0;
        pSeek->GetDuration(&rtTotal);
        while ((pEvent->WaitForCompletion(1000, &evCode)) == E_ABORT)
        {
            pSeek->GetCurrentPosition(&rtNow);
            printf("%d%%\n", (rtNow * 100)/rtTotal);
        }
        pSeek->Release();
    }
    else  // Cannot update the progress.
    {
        pEvent->WaitForCompletion(INFINITE, &evCode);
    }
    pControl->Stop();
    printf("All done\n");

    pSource->Release();
    pMux->Release();
    pVideoCompressor->Release();
    pAudioCompressor->Release ();
    pControl->Release();
    pEvent->Release();
    pCaptureGraphBuilder->Release();
    pGraphBuilder->Release();
    CoUninitialize();

    return 0;
}
Пример #11
0
std::vector<CameraConfig> videoInputCamera::getCameraConfigs(int dev_id) {

	std::vector<CameraConfig> cfg_list;

	int count = getDeviceCount();
	if (count==0) return cfg_list;

	comInit();

	HRESULT hr;
	ICaptureGraphBuilder2 *lpCaptureGraphBuilder;
	IGraphBuilder *lpGraphBuilder;
	IBaseFilter *lpInputFilter;
	IAMStreamConfig *lpStreamConfig;

	char 	nDeviceName[255];
	WCHAR 	wDeviceName[255];

	for (int cam_id=0;cam_id<count;cam_id++) {
		if ((dev_id>=0) && (dev_id!=cam_id)) continue;
		hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void **)&lpCaptureGraphBuilder);
		if (FAILED(hr))	// FAILED is a macro that tests the return value
		{
			printf("ERROR - Could not create the Filter Graph Manager\n");
			comUnInit();
			return cfg_list;
		}

		// Create the Filter Graph Manager.
		hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,IID_IGraphBuilder, (void**)&lpGraphBuilder);
		if (FAILED(hr))
		{
			printf("ERROR - Could not add the graph builder!\n");
			lpCaptureGraphBuilder->Release();
			comUnInit();
			return cfg_list;
		}

		hr = lpCaptureGraphBuilder->SetFiltergraph(lpGraphBuilder);
		if (FAILED(hr))
		{
			printf("ERROR - Could not set filtergraph\n");
			lpGraphBuilder->Release();
			lpCaptureGraphBuilder->Release();
			comUnInit();
			return cfg_list;
		}

		memset(wDeviceName, 0, sizeof(WCHAR) * 255);
		memset(nDeviceName, 0, sizeof(char) * 255);
		hr = getDevice(&lpInputFilter, cam_id, wDeviceName, nDeviceName);

		if (SUCCEEDED(hr)){
			hr = lpGraphBuilder->AddFilter(lpInputFilter, wDeviceName);
		}else{
			printf("ERROR - Could not find specified video device\n");
			lpGraphBuilder->Release();
			lpCaptureGraphBuilder->Release();
			comUnInit();
			return cfg_list;
		}

		hr = lpCaptureGraphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, lpInputFilter, IID_IAMStreamConfig, (void **)&lpStreamConfig);
		if(FAILED(hr)){
			printf("ERROR: Couldn't config the stream!\n");
			lpInputFilter->Release();
			lpGraphBuilder->Release();
			lpCaptureGraphBuilder->Release();
			comUnInit();
			return cfg_list;
		}

		CameraConfig cam_cfg;
		CameraTool::initCameraConfig(&cam_cfg);

		cam_cfg.driver = DRIVER_DEFAULT;
		cam_cfg.device = cam_id;
		sprintf(cam_cfg.name, "%s", nDeviceName);

		int iCount = 0;
		int iSize = 0;
		hr = lpStreamConfig->GetNumberOfCapabilities(&iCount, &iSize);
		std::vector<CameraConfig> fmt_list;

		if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
		{
			GUID lastFormat = MEDIASUBTYPE_None;
			for (int iFormat = 0; iFormat < iCount; iFormat+=2)
			{
				VIDEO_STREAM_CONFIG_CAPS scc;
				AM_MEDIA_TYPE *pmtConfig;
				hr =  lpStreamConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
				if (SUCCEEDED(hr)){

					if ( pmtConfig->subtype != lastFormat) {

						if (fmt_list.size()>0) {
							std::sort(fmt_list.begin(), fmt_list.end());
							cfg_list.insert( cfg_list.end(), fmt_list.begin(), fmt_list.end() );
							fmt_list.clear();
						}
						cam_cfg.cam_format = getMediaSubtype(pmtConfig->subtype);
						lastFormat = pmtConfig->subtype;
					}

					int stepX = scc.OutputGranularityX;
					int stepY = scc.OutputGranularityY;
					if(stepX < 1 || stepY < 1) continue;

					else if ((stepX==1) && (stepY==1)) {

						cam_cfg.cam_width = scc.InputSize.cx;
						cam_cfg.cam_height = scc.InputSize.cy;

						int maxFrameInterval = scc.MaxFrameInterval;
						if (maxFrameInterval==0) maxFrameInterval = 10000000;
						float last_fps=-1;
						VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat;
						for (int iv=scc.MinFrameInterval;iv<=maxFrameInterval;iv=iv*2) {
							pVih->AvgTimePerFrame = iv;
							hr = lpStreamConfig->SetFormat(pmtConfig);
							if (hr==S_OK) { hr = lpStreamConfig->GetFormat(&pmtConfig);
							float fps = ((int)floor(100000000.0f/(float)pVih->AvgTimePerFrame + 0.5f))/10.0f;
							if (fps!=last_fps) {
								cam_cfg.cam_fps = fps;
								fmt_list.push_back(cam_cfg);
								last_fps=fps;
							} }
						}

					} else {
						int x,y;
						for (x=scc.MinOutputSize.cx,y=scc.MinOutputSize.cy;x<=scc.MaxOutputSize.cx,y<=scc.MaxOutputSize.cy;x+=stepX,y+=stepY) {

							cam_cfg.cam_width = x;
							cam_cfg.cam_height = y;

							int maxFrameInterval = scc.MaxFrameInterval;
							if (maxFrameInterval==0) maxFrameInterval = 10000000;
							float last_fps=-1;
							VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat;
							for (int iv=scc.MinFrameInterval;iv<=maxFrameInterval;iv=iv*2) {
								pVih->AvgTimePerFrame = iv;
								hr = lpStreamConfig->SetFormat(pmtConfig);
								if (hr==S_OK) { hr = lpStreamConfig->GetFormat(&pmtConfig);
								float fps = ((int)floor(100000000.0f/(float)pVih->AvgTimePerFrame + 0.5f))/10.0f;
								if (fps!=last_fps) {
									cam_cfg.cam_fps = fps;
									fmt_list.push_back(cam_cfg);
									last_fps=fps;
								} }
							}

						}
					}

					deleteMediaType(pmtConfig);
				}
			}
		}

		if (fmt_list.size()>0) {
			std::sort(fmt_list.begin(), fmt_list.end());
			cfg_list.insert( cfg_list.end(), fmt_list.begin(), fmt_list.end() );
			fmt_list.clear();
		}

		lpStreamConfig->Release();
		lpInputFilter->Release();
		lpGraphBuilder->Release();
		lpCaptureGraphBuilder->Release();
	}

	comUnInit();
	return cfg_list;
}
Пример #12
0
bool MIPDirectShowCapture::setFormat(int w, int h, real_t rate)
{
	HRESULT hr;

	IAMStreamConfig *pConfig = 0;

	hr = m_pBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, 0, m_pCaptDevice, IID_IAMStreamConfig, (void**)&pConfig);
	if (HR_FAILED(hr))
	{
		setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECONFIG);
		return false;
	}

	int count = 0;
	int s = 0;
	
	hr = pConfig->GetNumberOfCapabilities(&count, &s);
	if (HR_FAILED(hr))
	{
		pConfig->Release();
		setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECAPS);
		return false;
	}

	if (s != sizeof(VIDEO_STREAM_CONFIG_CAPS))
	{
		pConfig->Release();
		setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_INVALIDCAPS);
		return false;
	}

	for (int i = 0; i < count; i++)
	{
        VIDEO_STREAM_CONFIG_CAPS caps;
        AM_MEDIA_TYPE *pMediaType;

        hr = pConfig->GetStreamCaps(i, &pMediaType, (BYTE*)&caps);
        if (HR_SUCCEEDED(hr))
        {
			if ((pMediaType->majortype == MEDIATYPE_Video) &&
				(pMediaType->subtype == m_selectedGuid) &&
				(pMediaType->formattype == FORMAT_VideoInfo) &&
				(pMediaType->cbFormat >= sizeof (VIDEOINFOHEADER)) &&
				(pMediaType->pbFormat != 0))
			{
				VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pMediaType->pbFormat;
				
				pVih->bmiHeader.biWidth = w;
				pVih->bmiHeader.biHeight = h;
				pVih->bmiHeader.biSizeImage = DIBSIZE(pVih->bmiHeader);
				pVih->AvgTimePerFrame = (REFERENCE_TIME)(10000000.0/rate);

				hr = pConfig->SetFormat(pMediaType);
				if (HR_SUCCEEDED(hr))
				{
					CoTaskMemFree(pMediaType->pbFormat);
					pConfig->Release();
					return true;
				}
			}

			if (pMediaType->pbFormat != 0)
				CoTaskMemFree(pMediaType->pbFormat);
		}
	}

	pConfig->Release();
	setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTSETCAPS);
	return false;
}
static GstCaps *
gst_dshowaudiosrc_get_caps (GstBaseSrc * basesrc)
{
  HRESULT hres = S_OK;
  IBindCtx *lpbc = NULL;
  IMoniker *audiom = NULL;
  DWORD dwEaten;
  GstDshowAudioSrc *src = GST_DSHOWAUDIOSRC (basesrc);
  gunichar2 *unidevice = NULL;

  if (src->device) {
    g_free (src->device);
    src->device = NULL;
  }

  src->device =
      gst_dshow_getdevice_from_devicename (&CLSID_AudioInputDeviceCategory,
      &src->device_name);
  if (!src->device) {
    GST_ERROR ("No audio device found.");
    return NULL;
  }
  unidevice =
      g_utf8_to_utf16 (src->device, strlen (src->device), NULL, NULL, NULL);

  if (!src->audio_cap_filter) {
    hres = CreateBindCtx (0, &lpbc);
    if (SUCCEEDED (hres)) {
      hres =
          MkParseDisplayName (lpbc, (LPCOLESTR) unidevice, &dwEaten, &audiom);
      if (SUCCEEDED (hres)) {
        hres = audiom->BindToObject (lpbc, NULL, IID_IBaseFilter,
            (LPVOID *) & src->audio_cap_filter);
        audiom->Release ();
      }
      lpbc->Release ();
    }
  }

  if (src->audio_cap_filter && !src->caps) {
    /* get the capture pins supported types */
    IPin *capture_pin = NULL;
    IEnumPins *enumpins = NULL;
    HRESULT hres;

    hres = src->audio_cap_filter->EnumPins (&enumpins);
    if (SUCCEEDED (hres)) {
      while (enumpins->Next (1, &capture_pin, NULL) == S_OK) {
        IKsPropertySet *pKs = NULL;

        hres =
            capture_pin->QueryInterface (IID_IKsPropertySet, (LPVOID *) & pKs);
        if (SUCCEEDED (hres) && pKs) {
          DWORD cbReturned;
          GUID pin_category;
          RPC_STATUS rpcstatus;

          hres =
              pKs->Get (AMPROPSETID_Pin,
              AMPROPERTY_PIN_CATEGORY, NULL, 0, &pin_category, sizeof (GUID),
              &cbReturned);

          /* we only want capture pins */
          if (UuidCompare (&pin_category, (UUID *) & PIN_CATEGORY_CAPTURE,
                  &rpcstatus) == 0) {
            IAMStreamConfig *streamcaps = NULL;

            if (SUCCEEDED (capture_pin->QueryInterface (IID_IAMStreamConfig,
                        (LPVOID *) & streamcaps))) {
              src->caps =
                  gst_dshowaudiosrc_getcaps_from_streamcaps (src, capture_pin,
                  streamcaps);
              streamcaps->Release ();
            }
          }
          pKs->Release ();
        }
        capture_pin->Release ();
      }
      enumpins->Release ();
    }
  }

  if (unidevice) {
    g_free (unidevice);
  }

  if (src->caps) {
    return gst_caps_ref (src->caps);
  }

  return NULL;
}
Пример #14
0
QVector<VideoMode> DirectShow::getDeviceModes(QString devName)
{
    QVector<VideoMode> modes;

    IBaseFilter* devFilter = getDevFilter(devName);
    if (!devFilter)
        return modes;

    // The outter loop tries to find a valid output pin
    GUID category;
    DWORD r2;
    IEnumPins *pins = nullptr;
    IPin *pin;
    if (devFilter->EnumPins(&pins) != S_OK)
        return modes;
    while (pins->Next(1, &pin, nullptr) == S_OK)
    {
        IKsPropertySet *p = nullptr;
        PIN_INFO info;

        pin->QueryPinInfo(&info);
        info.pFilter->Release();
        if (info.dir != PINDIR_OUTPUT)
            goto next;
        if (pin->QueryInterface(IID_IKsPropertySet, (void**)&p) != S_OK)
            goto next;
        if (p->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY,
                nullptr, 0, &category, sizeof(GUID), &r2) != S_OK)
            goto next;
        if (!IsEqualGUID(category, PIN_CATEGORY_CAPTURE))
            goto next;

        // Now we can list the video modes for the current pin
        // Prepare for another wall of spaghetti DIRECT SHOW QUALITY code
        {
            IAMStreamConfig *config = nullptr;
            VIDEO_STREAM_CONFIG_CAPS *vcaps = nullptr;
            int size, n;
            if (pin->QueryInterface(IID_IAMStreamConfig, (void**)&config) != S_OK)
                goto next;
            if (config->GetNumberOfCapabilities(&n, &size) != S_OK)
                goto pinend;
            assert(size == sizeof(VIDEO_STREAM_CONFIG_CAPS));
            vcaps = new VIDEO_STREAM_CONFIG_CAPS;

            for (int i=0; i<n; ++i)
            {
                AM_MEDIA_TYPE* type = nullptr;
                if (config->GetStreamCaps(i, &type, (BYTE*)vcaps) != S_OK)
                    goto nextformat;

                if (!IsEqualGUID(type->formattype, FORMAT_VideoInfo)
                    && !IsEqualGUID(type->formattype, FORMAT_VideoInfo2))
                    goto nextformat;

                VideoMode mode;
                mode.width = vcaps->MaxOutputSize.cx;
                mode.height = vcaps->MaxOutputSize.cy;
                mode.FPS = 1e7 / vcaps->MinFrameInterval;
                if (!modes.contains(mode))
                    modes.append(std::move(mode));

nextformat:
                if (type->pbFormat)
                    CoTaskMemFree(type->pbFormat);
                CoTaskMemFree(type);
            }
pinend:
            config->Release();
            delete vcaps;
        }
next:
        if (p)
            p->Release();
        pin->Release();
    }

    return modes;
}
static GstCaps *
gst_dshowvideosrc_getcaps_from_streamcaps (GstDshowVideoSrc * src, IPin * pin)
{
    GstCaps *caps = NULL;
    HRESULT hres = S_OK;
    int icount = 0;
    int isize = 0;
    VIDEO_STREAM_CONFIG_CAPS vscc;
    int i = 0;
    IAMStreamConfig *streamcaps = NULL;

    hres = pin->QueryInterface (IID_IAMStreamConfig, (LPVOID *) & streamcaps);
    if (FAILED (hres)) {
        GST_ERROR ("Failed to retrieve IAMStreamConfig (error=0x%x)", hres);
        return NULL;
    }

    streamcaps->GetNumberOfCapabilities (&icount, &isize);

    if (isize != sizeof (vscc)) {
        streamcaps->Release ();
        return NULL;
    }

    caps = gst_caps_new_empty ();

    for (i = 0; i < icount; i++) {

        GstCapturePinMediaType *pin_mediatype =
            gst_dshow_new_pin_mediatype_from_streamcaps (pin, i, streamcaps);

        if (pin_mediatype) {

            GstCaps *mediacaps = NULL;
            GstVideoFormat video_format =
                gst_dshow_guid_to_gst_video_format (pin_mediatype->mediatype);

            if (video_format != GST_VIDEO_FORMAT_UNKNOWN) {
                mediacaps = gst_dshow_new_video_caps (video_format, NULL,
                                                      pin_mediatype);

            } else if (gst_dshow_check_mediatype (pin_mediatype->mediatype,
                                                  MEDIASUBTYPE_dvsd, FORMAT_VideoInfo)) {
                mediacaps =
                    gst_dshow_new_video_caps (GST_VIDEO_FORMAT_UNKNOWN,
                                              "video/x-dv, systemstream=FALSE", pin_mediatype);

            } else if (gst_dshow_check_mediatype (pin_mediatype->mediatype,
                                                  MEDIASUBTYPE_dvsd, FORMAT_DvInfo)) {
                mediacaps =
                    gst_dshow_new_video_caps (GST_VIDEO_FORMAT_UNKNOWN,
                                              "video/x-dv, systemstream=TRUE", pin_mediatype);

                pin_mediatype->granularityWidth = 0;
                pin_mediatype->granularityHeight = 0;
            } else if(gst_dshow_check_mediatype(pin_mediatype->mediatype,
                                                MEDIASUBTYPE_MJPG, FORMAT_VideoInfo)) {
                mediacaps = gst_dshow_new_video_caps(GST_VIDEO_FORMAT_UNKNOWN,
                                                     "image/jpeg", pin_mediatype);
            }


            if (mediacaps) {
                src->pins_mediatypes =
                    g_list_append (src->pins_mediatypes, pin_mediatype);
                gst_caps_append (caps, mediacaps);
            } else {
                /* failed to convert dshow caps */
                gst_dshow_free_pin_mediatype (pin_mediatype);
            }
        }
    }

    streamcaps->Release ();

    if (caps && gst_caps_is_empty (caps)) {
        gst_caps_unref (caps);
        caps = NULL;
    }

    return caps;
}
// use cameraID 1 for first and so on
HRESULT VideoTexture::init(int cameraID)
{
	if (cameraID <= 0) return S_FALSE;

	glEnable(GL_TEXTURE_2D);

	// Texture -> This will be put into the camera module	
	glGenTextures(1, textures);					// Create The Texture
	// Typical Texture Generation Using Data From The Bitmap
	for (int i = 0; i < 1; i++)
	{
		//glActiveTexture(GL_TEXTURE0 + i);
		glBindTexture(GL_TEXTURE_2D, textures[i]);
		// Generate The Texture (640x480... make changeable!)
		//glTexImage2D(GL_TEXTURE_2D, 0, 3, 640, 480, 0, GL_RGB, GL_UNSIGNED_BYTE, ...THe data111!!!);
		glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);	// Linear Filtering
		glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);	// Linear Filtering
		// Enable Texture Mapping
		glTexImage2D(GL_TEXTURE_2D, 0, 3, TEXTURE_WIDTH, TEXTURE_HEIGHT, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
	}

	// Video stuff:
	// Create captue graph builder:
	HRESULT hr = InitCaptureGraphBuilder(&pGraph, &pBuild);
	if (FAILED(hr)) return hr;
	IEnumMoniker *enumerator;
	hr = EnumerateDevices(CLSID_VideoInputDeviceCategory, &enumerator);
	//DisplayDeviceInformation(enumerator);
	// Take the first camera:
	IMoniker *pMoniker = NULL;
	for (int i = 0; i < cameraID; i++)
	{
		enumerator->Next(1, &pMoniker, NULL);
	}
	IBaseFilter *pCap = NULL;
	hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pCap);
	if (SUCCEEDED(hr))
	{
		hr = pGraph->AddFilter(pCap, L"Capture Filter");
		if (FAILED(hr)) return hr;
	}
	else return hr;

	// Create the Sample Grabber which we will use
	// To take each frame for texture generation
	hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
							IID_ISampleGrabber, (void **)&pGrabber);
	if (FAILED(hr)) return hr;
	hr = pGrabber->QueryInterface(IID_IBaseFilter, (void **)&pGrabberBase);
		// We have to set the 24-bit RGB desire here
	// So that the proper conversion filters
	// Are added automatically.
	AM_MEDIA_TYPE desiredType;
	memset(&desiredType, 0, sizeof(desiredType));
	desiredType.majortype = MEDIATYPE_Video;
	desiredType.subtype = MEDIASUBTYPE_RGB24;
	desiredType.formattype = FORMAT_VideoInfo;
	pGrabber->SetMediaType(&desiredType);
	pGrabber->SetBufferSamples(TRUE);
	// add to Graph
	pGraph->AddFilter(pGrabberBase, L"Grabber");

    /* Null render filter */
    hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&pNullRender);
    if(FAILED(hr)) return hr;
	pGraph->AddFilter(pNullRender, L"Render");

	// Connect the graph
    hr = ConnectFilters(pGraph, pCap, pGrabberBase); 
    if(FAILED(hr)) return hr;
	hr = ConnectFilters(pGraph, pGrabberBase, pNullRender);

	// Set output format of capture:
	IAMStreamConfig *pConfig = NULL;
    hr = pBuild->FindInterface(
                &PIN_CATEGORY_CAPTURE, // Capture pin.
                0,    // Any media type.
                pCap, // Pointer to the capture filter.
                IID_IAMStreamConfig, (void**)&pConfig);
	if (FAILED(hr)) return hr;
	AM_MEDIA_TYPE *pmtConfig;
	hr = pConfig->GetFormat(&pmtConfig);
	if (FAILED(hr)) return hr;
		
	// Try and find a good video format
    int iCount = 0, iSize = 0;
    hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize);               
    // Check the size to make sure we pass in the correct structure.
    if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
    {
		// Use the video capabilities structure.               
        for (int iFormat = 0; iFormat < iCount; iFormat++)
        {
			VIDEO_STREAM_CONFIG_CAPS scc;
			AM_MEDIA_TYPE *pmtConfig;
			hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
			if (SUCCEEDED(hr))
			{
				VIDEOINFOHEADER *hdr = (VIDEOINFOHEADER *)pmtConfig->pbFormat;
				if (hdr->bmiHeader.biWidth == CAM_WIDTH &&
					hdr->bmiHeader.biHeight == CAM_HEIGHT &&
					hdr->bmiHeader.biBitCount == 24)
				{
					pConfig->SetFormat(pmtConfig);
				}
			}
		}
	}
	pConfig->Release();

	// Set camera stuff
	IAMCameraControl *pCamControl = NULL;
	hr = pCap->QueryInterface(IID_IAMCameraControl, (void **)&pCamControl);
	if (FAILED(hr)) return hr;
	// Get the range and default value. 
	long Min, Max, Step, Default, Flags;
	// For getting: long Val;
	hr = pCamControl->GetRange(CameraControl_Focus, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pCamControl->Set(CameraControl_Focus, 0, CameraControl_Flags_Manual);
#if 0
	hr = pCamControl->GetRange(CameraControl_Exposure, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pCamControl->Set(CameraControl_Exposure, -4, CameraControl_Flags_Manual);
#endif
	pCamControl->Release();
	IAMVideoProcAmp *pProcAmp = 0;
	hr = pCap->QueryInterface(IID_IAMVideoProcAmp, (void**)&pProcAmp);
	if (FAILED(hr)) return hr;
#if 0
	hr = pProcAmp->GetRange(VideoProcAmp_Brightness, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Brightness, 30, VideoProcAmp_Flags_Manual);
	hr = pProcAmp->GetRange(VideoProcAmp_Gain, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Gain, 30, VideoProcAmp_Flags_Manual);
	hr = pProcAmp->GetRange(VideoProcAmp_WhiteBalance, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_WhiteBalance, 4500, VideoProcAmp_Flags_Manual);
	hr = pProcAmp->GetRange(VideoProcAmp_Saturation, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Saturation, 100, VideoProcAmp_Flags_Manual);		
	hr = pProcAmp->GetRange(VideoProcAmp_Contrast, &Min, &Max, &Step, &Default, &Flags);
	if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Contrast, 6, VideoProcAmp_Flags_Manual);		
#endif
	pProcAmp->Release();

	hr = pMediaControl->Run();
	return hr;
}
Пример #17
0
int 
recChannel_t::source_format(char* newFormat)
{
     __CONTEXT("recChannel_t::source_format");

	int hr = 0;
    bool formatFound = false;

	IAMStreamConfig *pConfig = NULL;
	AM_MEDIA_TYPE * format = NULL;
	
	pControl->StopWhenReady();

    ql_t<AM_MEDIA_TYPE *> auxFormats = camInfo->getFormatList();
	
    for(int i = 0; i<auxFormats.len() ; i++)
    {
		AM_MEDIA_TYPE format = *(auxFormats.nth(i));
		IAMStreamConfig *pConfig = NULL;
		IVideoWindow * pWindow = NULL;
		
		char subtypeName [100];
		memset(subtypeName,0,100);
		GetGUIDString(subtypeName,&format.subtype);	
	
		VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*) format.pbFormat;
		
		if((pVih==NULL && strcmp(newFormat,sourceFormat)==0 )||
		   (pVih->bmiHeader.biHeight == capInfo.heigth &&
		    pVih->bmiHeader.biWidth == capInfo.width &&
		    strcmp(subtypeName,newFormat)==0) || 
			camInfo->getKind() == SHARED
			)
		{
		
			if (strcmp(sourceFormat,newFormat))
			{
				memset(sourceFormat,0,100);
				strcpy(sourceFormat,newFormat);
			}
			
			if (!hr && (camInfo->getKind() == CAM || camInfo->getKind() == SHARED)){
				camInfo->output->Disconnect();
				hr = camInfo->output->QueryInterface(IID_IAMStreamConfig, (void**)&pConfig);
			    //pVih->AvgTimePerFrame = 666666;//
                pVih->AvgTimePerFrame = 333333/(frameRate);
				int hr = pConfig->SetFormat(&format);
				actualFormat = format;	
				pConfig->Release();
			}
	        formatFound = true;
			break;
		}
	    
    }
	
    if (!formatFound)
    {
        IAMStreamConfig *pConfig = NULL;
		if (camInfo->getKind() == CAM || 
            camInfo->getKind() == SHARED)
        {
            VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*) actualFormat.pbFormat;
            camInfo->output->Disconnect();
            hr = camInfo->output->QueryInterface(IID_IAMStreamConfig, (void**)&pConfig);
            //pVih->AvgTimePerFrame = 666666;
            if (pConfig)
            {
                int hr = pConfig->SetFormat(&actualFormat);
                pConfig->Release();
            }
        }
    }
        
	    NOTIFY("reChannel_t"
			   "\r\n=========================================\r\n"
               "Channel %d : Source Description...\r\n"
               "- sourceName: %s\r\n"
               "- capture Size: %dx%d\r\n"
               "- supported Formats: %s\r\n"
               "- Window Info: (%d,%d,%d,%d)\r\n"
               "- Title: %s\r\n"
               "=========================================\r\n",
               getId(),
               camInfo->getCamName(),
               capInfo.width,
               capInfo.heigth,
               camInfo->getSupportedFormats(),
               windowInfo.top,
               windowInfo.left,
               windowInfo.width,
               windowInfo.heigth,
               title);

	remap();

	if (mapping){
		map();
	}
	return 0;
		
}
Пример #18
0
HRESULT CAudioCompressorFormats::GetSupportedFormats(std::vector<WAVEFORMATEX*>& listFormats)
{
	CStringW swDeviceName(m_sAudComp);

	HRESULT hr = m_pSysDevEnum->CreateClassEnumerator(CLSID_AudioCompressorCategory, &m_pEnumCat, 0);
	if(NULL == m_pEnumCat)
		return E_POINTER;
	if(S_OK == hr)
	{
		ULONG cFetched;
		while(m_pEnumCat->Next(1, &m_pMoniker, &cFetched) == S_OK)
		{
			IPropertyBag *pPropBag;
			hr = m_pMoniker->BindToStorage(0, 0, IID_IPropertyBag, 
				(void **)&pPropBag);
			if (SUCCEEDED(hr))
			{
				VARIANT varName;
				VariantInit(&varName);
				hr = pPropBag->Read(L"FriendlyName", &varName, 0);
				if (SUCCEEDED(hr))
				{
					if(wcscmp((WCHAR*)varName.pbstrVal, swDeviceName.GetBuffer()) == 0)
					{
						m_pMoniker->AddRef();
						break;
					}
				}
				VariantClear(&varName);
				pPropBag->Release();
			}
			m_pMoniker->Release();
		}
	}
	if(m_pMoniker)
	{
		IBaseFilter *pFilter = 0;
		hr = m_pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pFilter);
		if(SUCCEEDED(hr))
		{
			IEnumPins *pEnum = NULL;
			hr = pFilter->EnumPins(&pEnum);
			if (SUCCEEDED(hr))
			{
				IPin *pPin = NULL;
				while(S_OK == pEnum->Next(1, &pPin, NULL))
				{
					IAMStreamConfig *pConf;
					hr = pPin->QueryInterface(IID_IAMStreamConfig, (void**)&pConf);
					if (SUCCEEDED(hr))
					{
						CString sFormat;
						int iCount, iSize;
						BYTE *pSCC = NULL;
						AM_MEDIA_TYPE *pmt;
						float fSample;
						hr = pConf->GetNumberOfCapabilities(&iCount, &iSize);
						pSCC = new BYTE[iSize];
						if (pSCC == NULL)
						{
							return E_POINTER;
						}
						if (iSize == sizeof(AUDIO_STREAM_CONFIG_CAPS))
						{
							// Use the audio capabilities structure.
							for (int iFormat = 0; iFormat < iCount; iFormat++)
							{
								AUDIO_STREAM_CONFIG_CAPS scc;
								AM_MEDIA_TYPE *pmtConfig;
								hr = pConf->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
								if (SUCCEEDED(hr))
								{
									if(pmtConfig->formattype == FORMAT_WaveFormatEx)
									{
										WAVEFORMATEX *pFormat = new WAVEFORMATEX(*(reinterpret_cast<WAVEFORMATEX*>(pmtConfig->pbFormat)));
										if(pFormat)
										{
											listFormats.push_back(pFormat);
										}
										FreeMediaType(*pmtConfig); 
										CoTaskMemFree(pmtConfig);
									}
								}
							}
							delete pSCC;
						}
						pConf->Release();
					}
					pPin->Release();
				}
				pEnum->Release();
			}
			pFilter->Release();
		}
	}
}
Пример #19
0
BOOL CBoxView::APlaying()
{
	if (m_pGraph == NULL)
	{
		return FALSE;
	}

	CComPtr<IBaseFilter> pAudioInputFilter;

	HRESULT hr = S_OK;

	//查找audio filter并加入graph
	hr = FindInputFilters((void**)&pAudioInputFilter, CLSID_AudioInputDeviceCategory);
	if (NULL == pAudioInputFilter)
	{
		TRACE(L"[SVC]  CBoxView:: Could not create the Filter AudioInputFilter");
		return FALSE;
	}

	CComPtr<IPin> pinIn;
	CComPtr<IAMAudioInputMixer> pPinMixer;
	GetUnconnectedPin(pAudioInputFilter, PINDIR_INPUT, &pinIn);
	hr = pinIn->QueryInterface(IID_IAMAudioInputMixer, (void **)&pPinMixer);
	if (SUCCEEDED(hr))
	{
		hr = pPinMixer->put_Enable(TRUE);
	}

	hr = m_pGraph->AddFilter(pAudioInputFilter, L"ACapture");
	//创建render filter并加入graph
	CComPtr <IBaseFilter> pAudioRenderer = NULL;
	hr = CoCreateInstance(CLSID_AudioRender, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void **)&pAudioRenderer);
	hr = m_pGraph->AddFilter(pAudioRenderer, L"Audio Renderer");
	if (pAudioRenderer == NULL)
	{
		TRACE(L"[SVC]  CBoxView:: Could not create the Filter AudioRenderer");
		return FALSE;
	}

	//获取麦克风输出脚
	CComPtr<IPin> pAudioOutput;
	CComPtr<IEnumPins> pEnum;
	pAudioInputFilter->EnumPins(&pEnum);
	hr = pEnum->Reset();
	hr = pEnum->Next(1, &pAudioOutput, NULL);

	//设置麦克风输出脚
	IAMStreamConfig *pCfg = NULL;
	hr = pAudioOutput->QueryInterface(IID_IAMStreamConfig, (void **)&pCfg);
	// Read current media type/format   
	AM_MEDIA_TYPE *pmt = { 0 };
	hr = pCfg->GetFormat(&pmt);
	WAVEFORMATEX *pWF = (WAVEFORMATEX *)pmt->pbFormat;
	// Release interfaces   
	pCfg->Release();

	//设置麦克风输出脚缓冲
	IAMBufferNegotiation *pNeg;
	pAudioOutput->QueryInterface(IID_IAMBufferNegotiation, (void **)&pNeg);
	ALLOCATOR_PROPERTIES prop = { 0 };
	prop.cbBuffer = pWF->nAvgBytesPerSec * 50 / 1000;
	prop.cBuffers = -1;
	prop.cbAlign = -1;
	prop.cbPrefix = -1;
	hr = pNeg->SuggestAllocatorProperties(&prop);
	pNeg->Release();

#if 0
	//method 1:Render RUN
	hr = m_pCGB->RenderStream(&PIN_CATEGORY_PREVIEW,
		&MEDIATYPE_Audio,
		pAudioInputFilter,
		NULL,
		NULL);
	
#else

	//method 2:Connect RUN
	hr = ConnectFilters(m_pGraph, pAudioOutput, pAudioRenderer);

#endif

	return FAILED(hr);
}
Пример #20
0
static int v4w_open_videodevice(V4wState *s)
{
	// Initialize COM
	CoInitialize(NULL);

	// get a Graph
	HRESULT hr= CoCreateInstance (CLSID_FilterGraph,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_IGraphBuilder, //IID_IBaseFilter,
		(void **)&s->m_pGraph);
	if(FAILED(hr))
	{
		return -1;
	}

	// get a CaptureGraphBuilder2
	hr= CoCreateInstance (CLSID_CaptureGraphBuilder2,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_ICaptureGraphBuilder2, //IID_IBaseFilter,
		(void **)&s->m_pBuilder);
	if(FAILED(hr))
	{
		return -2;
	}

	// connect capture graph builder with the graph
	s->m_pBuilder->SetFiltergraph(s->m_pGraph);

	// get mediacontrol so we can start and stop the filter graph
	hr=s->m_pGraph->QueryInterface (IID_IMediaControl, (void **)&s->m_pControl);
	if(FAILED(hr))
	{
		return -3;
	}


	ICreateDevEnum *pCreateDevEnum = NULL;
	IEnumMoniker *pEnumMoniker = NULL;
	IMoniker *pMoniker = NULL;

	ULONG nFetched = 0;

	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, 
		IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
	if(FAILED(hr))
	{
		return -4;
	}

	hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
		&pEnumMoniker, 0);
	if (FAILED(hr) || pEnumMoniker == NULL) {
		//printf("no device\n");
		return -5;
	}

	pEnumMoniker->Reset();

	int pos=0;
	while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
	{
		IPropertyBag *pBag;
		hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag );
		if( hr != S_OK )
			continue; 

		if (s->dev[0]=='\0')
			break;

		VARIANT var;
		VariantInit(&var);
		hr = pBag->Read( L"FriendlyName", &var, NULL ); 
		if( hr != S_OK )
		{
			pMoniker->Release();
			continue;
		}
		//USES_CONVERSION;
		char szName[256];

		WideCharToMultiByte(CP_UTF8,0,var.bstrVal,-1,szName,256,0,0);
		VariantClear(&var); 

		if (strcmp(szName, s->dev)==0)
			break;

		pMoniker->Release();
		pBag->Release();
		pMoniker=NULL;
		pBag=NULL;
	}

	if(pMoniker==NULL)
	{
		return -6;
	}

	hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
	if(FAILED(hr))
	{
		return -7;
	}

	s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

	pMoniker->Release();
	pEnumMoniker->Release();
	pCreateDevEnum->Release();


	GUID pPinCategory;

	if (try_format(s->m_pDeviceFilter, s->pix_fmt, &pPinCategory)==0)
		s->pix_fmt = s->pix_fmt;
	else if (try_format(s->m_pDeviceFilter,MS_YUV420P, &pPinCategory)==0)
		s->pix_fmt = MS_YUV420P;
	else if (try_format(s->m_pDeviceFilter,MS_YUY2, &pPinCategory)==0)
		s->pix_fmt = MS_YUY2;
	else if (try_format(s->m_pDeviceFilter,MS_YUYV, &pPinCategory)==0)
		s->pix_fmt = MS_YUYV;
	else if (try_format(s->m_pDeviceFilter,MS_UYVY, &pPinCategory)==0)
		s->pix_fmt = MS_UYVY;
	else if (try_format(s->m_pDeviceFilter,MS_RGB24, &pPinCategory)==0)
		s->pix_fmt = MS_RGB24;
	else
	{
		ms_error("Unsupported video pixel format.");
		return -8;
	}

	if (s->pix_fmt == MS_YUV420P)
		ms_message("Driver supports YUV420P, using that format.");
	else if (s->pix_fmt == MS_YUY2)
		ms_message("Driver supports YUY2 (YUYV), using that format.");
	else if (s->pix_fmt == MS_YUYV)
		ms_message("Driver supports YUV422, using that format.");
	else if (s->pix_fmt == MS_UYVY)
		ms_message("Driver supports UYVY, using that format.");
	else if (s->pix_fmt == MS_RGB24)
		ms_message("Driver supports RGB24, using that format.");

	if (try_format_size(s, s->pix_fmt, s->vsize.width, s->vsize.height, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", s->vsize.width, s->vsize.height);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H);
	else
	{
		ms_error("No supported size found for format.");
		/* size not supported? */
		return -9;
	}

	// get DXFilter
	s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE);
	if(s->m_pDXFilter==NULL)
	{
		return -10;
	}
	s->m_pDXFilter->AddRef();

	CMediaType mt;
	mt.SetType(&MEDIATYPE_Video);

	GUID m = MEDIASUBTYPE_RGB24;
	if (s->pix_fmt == MS_YUV420P)
		m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0'));
	else if (s->pix_fmt == MS_YUY2)
		m = MEDIASUBTYPE_YUY2;
	else if (s->pix_fmt == MS_YUYV)
		m = MEDIASUBTYPE_YUYV;
	else if (s->pix_fmt == MS_UYVY)
		m = MEDIASUBTYPE_UYVY;
	else if (s->pix_fmt == MS_RGB24)
		m = MEDIASUBTYPE_RGB24;
	mt.SetSubtype(&m);

	mt.formattype = FORMAT_VideoInfo;
	mt.SetTemporalCompression(FALSE);

	VIDEOINFO *pvi = (VIDEOINFO *)
		mt.AllocFormatBuffer(sizeof(VIDEOINFO));
	if (NULL == pvi)
		return -11;
	ZeroMemory(pvi, sizeof(VIDEOINFO));

	if (s->pix_fmt == MS_YUV420P)
		pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
	else if (s->pix_fmt == MS_YUY2)
		pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2');
	else if (s->pix_fmt == MS_YUYV)
		pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','V');
	else if (s->pix_fmt == MS_UYVY)
		pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y');
	else if (s->pix_fmt == MS_RGB24)
		pvi->bmiHeader.biCompression = BI_RGB;

	if (s->pix_fmt == MS_YUV420P)
		pvi->bmiHeader.biBitCount = 12;
	else if (s->pix_fmt == MS_YUY2)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_YUYV)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_UYVY)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_RGB24)
		pvi->bmiHeader.biBitCount = 24;

	pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
	pvi->bmiHeader.biWidth = s->vsize.width;
	pvi->bmiHeader.biHeight = s->vsize.height;
	pvi->bmiHeader.biPlanes = 1;
	pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
	pvi->bmiHeader.biClrImportant = 0;
	mt.SetSampleSize(pvi->bmiHeader.biSizeImage);

	mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO));

	hr = s->m_pDXFilter->SetAcceptedMediaType(&mt);
	if(FAILED(hr))
	{
		return -12;
	}

	hr = s->m_pDXFilter->SetCallback(Callback); 
	if(FAILED(hr))
	{
		return -13;
	}

	hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter,
		(LPVOID *)&s->m_pIDXFilter);
	if(FAILED(hr))
	{
		return -14;
	}

	hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter");
	if(FAILED(hr))
	{
		return -15;
	}


	// get null renderer
	hr=CoCreateInstance (CLSID_NullRenderer,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_IBaseFilter,
		(void **)&s->m_pNullRenderer);
	if(FAILED(hr))
	{
		return -16;
	}
	if (s->m_pNullRenderer!=NULL)
	{
		s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer");
	}

	hr = s->m_pBuilder->RenderStream(&pPinCategory,
		&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
	if (FAILED(hr))
	{
		return -17;
	}

	IAMStreamConfig *pConfig = NULL;
	hr = s->m_pBuilder->FindInterface(
		&pPinCategory, // Preview pin.
		&MEDIATYPE_Video,    // Any media type.
		s->m_pDeviceFilter, // Pointer to the capture filter.
		IID_IAMStreamConfig, (void**)&pConfig); 
	if (pConfig!=NULL)
	{
		AM_MEDIA_TYPE *pType = NULL;
		int iCount, iSize;
		pConfig->GetNumberOfCapabilities(&iCount, &iSize);

		for (int i = 0; i < iCount; i++) {
			VIDEO_STREAM_CONFIG_CAPS scc;
			pType = NULL;
			pConfig->GetStreamCaps(i, &pType, (BYTE *)&scc);

			if (!((pType->formattype == FORMAT_VideoInfo) &&
				(pType->cbFormat >= sizeof(VIDEOINFOHEADER)) &&
				(pType->pbFormat != NULL)))
				continue;

			VIDEOINFOHEADER & videoInfo = *(VIDEOINFOHEADER *)pType->pbFormat;

			if (m != pType->subtype)
				continue;

			if (videoInfo.bmiHeader.biWidth != s->vsize.width)
				continue;

			if (videoInfo.bmiHeader.biHeight != s->vsize.height)
				continue;

			if (videoInfo.bmiHeader.biBitCount != pvi->bmiHeader.biBitCount)
				continue;

			if (videoInfo.bmiHeader.biCompression != pvi->bmiHeader.biCompression)
				continue;

			videoInfo.AvgTimePerFrame = UNITS / (LONGLONG)s->fps;
			pConfig->SetFormat(pType);    
		}

		pConfig->GetFormat(&pType);
		if (pType!=NULL)
		{
			VIDEOINFO *pvi;
			pvi = (VIDEOINFO *)pType->pbFormat;
			ms_message("v4w: camera asked fps=%.2f // real fps=%.2f", s->fps, ((float)UNITS / (float)pvi->AvgTimePerFrame));
		}

		pConfig->Release();
	}

	//m_pDXFilter->SetBufferSamples(TRUE);

	s_callback = s;
	hr = s->m_pControl->Run();
	if(FAILED(hr))
	{
		return -18;
	}


	s->rotregvalue=1;
	return 0;
}
Пример #21
0
HRESULT DSCaptureDevice::setFormat(const DSFormat& format)
{
    HRESULT hr;
    IAMStreamConfig* streamConfig = NULL;

    /* get the right interface to change capture settings */
    hr
        = m_captureGraphBuilder->FindInterface(
                &PIN_CATEGORY_CAPTURE,
                &MEDIATYPE_Video,
                m_srcFilter,
                IID_IAMStreamConfig,
                (void**) &streamConfig);
    if(SUCCEEDED(hr))
    {
        int nb = 0;
        int size = 0;
        AM_MEDIA_TYPE* mediaType = NULL;
        size_t bitCount = 0;

        hr = streamConfig->GetNumberOfCapabilities(&nb, &size);
        if (SUCCEEDED(hr) && nb)
        {
            BYTE* scc = new BYTE[size];

            if (scc)
            {
                DWORD pixfmt = format.pixelFormat;

                for (int i = 0 ; i < nb ; i++)
                {
                    AM_MEDIA_TYPE* mt;

                    if (streamConfig->GetStreamCaps(i, &mt, scc) == S_OK)
                    {
                        VIDEOINFOHEADER* hdr = (VIDEOINFOHEADER*) mt->pbFormat;

                        if (hdr
                                && (mt->subtype.Data1 == pixfmt)
                                && ((long) format.height
                                        == hdr->bmiHeader.biHeight)
                                && ((long) format.width
                                        == hdr->bmiHeader.biWidth))
                        {
                            mediaType = mt;
                            if ((pixfmt == MEDIASUBTYPE_ARGB32.Data1)
                                    || (pixfmt == MEDIASUBTYPE_RGB32.Data1))
                                bitCount = 32;
                            else if (pixfmt == MEDIASUBTYPE_RGB24.Data1)
                                bitCount = 24;
                            else
                                bitCount = hdr->bmiHeader.biBitCount;
                            break;
                        }
                        else
                            _DeleteMediaType(mt);
                    }
                }

                delete[] scc;
            }
            else
                hr = E_OUTOFMEMORY;
        }

        if (mediaType)
        {
            hr = streamConfig->SetFormat(mediaType);
            if (SUCCEEDED(hr))
            {
                m_bitPerPixel = bitCount;
                m_format = format;
                m_format.mediaType = mediaType->subtype;
            }
            _DeleteMediaType(mediaType);
        }
        else if (SUCCEEDED(hr))
            hr = E_FAIL;

        streamConfig->Release();
    }

    return hr;
}
Пример #22
0
bool DeviceSource::LoadFilters()
{
    if(bCapturing || bFiltersLoaded)
        return false;

    bool bSucceeded = false;

    List<MediaOutputInfo> outputList;
    IAMStreamConfig *config = NULL;
    bool bAddedVideoCapture = false, bAddedAudioCapture = false, bAddedDevice = false;
    GUID expectedMediaType;
    IPin *devicePin = NULL, *audioPin = NULL;
    HRESULT err;
    String strShader;

    bUseThreadedConversion = API->UseMultithreadedOptimizations() && (OSGetTotalCores() > 1);

    //------------------------------------------------
    // basic initialization vars

    bool bCheckForceAudio = data->GetInt(TEXT("forceCustomAudioDevice")) != 0;

    bUseCustomResolution = data->GetInt(TEXT("customResolution"));
    strDevice = data->GetString(TEXT("device"));
    strDeviceName = data->GetString(TEXT("deviceName"));
    strDeviceID = data->GetString(TEXT("deviceID"));
    strAudioDevice = data->GetString(TEXT("audioDevice"));
    strAudioName = data->GetString(TEXT("audioDeviceName"));
    strAudioID = data->GetString(TEXT("audioDeviceID"));
    strAudioGUID = data->GetString(TEXT("audioDeviceCLSID"));

    if(strAudioGUID.Compare(TEXT("CLSID_AudioInputDeviceCategory"))) matchGUID = CLSID_AudioInputDeviceCategory;
    if(strAudioGUID.Compare(TEXT("CLSID_VideoInputDeviceCategory"))) matchGUID = CLSID_VideoInputDeviceCategory;
    if(strAudioGUID.Compare(TEXT("CLSID_AudioRendererCategory"))) {
        //Log(TEXT("Dese are spekers.\n"));
        matchGUID = CLSID_AudioRendererCategory;
    }

    bFlipVertical = data->GetInt(TEXT("flipImage")) != 0;
    bFlipHorizontal = data->GetInt(TEXT("flipImageHorizontal")) != 0;

    opacity = data->GetInt(TEXT("opacity"), 100);

    float volume = data->GetFloat(TEXT("volume"), 1.0f);

    //------------------------------------------------
    // chrom key stuff

    bUseChromaKey = data->GetInt(TEXT("useChromaKey")) != 0;
    keyColor = data->GetInt(TEXT("keyColor"), 0xFFFFFFFF);
    keySimilarity = data->GetInt(TEXT("keySimilarity"));
    keyBlend = data->GetInt(TEXT("keyBlend"), 80);
    keySpillReduction = data->GetInt(TEXT("keySpillReduction"), 50);

    if(keyBaseColor.x < keyBaseColor.y && keyBaseColor.x < keyBaseColor.z)
        keyBaseColor -= keyBaseColor.x;
    else if(keyBaseColor.y < keyBaseColor.x && keyBaseColor.y < keyBaseColor.z)
        keyBaseColor -= keyBaseColor.y;
    else if(keyBaseColor.z < keyBaseColor.x && keyBaseColor.z < keyBaseColor.y)
        keyBaseColor -= keyBaseColor.z;

    //------------------------------------------------
    // get the device filter and pins

    if(strAudioDevice.IsValid())
    {
        audioDeviceFilter = GetDeviceByValue(matchGUID, L"FriendlyName", strAudioName, L"DevicePath", strAudioID);
        if(!audioDeviceFilter) {
            AppWarning(TEXT("DShowAudioPlugin: Invalid audio device: name '%s', path '%s'"), strAudioName.Array(), strAudioID.Array());
        }
    }

    soundOutputType = data->GetInt(TEXT("soundOutputType"));

    if(soundOutputType != 0)
    {
        if(matchGUID == CLSID_AudioRendererCategory) {
            err = capture->FindPin(audioDeviceFilter, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, FALSE, 0, &audioPin);
        }
        else {
            err = capture->FindPin(audioDeviceFilter, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, FALSE, 0, &audioPin);
        }
        if(FAILED(err))
        {
            Log(TEXT("DShowAudioPlugin: No audio pin, result = %lX"), err);
            soundOutputType = 0;
        }
    }

    int soundTimeOffset = data->GetInt(TEXT("soundTimeOffset"));

    //GetOutputList(devicePin, outputList);

    //------------------------------------------------
    // initialize the basic video variables and data


    //------------------------------------------------
    // log audio info
    {
    String strTest;

        strTest = FormattedString(TEXT("    audio device: %s,\r\n    audio device id %s,\r\n"), strAudioDevice.Array(), strAudioID.Array());

        Log(TEXT("------------------------------------------"));
        Log(strTest.Array());
    }

    //------------------------------------------------
    // get audio pin configuration, optionally configure audio pin to 44100

    GUID expectedAudioType;

    if(soundOutputType == 1)
    {
        IAMStreamConfig *audioConfig;
        if(SUCCEEDED(audioPin->QueryInterface(IID_IAMStreamConfig, (void**)&audioConfig)))
        {
            AM_MEDIA_TYPE *audioMediaType;
            if(SUCCEEDED(err = audioConfig->GetFormat(&audioMediaType)))
            {
                SetAudioInfo(audioMediaType, expectedAudioType);
                //Log(TEXT("Fixed size samples: %s\r\n"), (audioMediaType->bFixedSizeSamples) ? "Yes" : "No");
                //Log(TEXT("Temporal Compression: %s\r\n"), (audioMediaType->bTemporalCompression) ? "Yes" : "No");
                //Log(TEXT("cbFormat: %.16X\r\n"), audioMediaType->cbFormat);
                //Log(TEXT("Sample size: %u\r\n"), audioMediaType->lSampleSize);
            }
            else if(err == E_NOTIMPL) //elgato probably
            {
                IEnumMediaTypes *audioMediaTypes;
                if(SUCCEEDED(err = audioPin->EnumMediaTypes(&audioMediaTypes)))
                {
                    ULONG i = 0;
                    if((err = audioMediaTypes->Next(1, &audioMediaType, &i)) == S_OK)
                        SetAudioInfo(audioMediaType, expectedAudioType);
                    else
                    {
                        AppWarning(TEXT("DShowAudioPlugin: audioMediaTypes->Next failed, result = %08lX"), err);
                        soundOutputType = 0;
                    }

                    audioMediaTypes->Release();
                }
                else
                {
                    AppWarning(TEXT("DShowAudioPlugin: audioMediaTypes->Next failed, result = %08lX"), err);
                    soundOutputType = 0;
                }
            }
            else
            {
                AppWarning(TEXT("DShowAudioPlugin: Could not get audio format, result = %08lX"), err);
                soundOutputType = 0;
            }

            audioConfig->Release();
        }
        else {
            soundOutputType = 0;
        }
    }

    //------------------------------------------------
    // add video capture filter if any

    captureFilter = new CaptureFilter(this, MEDIATYPE_Video, expectedMediaType);

    if(FAILED(err = graph->AddFilter(captureFilter, NULL)))
    {
        AppWarning(TEXT("DShowAudioPlugin: Failed to add video capture filter to graph, result = %08lX"), err);
        goto cleanFinish;
    }

    bAddedVideoCapture = true;

    //------------------------------------------------
    // add audio capture filter if any

    if(soundOutputType == 1)
    {
        audioFilter = new CaptureFilter(this, MEDIATYPE_Audio, expectedAudioType);
        if(!audioFilter)
        {
            AppWarning(TEXT("Failed to create audio capture filter"));
            soundOutputType = 0;
        }
    }
    else if(soundOutputType == 2)
    {
        if(FAILED(err = CoCreateInstance(CLSID_DSoundRender, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&audioFilter)))
        {
            AppWarning(TEXT("DShowPlugin: failed to create audio renderer, result = %08lX"), err);
            soundOutputType = 0;
        }

        IBasicAudio *basicAudio;
        if(SUCCEEDED(audioFilter->QueryInterface(IID_IBasicAudio, (void**)&basicAudio)))
        {
            long lVol = long((double(volume)*NEAR_SILENTf)-NEAR_SILENTf);
            if(lVol <= -NEAR_SILENT)
                lVol = -10000;
            basicAudio->put_Volume(lVol);
            basicAudio->Release();
        }
    }

    if(soundOutputType != 0)
    {
        if(FAILED(err = graph->AddFilter(audioFilter, NULL)))
        {
            AppWarning(TEXT("DShowAudioPlugin: Failed to add audio capture filter to graph, result = %08lX"), err);
            goto cleanFinish;
        }

        bAddedAudioCapture = true;
    }

    //------------------------------------------------
    // add primary device filter

    if(FAILED(err = graph->AddFilter(audioDeviceFilter, NULL)))
    {
        AppWarning(TEXT("DShowAudioPlugin: Failed to add audio device filter to graph, result = %08lX"), err);
        goto cleanFinish;
    }

    bAddedDevice = true;

    //------------------------------------------------
    // connect all pins and set up the whole capture thing

    bool bConnected;

    if(soundOutputType != 0)
    {
        bConnected = SUCCEEDED(err = capture->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, audioDeviceFilter, NULL, audioFilter));
        if(!bConnected)
        {
            AppWarning(TEXT("DShowAudioPlugin: Failed to connect the audio device pin to the audio capture pin, result = %08lX"), err);
            soundOutputType = 0;
        }
    }

    if(FAILED(err = graph->QueryInterface(IID_IMediaControl, (void**)&control)))
    {
        AppWarning(TEXT("DShowAudioPlugin: Failed to get IMediaControl, result = %08lX"), err);
        goto cleanFinish;
    }

    if(soundOutputType == 1)
    {
        audioOut = new DeviceAudioSource;
        audioOut->Initialize(this);
        API->AddAudioSource(audioOut);

        audioOut->SetAudioOffset(soundTimeOffset);
        audioOut->SetVolume(volume);
    }

    bSucceeded = true;

cleanFinish:
    SafeRelease(config);
    SafeRelease(devicePin);
    SafeRelease(audioPin);

    for(UINT i=0; i<outputList.Num(); i++)
        outputList[i].FreeData();

    if(!bSucceeded)
    {
        bCapturing = false;

        if(bAddedVideoCapture)
            graph->RemoveFilter(captureFilter);
        if(bAddedAudioCapture)
            graph->RemoveFilter(audioFilter);
        if(bAddedDevice) {
            graph->RemoveFilter(audioDeviceFilter);
            graph->RemoveFilter(deviceFilter);
        }

        SafeRelease(audioDeviceFilter);
        SafeRelease(deviceFilter);
        SafeRelease(captureFilter);
        SafeRelease(audioFilter);
        SafeRelease(control);

        if(colorConvertShader)
        {
            delete colorConvertShader;
            colorConvertShader = NULL;
        }

        if(audioOut)
        {
            delete audioOut;
            audioOut = NULL;
        }

        if(lpImageBuffer)
        {
            Free(lpImageBuffer);
            lpImageBuffer = NULL;
        }

        bReadyToDraw = true;
    }
    else
        bReadyToDraw = false;

    if(!renderCX) renderCX = 32;
    if(!renderCY) renderCY = 32;

    //-----------------------------------------------------
    // create the texture regardless, will just show up as red to indicate failure
    BYTE *textureData = (BYTE*)Allocate(renderCX*renderCY*4);

    if(colorType == DeviceOutputType_RGB) //you may be confused, but when directshow outputs RGB, it's actually outputting BGR
    {
        msetd(textureData, 0xFFFF0000, renderCX*renderCY*4);
        texture = CreateTexture(renderCX, renderCY, GS_BGR, textureData, FALSE, FALSE);
    }
    else //if we're working with planar YUV, we can just use regular RGB textures instead
    {
        msetd(textureData, 0xFF0000FF, renderCX*renderCY*4);
        texture = CreateTexture(renderCX, renderCY, GS_RGB, textureData, FALSE, FALSE);
    }

    if(bSucceeded && bUseThreadedConversion)
    {
        if(colorType == DeviceOutputType_I420 || colorType == DeviceOutputType_YV12)
        {
            LPBYTE lpData;
            if(texture->Map(lpData, texturePitch))
                texture->Unmap();
            else
                texturePitch = renderCX*4;

            lpImageBuffer = (LPBYTE)Allocate(texturePitch*renderCY);
        }
    }

    Free(textureData);

    bFiltersLoaded = bSucceeded;
    return bSucceeded;
}
Пример #23
0
bool DeviceSource::LoadFilters()
{
    if(bCapturing || bFiltersLoaded)
        return false;

    bool bSucceeded = false;

    List<MediaOutputInfo> outputList;
    IAMStreamConfig *config = NULL;
    bool bAddedVideoCapture = false, bAddedAudioCapture = false, bAddedDevice = false;
    GUID expectedMediaType;
    IPin *devicePin = NULL, *audioPin = NULL;
    HRESULT err;
    String strShader;

    bUseThreadedConversion = API->UseMultithreadedOptimizations() && (OSGetTotalCores() > 1);

    //------------------------------------------------
    // basic initialization vars

    bool bForceCustomAudio = data->GetInt(TEXT("forceCustomAudioDevice")) != 0;

    bUseCustomResolution = data->GetInt(TEXT("customResolution"));
    strDevice = data->GetString(TEXT("device"));
    strDeviceName = data->GetString(TEXT("deviceName"));
    strDeviceID = data->GetString(TEXT("deviceID"));
    strAudioDevice = data->GetString(TEXT("audioDevice"));
    strAudioName = data->GetString(TEXT("audioDeviceName"));
    strAudioID = data->GetString(TEXT("audioDeviceID"));

    bFlipVertical = data->GetInt(TEXT("flipImage")) != 0;
    bFlipHorizontal = data->GetInt(TEXT("flipImageHorizontal")) != 0;
    bUsePointFiltering = data->GetInt(TEXT("usePointFiltering")) != 0;

    opacity = data->GetInt(TEXT("opacity"), 100);

    float volume = data->GetFloat(TEXT("volume"), 1.0f);

    bUseBuffering = data->GetInt(TEXT("useBuffering")) != 0;
    bufferTime = data->GetInt(TEXT("bufferTime"))*10000;

    //------------------------------------------------
    // chrom key stuff

    bUseChromaKey = data->GetInt(TEXT("useChromaKey")) != 0;
    keyColor = data->GetInt(TEXT("keyColor"), 0xFFFFFFFF);
    keySimilarity = data->GetInt(TEXT("keySimilarity"));
    keyBlend = data->GetInt(TEXT("keyBlend"), 80);
    keySpillReduction = data->GetInt(TEXT("keySpillReduction"), 50);

    if(keyBaseColor.x < keyBaseColor.y && keyBaseColor.x < keyBaseColor.z)
        keyBaseColor -= keyBaseColor.x;
    else if(keyBaseColor.y < keyBaseColor.x && keyBaseColor.y < keyBaseColor.z)
        keyBaseColor -= keyBaseColor.y;
    else if(keyBaseColor.z < keyBaseColor.x && keyBaseColor.z < keyBaseColor.y)
        keyBaseColor -= keyBaseColor.z;

    //------------------------------------------------
    // get the device filter and pins

    if(strDeviceName.IsValid())
        deviceFilter = GetDeviceByValue(CLSID_VideoInputDeviceCategory, L"FriendlyName", strDeviceName, L"DevicePath", strDeviceID);
    else
    {
        if(!strDevice.IsValid())
        {
            AppWarning(TEXT("DShowPlugin: Invalid device specified"));
            goto cleanFinish;
        }

        deviceFilter = GetDeviceByValue(CLSID_VideoInputDeviceCategory, L"FriendlyName", strDevice);
    }
    
    if(!deviceFilter)
    {
        AppWarning(TEXT("DShowPlugin: Could not create device filter"));
        goto cleanFinish;
    }

    devicePin = GetOutputPin(deviceFilter, &MEDIATYPE_Video);
    if(!devicePin)
    {
        AppWarning(TEXT("DShowPlugin: Could not get device video pin"));
        goto cleanFinish;
    }

    soundOutputType = data->GetInt(TEXT("soundOutputType")); //0 is for backward-compatibility
    if (strAudioID.CompareI(TEXT("Disabled")))
        soundOutputType = 0;

    if(soundOutputType != 0)
    {
        if(!bForceCustomAudio)
        {
            err = capture->FindPin(deviceFilter, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, FALSE, 0, &audioPin);
            bDeviceHasAudio = SUCCEEDED(err);
        }
        else
            bDeviceHasAudio = false;

        if(!bDeviceHasAudio)
        {
            if(strDeviceName.IsValid())
            {
                audioDeviceFilter = GetDeviceByValue(CLSID_AudioInputDeviceCategory, L"FriendlyName", strAudioName, L"DevicePath", strAudioID);
                if(!audioDeviceFilter)
                    AppWarning(TEXT("DShowPlugin: Invalid audio device: name '%s', path '%s'"), strAudioName.Array(), strAudioID.Array());
            }
            else if(strAudioDevice.IsValid())
            {
                audioDeviceFilter = GetDeviceByValue(CLSID_AudioInputDeviceCategory, L"FriendlyName", strAudioDevice);
                if(!audioDeviceFilter)
                    AppWarning(TEXT("DShowPlugin: Could not create audio device filter"));
            }

            if(audioDeviceFilter)
                err = capture->FindPin(audioDeviceFilter, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, FALSE, 0, &audioPin);
            else
                err = E_FAIL;
        }

        if(FAILED(err) || !audioPin)
        {
            Log(TEXT("DShowPlugin: No audio pin, result = %lX"), err);
            soundOutputType = 0;
        }
    }
    else
        bDeviceHasAudio = bForceCustomAudio = false;

    int soundTimeOffset = data->GetInt(TEXT("soundTimeOffset"));

    GetOutputList(devicePin, outputList);

    //------------------------------------------------
    // initialize the basic video variables and data

    renderCX = renderCY = 0;
    frameInterval = 0;

    if(bUseCustomResolution)
    {
        renderCX = data->GetInt(TEXT("resolutionWidth"));
        renderCY = data->GetInt(TEXT("resolutionHeight"));
        frameInterval = data->GetInt(TEXT("frameInterval"));
    }
    else
    {
        SIZE size;
        if (!GetClosestResolution(outputList, size, frameInterval))
        {
            AppWarning(TEXT("DShowPlugin: Unable to find appropriate resolution"));
            renderCX = renderCY = 64;
            goto cleanFinish;
        }

        renderCX = size.cx;
        renderCY = size.cy;
    }

    if(!renderCX || !renderCY || !frameInterval)
    {
        AppWarning(TEXT("DShowPlugin: Invalid size/fps specified"));
        goto cleanFinish;
    }

    preferredOutputType = (data->GetInt(TEXT("usePreferredType")) != 0) ? data->GetInt(TEXT("preferredType")) : -1;

    int numThreads = MAX(OSGetTotalCores()-2, 1);
    for(int i=0; i<numThreads; i++)
    {
        convertData[i].width  = renderCX;
        convertData[i].height = renderCY;
        convertData[i].sample = NULL;
        convertData[i].hSignalConvert  = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertData[i].hSignalComplete = CreateEvent(NULL, FALSE, FALSE, NULL);

        if(i == 0)
            convertData[i].startY = 0;
        else
            convertData[i].startY = convertData[i-1].endY;

        if(i == (numThreads-1))
            convertData[i].endY = renderCY;
        else
            convertData[i].endY = ((renderCY/numThreads)*(i+1)) & 0xFFFFFFFE;
    }

    bFirstFrame = true;

    //------------------------------------------------
    // get the closest media output for the settings used

    MediaOutputInfo *bestOutput = GetBestMediaOutput(outputList, renderCX, renderCY, preferredOutputType, frameInterval);
    if(!bestOutput)
    {
        AppWarning(TEXT("DShowPlugin: Could not find appropriate resolution to create device image source"));
        goto cleanFinish;
    }

    //------------------------------------------------
    // log video info

    {
        String strTest = FormattedString(TEXT("    device: %s,\r\n    device id %s,\r\n    chosen type: %s, usingFourCC: %s, res: %ux%u - %ux%u, frameIntervals: %llu-%llu"),
            strDevice.Array(), strDeviceID.Array(),
            EnumToName[(int)bestOutput->videoType],
            bestOutput->bUsingFourCC ? TEXT("true") : TEXT("false"),
            bestOutput->minCX, bestOutput->minCY, bestOutput->maxCX, bestOutput->maxCY,
            bestOutput->minFrameInterval, bestOutput->maxFrameInterval);

        BITMAPINFOHEADER *bmiHeader = GetVideoBMIHeader(bestOutput->mediaType);

        char fourcc[5];
        mcpy(fourcc, &bmiHeader->biCompression, 4);
        fourcc[4] = 0;

        if(bmiHeader->biCompression > 1000)
            strTest << FormattedString(TEXT(", fourCC: '%S'\r\n"), fourcc);
        else
            strTest << FormattedString(TEXT(", fourCC: %08lX\r\n"), bmiHeader->biCompression);

        if(!bDeviceHasAudio) strTest << FormattedString(TEXT("    audio device: %s,\r\n    audio device id %s,\r\n"), strAudioDevice.Array(), strAudioID.Array());

        Log(TEXT("------------------------------------------"));
        Log(strTest.Array());
    }

    //------------------------------------------------
    // set up shaders and video output data

    expectedMediaType = bestOutput->mediaType->subtype;

    colorType = DeviceOutputType_RGB;
    if(bestOutput->videoType == VideoOutputType_I420)
        colorType = DeviceOutputType_I420;
    else if(bestOutput->videoType == VideoOutputType_YV12)
        colorType = DeviceOutputType_YV12;
    else if(bestOutput->videoType == VideoOutputType_YVYU)
        colorType = DeviceOutputType_YVYU;
    else if(bestOutput->videoType == VideoOutputType_YUY2)
        colorType = DeviceOutputType_YUY2;
    else if(bestOutput->videoType == VideoOutputType_UYVY)
        colorType = DeviceOutputType_UYVY;
    else if(bestOutput->videoType == VideoOutputType_HDYC)
        colorType = DeviceOutputType_HDYC;
    else
    {
        colorType = DeviceOutputType_RGB;
        expectedMediaType = MEDIASUBTYPE_RGB32;
    }

    strShader = ChooseShader();
    if(strShader.IsValid())
        colorConvertShader = CreatePixelShaderFromFile(strShader);

    if(colorType != DeviceOutputType_RGB && !colorConvertShader)
    {
        AppWarning(TEXT("DShowPlugin: Could not create color space conversion pixel shader"));
        goto cleanFinish;
    }

    if(colorType == DeviceOutputType_YV12 || colorType == DeviceOutputType_I420)
    {
        for(int i=0; i<numThreads; i++)
            hConvertThreads[i] = OSCreateThread((XTHREAD)PackPlanarThread, convertData+i);
    }

    //------------------------------------------------
    // set chroma details

    keyBaseColor = Color4().MakeFromRGBA(keyColor);
    Matrix4x4TransformVect(keyChroma, (colorType == DeviceOutputType_HDYC) ? (float*)yuv709Mat : (float*)yuvMat, keyBaseColor);
    keyChroma *= 2.0f;

    //------------------------------------------------
    // configure video pin

    if(FAILED(err = devicePin->QueryInterface(IID_IAMStreamConfig, (void**)&config)))
    {
        AppWarning(TEXT("DShowPlugin: Could not get IAMStreamConfig for device pin, result = %08lX"), err);
        goto cleanFinish;
    }

    AM_MEDIA_TYPE outputMediaType;
    CopyMediaType(&outputMediaType, bestOutput->mediaType);

    VIDEOINFOHEADER *vih  = reinterpret_cast<VIDEOINFOHEADER*>(outputMediaType.pbFormat);
    BITMAPINFOHEADER *bmi = GetVideoBMIHeader(&outputMediaType);
    vih->AvgTimePerFrame  = frameInterval;
    bmi->biWidth          = renderCX;
    bmi->biHeight         = renderCY;
    bmi->biSizeImage      = renderCX*renderCY*(bmi->biBitCount>>3);

    if(FAILED(err = config->SetFormat(&outputMediaType)))
    {
        if(err != E_NOTIMPL)
        {
            AppWarning(TEXT("DShowPlugin: SetFormat on device pin failed, result = %08lX"), err);
            goto cleanFinish;
        }
    }

    FreeMediaType(outputMediaType);

    //------------------------------------------------
    // get audio pin configuration, optionally configure audio pin to 44100

    GUID expectedAudioType;

    if(soundOutputType == 1)
    {
        IAMStreamConfig *audioConfig;
        if(SUCCEEDED(audioPin->QueryInterface(IID_IAMStreamConfig, (void**)&audioConfig)))
        {
            AM_MEDIA_TYPE *audioMediaType;
            if(SUCCEEDED(err = audioConfig->GetFormat(&audioMediaType)))
            {
                SetAudioInfo(audioMediaType, expectedAudioType);
            }
            else if(err == E_NOTIMPL) //elgato probably
            {
                IEnumMediaTypes *audioMediaTypes;
                if(SUCCEEDED(err = audioPin->EnumMediaTypes(&audioMediaTypes)))
                {
                    ULONG i = 0;
                    if((err = audioMediaTypes->Next(1, &audioMediaType, &i)) == S_OK)
                        SetAudioInfo(audioMediaType, expectedAudioType);
                    else
                    {
                        AppWarning(TEXT("DShowPlugin: audioMediaTypes->Next failed, result = %08lX"), err);
                        soundOutputType = 0;
                    }

                    audioMediaTypes->Release();
                }
                else
                {
                    AppWarning(TEXT("DShowPlugin: audioMediaTypes->Next failed, result = %08lX"), err);
                    soundOutputType = 0;
                }
            }
            else
            {
                AppWarning(TEXT("DShowPlugin: Could not get audio format, result = %08lX"), err);
                soundOutputType = 0;
            }

            audioConfig->Release();
        }
        else {
            soundOutputType = 0;
        }
    }

    //------------------------------------------------
    // add video capture filter if any

    captureFilter = new CaptureFilter(this, MEDIATYPE_Video, expectedMediaType);

    if(FAILED(err = graph->AddFilter(captureFilter, NULL)))
    {
        AppWarning(TEXT("DShowPlugin: Failed to add video capture filter to graph, result = %08lX"), err);
        goto cleanFinish;
    }

    bAddedVideoCapture = true;

    //------------------------------------------------
    // add audio capture filter if any

    if(soundOutputType == 1)
    {
        audioFilter = new CaptureFilter(this, MEDIATYPE_Audio, expectedAudioType);
        if(!audioFilter)
        {
            AppWarning(TEXT("Failed to create audio capture filter"));
            soundOutputType = 0;
        }
    }
    else if(soundOutputType == 2)
    {
        if(FAILED(err = CoCreateInstance(CLSID_AudioRender, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&audioFilter)))
        {
            AppWarning(TEXT("DShowPlugin: failed to create audio renderer, result = %08lX"), err);
            soundOutputType = 0;
        }

        IBasicAudio *basicAudio;
        if(SUCCEEDED(audioFilter->QueryInterface(IID_IBasicAudio, (void**)&basicAudio)))
        {
            long lVol = long((double(volume)*NEAR_SILENTf)-NEAR_SILENTf);
            if(lVol <= -NEAR_SILENT)
                lVol = -10000;
            basicAudio->put_Volume(lVol);
            basicAudio->Release();
        }
    }

    if(soundOutputType != 0)
    {
        if(FAILED(err = graph->AddFilter(audioFilter, NULL)))
            AppWarning(TEXT("DShowPlugin: Failed to add audio capture filter to graph, result = %08lX"), err);

        bAddedAudioCapture = true;
    }

    //------------------------------------------------
    // add primary device filter

    if(FAILED(err = graph->AddFilter(deviceFilter, NULL)))
    {
        AppWarning(TEXT("DShowPlugin: Failed to add device filter to graph, result = %08lX"), err);
        goto cleanFinish;
    }

    if(soundOutputType != 0 && !bDeviceHasAudio)
    {
        if(FAILED(err = graph->AddFilter(audioDeviceFilter, NULL)))
            AppWarning(TEXT("DShowPlugin: Failed to add audio device filter to graph, result = %08lX"), err);
    }

    bAddedDevice = true;

    //------------------------------------------------
    // connect all pins and set up the whole capture thing

    //THANK THE NINE DIVINES I FINALLY GOT IT WORKING
    bool bConnected = SUCCEEDED(err = capture->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, deviceFilter, NULL, captureFilter));
    if(!bConnected)
    {
        if(FAILED(err = graph->Connect(devicePin, captureFilter->GetCapturePin())))
        {
            AppWarning(TEXT("DShowPlugin: Failed to connect the video device pin to the video capture pin, result = %08lX"), err);
            goto cleanFinish;
        }
    }

    if(soundOutputType != 0)
    {
        if(!bDeviceHasAudio)
            bConnected = SUCCEEDED(err = capture->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, audioDeviceFilter, NULL, audioFilter));
        else
            bConnected = SUCCEEDED(err = capture->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, deviceFilter, NULL, audioFilter));

        if(!bConnected)
        {
            AppWarning(TEXT("DShowPlugin: Failed to connect the audio device pin to the audio capture pin, result = %08lX"), err);
            soundOutputType = 0;
        }
    }

    if(FAILED(err = graph->QueryInterface(IID_IMediaControl, (void**)&control)))
    {
        AppWarning(TEXT("DShowPlugin: Failed to get IMediaControl, result = %08lX"), err);
        goto cleanFinish;
    }

    if (bUseBuffering) {
        if (!(hStopSampleEvent = CreateEvent(NULL, FALSE, FALSE, NULL))) {
            AppWarning(TEXT("DShowPlugin: Failed to create stop event"), err);
            goto cleanFinish;
        }

        if (!(hSampleThread = OSCreateThread((XTHREAD)SampleThread, this))) {
            AppWarning(TEXT("DShowPlugin: Failed to create sample thread"), err);
            goto cleanFinish;
        }
    }

    if(soundOutputType == 1)
    {
        audioOut = new DeviceAudioSource;
        audioOut->Initialize(this);
        API->AddAudioSource(audioOut);

        audioOut->SetAudioOffset(soundTimeOffset);
        audioOut->SetVolume(volume);
    }

    bSucceeded = true;

cleanFinish:
    SafeRelease(config);
    SafeRelease(devicePin);
    SafeRelease(audioPin);

    for(UINT i=0; i<outputList.Num(); i++)
        outputList[i].FreeData();

    if(!bSucceeded)
    {
        bCapturing = false;

        if(bAddedVideoCapture)
            graph->RemoveFilter(captureFilter);
        if(bAddedAudioCapture)
            graph->RemoveFilter(audioFilter);

        if(bAddedDevice)
        {
            if(!bDeviceHasAudio && audioDeviceFilter)
                graph->RemoveFilter(audioDeviceFilter);
            graph->RemoveFilter(deviceFilter);
        }

        SafeRelease(audioDeviceFilter);
        SafeRelease(deviceFilter);
        SafeRelease(captureFilter);
        SafeRelease(audioFilter);
        SafeRelease(control);

        if (hSampleThread) {
            SetEvent(hStopSampleEvent);
            WaitForSingleObject(hSampleThread, INFINITE);
            CloseHandle(hSampleThread);
            hSampleThread = NULL;
        }

        if (hStopSampleEvent) {
            CloseHandle(hStopSampleEvent);
            hStopSampleEvent = NULL;
        }

        if(colorConvertShader)
        {
            delete colorConvertShader;
            colorConvertShader = NULL;
        }

        if(audioOut)
        {
            delete audioOut;
            audioOut = NULL;
        }

        if(lpImageBuffer)
        {
            Free(lpImageBuffer);
            lpImageBuffer = NULL;
        }

        bReadyToDraw = true;
    }
    else
        bReadyToDraw = false;

    if(!renderCX) renderCX = 32;
    if(!renderCY) renderCY = 32;

    //-----------------------------------------------------
    // create the texture regardless, will just show up as red to indicate failure
    BYTE *textureData = (BYTE*)Allocate(renderCX*renderCY*4);

    if(colorType == DeviceOutputType_RGB) //you may be confused, but when directshow outputs RGB, it's actually outputting BGR
    {
        msetd(textureData, 0xFFFF0000, renderCX*renderCY*4);
        texture = CreateTexture(renderCX, renderCY, GS_BGR, textureData, FALSE, FALSE);
    }
    else //if we're working with planar YUV, we can just use regular RGB textures instead
    {
        msetd(textureData, 0xFF0000FF, renderCX*renderCY*4);
        texture = CreateTexture(renderCX, renderCY, GS_RGB, textureData, FALSE, FALSE);
    }

    if(bSucceeded && bUseThreadedConversion)
    {
        if(colorType == DeviceOutputType_I420 || colorType == DeviceOutputType_YV12)
        {
            LPBYTE lpData;
            if(texture->Map(lpData, texturePitch))
                texture->Unmap();
            else
                texturePitch = renderCX*4;

            lpImageBuffer = (LPBYTE)Allocate(texturePitch*renderCY);
        }
    }

    Free(textureData);

    bFiltersLoaded = bSucceeded;
    return bSucceeded;
}
Пример #24
0
bool CCameraDS::OpenCamera(int nCamID, bool bDisplayProperties, int nWidth, int nHeight)
{
	
	HRESULT hr = S_OK;

	CoInitialize(NULL);
	// Create the Filter Graph Manager.
	hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC,
							IID_IGraphBuilder, (void **)&m_pGraph);

	hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER, 
							IID_IBaseFilter, (LPVOID *)&m_pSampleGrabberFilter);

	hr = m_pGraph->QueryInterface(IID_IMediaControl, (void **) &m_pMediaControl);
	hr = m_pGraph->QueryInterface(IID_IMediaEvent, (void **) &m_pMediaEvent);

	hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER,
							IID_IBaseFilter, (LPVOID*) &m_pNullFilter);


	hr = m_pGraph->AddFilter(m_pNullFilter, L"NullRenderer");
	
	hr = m_pSampleGrabberFilter->QueryInterface(IID_ISampleGrabber, (void**)&m_pSampleGrabber);

	AM_MEDIA_TYPE   mt;
	ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
	mt.majortype = MEDIATYPE_Video;
	mt.subtype = MEDIASUBTYPE_RGB24;
	mt.formattype = FORMAT_VideoInfo; 
	hr = m_pSampleGrabber->SetMediaType(&mt);
	MYFREEMEDIATYPE(mt);

	m_pGraph->AddFilter(m_pSampleGrabberFilter, L"Grabber");
 
	// Bind Device Filter.  We know the device because the id was passed in
	BindFilter(nCamID, &m_pDeviceFilter);
	m_pGraph->AddFilter(m_pDeviceFilter, NULL);

	CComPtr<IEnumPins> pEnum;
	m_pDeviceFilter->EnumPins(&pEnum);
 
	hr = pEnum->Reset();
	hr = pEnum->Next(1, &m_pCameraOutput, NULL); 

	pEnum = NULL; 
	m_pSampleGrabberFilter->EnumPins(&pEnum);
	pEnum->Reset();
	hr = pEnum->Next(1, &m_pGrabberInput, NULL); 

	pEnum = NULL;
	m_pSampleGrabberFilter->EnumPins(&pEnum);
	pEnum->Reset();
	pEnum->Skip(1);
	hr = pEnum->Next(1, &m_pGrabberOutput, NULL); 

	pEnum = NULL;
	m_pNullFilter->EnumPins(&pEnum);
	pEnum->Reset();
	hr = pEnum->Next(1, &m_pNullInputPin, NULL);

	//SetCrossBar();

	if (bDisplayProperties) 
	{
		CComPtr<ISpecifyPropertyPages> pPages;

		HRESULT hr = m_pCameraOutput->QueryInterface(IID_ISpecifyPropertyPages, (void**)&pPages);
		if (SUCCEEDED(hr))
		{
			PIN_INFO PinInfo;
			m_pCameraOutput->QueryPinInfo(&PinInfo);

			CAUUID caGUID;
			pPages->GetPages(&caGUID);

			OleCreatePropertyFrame(NULL, 0, 0,
						L"Property Sheet", 1,
						(IUnknown **)&(m_pCameraOutput.p),
						caGUID.cElems,
						caGUID.pElems,
						0, 0, NULL);
			CoTaskMemFree(caGUID.pElems);
			PinInfo.pFilter->Release();
		}
		pPages = NULL;
	}
	else 
	{
		//////////////////////////////////////////////////////////////////////////////
		// 加入由 lWidth和lHeight设置的摄像头的宽和高 的功能,默认320*240
		// by flymanbox @2009-01-24
		//////////////////////////////////////////////////////////////////////////////
	   int _Width = nWidth, _Height = nHeight;
	   IAMStreamConfig*   iconfig; 
	   iconfig = NULL;
	   hr = m_pCameraOutput->QueryInterface(IID_IAMStreamConfig,   (void**)&iconfig);   
      
	   AM_MEDIA_TYPE* pmt;    
	   if(iconfig->GetFormat(&pmt) !=S_OK) 
	   {
		  //printf("GetFormat Failed ! \n");
		  return   false;   
	   }
      
	   VIDEOINFOHEADER*   phead;
	   if ( pmt->formattype == FORMAT_VideoInfo)   
	   {   
			phead=( VIDEOINFOHEADER*)pmt->pbFormat;   
			phead->bmiHeader.biWidth = _Width;   
			phead->bmiHeader.biHeight = _Height;   
			if(( hr=iconfig->SetFormat(pmt)) != S_OK )   
			{
				return   false;
			}

		}   
		iconfig->Release();   
		iconfig=NULL;   
		MYFREEMEDIATYPE(*pmt);
	}

	hr = m_pGraph->Connect(m_pCameraOutput, m_pGrabberInput);
	hr = m_pGraph->Connect(m_pGrabberOutput, m_pNullInputPin);

	if (FAILED(hr))
	{
		switch(hr)
		{
			case VFW_S_NOPREVIEWPIN :
				break;
			case E_FAIL :
				break;
			case E_INVALIDARG :
				break;
			case E_POINTER :
				break;
		}
	}

	m_pSampleGrabber->SetBufferSamples(TRUE);
	m_pSampleGrabber->SetOneShot(TRUE);
    
	hr = m_pSampleGrabber->GetConnectedMediaType(&mt);
	if(FAILED(hr))
		return false;

	VIDEOINFOHEADER *videoHeader;
	videoHeader = reinterpret_cast<VIDEOINFOHEADER*>(mt.pbFormat);
	m_nWidth = videoHeader->bmiHeader.biWidth;
	m_nHeight = videoHeader->bmiHeader.biHeight;
	m_bConnected = true;

	pEnum = NULL;
	return true;
}
Пример #25
0
HRESULT videoInputCamera::setupDevice() {

	comInit();
	GUID CAPTURE_MODE   = PIN_CATEGORY_CAPTURE; //Don't worry - it ends up being preview (which is faster)
	//printf("SETUP: Setting up device %i\n",deviceID);

	// CREATE THE GRAPH BUILDER //
	// Create the filter graph manager and query for interfaces.
	HRESULT hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void **)&pCaptureGraphBuilder);
	if (FAILED(hr))	// FAILED is a macro that tests the return value
	{
		printf("ERROR - Could not create the Filter Graph Manager\n");
		return hr;
	}

	//FITLER GRAPH MANAGER//
	// Create the Filter Graph Manager.
	hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,IID_IGraphBuilder, (void**)&pGraphBuilder);
	if (FAILED(hr))
	{
		printf("ERROR - Could not add the graph builder!\n");
		stopDevice();
		return hr;
	}

	//SET THE FILTERGRAPH//
	hr = pCaptureGraphBuilder->SetFiltergraph(pGraphBuilder);
	if (FAILED(hr))
	{
		printf("ERROR - Could not set filtergraph\n");
		stopDevice();
		return hr;
	}

	//MEDIA CONTROL (START/STOPS STREAM)//
	// Using QueryInterface on the graph builder,
	// Get the Media Control object.
	hr = pGraphBuilder->QueryInterface(IID_IMediaControl, (void **)&pMediaControl);
	if (FAILED(hr))
	{
		printf("ERROR - Could not create the Media Control object\n");
		stopDevice();
		return hr;
	}

	char 	nDeviceName[255];
	WCHAR 	wDeviceName[255];
	memset(wDeviceName, 0, sizeof(WCHAR) * 255);
	memset(nDeviceName, 0, sizeof(char) * 255);

	//FIND VIDEO DEVICE AND ADD TO GRAPH//
	//gets the device specified by the second argument.
	hr = getDevice(&pInputFilter, cfg->device, wDeviceName, nDeviceName);

	if (SUCCEEDED(hr)){
		sprintf(cfg->name,nDeviceName);
		//printf("SETUP: %s\n", nDeviceName);
		hr = pGraphBuilder->AddFilter(pInputFilter, wDeviceName);
	}else{
		printf("ERROR - Could not find specified video device\n");
		stopDevice();
		return hr;
	}

	//LOOK FOR PREVIEW PIN IF THERE IS NONE THEN WE USE CAPTURE PIN AND THEN SMART TEE TO PREVIEW
	IAMStreamConfig *streamConfTest = NULL;
	hr = pCaptureGraphBuilder->FindInterface(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, pInputFilter, IID_IAMStreamConfig, (void **)&streamConfTest);
	if(FAILED(hr)){
		//printf("SETUP: Couldn't find preview pin using SmartTee\n");
	}else{
		CAPTURE_MODE = PIN_CATEGORY_PREVIEW;
		streamConfTest->Release();
		streamConfTest = NULL;
	}

	//CROSSBAR (SELECT PHYSICAL INPUT TYPE)//
	//my own function that checks to see if the device can support a crossbar and if so it routes it.
	//webcams tend not to have a crossbar so this function will also detect a webcams and not apply the crossbar
	/*if(useCrossbar)
	{
	//printf("SETUP: Checking crossbar\n");
	routeCrossbar(pCaptureGraphBuilder, pInputFilter, connection, CAPTURE_MODE);
	}*/

	//we do this because webcams don't have a preview mode
	hr = pCaptureGraphBuilder->FindInterface(&CAPTURE_MODE, &MEDIATYPE_Video, pInputFilter, IID_IAMStreamConfig, (void **)&pStreamConfig);
	if(FAILED(hr)){
		printf("ERROR: Couldn't config the stream!\n");
		stopDevice();
		return hr;
	}

	//NOW LETS DEAL WITH GETTING THE RIGHT SIZE
	hr = pStreamConfig->GetFormat(&pAmMediaType);
	if(FAILED(hr)){
		printf("ERROR: Couldn't getFormat for pAmMediaType!\n");
		stopDevice();
		return hr;
	}

	if (!setSizeAndSubtype()) return false;

	VIDEOINFOHEADER *pVih =  reinterpret_cast<VIDEOINFOHEADER*>(pAmMediaType->pbFormat);
	cfg->cam_width	=  HEADER(pVih)->biWidth;
	cfg->cam_height	=  HEADER(pVih)->biHeight;
	cfg->cam_fps = ((int)floor(100000000.0f/(float)pVih->AvgTimePerFrame + 0.5f))/10.0f;

	long bufferSize = cfg->cam_width*cfg->cam_height*3;
	sgCallback->setupBuffer(bufferSize);

	// Create the Sample Grabber.
	hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,IID_IBaseFilter, (void**)&pGrabberFilter);
	if (FAILED(hr)){
		printf("Could not Create Sample Grabber - CoCreateInstance()\n");
		stopDevice();
		return hr;
	}

	hr = pGraphBuilder->AddFilter(pGrabberFilter, L"Sample Grabber");
	if (FAILED(hr)){
		printf("Could not add Sample Grabber - AddFilter()\n");
		stopDevice();
		return hr;
	}

	hr = pGrabberFilter->QueryInterface(IID_ISampleGrabber, (void**)&pSampleGrabber);
	if (FAILED(hr)){
		printf("ERROR: Could not query SampleGrabber\n");
		stopDevice();
		return hr;
	}

	//Get video properties from the stream's mediatype and apply to the grabber (otherwise we don't get an RGB image)
	AM_MEDIA_TYPE mt;
	ZeroMemory(&mt,sizeof(AM_MEDIA_TYPE));
	mt.majortype 	= MEDIATYPE_Video;
	mt.subtype 		= MEDIASUBTYPE_RGB24;
	//mt.subtype 		= MEDIASUBTYPE_YUY2;

	mt.formattype 	= FORMAT_VideoInfo;

	//Set Params - One Shot should be false unless you want to capture just one buffer
	hr = pSampleGrabber->SetMediaType(&mt);
	hr = pSampleGrabber->SetOneShot(FALSE);
	hr = pSampleGrabber->SetBufferSamples(FALSE);

	//Tell the grabber to use our callback function - 0 is for SampleCB and 1 for BufferCB
	//We use SampleCB
	hr = pSampleGrabber->SetCallback(sgCallback, 0);
	if (FAILED(hr)) {
		printf("ERROR: problem setting callback\n");
		stopDevice();
		return hr;
	} /*else {
		printf("SETUP: Capture callback set\n");
	  }*/


	//lets try freeing our stream conf here too
	//this will fail if the device is already running
	/* if(pStreamConfig) {
		pStreamConfig->Release();
		pStreamConfig = NULL;
	} else {
		printf("ERROR: connecting device - prehaps it is already being used?\n");
		stopDevice();
		return S_FALSE;
	}*/

	//used to give the video stream somewhere to go to.
	hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)(&pDestFilter));
	if (FAILED(hr)){
		printf("ERROR: Could not create filter - NullRenderer\n");
		stopDevice();
		return hr;
	}

	hr = pGraphBuilder->AddFilter(pDestFilter, L"NullRenderer");
	if (FAILED(hr)){
		printf("ERROR: Could not add filter - NullRenderer\n");
		stopDevice();
		return hr;
	}

	//This is where the stream gets put together.
	hr = pCaptureGraphBuilder->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, pInputFilter, pGrabberFilter, pDestFilter);

	if (FAILED(hr)){
		printf("ERROR: Could not connect pins - RenderStream()\n");
		stopDevice();
		return hr;
	}


	//EXP - lets try setting the sync source to null - and make it run as fast as possible
	{
		IMediaFilter *pMediaFilter = 0;
		hr = pGraphBuilder->QueryInterface(IID_IMediaFilter, (void**)&pMediaFilter);
		if (FAILED(hr)){
			printf("ERROR: Could not get IID_IMediaFilter interface\n");
		}else{
			pMediaFilter->SetSyncSource(NULL);
			pMediaFilter->Release();
		}
	}

	//printf("SETUP: Device is setup and ready to capture.\n\n");

	//if we release this then we don't have access to the settings
	//we release our video input filter but then reconnect with it
	//each time we need to use it
	//pInputFilter->Release();
	//pInputFilter = NULL;

	pGrabberFilter->Release();
	pGrabberFilter = NULL;

	pDestFilter->Release();
	pDestFilter = NULL;

	return S_OK;
}