Ejemplo n.º 1
0
bool spk2mt(Speakers spk, CMediaType &mt, int i)
{
  if (spk.format == FORMAT_SPDIF)
  {
    // SPDIF media types
    if (i < 0 || i >= 2)
      return false;

    std::auto_ptr<WAVEFORMATEX> wfe(spk2wfe(spk, 0));
    if (!wfe.get())
      return false;

    mt.SetType(&MEDIATYPE_Audio);
    mt.SetSubtype(i == 0? &MEDIASUBTYPE_DOLBY_AC3_SPDIF: &MEDIASUBTYPE_PCM);
    mt.SetFormatType(&FORMAT_WaveFormatEx);
    mt.SetFormat((BYTE*)wfe.get(), sizeof(WAVEFORMATEX) + wfe->cbSize);
    return true;
  }
  else if (FORMAT_MASK(spk.format) & FORMAT_CLASS_PCM)
  {
    // PCM media types
    std::auto_ptr<WAVEFORMATEX> wfe(spk2wfe(spk, i));
    if (!wfe.get())
      return false;

    mt.SetType(&MEDIATYPE_Audio);
    mt.SetSubtype(&MEDIASUBTYPE_PCM);
    mt.SetFormatType(&FORMAT_WaveFormatEx);
    mt.SetFormat((BYTE*)wfe.get(), sizeof(WAVEFORMATEX) + wfe->cbSize);
    return true;
  }
  else
    return false;
}
Ejemplo n.º 2
0
void CTSParserFilter::OutpinConnecting( IPin * pReceivePin, CBasePin* pFilterPin, const AM_MEDIA_TYPE *pmt )
{
	REMUXER*  pRemuxer;
	HRESULT hr = m_pInputPin->GetParser( &pRemuxer );
	if ( hr != NOERROR  )
		return ;

	if ( pFilterPin == m_pOutputPin )
	{
		CMediaType mt;
		mt.SetTemporalCompression(FALSE);
		mt.SetType(&MEDIATYPE_Stream);
		mt.SetSubtype(&MEDIASUBTYPE_MPEG2_TRANSPORT);
		if ( IS_PS_TYPE( m_wOutputFormat ) )
			mt.SetSubtype(&MEDIASUBTYPE_MPEG2_PROGRAM ); 
		else
			mt.SetSubtype(&MEDIASUBTYPE_MPEG2_TRANSPORT);
		m_pOutputPin->DefaultMediaType( &mt );
	}
}
Ejemplo n.º 3
0
void CDeMultiplexer::GetAudioStreamPMT(CMediaType& pmt)
{
  // Fake audio in use
  if (m_AudioStreamType == NO_STREAM)
  {
    pmt.InitMediaType();
    pmt.SetType(&MEDIATYPE_Audio);
    pmt.SetSubtype(&MEDIASUBTYPE_DOLBY_AC3);
    pmt.SetSampleSize(1);
    pmt.SetTemporalCompression(FALSE);
    pmt.SetVariableSize();
    pmt.SetFormatType(&FORMAT_WaveFormatEx);
    pmt.SetFormat(AC3AudioFormat, sizeof(AC3AudioFormat));
  }
  else
    pmt = m_audioParser->pmt;
}
Ejemplo n.º 4
0
Packet* CClip::GenerateFakeAudio(REFERENCE_TIME rtStart)
{
  if (rtStart + FAKE_AUDIO_DURATION - 1 > playlistFirstPacketTime + clipDuration) 
    superceeded |= SUPERCEEDED_AUDIO_RETURN;
  
  if (superceeded&SUPERCEEDED_AUDIO_RETURN) 
    return NULL;
  
  if (!FakeAudioAvailable()) 
    return NULL;

  Packet* packet = new Packet();
  packet->nClipNumber = nClip;
    
  packet->SetCount(AC3_FRAME_LENGTH);
  packet->SetData(ac3_sample, AC3_FRAME_LENGTH);
  packet->rtStart = rtStart;
  packet->rtStop = packet->rtStart + 1;

  if (firstAudio)
  {
    CMediaType pmt;
    pmt.InitMediaType();
    pmt.SetType(&MEDIATYPE_Audio);
    pmt.SetSubtype(&MEDIASUBTYPE_DOLBY_AC3);
    pmt.SetSampleSize(1);
    pmt.SetTemporalCompression(FALSE);
    pmt.SetVariableSize();
    pmt.SetFormatType(&FORMAT_WaveFormatEx);
    pmt.SetFormat(AC3AudioFormat, sizeof(AC3AudioFormat));
    WAVEFORMATEXTENSIBLE* wfe = (WAVEFORMATEXTENSIBLE*)pmt.pbFormat;
    wfe->Format.nChannels = 6;
    wfe->Format.nSamplesPerSec = 48000;
    wfe->Format.wFormatTag = WAVE_FORMAT_DOLBY_AC3;

    packet->pmt = CreateMediaType(&pmt);
  }
  
  audioPlaybackPosition += FAKE_AUDIO_DURATION;
  lastAudioPosition += FAKE_AUDIO_DURATION;

  return packet;
}
Ejemplo n.º 5
0
HRESULT CAudioDecFilter::CheckTransform(const CMediaType* mtIn, const CMediaType* mtOut)
{
	CheckPointer(mtIn, E_POINTER);
	CheckPointer(mtOut, E_POINTER);

	if (*mtOut->Type() == MEDIATYPE_Audio) {
		if (*mtOut->Subtype() == MEDIASUBTYPE_PCM) {

			// GUID_NULLではデバッグアサートが発生するのでダミーを設定して回避
			CMediaType MediaType;
			MediaType.InitMediaType();
			MediaType.SetType(&MEDIATYPE_Stream);
			MediaType.SetSubtype(&MEDIASUBTYPE_None);

			m_pInput->SetMediaType(&MediaType);

			return S_OK;
		}
	}

	return VFW_E_TYPE_NOT_ACCEPTED;
}
Ejemplo n.º 6
0
HRESULT CSubtitlePin::CheckMediaType(const CMediaType* pmt)
{
  CheckPointer(pmt, E_POINTER);

  CDeMultiplexer& demux=m_pTsReaderFilter->GetDemultiplexer();
  
  if (!m_pTsReaderFilter->CheckCallback())
  {
    //LogDebug("subPin: Not running in MP - CheckMediaType() fail");
    return E_FAIL;
  }

  if (!demux.PatParsed())
  {
    return E_FAIL;
  }

  CMediaType pmti;
  CMediaType* ppmti = &pmti;
  
  ppmti->InitMediaType();
  ppmti->SetType      (& MEDIATYPE_Stream);
  ppmti->SetSubtype   (& MEDIASUBTYPE_MPEG2_TRANSPORT);
  ppmti->SetSampleSize(1);
  ppmti->SetTemporalCompression(FALSE);
  ppmti->SetVariableSize();    

  if(*pmt == *ppmti)
  {
    //LogDebug("subPin:CheckMediaType() ok");  
    return S_OK;
  }

  //LogDebug("subPin:CheckMediaType() fail");  
  return E_FAIL;
}
Ejemplo n.º 7
0
static int v4w_open_videodevice(V4wState *s)
{
	// Initialize COM
	CoInitialize(NULL);

	// get a Graph
	HRESULT hr= CoCreateInstance (CLSID_FilterGraph,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_IGraphBuilder, //IID_IBaseFilter,
		(void **)&s->m_pGraph);
	if(FAILED(hr))
	{
		return -1;
	}

	// get a CaptureGraphBuilder2
	hr= CoCreateInstance (CLSID_CaptureGraphBuilder2,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_ICaptureGraphBuilder2, //IID_IBaseFilter,
		(void **)&s->m_pBuilder);
	if(FAILED(hr))
	{
		return -2;
	}

	// connect capture graph builder with the graph
	s->m_pBuilder->SetFiltergraph(s->m_pGraph);

	// get mediacontrol so we can start and stop the filter graph
	hr=s->m_pGraph->QueryInterface (IID_IMediaControl, (void **)&s->m_pControl);
	if(FAILED(hr))
	{
		return -3;
	}


	ICreateDevEnum *pCreateDevEnum = NULL;
	IEnumMoniker *pEnumMoniker = NULL;
	IMoniker *pMoniker = NULL;

	ULONG nFetched = 0;

	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, 
		IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
	if(FAILED(hr))
	{
		return -4;
	}

	hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
		&pEnumMoniker, 0);
	if (FAILED(hr) || pEnumMoniker == NULL) {
		//printf("no device\n");
		return -5;
	}

	pEnumMoniker->Reset();

	int pos=0;
	while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
	{
		IPropertyBag *pBag;
		hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag );
		if( hr != S_OK )
			continue; 

		if (s->dev[0]=='\0')
			break;

		VARIANT var;
		VariantInit(&var);
		hr = pBag->Read( L"FriendlyName", &var, NULL ); 
		if( hr != S_OK )
		{
			pMoniker->Release();
			continue;
		}
		//USES_CONVERSION;
		char szName[256];

		WideCharToMultiByte(CP_UTF8,0,var.bstrVal,-1,szName,256,0,0);
		VariantClear(&var); 

		if (strcmp(szName, s->dev)==0)
			break;

		pMoniker->Release();
		pBag->Release();
		pMoniker=NULL;
		pBag=NULL;
	}

	if(pMoniker==NULL)
	{
		return -6;
	}

	hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
	if(FAILED(hr))
	{
		return -7;
	}

	s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

	pMoniker->Release();
	pEnumMoniker->Release();
	pCreateDevEnum->Release();


	GUID pPinCategory;

	if (try_format(s->m_pDeviceFilter, s->pix_fmt, &pPinCategory)==0)
		s->pix_fmt = s->pix_fmt;
	else if (try_format(s->m_pDeviceFilter,MS_YUV420P, &pPinCategory)==0)
		s->pix_fmt = MS_YUV420P;
	else if (try_format(s->m_pDeviceFilter,MS_YUY2, &pPinCategory)==0)
		s->pix_fmt = MS_YUY2;
	else if (try_format(s->m_pDeviceFilter,MS_YUYV, &pPinCategory)==0)
		s->pix_fmt = MS_YUYV;
	else if (try_format(s->m_pDeviceFilter,MS_UYVY, &pPinCategory)==0)
		s->pix_fmt = MS_UYVY;
	else if (try_format(s->m_pDeviceFilter,MS_RGB24, &pPinCategory)==0)
		s->pix_fmt = MS_RGB24;
	else
	{
		ms_error("Unsupported video pixel format.");
		return -8;
	}

	if (s->pix_fmt == MS_YUV420P)
		ms_message("Driver supports YUV420P, using that format.");
	else if (s->pix_fmt == MS_YUY2)
		ms_message("Driver supports YUY2 (YUYV), using that format.");
	else if (s->pix_fmt == MS_YUYV)
		ms_message("Driver supports YUV422, using that format.");
	else if (s->pix_fmt == MS_UYVY)
		ms_message("Driver supports UYVY, using that format.");
	else if (s->pix_fmt == MS_RGB24)
		ms_message("Driver supports RGB24, using that format.");

	if (try_format_size(s, s->pix_fmt, s->vsize.width, s->vsize.height, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", s->vsize.width, s->vsize.height);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H);
	else
	{
		ms_error("No supported size found for format.");
		/* size not supported? */
		return -9;
	}

	// get DXFilter
	s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE);
	if(s->m_pDXFilter==NULL)
	{
		return -10;
	}
	s->m_pDXFilter->AddRef();

	CMediaType mt;
	mt.SetType(&MEDIATYPE_Video);

	GUID m = MEDIASUBTYPE_RGB24;
	if (s->pix_fmt == MS_YUV420P)
		m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0'));
	else if (s->pix_fmt == MS_YUY2)
		m = MEDIASUBTYPE_YUY2;
	else if (s->pix_fmt == MS_YUYV)
		m = MEDIASUBTYPE_YUYV;
	else if (s->pix_fmt == MS_UYVY)
		m = MEDIASUBTYPE_UYVY;
	else if (s->pix_fmt == MS_RGB24)
		m = MEDIASUBTYPE_RGB24;
	mt.SetSubtype(&m);

	mt.formattype = FORMAT_VideoInfo;
	mt.SetTemporalCompression(FALSE);

	VIDEOINFO *pvi = (VIDEOINFO *)
		mt.AllocFormatBuffer(sizeof(VIDEOINFO));
	if (NULL == pvi)
		return -11;
	ZeroMemory(pvi, sizeof(VIDEOINFO));

	if (s->pix_fmt == MS_YUV420P)
		pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
	else if (s->pix_fmt == MS_YUY2)
		pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2');
	else if (s->pix_fmt == MS_YUYV)
		pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','V');
	else if (s->pix_fmt == MS_UYVY)
		pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y');
	else if (s->pix_fmt == MS_RGB24)
		pvi->bmiHeader.biCompression = BI_RGB;

	if (s->pix_fmt == MS_YUV420P)
		pvi->bmiHeader.biBitCount = 12;
	else if (s->pix_fmt == MS_YUY2)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_YUYV)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_UYVY)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_RGB24)
		pvi->bmiHeader.biBitCount = 24;

	pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
	pvi->bmiHeader.biWidth = s->vsize.width;
	pvi->bmiHeader.biHeight = s->vsize.height;
	pvi->bmiHeader.biPlanes = 1;
	pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
	pvi->bmiHeader.biClrImportant = 0;
	mt.SetSampleSize(pvi->bmiHeader.biSizeImage);

	mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO));

	hr = s->m_pDXFilter->SetAcceptedMediaType(&mt);
	if(FAILED(hr))
	{
		return -12;
	}

	hr = s->m_pDXFilter->SetCallback(Callback); 
	if(FAILED(hr))
	{
		return -13;
	}

	hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter,
		(LPVOID *)&s->m_pIDXFilter);
	if(FAILED(hr))
	{
		return -14;
	}

	hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter");
	if(FAILED(hr))
	{
		return -15;
	}


	// get null renderer
	hr=CoCreateInstance (CLSID_NullRenderer,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_IBaseFilter,
		(void **)&s->m_pNullRenderer);
	if(FAILED(hr))
	{
		return -16;
	}
	if (s->m_pNullRenderer!=NULL)
	{
		s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer");
	}

	hr = s->m_pBuilder->RenderStream(&pPinCategory,
		&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
	if (FAILED(hr))
	{
		return -17;
	}

	IAMStreamConfig *pConfig = NULL;
	hr = s->m_pBuilder->FindInterface(
		&pPinCategory, // Preview pin.
		&MEDIATYPE_Video,    // Any media type.
		s->m_pDeviceFilter, // Pointer to the capture filter.
		IID_IAMStreamConfig, (void**)&pConfig); 
	if (pConfig!=NULL)
	{
		AM_MEDIA_TYPE *pType = NULL;
		int iCount, iSize;
		pConfig->GetNumberOfCapabilities(&iCount, &iSize);

		for (int i = 0; i < iCount; i++) {
			VIDEO_STREAM_CONFIG_CAPS scc;
			pType = NULL;
			pConfig->GetStreamCaps(i, &pType, (BYTE *)&scc);

			if (!((pType->formattype == FORMAT_VideoInfo) &&
				(pType->cbFormat >= sizeof(VIDEOINFOHEADER)) &&
				(pType->pbFormat != NULL)))
				continue;

			VIDEOINFOHEADER & videoInfo = *(VIDEOINFOHEADER *)pType->pbFormat;

			if (m != pType->subtype)
				continue;

			if (videoInfo.bmiHeader.biWidth != s->vsize.width)
				continue;

			if (videoInfo.bmiHeader.biHeight != s->vsize.height)
				continue;

			if (videoInfo.bmiHeader.biBitCount != pvi->bmiHeader.biBitCount)
				continue;

			if (videoInfo.bmiHeader.biCompression != pvi->bmiHeader.biCompression)
				continue;

			videoInfo.AvgTimePerFrame = UNITS / (LONGLONG)s->fps;
			pConfig->SetFormat(pType);    
		}

		pConfig->GetFormat(&pType);
		if (pType!=NULL)
		{
			VIDEOINFO *pvi;
			pvi = (VIDEOINFO *)pType->pbFormat;
			ms_message("v4w: camera asked fps=%.2f // real fps=%.2f", s->fps, ((float)UNITS / (float)pvi->AvgTimePerFrame));
		}

		pConfig->Release();
	}

	//m_pDXFilter->SetBufferSamples(TRUE);

	s_callback = s;
	hr = s->m_pControl->Run();
	if(FAILED(hr))
	{
		return -18;
	}


	s->rotregvalue=1;
	return 0;
}
Ejemplo n.º 8
0
BOOL CDXVideoGrabber::PlayFile(const char* inFile, HWND hWnd)
{
	RemoveAllFilters();

	HRESULT hr = m_pGrabber.CoCreateInstance( CLSID_SampleGrabber );
	if( !m_pGrabber )
		return FALSE;

	CComQIPtr< IBaseFilter, &IID_IBaseFilter > pGrabBase( m_pGrabber );
	// force it to connect to video, 24 bit
	//
	CMediaType VideoType;
	VideoType.SetType( &MEDIATYPE_Video );
	VideoType.SetSubtype( &MEDIASUBTYPE_RGB24 );
	hr = m_pGrabber->SetMediaType( &VideoType ); // shouldn't fail
	if( FAILED( hr ) )
		return FALSE;

	// add the grabber to the graph
	//
	hr = m_pIGraphBuilder->AddFilter( pGrabBase, L"Grabber" );
	if( FAILED( hr ) )
		return FALSE;

	RenderFile(inFile);

	AM_MEDIA_TYPE mt;
	hr = m_pGrabber->GetConnectedMediaType( &mt );
	if ( FAILED( hr) )
	{
		return FALSE;
	}

	VIDEOINFOHEADER * vih = (VIDEOINFOHEADER*) mt.pbFormat;
	if(m_pSGCB)
	{
		// 		m_pSGCB->SetOwner(GetParent(hWnd));
		m_pSGCB->SetWidth(vih->bmiHeader.biWidth);
		m_pSGCB->SetHeight(vih->bmiHeader.biHeight);
	}
	FreeMediaType( mt );

	// don't buffer the samples as they pass through
	//
	hr = m_pGrabber->SetBufferSamples( FALSE );

	// only grab one at a time, stop stream after
	// grabbing one sample
	//
	hr = m_pGrabber->SetOneShot( FALSE );

	// set the callback, so we can grab the one sample
	//
	hr = m_pGrabber->SetCallback( m_pSGCB, 1 );

	if(!SetDisplayWindow(hWnd))
	{
		return FALSE;
	}

	return Run();
}
Ejemplo n.º 9
0
BOOL CDXCamara::PlayFile(const char* inFile, HWND hWnd)
{
	HRESULT hr;

	RemoveAllFilters();
	// get whatever capture device exists
	//
	CComPtr< IBaseFilter > pCap;
	GetDefaultCapDevice( &pCap );
	if( !pCap )
		return FALSE;

	// add the capture filter to the graph
	//
	hr = m_pIGraphBuilder->AddFilter( pCap, L"Cap" );
	if( FAILED( hr ) )
		return FALSE;

	hr = m_pGrabber.CoCreateInstance( CLSID_SampleGrabber );
	if( !m_pGrabber )
		return FALSE;

	CComQIPtr< IBaseFilter, &IID_IBaseFilter > pGrabBase( m_pGrabber );
	// force it to connect to video, 24 bit
	//
	CMediaType VideoType;
	VideoType.SetType( &MEDIATYPE_Video );
	VideoType.SetSubtype( &MEDIASUBTYPE_RGB24 );
	hr = m_pGrabber->SetMediaType( &VideoType ); // shouldn't fail
	if( FAILED( hr ) )
		return FALSE;

	// add the grabber to the graph
	//
	hr = m_pIGraphBuilder->AddFilter( pGrabBase, L"Grabber" );
	if( FAILED( hr ) )
		return FALSE;

	// make a capture builder graph (for connecting help)
	//
	CComPtr< ICaptureGraphBuilder2 > pBuilder;
	hr = pBuilder.CoCreateInstance( CLSID_CaptureGraphBuilder2 );
	if( !pBuilder )
		return FALSE;

	hr = pBuilder->SetFiltergraph( m_pIGraphBuilder );

	if( FAILED( hr ) )
		return FALSE;


	// If there is a VP pin present on the video device, then put the 
	// renderer on CLSID_NullRenderer
	CComPtr<IPin> pVPPin;
	hr = pBuilder->FindPin(
		pCap, 
		PINDIR_OUTPUT, 
		&PIN_CATEGORY_VIDEOPORT, 
		NULL, 
		FALSE, 
		0, 
		&pVPPin);


	// If there is a VP pin, put the renderer on NULL Renderer
	CComPtr<IBaseFilter> pRenderer;
	if (S_OK == hr)
	{   
		hr = pRenderer.CoCreateInstance(CLSID_NullRenderer);    
		if (FAILED (hr))
		{
			return FALSE;
		}
		hr = m_pIGraphBuilder->AddFilter(pRenderer, L"NULL renderer");
		if (FAILED (hr))
		{
			return FALSE;
		}
	}

	hr = pBuilder->RenderStream(
		&PIN_CATEGORY_PREVIEW,
		&MEDIATYPE_Interleaved, 
		pCap, 
		pGrabBase, 
		pRenderer);
	if (FAILED (hr))
	{
		// try to render preview pin
		hr = pBuilder->RenderStream( 
			&PIN_CATEGORY_PREVIEW, 
			&MEDIATYPE_Video,
			pCap, 
			pGrabBase, 
			pRenderer);

		// try to render capture pin
		if( FAILED( hr ) )
		{
			hr = pBuilder->RenderStream( 
				&PIN_CATEGORY_CAPTURE, 
				&MEDIATYPE_Video,
				pCap, 
				pGrabBase, 
				pRenderer);
		}
	}

	if( FAILED( hr ) )
	{
		return FALSE;
	}

	// ask for the connection media type so we know how big
	// it is, so we can write out bitmaps
	//
	AM_MEDIA_TYPE mt;
	hr = m_pGrabber->GetConnectedMediaType( &mt );
	if ( FAILED( hr) )
	{
		return FALSE;
	}

	VIDEOINFOHEADER * vih = (VIDEOINFOHEADER*) mt.pbFormat;
	if(m_pSGCB)
	{
// 		m_pSGCB->SetOwner(GetParent(hWnd));
		m_pSGCB->SetWidth(vih->bmiHeader.biWidth);
		m_pSGCB->SetHeight(vih->bmiHeader.biHeight);
	}
	FreeMediaType( mt );

	// don't buffer the samples as they pass through
	//
	hr = m_pGrabber->SetBufferSamples( FALSE );

	// only grab one at a time, stop stream after
	// grabbing one sample
	//
	hr = m_pGrabber->SetOneShot( FALSE );

	// set the callback, so we can grab the one sample
	//
	hr = m_pGrabber->SetCallback( m_pSGCB, 1 );


// 	CComPtr< IBaseFilter > pMux;
// 	CComPtr< IFileSinkFilter > pSink;
// 	USES_CONVERSION;
// 
// 	hr = pBuilder->SetOutputFileName( &MEDIASUBTYPE_Avi,  T2W( inFile ), 
// 		&pMux, &pSink );
// 	if( FAILED( hr ) )
// 		return FALSE;
// 
// 	hr = pBuilder->RenderStream( &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
// 		pCap, NULL,pMux );
// 	if( FAILED( hr ) )
// 		return FALSE;

// 	hr = pBuilder->RenderStream( &PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video,
// 		pCap, NULL, NULL );
// 	if( FAILED( hr ) )
// 		return FALSE;

	if(!SetDisplayWindow(hWnd))
	{
		return FALSE;
	}

	return Run();
}
HRESULT CFSTSplitterFilter::Initialize(IPin *pPin, IAsyncReader *pReader)
{
	// Check and validate the pointer
	CheckPointer(pReader, E_POINTER);
	ValidateReadPtr(pReader, sizeof(IAsyncReader));

	// Read file header
	FST_HEADER header;
	HRESULT hr = pReader->SyncRead(0, sizeof(header), (BYTE*)&header);
	if (hr != S_OK)
		return hr;

	// Verify file header
	if (header.dwID != FST_ID_2TSF)
		return VFW_E_INVALID_FILE_FORMAT;

	// Protect the filter data
	CAutoLock datalock(&m_csData);

	// Set video stream info
	m_nVideoFrames		= header.nVideoFrames;
	m_nFramesPerSecond	= header.nFramesPerSecond;

	// Allocate frame table
	m_pFrameTable = (FST_FRAME_ENTRY*)CoTaskMemAlloc(m_nVideoFrames * sizeof(FST_FRAME_ENTRY));
	if (m_pFrameTable == NULL) {
		Shutdown();
		return E_OUTOFMEMORY;
	}

	// Read in the frame table
	hr = pReader->SyncRead(sizeof(header), m_nVideoFrames * sizeof(FST_FRAME_ENTRY), (BYTE*)m_pFrameTable);
	if (hr != S_OK) {
		Shutdown();
		return hr;
	}

	// Walk the frame table and determine the maximum frame data sizes
	DWORD cbMaxImage = 0, cbMaxSound = 0;
	for (DWORD iFrame = 0; iFrame < m_nVideoFrames; iFrame++) {
		if (m_pFrameTable[iFrame].cbImage > cbMaxImage)
			cbMaxImage = m_pFrameTable[iFrame].cbImage;
		if (m_pFrameTable[iFrame].cbSound > cbMaxSound)
			cbMaxSound = m_pFrameTable[iFrame].cbSound;
	}

	// Set file positions
	m_llDefaultStart	= (LONGLONG)sizeof(FST_HEADER) + m_nVideoFrames * sizeof(FST_FRAME_ENTRY);	// Right after the header and frame table
	m_llDefaultStop		= MAXLONGLONG; // Defaults to file end

	// Decide on the input pin properties
	m_cbInputAlign	= 1;
	m_cbInputBuffer	= cbMaxImage + cbMaxSound;

	// Protect the output pins state
	CAutoLock pinlock(&m_csPins);

	// Decide on the output pins count
	m_nOutputPins = 1;	// Video is always present

	// Check if we have soundtrack
	if (
		(header.dwAudioSampleRate	!= 0) &&
		(header.nAudioBits			!= 0) &&
		(m_pFrameTable[0].cbSound	!= 0)
	)
		m_nOutputPins++;

	// Create output pins array
	ASSERT(m_ppOutputPin == NULL);
	m_ppOutputPin = new CParserOutputPin*[m_nOutputPins];
	if (m_ppOutputPin == NULL) {
		m_nOutputPins = 0;
		Shutdown();
		return E_OUTOFMEMORY;
	}

	// Reset the output pin array elements to NULLs
	for (int i = 0; i < m_nOutputPins; i++)
		m_ppOutputPin[i] = NULL;

	// Allocate video media type
	CMediaType *pmtVideo = new CMediaType();
	if (pmtVideo == NULL) {
		Shutdown();
		return E_OUTOFMEMORY;
	}

	// Initialize the video media type
	pmtVideo->InitMediaType();
	pmtVideo->SetType(&MEDIATYPE_Video);
	pmtVideo->SetSubtype(&MEDIASUBTYPE_FSTVideo);
	pmtVideo->SetSampleSize(0);
	pmtVideo->SetTemporalCompression(TRUE);
	pmtVideo->SetFormatType(&FORMAT_FSTVideo);
	if (!pmtVideo->SetFormat((BYTE*)&header, sizeof(header))) {
		delete pmtVideo;
		Shutdown();
		return E_FAIL;
	}

	// Allocate the video allocator properties
	ALLOCATOR_PROPERTIES *papVideo = (ALLOCATOR_PROPERTIES*)CoTaskMemAlloc(sizeof(ALLOCATOR_PROPERTIES));
	if (papVideo == NULL) {
		delete pmtVideo;
		Shutdown();
		return E_OUTOFMEMORY;
	}

	// Set the video allocator properties
	papVideo->cbAlign	= 0;	// No matter
	papVideo->cbPrefix	= 0;	// No matter
	papVideo->cBuffers	= 4;	// TEMP: No need to set larger value?
	papVideo->cbBuffer	= cbMaxImage;

	// Allocate time formats array. If we fail here, it's not an error, 
	// we'll just set zero seeker parameters and may proceed
	DWORD dwVideoCapabilities = 0;
	int nVideoTimeFormats = 0;
	GUID *pVideoTimeFormats = (GUID*)CoTaskMemAlloc(3 * sizeof(GUID));
	if (pVideoTimeFormats) {

		nVideoTimeFormats = 3;

		// Fill in the time formats array
		pVideoTimeFormats[0] = TIME_FORMAT_MEDIA_TIME;
		pVideoTimeFormats[1] = TIME_FORMAT_FRAME;
		pVideoTimeFormats[2] = TIME_FORMAT_SAMPLE;

		dwVideoCapabilities =	AM_SEEKING_CanGetCurrentPos	|
								AM_SEEKING_CanGetStopPos	|
								AM_SEEKING_CanGetDuration;
	}

	// Create video output pin (always the first one!)
	hr = NOERROR;
	m_ppOutputPin[0] = new CParserOutputPin(
		NAME("FST Splitter Video Output Pin"),
		this,
		&m_csFilter,
		pmtVideo,
		papVideo,
		dwVideoCapabilities,
		nVideoTimeFormats,
		pVideoTimeFormats,
		&hr,
		wszFSTVideoOutputName
	);
	if (
		(FAILED(hr)) ||
		(m_ppOutputPin[0] == NULL)
	) {
		if (m_ppOutputPin[0]) {
			delete m_ppOutputPin[0];
			m_ppOutputPin[0] = NULL;
		} else {
			delete pmtVideo;
			CoTaskMemFree(papVideo);
			if (pVideoTimeFormats)
				CoTaskMemFree(pVideoTimeFormats);
		}
		Shutdown();

		if (FAILED(hr))
			return hr;
		else
			return E_OUTOFMEMORY;
	}
	// Hold a reference on the video output pin
	m_ppOutputPin[0]->AddRef();

	// We've created a new pin -- so increment pin version
	IncrementPinVersion();

	if (m_nOutputPins > 1) {

		// Allocate audio media type
		CMediaType *pmtAudio = new CMediaType();
		if (pmtAudio == NULL) {
			Shutdown();
			return E_OUTOFMEMORY;
		}

		// Initialize the audio media type
		pmtAudio->InitMediaType();
		pmtAudio->SetType(&MEDIATYPE_Audio);
		pmtAudio->SetSubtype(&MEDIASUBTYPE_PCM);
		pmtAudio->SetSampleSize(header.nAudioBits * (header.wAudioChannels + 1) / 8);
		pmtAudio->SetTemporalCompression(FALSE);
		pmtAudio->SetFormatType(&FORMAT_WaveFormatEx);
		WAVEFORMATEX *pFormat = (WAVEFORMATEX*)pmtAudio->AllocFormatBuffer(sizeof(WAVEFORMATEX));
		if (pFormat == NULL) {
			delete pmtAudio;
			Shutdown();
			return E_OUTOFMEMORY;
		}

		// Fill in the audio format block
		pFormat->wFormatTag			= WAVE_FORMAT_PCM;
		pFormat->nChannels			= header.wAudioChannels + 1; // TEMP: Is it really so?
		pFormat->nSamplesPerSec		= header.dwAudioSampleRate;
		pFormat->nAvgBytesPerSec	= pFormat->nChannels * header.nAudioBits * header.dwAudioSampleRate / 8;
		pFormat->nBlockAlign		= pFormat->nChannels * header.nAudioBits / 8;
		pFormat->wBitsPerSample		= header.nAudioBits;
		pFormat->cbSize				= 0;

		// Allocate the audio allocator properties
		ALLOCATOR_PROPERTIES *papAudio = (ALLOCATOR_PROPERTIES*)CoTaskMemAlloc(sizeof(ALLOCATOR_PROPERTIES));
		if (papAudio == NULL) {
			delete pmtAudio;
			Shutdown();
			return E_OUTOFMEMORY;
		}

		// Set the audio allocator properties
		papAudio->cbAlign	= 0;	// No matter
		papAudio->cbPrefix	= 0;	// No matter
		papAudio->cBuffers	= 4;	// No use to set different from video value
		papAudio->cbBuffer	= cbMaxSound;

		// Set the wave format parameters needed for the calculation
		// of sample stream and media duration
		m_nSampleSize		= pFormat->nBlockAlign;
		m_nAvgBytesPerSec	= pFormat->nAvgBytesPerSec;

		// Allocate time formats array. If we fail here, it's not an error, 
		// we'll just set zero seeker parameters and may proceed
		DWORD dwAudioCapabilities = 0;
		int nAudioTimeFormats = 0;
		GUID *pAudioTimeFormats = (GUID*)CoTaskMemAlloc(3 * sizeof(GUID));
		if (pAudioTimeFormats) {

			nAudioTimeFormats = 3;

			// Fill in the time formats array
			pAudioTimeFormats[0] = TIME_FORMAT_MEDIA_TIME;
			pAudioTimeFormats[1] = TIME_FORMAT_SAMPLE;
			pAudioTimeFormats[2] = TIME_FORMAT_BYTE;

			dwAudioCapabilities	=	AM_SEEKING_CanGetCurrentPos	|
									AM_SEEKING_CanGetStopPos	|
									AM_SEEKING_CanGetDuration;
		}

		// Create audio output pin
		hr = NOERROR;
		m_ppOutputPin[1] = new CParserOutputPin(
			NAME("FST Splitter Audio Output Pin"),
			this,
			&m_csFilter,
			pmtAudio,
			papAudio,
			dwAudioCapabilities,
			nAudioTimeFormats,
			pAudioTimeFormats,
			&hr,
			wszFSTAudioOutputName
		);
		if (
			(FAILED(hr)) ||
			(m_ppOutputPin[1] == NULL)
		) {
			if (m_ppOutputPin[1]) {
				delete m_ppOutputPin[1];
				m_ppOutputPin[1] = NULL;
			} else {
				delete pmtAudio;
				CoTaskMemFree(papAudio);
				if (pAudioTimeFormats)
					CoTaskMemFree(pAudioTimeFormats);
			}
			Shutdown();

			if (FAILED(hr))
				return hr;
			else
				return E_OUTOFMEMORY;
		}
		// Hold a reference on the audio output pin
		m_ppOutputPin[1]->AddRef();

		// We've created a new pin -- so increment pin version
		IncrementPinVersion();
	}

	// Scope for the locking
	{
		// Protect media content information
		CAutoLock infolock(&m_csInfo);

		// Set the media content strings
		m_wszAuthorName = (OLECHAR*)CoTaskMemAlloc(sizeof(OLECHAR) * (lstrlenW(wszFSTAuthorName) + 1));
		if (m_wszAuthorName)
			lstrcpyW(m_wszAuthorName, wszFSTAuthorName);
		m_wszDescription = (OLECHAR*)CoTaskMemAlloc(sizeof(OLECHAR) * (lstrlenW(wszFSTDescription) + 1));
		if (m_wszDescription)
			lstrcpyW(m_wszDescription, wszFSTDescription);
	}

	return NOERROR;
}
Ejemplo n.º 11
0
CTSParserFilter::CTSParserFilter(LPUNKNOWN pUnk,
                           HRESULT *phr) 
: CBaseFilter(NAME("CTSParser"), pUnk, &m_Lock, CLSID_TSParser)

{
    ASSERT(phr);
	m_wOutputFormat = MPEG_PS;
    // Create the single input pin

	m_pInputPin = new CTSParserInputPin(   
		                                this,
                                        GetOwner(),
                                        &m_Lock,
                                        &m_ReceiveLock,
                                        phr);
    if(m_pInputPin == NULL)
    {
        if (phr)
            *phr = E_OUTOFMEMORY;
    }

    m_pOutputPin = new CTSParserOutputPin(   
										L"TSPin",
		                                this,
                                        GetOwner(),
                                        &m_Lock,
                                        phr);
    if( m_pOutputPin == NULL )
    {
        if (phr)
            *phr = E_OUTOFMEMORY;
		delete m_pInputPin;
    }
    
    m_pVideoPin = new CTSParserOutputPin(   
										L"VideoPin",
		                                this,
                                        GetOwner(),
                                        &m_Lock,
                                        phr);
    if( m_pVideoPin == NULL )
    {
        if (phr)
            *phr = E_OUTOFMEMORY;
		delete m_pInputPin;
		delete m_pOutputPin;
    }

    m_pAudioPin = new CTSParserOutputPin(   
										L"AudioPin",
		                                this,
                                        GetOwner(),
                                        &m_Lock,
                                        phr);
    if( m_pAudioPin == NULL )
    {
        if (phr)
            *phr = E_OUTOFMEMORY;
		delete m_pInputPin;
		delete m_pOutputPin;
		delete m_pVideoPin;
    }
    m_pPassThrusPin = new CTSParserOutputPin(   
										L"PassThrusPin",
		                                this,
                                        GetOwner(),
                                        &m_Lock,
                                        phr);
    if( m_pPassThrusPin == NULL )
    {
        if (phr)
            *phr = E_OUTOFMEMORY;
		delete m_pInputPin;
		delete m_pOutputPin;
		delete m_pVideoPin;
		delete m_pAudioPin;
    }
    
    m_pDumpPin= new CTSParserOutputPin(   
										L"DumpPin",
		                                this,
                                        GetOwner(),
                                        &m_Lock,
                                        phr);
    if( m_pDumpPin == NULL )
    {
        if (phr)
            *phr = E_OUTOFMEMORY;
		delete m_pInputPin;
		delete m_pOutputPin;
		delete m_pVideoPin;
		delete m_pAudioPin;
		delete m_pPassThrusPin;
    }

	CMediaType mt;
	//mt.AllocFormatBuffer( TS_PACKET_LENGTH * TS_BUFFER_PACKETS );
	//mt.SetSampleSize( TS_PACKET_LENGTH * TS_BUFFER_PACKETS );
    //mt.SetTemporalCompression(FALSE);

	mt.SetType(&MEDIATYPE_Stream);
	mt.SetSubtype(&MEDIASUBTYPE_MPEG2_TRANSPORT);
	m_pPassThrusPin->DefaultMediaType( &mt );
	m_pDumpPin->DefaultMediaType( &mt );

	mt.SetType(&MEDIATYPE_Video);
	mt.SetSubtype(&MEDIASUBTYPE_MPEG2_VIDEO);
    mt.SetFormatType(&FORMAT_VideoInfo);
	m_pVideoPin->DefaultMediaType( &mt );

	mt.SetType(&MEDIATYPE_Audio);
	mt.SetSubtype(&MEDIASUBTYPE_DOLBY_AC3);
    mt.SetFormatType(&FORMAT_DolbyAC3);
	m_pAudioPin->DefaultMediaType( &mt );

	m_dwDumpedBytes = 0;
	m_dwDumpSize = 0;
	m_dwInputBytes = 0;
	m_dwOutputBytes = 0;
	m_bRebultPMT = FALSE;
	m_bRebuildTSPMTEnabled = FALSE;
	memset( m_szDebugFileSource, 0, sizeof(m_szDebugFileSource) );
	m_hDebugFileSource = NULL;
	m_bPassThroughEnabled = FALSE;
	m_bParserEnabled = TRUE;
	m_bFilterRunning = FALSE;
	m_bDebugFlag = FALSE;
	m_dwSelectProgram = -1;
	m_dwSelectChannel = -1;
	m_dwSelectTSID = 0;
	m_dwPIDDumpDisable = 0;
	m_bDisableClockTimeout = TRUE;
	m_bSubtitleEnable = TRUE;

	m_wFormat = 0;
	m_wSubFormat = 0;

} 
Ejemplo n.º 12
0
HRESULT CConvert::AddVideoGroup(double dSourceFramerate, long nSourceWidth, long nSourceHeight)
{

    // make the root group/composition
	HRESULT hr = S_OK;
    hr = m_pTimeline->CreateEmptyNode(&m_pVideoGroupObj, TIMELINE_MAJOR_TYPE_GROUP);
    if(FAILED( hr )) 
    {
        return hr;
    }

	CComQIPtr<IAMTimelineGroup, &IID_IAMTimelineGroup> pVideoGroup(m_pVideoGroupObj);
 
	//// Set Media Type
	CMediaType VideoGroupType;
	VideoGroupType.SetType(&MEDIATYPE_Video);
	VideoGroupType.SetSubtype(&MEDIASUBTYPE_RGB24);
	VideoGroupType.SetFormatType(&FORMAT_VideoInfo);
	
	VIDEOINFOHEADER *pVideoHeader = (VIDEOINFOHEADER*)VideoGroupType.AllocFormatBuffer(sizeof(VIDEOINFOHEADER));
	ZeroMemory(pVideoHeader, sizeof(VIDEOINFOHEADER));
	
	pVideoHeader->bmiHeader.biBitCount = 24;
	pVideoHeader->bmiHeader.biWidth = nSourceWidth;
	pVideoHeader->bmiHeader.biHeight = nSourceHeight;
	pVideoHeader->bmiHeader.biPlanes = 1;
	pVideoHeader->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
	pVideoHeader->bmiHeader.biSizeImage = DIBSIZE(pVideoHeader->bmiHeader);

	VideoGroupType.SetSampleSize(DIBSIZE(pVideoHeader->bmiHeader));
	
	hr = pVideoGroup->SetMediaType(&VideoGroupType);
    if(FAILED( hr )) 
    {
        return hr;
    }

	
//	double dRequiredInputFramerate = 0;
//	m_pMPEGWriterProps->SetSourceFramerate(dSourceFramerate, &dRequiredInputFramerate);

// 	hr = pVideoGroup->SetOutputFPS(15);


	/*
	if (GetOutputFPS() != 0)
	{
		// the user set a framerate
		hr = pVideoGroup->SetOutputFPS(GetOutputFPS());
		GetMPEGWriterProps()->OverrideSourceFPS(GetOutputFPS());
	}
	else if (IsFrameRateSupported((float)dSourceFramerate))
	{
		// the user did not set a framerate. If the source
		// framerate is supported, we use it.
		hr = pVideoGroup->SetOutputFPS(dSourceFramerate);
		GetMPEGWriterProps()->OverrideSourceFPS((float)dSourceFramerate);
	}
	else
	{
		// the user did not want a framerate, and the framerate
		// of the file is not supported. We use 25fps
		hr = pVideoGroup->SetOutputFPS(25);
		GetMPEGWriterProps()->OverrideSourceFPS(25);
	}
	*/

	hr = m_pTimeline->AddGroup(m_pVideoGroupObj);

	return hr;
}
Ejemplo n.º 13
0
void CDeMultiplexer::GetSubtitleStreamPMT(CMediaType& pmt)
{
  pmt.InitMediaType();
  pmt.SetType(&MEDIATYPE_Text);
  pmt.SetSubtype(&MEDIATYPE_Subtitle);
}
Ejemplo n.º 14
0
bool VideoCapture::InitCamera(int iDevice, int iResolution)
{
	if (cameraInitialized) return false;

	HRESULT hr;
	bool ans;
	int iWidth = devices_resolutions[iDevice].x[iResolution];
	int iHeight = devices_resolutions[iDevice].y[iResolution];

    USES_CONVERSION;

	CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
		  IID_ISampleGrabber, (void**)&pGrabber);
	if( !pGrabber ) return false;
    
	hr = pNull.CoCreateInstance(CLSID_NullRenderer);
	
    CComQIPtr< IBaseFilter, &IID_IBaseFilter > pGrabberBase( pGrabber );

	ans = BindFilter(iDevice, &pSource);
	if( !pSource ) return false;
	
	hr = pGraph.CoCreateInstance( CLSID_FilterGraph );
	if (hr!=S_OK) return false;

	hr = pGraph->AddFilter( pSource, L"Source" );
	if (hr!=S_OK) return false;
	hr = pGraph->AddFilter( pGrabberBase, L"Grabber" );
    if (hr!=S_OK) return false;
	hr = pGraph->AddFilter( pNull, L"NullRenderer" );
	if (hr!=S_OK) return false;
	
	// Tell the grabber to grab 24-bit video. Must do this
    // before connecting it
    CMediaType GrabType;

	GrabType.SetType( &MEDIATYPE_Video );
	GrabType.SetSubtype( &MEDIASUBTYPE_RGB24 );

	hr = pGrabber->SetMediaType( &GrabType );
	if (hr!=S_OK) return false;

    CComPtr< IPin > pGrabPinIn;
    CComPtr< IPin > pGrabPinOut;
    CComPtr< IPin > pNullPinIn;

    pSourcePin = GetOutPin( pSource, 0 );
    pGrabPinIn   = GetInPin( pGrabberBase, 0 );
    pGrabPinOut  = GetOutPin( pGrabberBase, 0 );
    pNullPinIn  = GetInPin( pNull, 0 );

	CComPtr <ICaptureGraphBuilder2> pCaptB;
	pCaptB.CoCreateInstance(CLSID_CaptureGraphBuilder2);
	pCaptB->SetFiltergraph(pGraph);

	hr = pCaptB->FindInterface(&PIN_CATEGORY_CAPTURE,
		&MEDIATYPE_Video,pSource, 
		IID_IAMStreamConfig, (void**)&pConfig);
	if (hr!=S_OK) return false;

	SetResolution(iDevice, iResolution);
    // ... and connect them
    //
    hr = pGraph->Connect( pSourcePin, pGrabPinIn);
    if (hr!=S_OK) return false;
	
	hr = pGraph->Connect( pGrabPinOut, pNullPinIn );
	if (hr!=S_OK) return false;
	
	// Ask for the connection media type so we know its size
    //
    AM_MEDIA_TYPE mt;
	hr = pGrabber->GetConnectedMediaType( &mt );
	if (hr!=S_OK) return false;

    VIDEOINFOHEADER * vih = (VIDEOINFOHEADER*) mt.pbFormat;
    callback.im_width  = iWidth;
    callback.im_height = iHeight;
    FreeMediaType( mt );

	// Write the bitmap format
    //   
    memset( &(callback.cbInfo.bih), 0, sizeof( callback.cbInfo.bih ) );
    callback.cbInfo.bih.biSize = iWidth*iHeight*3;
    callback.cbInfo.bih.biWidth = iWidth;
    callback.cbInfo.bih.biHeight = iHeight;
    callback.cbInfo.bih.biPlanes = 1;
    callback.cbInfo.bih.biBitCount = 24;

	callback.cbInfo.pBuffer = new byte[callback.cbInfo.bih.biSize];
	    
	hr = pGrabber->SetBufferSamples( FALSE );
	hr = pGrabber->SetOneShot( FALSE );
    hr = pGrabber->SetCallback( &callback, 1);

	hr = pGraph->QueryInterface(IID_IMediaControl,(LPVOID *) &pControl);

	hr = pControl->Run( );

	cameraInitialized = true;
	return true;	
}
Ejemplo n.º 15
0
static int v4w_open_videodevice(V4wState *s)
{
    // Initialize COM
    CoInitialize(NULL);

    // get a Graph
    HRESULT hr=s->m_pGraph.CoCreateInstance(CLSID_FilterGraph);
    if(FAILED(hr))
    {
        return -1;
    }

    // get a CaptureGraphBuilder2
    hr=s->m_pBuilder.CoCreateInstance(CLSID_CaptureGraphBuilder2);
    if(FAILED(hr))
    {
        return -2;
    }

    // connect capture graph builder with the graph
    s->m_pBuilder->SetFiltergraph(s->m_pGraph);

    // get mediacontrol so we can start and stop the filter graph
    hr=s->m_pGraph.QueryInterface(&(s->m_pControl));
    if(FAILED(hr))
    {
        return -3;
    }


#ifdef _DEBUG
    HANDLE m_hLogFile=CreateFile(L"DShowGraphLog.txt",GENERIC_READ|GENERIC_WRITE,FILE_SHARE_READ,NULL,OPEN_ALWAYS,FILE_ATTRIBUTE_NORMAL,NULL);
    if(m_hLogFile!=INVALID_HANDLE_VALUE)
    {
        hr=s->m_pGraph->SetLogFile((DWORD_PTR)m_hLogFile);
        /* ASSERT(SUCCEEDED(hr)); */
    }

    //AddGraphToRot(s->m_pGraph, &s->rotregvalue);
#endif

    ICreateDevEnum *pCreateDevEnum = NULL;
    IEnumMoniker *pEnumMoniker = NULL;
    IMoniker *pMoniker = NULL;

    ULONG nFetched = 0;

    hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
                          IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
    if(FAILED(hr))
    {
        return -4;
    }

    hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
            &pEnumMoniker, 0);
    if (FAILED(hr) || pEnumMoniker == NULL) {
        //printf("no device\n");
        return -5;
    }

    pEnumMoniker->Reset();

    int pos=0;
    while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
    {
        if (pos>=s->devidx)
            break;
        pos++;
        pMoniker->Release();
        pMoniker=NULL;
    }
    if(pMoniker==NULL)
    {
        return -6;
    }

    hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
    if(FAILED(hr))
    {
        return -7;
    }

    s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

    pMoniker->Release();
    pEnumMoniker->Release();
    pCreateDevEnum->Release();

    if (try_format(s, s->pix_fmt)==0)
        s->pix_fmt = s->pix_fmt;
    else if (try_format(s,MS_YUV420P)==0)
        s->pix_fmt = MS_YUV420P;
    else if (try_format(s,MS_YUY2)==0)
        s->pix_fmt = MS_YUY2;
    else if (try_format(s,MS_YUYV)==0)
        s->pix_fmt = MS_YUYV;
    else if (try_format(s,MS_UYVY)==0)
        s->pix_fmt = MS_UYVY;
    else if (try_format(s,MS_RGB24)==0)
        s->pix_fmt = MS_RGB24;
    else
    {
        ms_error("Unsupported video pixel format.");
        return -8;
    }

    if (s->pix_fmt == MS_YUV420P)
        ms_message("Driver supports YUV420P, using that format.");
    else if (s->pix_fmt == MS_YUY2)
        ms_message("Driver supports YUY2 (UYVY), using that format.");
    else if (s->pix_fmt == MS_YUYV)
        ms_message("Driver supports YUV422, using that format.");
    else if (s->pix_fmt == MS_UYVY)
        ms_message("Driver supports UYVY, using that format.");
    else if (s->pix_fmt == MS_RGB24)
        ms_message("Driver supports RGB24, using that format.");

    if (try_format_size(s, s->pix_fmt, s->vsize.width, s->vsize.height)==0)
        ms_message("Selected Size: %ix%i.", s->vsize.width, s->vsize.height);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H);
    else
    {
        ms_error("No supported size found for format.");
        /* size not supported? */
        return -9;
    }

    // get DXFilter
    s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE);
    if(s->m_pDXFilter==NULL)
    {
        return -10;
    }
    s->m_pDXFilter->AddRef();

    CMediaType mt;
    mt.SetType(&MEDIATYPE_Video);

    GUID m = MEDIASUBTYPE_RGB24;
    if (s->pix_fmt == MS_YUV420P)
        m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0'));
    else if (s->pix_fmt == MS_YUY2)
        m = MEDIASUBTYPE_YUY2;
    else if (s->pix_fmt == MS_YUYV)
        m = MEDIASUBTYPE_YUYV;
    else if (s->pix_fmt == MS_UYVY)
        m = MEDIASUBTYPE_UYVY;
    else if (s->pix_fmt == MS_RGB24)
        m = MEDIASUBTYPE_RGB24;
    mt.SetSubtype(&m);

    mt.formattype = FORMAT_VideoInfo;
    mt.SetTemporalCompression(FALSE);

    VIDEOINFO *pvi = (VIDEOINFO *)
                     mt.AllocFormatBuffer(sizeof(VIDEOINFO));
    if (NULL == pvi)
        return -11;
    ZeroMemory(pvi, sizeof(VIDEOINFO));

    if (s->pix_fmt == MS_YUV420P)
        pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
    else if (s->pix_fmt == MS_YUY2)
        pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2');
    else if (s->pix_fmt == MS_YUYV)
        pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','V');
    else if (s->pix_fmt == MS_UYVY)
        pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y');
    else if (s->pix_fmt == MS_RGB24)
        pvi->bmiHeader.biCompression = BI_RGB;

    if (s->pix_fmt == MS_YUV420P)
        pvi->bmiHeader.biBitCount = 12;
    else if (s->pix_fmt == MS_YUY2)
        pvi->bmiHeader.biBitCount = 16;
    else if (s->pix_fmt == MS_YUYV)
        pvi->bmiHeader.biBitCount = 16;
    else if (s->pix_fmt == MS_UYVY)
        pvi->bmiHeader.biBitCount = 16;
    else if (s->pix_fmt == MS_RGB24)
        pvi->bmiHeader.biBitCount = 24;

    pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
    pvi->bmiHeader.biWidth = s->vsize.width;
    pvi->bmiHeader.biHeight = s->vsize.height;
    pvi->bmiHeader.biPlanes = 1;
    pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
    pvi->bmiHeader.biClrImportant = 0;
    mt.SetSampleSize(pvi->bmiHeader.biSizeImage);
    mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO));

    hr = s->m_pDXFilter->SetAcceptedMediaType(&mt);
    if(FAILED(hr))
    {
        return -12;
    }

    hr = s->m_pDXFilter->SetCallback(Callback);
    if(FAILED(hr))
    {
        return -13;
    }

    hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter,
                                        (LPVOID *)&s->m_pIDXFilter);
    if(FAILED(hr))
    {
        return -14;
    }

    hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter");
    if(FAILED(hr))
    {
        return -15;
    }


    // get null renderer
    hr=s->m_pNullRenderer.CoCreateInstance(CLSID_NullRenderer);
    if(FAILED(hr))
    {
        return -16;
    }
    if (s->m_pNullRenderer!=NULL)
    {
        s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer");
    }

    hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_PREVIEW,
                                     &MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
    if (FAILED(hr))
    {
        //hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_CAPTURE,
        //	&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
        if (FAILED(hr))
        {
            return -17;
        }
    }

    //m_pDXFilter->SetBufferSamples(TRUE);

    s_callback = s;
    hr = s->m_pControl->Run();
    if(FAILED(hr))
    {
        return -18;
    }

    s->rotregvalue=1;
    return 0;
}
Ejemplo n.º 16
0
const bool CMediaViewer::OpenViewer(HWND hOwnerHwnd, HWND hMessageDrainHwnd,
			CVideoRenderer::RendererType RendererType,
			LPCWSTR pszVideoDecoder, LPCWSTR pszAudioDevice)
{
	CTryBlockLock Lock(&m_DecoderLock);
	if (!Lock.TryLock(LOCK_TIMEOUT)) {
		SetError(TEXT("タイムアウトエラーです。"));
		return false;
	}

	if (m_bInit) {
		SetError(TEXT("既にフィルタグラフが構築されています。"));
		return false;
	}

	TRACE(TEXT("CMediaViewer::OpenViewer() フィルタグラフ作成開始\n"));

	HRESULT hr=S_OK;

	IPin *pOutput=NULL;
	IPin *pOutputVideo=NULL;
	IPin *pOutputAudio=NULL;

	try {
		// フィルタグラフマネージャを構築する
		hr=::CoCreateInstance(CLSID_FilterGraph,NULL,CLSCTX_INPROC_SERVER,
				IID_IGraphBuilder,pointer_cast<LPVOID*>(&m_pFilterGraph));
		if (FAILED(hr)) {
			throw CBonException(hr,TEXT("フィルタグラフマネージャを作成できません。"));
		}
#ifdef _DEBUG
		AddToRot(m_pFilterGraph, &m_dwRegister);
#endif

		// IMediaControlインタフェースのクエリー
		hr=m_pFilterGraph->QueryInterface(IID_IMediaControl, pointer_cast<void**>(&m_pMediaControl));
		if (FAILED(hr)) {
			throw CBonException(hr,TEXT("メディアコントロールを取得できません。"));
		}

		Trace(TEXT("ソースフィルタの接続中..."));

		/* CBonSrcFilter */
		{
			// インスタンス作成
			m_pSrcFilter = static_cast<CBonSrcFilter*>(CBonSrcFilter::CreateInstance(NULL, &hr));
			if (m_pSrcFilter == NULL || FAILED(hr))
				throw CBonException(hr, TEXT("ソースフィルタを作成できません。"));
			m_pSrcFilter->SetOutputWhenPaused(RendererType == CVideoRenderer::RENDERER_DEFAULT);
			// フィルタグラフに追加
			hr = m_pFilterGraph->AddFilter(m_pSrcFilter, L"BonSrcFilter");
			if (FAILED(hr))
				throw CBonException(hr, TEXT("ソースフィルタをフィルタグラフに追加できません。"));
			// 出力ピンを取得
			pOutput = DirectShowUtil::GetFilterPin(m_pSrcFilter, PINDIR_OUTPUT);
			if (pOutput==NULL)
				throw CBonException(TEXT("ソースフィルタの出力ピンを取得できません。"));
			m_pSrcFilter->EnableSync(m_bEnablePTSSync);
		}

		Trace(TEXT("MPEG-2 Demultiplexerフィルタの接続中..."));

		/* MPEG-2 Demultiplexer */
		{
			CMediaType MediaTypeVideo;
			CMediaType MediaTypeAudio;
			IMpeg2Demultiplexer *pMpeg2Demuxer;

			hr=::CoCreateInstance(CLSID_MPEG2Demultiplexer,NULL,
					CLSCTX_INPROC_SERVER,IID_IBaseFilter,
					pointer_cast<LPVOID*>(&m_pMp2DemuxFilter));
			if (FAILED(hr))
				throw CBonException(hr,TEXT("MPEG-2 Demultiplexerフィルタを作成できません。"),
									TEXT("MPEG-2 Demultiplexerフィルタがインストールされているか確認してください。"));
			hr=DirectShowUtil::AppendFilterAndConnect(m_pFilterGraph,
								m_pMp2DemuxFilter,L"Mpeg2Demuxer",&pOutput);
			if (FAILED(hr))
				throw CBonException(hr,TEXT("MPEG-2 Demultiplexerをフィルタグラフに追加できません。"));
			// この時点でpOutput==NULLのはずだが念のため
			SAFE_RELEASE(pOutput);

			// IMpeg2Demultiplexerインタフェースのクエリー
			hr=m_pMp2DemuxFilter->QueryInterface(IID_IMpeg2Demultiplexer,
												 pointer_cast<void**>(&pMpeg2Demuxer));
			if (FAILED(hr))
				throw CBonException(hr,TEXT("MPEG-2 Demultiplexerインターフェースを取得できません。"),
									TEXT("互換性のないスプリッタの優先度がMPEG-2 Demultiplexerより高くなっている可能性があります。"));

			// 映像メディアフォーマット設定
			hr = SetVideoMediaType(&MediaTypeVideo, 1920, 1080);
			if (FAILED(hr))
				throw CBonException(TEXT("メモリが確保できません。"));
			// 映像出力ピン作成
			hr = pMpeg2Demuxer->CreateOutputPin(&MediaTypeVideo, L"Video", &pOutputVideo);
			if (FAILED(hr)) {
				pMpeg2Demuxer->Release();
				throw CBonException(hr, TEXT("MPEG-2 Demultiplexerの映像出力ピンを作成できません。"));
			}
			// 音声メディアフォーマット設定
			MediaTypeAudio.InitMediaType();
			MediaTypeAudio.SetType(&MEDIATYPE_Audio);
			MediaTypeAudio.SetSubtype(&MEDIASUBTYPE_NULL);
			MediaTypeAudio.SetVariableSize();
			MediaTypeAudio.SetTemporalCompression(TRUE);
			MediaTypeAudio.SetSampleSize(0);
			MediaTypeAudio.SetFormatType(&FORMAT_None);
			// 音声出力ピン作成
			hr=pMpeg2Demuxer->CreateOutputPin(&MediaTypeAudio,L"Audio",&pOutputAudio);
			pMpeg2Demuxer->Release();
			if (FAILED(hr))
				throw CBonException(hr,TEXT("MPEG-2 Demultiplexerの音声出力ピンを作成できません。"));
			// 映像出力ピンのIMPEG2PIDMapインタフェースのクエリー
			hr=pOutputVideo->QueryInterface(__uuidof(IMPEG2PIDMap),pointer_cast<void**>(&m_pMp2DemuxVideoMap));
			if (FAILED(hr))
				throw CBonException(hr,TEXT("映像出力ピンのIMPEG2PIDMapを取得できません。"));
			// 音声出力ピンのIMPEG2PIDMapインタフェースのクエリ
			hr=pOutputAudio->QueryInterface(__uuidof(IMPEG2PIDMap),pointer_cast<void**>(&m_pMp2DemuxAudioMap));
			if (FAILED(hr))
				throw CBonException(hr,TEXT("音声出力ピンのIMPEG2PIDMapを取得できません。"));
		}

#ifndef BONTSENGINE_H264_SUPPORT
		Trace(TEXT("MPEG-2パーサフィルタの接続中..."));

		/* CMpeg2ParserFilter */
		{
			// インスタンス作成
			m_pMpeg2Parser = static_cast<CMpeg2ParserFilter*>(CMpeg2ParserFilter::CreateInstance(NULL, &hr));
			if ((!m_pMpeg2Parser) || FAILED(hr))
				throw CBonException(hr,TEXT("MPEG-2パーサフィルタを作成できません。"));
			m_pMpeg2Parser->SetVideoInfoCallback(OnVideoInfo,this);
			// madVR は映像サイズの変化時に MediaType を設定しないと新しいサイズが適用されない
			m_pMpeg2Parser->SetAttachMediaType(RendererType==CVideoRenderer::RENDERER_madVR);
			// フィルタの追加と接続
			hr=DirectShowUtil::AppendFilterAndConnect(
				m_pFilterGraph,m_pMpeg2Parser,L"Mpeg2ParserFilter",&pOutputVideo);
			if (FAILED(hr))
				throw CBonException(hr,TEXT("MPEG-2パーサフィルタをフィルタグラフに追加できません。"));
		}
#else
		Trace(TEXT("H.264パーサフィルタの接続中..."));

		/* CH264ParserFilter */
		{
			// インスタンス作成
			m_pH264Parser = static_cast<CH264ParserFilter*>(CH264ParserFilter::CreateInstance(NULL, &hr));
			if ((!m_pH264Parser) || FAILED(hr))
				throw CBonException(TEXT("H.264パーサフィルタを作成できません。"));
			m_pH264Parser->SetVideoInfoCallback(OnVideoInfo,this);
			m_pH264Parser->SetAdjustTime(m_bAdjustVideoSampleTime);
			m_pH264Parser->SetAdjustFrameRate(m_bAdjustFrameRate);
			// madVR は映像サイズの変化時に MediaType を設定しないと新しいサイズが適用されない
			m_pH264Parser->SetAttachMediaType(RendererType==CVideoRenderer::RENDERER_madVR);
			// フィルタの追加と接続
			hr=DirectShowUtil::AppendFilterAndConnect(
				m_pFilterGraph,m_pH264Parser,L"H264ParserFilter",&pOutputVideo);
			if (FAILED(hr))
				throw CBonException(hr,TEXT("H.264パーサフィルタをフィルタグラフに追加できません。"));
		}
#endif	// BONTSENGINE_H264_SUPPORT

		Trace(TEXT("音声デコーダの接続中..."));

#if 1
		/* CAudioDecFilter */
		{
			// CAudioDecFilterインスタンス作成
			m_pAudioDecoder = static_cast<CAudioDecFilter*>(CAudioDecFilter::CreateInstance(NULL, &hr));
			if (!m_pAudioDecoder || FAILED(hr))
				throw CBonException(hr,TEXT("音声デコーダフィルタを作成できません。"));
			// フィルタの追加と接続
			hr=DirectShowUtil::AppendFilterAndConnect(
				m_pFilterGraph,m_pAudioDecoder,L"AudioDecFilter",&pOutputAudio);
			if (FAILED(hr))
				throw CBonException(hr,TEXT("音声デコーダフィルタをフィルタグラフに追加できません。"));

			m_pAudioDecoder->SetJitterCorrection(m_bAdjustAudioStreamTime);
			if (m_pAudioStreamCallback)
				m_pAudioDecoder->SetStreamCallback(m_pAudioStreamCallback,
												   m_pAudioStreamCallbackParam);
		}
#else
		/*
			外部AACデコーダを利用すると、チャンネル数が切り替わった際に音が出なくなる、
			デュアルモノラルがステレオとして再生される、といった問題が出る
		*/

		/* CAacParserFilter */
		{
			CAacParserFilter *m_pAacParser;
			// CAacParserFilterインスタンス作成
			m_pAacParser=static_cast<CAacParserFilter*>(CAacParserFilter::CreateInstance(NULL, &hr));
			if (!m_pAacParser || FAILED(hr))
				throw CBonException(hr,TEXT("AACパーサフィルタを作成できません。"));
			// フィルタの追加と接続
			hr=DirectShowUtil::AppendFilterAndConnect(
				m_pFilterGraph,m_pAacParser,L"AacParserFilter",&pOutputAudio);
			if (FAILED(hr))
				throw CBonException(TEXT("AACパーサフィルタをフィルタグラフに追加できません。"));
			m_pAacParser->Release();
		}

		/* AACデコーダー */
		{
			CDirectShowFilterFinder FilterFinder;

			// 検索
			if(!FilterFinder.FindFilter(&MEDIATYPE_Audio,&MEDIASUBTYPE_AAC))
				throw CBonException(TEXT("AACデコーダが見付かりません。"),
									TEXT("AACデコーダがインストールされているか確認してください。"));

			WCHAR szAacDecoder[128];
			CLSID idAac;
			bool bConnectSuccess=false;
			IBaseFilter *pAacDecFilter=NULL;

			for (int i=0;i<FilterFinder.GetFilterCount();i++){
				if (FilterFinder.GetFilterInfo(i,&idAac,szAacDecoder,128)) {
					if (pszAudioDecoder!=NULL && pszAudioDecoder[0]!='\0'
							&& ::lstrcmpi(szAacDecoder,pszAudioDecoder)!=0)
						continue;
					hr=DirectShowUtil::AppendFilterAndConnect(m_pFilterGraph,
							idAac,szAacDecoder,&pAacDecFilter,
							&pOutputAudio);
					if (SUCCEEDED(hr)) {
						TRACE(TEXT("AAC decoder connected : %s\n"),szAacDecoder);
						bConnectSuccess=true;
						break;
					}
				}
			}
			// どれかのフィルタで接続できたか
			if (bConnectSuccess) {
				SAFE_RELEASE(pAacDecFilter);
				//m_pszAacDecoderName=StdUtil::strdup(szAacDecoder);
			} else {
				throw CBonException(TEXT("AACデコーダフィルタをフィルタグラフに追加できません。"),
									TEXT("設定で有効なAACデコーダが選択されているか確認してください。"));
			}
		}
#endif

		/* ユーザー指定の音声フィルタの接続 */
		if (m_pszAudioFilterName) {
			Trace(TEXT("音声フィルタの接続中..."));

			// 検索
			bool bConnectSuccess=false;
			CDirectShowFilterFinder FilterFinder;
			if (FilterFinder.FindFilter(&MEDIATYPE_Audio,&MEDIASUBTYPE_PCM)) {
				WCHAR szAudioFilter[128];
				CLSID idAudioFilter;

				for (int i=0;i<FilterFinder.GetFilterCount();i++) {
					if (FilterFinder.GetFilterInfo(i,&idAudioFilter,szAudioFilter,128)) {
						if (::lstrcmpi(m_pszAudioFilterName,szAudioFilter)!=0)
							continue;
						hr=DirectShowUtil::AppendFilterAndConnect(m_pFilterGraph,
								idAudioFilter,szAudioFilter,&m_pAudioFilter,
								&pOutputAudio,NULL,true);
						if (SUCCEEDED(hr)) {
							TRACE(TEXT("音声フィルタ接続 : %s\n"),szAudioFilter);
							bConnectSuccess=true;
						}
						break;
					}
				}
			}
			if (!bConnectSuccess) {
				throw CBonException(hr,
					TEXT("音声フィルタをフィルタグラフに追加できません。"),
					TEXT("音声フィルタが利用できないか、音声デバイスに対応していない可能性があります。"));
			}
		}

#ifndef BONTSENGINE_H264_SUPPORT
		Trace(TEXT("MPEG-2デコーダの接続中..."));

		/* Mpeg2デコーダー */
		{
			CDirectShowFilterFinder FilterFinder;

			// 検索
			if(!FilterFinder.FindFilter(&MEDIATYPE_Video,&MEDIASUBTYPE_MPEG2_VIDEO))
				throw CBonException(TEXT("MPEG-2デコーダが見付かりません。"),
									TEXT("MPEG-2デコーダがインストールされているか確認してください。"));

			WCHAR szMpeg2Decoder[128];
			CLSID idMpeg2Vid;
			bool bConnectSuccess=false;

			for (int i=0;i<FilterFinder.GetFilterCount();i++){
				if (FilterFinder.GetFilterInfo(i,&idMpeg2Vid,szMpeg2Decoder,128)) {
					if (pszVideoDecoder!=NULL && pszVideoDecoder[0]!='\0'
							&& ::lstrcmpi(szMpeg2Decoder,pszVideoDecoder)!=0)
						continue;
					hr=DirectShowUtil::AppendFilterAndConnect(m_pFilterGraph,
							idMpeg2Vid,szMpeg2Decoder,&m_pVideoDecoderFilter,
							&pOutputVideo,NULL,true);
					if (SUCCEEDED(hr)) {
						bConnectSuccess=true;
						break;
					}
				}
			}
			// どれかのフィルタで接続できたか
			if (bConnectSuccess) {
				m_pszVideoDecoderName=StdUtil::strdup(szMpeg2Decoder);
			} else {
				throw CBonException(hr,TEXT("MPEG-2デコーダフィルタをフィルタグラフに追加できません。"),
					TEXT("設定で有効なMPEG-2デコーダが選択されているか確認してください。\nまた、レンダラを変えてみてください。"));
			}
		}

#ifndef MPEG2PARSERFILTER_INPLACE
		/*
			CyberLinkのデコーダとデフォルトレンダラの組み合わせで
			1080x1080(4:3)の映像が正方形に表示される問題に対応
			…しようと思ったが変になるので保留
		*/
		if (::StrStrI(m_pszVideoDecoderName, TEXT("CyberLink")) != NULL)
			m_pMpeg2Parser->SetFixSquareDisplay(true);
#endif
#else	// ndef BONTSENGINE_H264_SUPPORT
		Trace(TEXT("H.264デコーダの接続中..."));

		/* H.264デコーダー */
		{
			CDirectShowFilterFinder FilterFinder;

			// 検索
			if(!FilterFinder.FindFilter(&MEDIATYPE_Video,&MEDIASUBTYPE_H264))
				throw CBonException(TEXT("H.264デコーダが見付かりません。"),
									TEXT("H.264デコーダがインストールされているか確認してください。"));

			WCHAR szH264Decoder[128];
			CLSID idH264Decoder;
			bool bConnectSuccess=false;

			for (int i=0;i<FilterFinder.GetFilterCount();i++){
				if (FilterFinder.GetFilterInfo(i,&idH264Decoder,szH264Decoder,128)) {
					if (pszVideoDecoder!=NULL && pszVideoDecoder[0]!='\0'
							&& ::lstrcmpi(szH264Decoder,pszVideoDecoder)!=0)
						continue;
					hr=DirectShowUtil::AppendFilterAndConnect(m_pFilterGraph,
							idH264Decoder,szH264Decoder,&m_pVideoDecoderFilter,
							&pOutputVideo,NULL,true);
					if (SUCCEEDED(hr)) {
						bConnectSuccess=true;
						break;
					}
				}
			}
			// どれかのフィルタで接続できたか
			if (bConnectSuccess) {
				m_pszVideoDecoderName=StdUtil::strdup(szH264Decoder);
			} else {
				throw CBonException(hr,TEXT("H.264デコーダフィルタをフィルタグラフに追加できません。"),
					TEXT("設定で有効なH.264デコーダが選択されているか確認してください。\nまた、レンダラを変えてみてください。"));
			}
		}
#endif	// BONTSENGINE_H264_SUPPORT

		Trace(TEXT("映像レンダラの構築中..."));

		if (!CVideoRenderer::CreateRenderer(RendererType,&m_pVideoRenderer)) {
			throw CBonException(TEXT("映像レンダラを作成できません。"),
								TEXT("設定で有効なレンダラが選択されているか確認してください。"));
		}
		if (!m_pVideoRenderer->Initialize(m_pFilterGraph,pOutputVideo,
										  hOwnerHwnd,hMessageDrainHwnd)) {
			throw CBonException(m_pVideoRenderer->GetLastErrorException());
		}
		m_VideoRendererType=RendererType;

		Trace(TEXT("音声レンダラの構築中..."));

		// 音声レンダラ構築
		{
			bool fOK = false;

			if (pszAudioDevice != NULL && pszAudioDevice[0] != '\0') {
				CDirectShowDeviceEnumerator DevEnum;

				if (DevEnum.CreateFilter(CLSID_AudioRendererCategory,
										 pszAudioDevice, &m_pAudioRenderer)) {
					m_pszAudioRendererName=StdUtil::strdup(pszAudioDevice);
					fOK = true;
				}
			}
			if (!fOK) {
				hr = ::CoCreateInstance(CLSID_DSoundRender, NULL,
										CLSCTX_INPROC_SERVER, IID_IBaseFilter,
										pointer_cast<LPVOID*>(&m_pAudioRenderer));
				if (SUCCEEDED(hr)) {
					m_pszAudioRendererName=StdUtil::strdup(TEXT("DirectSound Renderer"));
					fOK = true;
				}
			}
			if (fOK) {
				hr = DirectShowUtil::AppendFilterAndConnect(m_pFilterGraph,
						m_pAudioRenderer, L"Audio Renderer", &pOutputAudio);
				if (SUCCEEDED(hr)) {
#ifdef _DEBUG
					if (pszAudioDevice != NULL && pszAudioDevice[0] != '\0')
						TRACE(TEXT("音声デバイス %s を接続\n"), pszAudioDevice);
#endif
					if (m_bUseAudioRendererClock) {
						IMediaFilter *pMediaFilter;

						if (SUCCEEDED(m_pFilterGraph->QueryInterface(IID_IMediaFilter,
								pointer_cast<void**>(&pMediaFilter)))) {
							IReferenceClock *pReferenceClock;

							if (SUCCEEDED(m_pAudioRenderer->QueryInterface(IID_IReferenceClock,
									pointer_cast<void**>(&pReferenceClock)))) {
								pMediaFilter->SetSyncSource(pReferenceClock);
								pReferenceClock->Release();
								TRACE(TEXT("グラフのクロックに音声レンダラを選択\n"));
							}
							pMediaFilter->Release();
						}
					}
					fOK = true;
				} else {
					fOK = false;
				}
				if (!fOK) {
					hr = m_pFilterGraph->Render(pOutputAudio);
					if (FAILED(hr))
						throw CBonException(hr, TEXT("音声レンダラを接続できません。"),
							TEXT("設定で有効な音声デバイスが選択されているか確認してください。"));
				}
			} else {
				// 音声デバイスが無い?
				// Nullレンダラを繋げておく
				hr = ::CoCreateInstance(CLSID_NullRenderer, NULL,
										CLSCTX_INPROC_SERVER, IID_IBaseFilter,
										pointer_cast<LPVOID*>(&m_pAudioRenderer));
				if (SUCCEEDED(hr)) {
					hr = DirectShowUtil::AppendFilterAndConnect(m_pFilterGraph,
						m_pAudioRenderer, L"Null Audio Renderer", &pOutputAudio);
					if (FAILED(hr)) {
						throw CBonException(hr, TEXT("Null音声レンダラを接続できません。"));
					}
					m_pszAudioRendererName=StdUtil::strdup(TEXT("Null Renderer"));
					TRACE(TEXT("Nullレンダラを接続\n"));
				}
			}
		}

		/*
			デフォルトでMPEG-2 Demultiplexerがグラフのクロックに
			設定されるらしいが、一応設定しておく
		*/
		if (!m_bUseAudioRendererClock) {
			IMediaFilter *pMediaFilter;

			if (SUCCEEDED(m_pFilterGraph->QueryInterface(
					IID_IMediaFilter,pointer_cast<void**>(&pMediaFilter)))) {
				IReferenceClock *pReferenceClock;

				if (SUCCEEDED(m_pMp2DemuxFilter->QueryInterface(
						IID_IReferenceClock,pointer_cast<void**>(&pReferenceClock)))) {
					pMediaFilter->SetSyncSource(pReferenceClock);
					pReferenceClock->Release();
					TRACE(TEXT("グラフのクロックにMPEG-2 Demultiplexerを選択\n"));
				}
				pMediaFilter->Release();
			}
		}

		// オーナウィンドウ設定
		m_hOwnerWnd = hOwnerHwnd;
		RECT rc;
		::GetClientRect(hOwnerHwnd, &rc);
		m_wVideoWindowX = (WORD)rc.right;
		m_wVideoWindowY = (WORD)rc.bottom;

		m_bInit=true;

		ULONG PID;
		if (m_wVideoEsPID != PID_INVALID) {
			PID = m_wVideoEsPID;
			if (FAILED(m_pMp2DemuxVideoMap->MapPID(1, &PID, MEDIA_ELEMENTARY_STREAM)))
				m_wVideoEsPID = PID_INVALID;
		}
		if (m_wAudioEsPID != PID_INVALID) {
			PID = m_wAudioEsPID;
			if (FAILED(m_pMp2DemuxAudioMap->MapPID(1, &PID, MEDIA_ELEMENTARY_STREAM)))
				m_wAudioEsPID = PID_INVALID;
		}
	} catch (CBonException &Exception) {
		SetError(Exception);
		if (Exception.GetErrorCode()!=0) {
			TCHAR szText[MAX_ERROR_TEXT_LEN+32];
			int Length;

			Length=::AMGetErrorText(Exception.GetErrorCode(),szText,MAX_ERROR_TEXT_LEN);
			::wsprintf(szText+Length,TEXT("\nエラーコード(HRESULT) 0x%08X"),Exception.GetErrorCode());
			SetErrorSystemMessage(szText);
		}

		SAFE_RELEASE(pOutput);
		SAFE_RELEASE(pOutputVideo);
		SAFE_RELEASE(pOutputAudio);
		CloseViewer();

		TRACE(TEXT("フィルタグラフ構築失敗 : %s\n"), GetLastErrorText());
		return false;
	}

	SAFE_RELEASE(pOutputVideo);
	SAFE_RELEASE(pOutputAudio);

	ClearError();

	TRACE(TEXT("フィルタグラフ構築成功\n"));
	return true;
}
Ejemplo n.º 17
0
static int v4w_open_videodevice(V4wState *s, int format, MSVideoSize *vsize)
{
	// Initialize COM
	CoInitialize(NULL);

	// get a Graph
	HRESULT hr=s->m_pGraph.CoCreateInstance(CLSID_FilterGraph);
	if(FAILED(hr))
	{
		return -1;
	}

	// get a CaptureGraphBuilder2
#if !defined(_WIN32_WCE)
	hr=s->m_pBuilder.CoCreateInstance(CLSID_CaptureGraphBuilder2);
#else
	hr=s->m_pBuilder.CoCreateInstance(CLSID_CaptureGraphBuilder);
#endif
	if(FAILED(hr))
	{
		return -2;
	}

	// connect capture graph builder with the graph
	s->m_pBuilder->SetFiltergraph(s->m_pGraph);

	// get mediacontrol so we can start and stop the filter graph
	hr=s->m_pGraph.QueryInterface(&(s->m_pControl));
	if(FAILED(hr))
	{
		return -3;
	}

	// get DXFilter
	s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE);
	if(s->m_pDXFilter==NULL)
	{
		return -4;
	}
	s->m_pDXFilter->AddRef();
	if(FAILED(hr))
	{
		return -4;
	}

	CMediaType mt;
	mt.SetType(&MEDIATYPE_Video);

	if (format==MS_YUV420P)
	{
		GUID m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0'));
		mt.SetSubtype(&m);
		mt.SetSubtype(&MEDIASUBTYPE_YV12);
	}
	else //if (format==MS_RGB24)
	{
		mt.SetSubtype(&MEDIASUBTYPE_RGB24);
	}

	//mt.SetSubtype(&MEDIASUBTYPE_IYUV);
	//mt.SetSubtype(&MEDIASUBTYPE_YUYV);
	//mt.SetSubtype(&MEDIASUBTYPE_RGB24);
	//mt.SetSampleSize();
	mt.formattype = FORMAT_VideoInfo;
	mt.SetTemporalCompression(FALSE);

	VIDEOINFO *pvi = (VIDEOINFO *)
	mt.AllocFormatBuffer(sizeof(VIDEOINFO));
	if (NULL == pvi)
		return E_OUTOFMEMORY;
	ZeroMemory(pvi, sizeof(VIDEOINFO));
	if (format==MS_YUV420P)
	{
		pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
		pvi->bmiHeader.biCompression = MAKEFOURCC('Y','V','1','2');
		pvi->bmiHeader.biBitCount = 12;
	}
	else
	{
		pvi->bmiHeader.biCompression = BI_RGB;
		pvi->bmiHeader.biBitCount = 24;
	}
	pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
	pvi->bmiHeader.biWidth = vsize->width;
	pvi->bmiHeader.biHeight = vsize->height;
	pvi->bmiHeader.biPlanes = 1;
	pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
	pvi->bmiHeader.biClrImportant = 0;
	mt.SetSampleSize(pvi->bmiHeader.biSizeImage);
	mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO));

	hr = s->m_pDXFilter->SetAcceptedMediaType(&mt);
	if(FAILED(hr))
	{
		return -5;
	}

	hr = s->m_pDXFilter->SetCallback(Callback); 
	if(FAILED(hr))
	{
		return -6;
	}

	hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter,
	 (LPVOID *)&s->m_pIDXFilter);
	if(FAILED(hr))
	{
		return -7;
	}

	hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter");
	if(FAILED(hr))
	{
		return -8;
	}

#ifdef WM6
	ICreateDevEnum *pCreateDevEnum = NULL;
	IEnumMoniker *pEnumMoniker = NULL;
	IMoniker *pMoniker = NULL;

	ULONG nFetched = 0;

	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, 
		IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
	if(FAILED(hr))
	{
		return -9;
	}

	hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
		&pEnumMoniker, 0);
	if (FAILED(hr) || pEnumMoniker == NULL) {
		//printf("no device\n");
		return -10;
	}

	pEnumMoniker->Reset();

	hr = pEnumMoniker->Next(1, &pMoniker, &nFetched);
	if(FAILED(hr) || pMoniker==NULL)
	{
		return -11;
	}

	hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
	if(FAILED(hr))
	{
		return -12;
	}

	s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

	pMoniker->Release();
	pEnumMoniker->Release();
	pCreateDevEnum->Release();
#else
	WCHAR wzDeviceName[ MAX_PATH + 1 ];
	CComVariant   varCamName;
	CPropertyBag PropBag;
    CComPtr<IPersistPropertyBag>    pPropertyBag;
	GetFirstCameraDriver(wzDeviceName);

	hr = s->m_pDeviceFilter.CoCreateInstance( CLSID_VideoCapture ); 
	if (FAILED(hr))
	{
		return -8;
	}

	s->m_pDeviceFilter.QueryInterface( &pPropertyBag );
	varCamName = wzDeviceName;
	if(( varCamName.vt == VT_BSTR ) == NULL ) {
	  return E_OUTOFMEMORY;
	}
	PropBag.Write( L"VCapName", &varCamName );   
	pPropertyBag->Load( &PropBag, NULL );
	pPropertyBag.Release();

	hr = s->m_pGraph->AddFilter( s->m_pDeviceFilter, L"Video capture source" );
#endif

	if (FAILED(hr))
	{
		return -8;
	}

	// get null renderer
	s->m_pNullRenderer = NULL;
#if 0
	hr=s->m_pNullRenderer.CoCreateInstance(CLSID_NullRenderer);
	if(FAILED(hr))
	{
		return -13;
	}
#endif
	if (s->m_pNullRenderer!=NULL)
	{
		s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer");
	}

	hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_PREVIEW,
		&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
	if (FAILED(hr))
	{
		//hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_CAPTURE,
		//	&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
		if (FAILED(hr))
		{
			return -14;
		}
	}
	
	//m_pDXFilter->SetBufferSamples(TRUE);


		// Create the System Device Enumerator.
IFilterMapper *pMapper = NULL;
//IEnumMoniker *pEnum = NULL;
IEnumRegFilters *pEnum = NULL;

hr = CoCreateInstance(CLSID_FilterMapper, 
    NULL, CLSCTX_INPROC, IID_IFilterMapper, 
    (void **) &pMapper);

if (FAILED(hr))
{
    // Error handling omitted for clarity.
}

GUID arrayInTypes[2];
arrayInTypes[0] = MEDIATYPE_Video;
arrayInTypes[1] = MEDIASUBTYPE_dvsd;

hr = pMapper->EnumMatchingFilters(
        &pEnum,
        MERIT_HW_COMPRESSOR, // Minimum merit.
        FALSE,               // At least one input pin?
        MEDIATYPE_NULL,
        MEDIASUBTYPE_NULL,
        FALSE,              // Must be a renderer?
        FALSE,               // At least one output pin?
        MEDIATYPE_NULL,                  
        MEDIASUBTYPE_NULL);              

// Enumerate the monikers.
//IMoniker *pMoniker;
REGFILTER *pMoniker;
ULONG cFetched;
while (pEnum->Next(1, &pMoniker, &cFetched) == S_OK)
{
    IPropertyBag *pPropBag = NULL;
#if 0
	hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, 
       (void **)&pPropBag);

    if (SUCCEEDED(hr))
    {
        // To retrieve the friendly name of the filter, do the following:
        VARIANT varName;
        VariantInit(&varName);
        hr = pPropBag->Read(L"FriendlyName", &varName, 0);
        if (SUCCEEDED(hr))
        {
            // Display the name in your UI somehow.
        }
        VariantClear(&varName);

        // To create an instance of the filter, do the following:
        IBaseFilter *pFilter;
        hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)&pFilter);
        // Now add the filter to the graph. Remember to release pFilter later.
    
        // Clean up.
        pPropBag->Release();
    }
    pMoniker->Release();
#endif

}

// Clean up.
pMapper->Release();
pEnum->Release();




	s_callback = s;
	hr = s->m_pControl->Run();
	if(FAILED(hr))
	{
		return -15;
	}

	s->rotregvalue=1;
	s->pix_fmt = format;
	s->vsize.height = vsize->height;
	s->vsize.width = vsize->width;
	return 0;
}
Ejemplo n.º 18
0
HRESULT XnVideoStream::GetStreamCapability(int iIndex, CMediaType& mediaType, VIDEO_STREAM_CONFIG_CAPS& vscc)
{
	// check bounds
	if(iIndex < 0 || iIndex >= int(m_aSupportedModes.GetSize()))
	{
		xnLogVerbose(XN_MASK_FILTER, "GetStreamCapability() - Index %d is out of bounds!", iIndex);
		return S_FALSE;
	}

	VIDEOINFO *pvi = (VIDEOINFO*)mediaType.AllocFormatBuffer(sizeof(VIDEOINFO));
	if(NULL == pvi)
		return(E_OUTOFMEMORY);

	ZeroMemory(pvi, sizeof(VIDEOINFO));

	int xRes = m_aSupportedModes[iIndex].OutputMode.nXRes;
	int yRes = m_aSupportedModes[iIndex].OutputMode.nYRes;

	XnUInt64 nFrameTime = 10000000 / m_aSupportedModes[iIndex].OutputMode.nFPS;

	if (m_aSupportedModes[iIndex].Format == XN_PIXEL_FORMAT_RGB24)
	{
		pvi->bmiHeader.biCompression = BI_RGB;
	}
	else if (m_aSupportedModes[iIndex].Format == XN_PIXEL_FORMAT_MJPEG)
	{
		pvi->bmiHeader.biCompression = 'GPJM';
	}
	else
	{
		xnLogError(XN_MASK_FILTER, "Unknown format type!");
		return E_UNEXPECTED;
	}

	pvi->bmiHeader.biBitCount	= 24;
	pvi->bmiHeader.biSize       = sizeof(BITMAPINFOHEADER);
	pvi->bmiHeader.biWidth      = xRes;
	pvi->bmiHeader.biHeight     = yRes;
	pvi->bmiHeader.biPlanes     = 1;
	pvi->bmiHeader.biSizeImage  = GetBitmapSize(&pvi->bmiHeader);
	pvi->bmiHeader.biClrImportant = 0;

	SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
	SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle

	pvi->dwBitRate = 
		GetBitmapSize(&pvi->bmiHeader) * // bytes per frame
		m_aSupportedModes[iIndex].OutputMode.nFPS * // frames per second
		8; // bits per byte

	pvi->dwBitErrorRate = 0; // assume no errors
	pvi->AvgTimePerFrame = nFrameTime;

	mediaType.SetType(&MEDIATYPE_Video);
	mediaType.SetFormatType(&FORMAT_VideoInfo);
	mediaType.SetTemporalCompression(FALSE);

	// Work out the GUID for the subtype from the header info.
	const GUID SubTypeGUID = GetBitmapSubtype(&pvi->bmiHeader);
	mediaType.SetSubtype(&SubTypeGUID);
	mediaType.SetSampleSize(pvi->bmiHeader.biSizeImage);

	vscc.guid = FORMAT_VideoInfo;
	vscc.VideoStandard = AnalogVideo_None;
	vscc.InputSize.cx = xRes;
	vscc.InputSize.cy = yRes;
	vscc.MinCroppingSize.cx = xRes;
	vscc.MinCroppingSize.cy = yRes;
	vscc.MaxCroppingSize.cx = xRes;
	vscc.MaxCroppingSize.cy = yRes;
	vscc.CropGranularityX = 1;
	vscc.CropGranularityY = 1;
	vscc.CropAlignX = 1;
	vscc.CropAlignY = 1;

	vscc.MinOutputSize.cx = xRes;
	vscc.MinOutputSize.cy = yRes;
	vscc.MaxOutputSize.cx = xRes;
	vscc.MaxOutputSize.cy = yRes;
	vscc.OutputGranularityX = 1;
	vscc.OutputGranularityY = 1;
	vscc.StretchTapsX = 0;
	vscc.StretchTapsY = 0;
	vscc.ShrinkTapsX = 0;
	vscc.ShrinkTapsY = 0;
	// Frame interval is in 100 nanosecond units
	vscc.MinFrameInterval = nFrameTime;
	vscc.MaxFrameInterval = nFrameTime;
	vscc.MinBitsPerSecond = 
		mediaType.GetSampleSize() * // bytes in frame
		m_aSupportedModes[iIndex].OutputMode.nFPS * // frames per second
		8; // bits per byte
	vscc.MaxBitsPerSecond = vscc.MinBitsPerSecond;

	return S_OK;
}