Пример #1
0
CMediaType CLAVFAudioHelper::initAudioType(AVCodecID codecId, unsigned int &codecTag, std::string container)
{
    CMediaType mediaType;
    mediaType.InitMediaType();
    mediaType.majortype = MEDIATYPE_Audio;
    mediaType.subtype = FOURCCMap(codecTag);
    mediaType.formattype = FORMAT_WaveFormatEx; //default value
    mediaType.SetSampleSize(256000);

    // Check against values from the map above
    for(unsigned i = 0; i < countof(audio_map); ++i) {
        if (audio_map[i].codec == codecId) {
            if (audio_map[i].subtype)
                mediaType.subtype = *audio_map[i].subtype;
            if (audio_map[i].codecTag)
                codecTag = audio_map[i].codecTag;
            if (audio_map[i].format)
                mediaType.formattype = *audio_map[i].format;
            break;
        }
    }

    // special cases
    switch(codecId)
    {
    case AV_CODEC_ID_PCM_F64LE:
        // Qt PCM
        if (codecTag == MKTAG('f', 'l', '6', '4')) mediaType.subtype = MEDIASUBTYPE_PCM_FL64_le;
        break;
    }
    return mediaType;
}
Пример #2
0
bool CWAVFile::SetMediaType(CMediaType& mt)
{
	if (!m_fmtdata || !m_fmtsize) {
		return false;
	}

	mt.majortype	= MEDIATYPE_Audio;
	mt.formattype	= FORMAT_WaveFormatEx;
	mt.subtype		= m_subtype;
	mt.SetSampleSize(m_blocksize);

	memcpy(mt.AllocFormatBuffer(m_fmtsize), m_fmtdata, m_fmtsize);

	return true;
}
Пример #3
0
void CDeMultiplexer::GetAudioStreamPMT(CMediaType& pmt)
{
  // Fake audio in use
  if (m_AudioStreamType == NO_STREAM)
  {
    pmt.InitMediaType();
    pmt.SetType(&MEDIATYPE_Audio);
    pmt.SetSubtype(&MEDIASUBTYPE_DOLBY_AC3);
    pmt.SetSampleSize(1);
    pmt.SetTemporalCompression(FALSE);
    pmt.SetVariableSize();
    pmt.SetFormatType(&FORMAT_WaveFormatEx);
    pmt.SetFormat(AC3AudioFormat, sizeof(AC3AudioFormat));
  }
  else
    pmt = m_audioParser->pmt;
}
Пример #4
0
Packet* CClip::GenerateFakeAudio(REFERENCE_TIME rtStart)
{
  if (rtStart + FAKE_AUDIO_DURATION - 1 > playlistFirstPacketTime + clipDuration) 
    superceeded |= SUPERCEEDED_AUDIO_RETURN;
  
  if (superceeded&SUPERCEEDED_AUDIO_RETURN) 
    return NULL;
  
  if (!FakeAudioAvailable()) 
    return NULL;

  Packet* packet = new Packet();
  packet->nClipNumber = nClip;
    
  packet->SetCount(AC3_FRAME_LENGTH);
  packet->SetData(ac3_sample, AC3_FRAME_LENGTH);
  packet->rtStart = rtStart;
  packet->rtStop = packet->rtStart + 1;

  if (firstAudio)
  {
    CMediaType pmt;
    pmt.InitMediaType();
    pmt.SetType(&MEDIATYPE_Audio);
    pmt.SetSubtype(&MEDIASUBTYPE_DOLBY_AC3);
    pmt.SetSampleSize(1);
    pmt.SetTemporalCompression(FALSE);
    pmt.SetVariableSize();
    pmt.SetFormatType(&FORMAT_WaveFormatEx);
    pmt.SetFormat(AC3AudioFormat, sizeof(AC3AudioFormat));
    WAVEFORMATEXTENSIBLE* wfe = (WAVEFORMATEXTENSIBLE*)pmt.pbFormat;
    wfe->Format.nChannels = 6;
    wfe->Format.nSamplesPerSec = 48000;
    wfe->Format.wFormatTag = WAVE_FORMAT_DOLBY_AC3;

    packet->pmt = CreateMediaType(&pmt);
  }
  
  audioPlaybackPosition += FAKE_AUDIO_DURATION;
  lastAudioPosition += FAKE_AUDIO_DURATION;

  return packet;
}
HRESULT CSubtitlePin::CheckMediaType(const CMediaType* pmt)
{
  CheckPointer(pmt, E_POINTER);

  CDeMultiplexer& demux=m_pTsReaderFilter->GetDemultiplexer();
  
  if (!m_pTsReaderFilter->CheckCallback())
  {
    //LogDebug("subPin: Not running in MP - CheckMediaType() fail");
    return E_FAIL;
  }

  if (!demux.PatParsed())
  {
    return E_FAIL;
  }

  CMediaType pmti;
  CMediaType* ppmti = &pmti;
  
  ppmti->InitMediaType();
  ppmti->SetType      (& MEDIATYPE_Stream);
  ppmti->SetSubtype   (& MEDIASUBTYPE_MPEG2_TRANSPORT);
  ppmti->SetSampleSize(1);
  ppmti->SetTemporalCompression(FALSE);
  ppmti->SetVariableSize();    

  if(*pmt == *ppmti)
  {
    //LogDebug("subPin:CheckMediaType() ok");  
    return S_OK;
  }

  //LogDebug("subPin:CheckMediaType() fail");  
  return E_FAIL;
}
Пример #6
0
CMediaType CLAVAudio::CreateBitstreamMediaType(AVCodecID codec, DWORD dwSampleRate, BOOL bDTSHDOverride)
{
   CMediaType mt;

   mt.majortype  = MEDIATYPE_Audio;
   mt.subtype    = MEDIASUBTYPE_PCM;
   mt.formattype = FORMAT_WaveFormatEx;

   WAVEFORMATEXTENSIBLE wfex;
   memset(&wfex, 0, sizeof(wfex));

   WAVEFORMATEX* wfe = &wfex.Format;

   wfe->nChannels = 2;
   wfe->wBitsPerSample = 16;

   GUID subtype = GUID_NULL;

   switch(codec) {
   case AV_CODEC_ID_AC3:
     wfe->wFormatTag     = WAVE_FORMAT_DOLBY_AC3_SPDIF;
     wfe->nSamplesPerSec = min(dwSampleRate, 48000);
     break;
   case AV_CODEC_ID_EAC3:
     wfe->nSamplesPerSec = 192000;
     wfe->nChannels      = 2;
     subtype = KSDATAFORMAT_SUBTYPE_IEC61937_DOLBY_DIGITAL_PLUS;
     break;
   case AV_CODEC_ID_TRUEHD:
     wfe->nSamplesPerSec = 192000;
     wfe->nChannels      = 8;
     subtype = KSDATAFORMAT_SUBTYPE_IEC61937_DOLBY_MLP;
     break;
   case AV_CODEC_ID_DTS:
     if (m_settings.bBitstream[Bitstream_DTSHD] && m_bDTSHD && !bDTSHDOverride) {
       wfe->nSamplesPerSec = 192000;
       wfe->nChannels      = 8;
       subtype = KSDATAFORMAT_SUBTYPE_IEC61937_DTS_HD;
     } else {
       wfe->wFormatTag     = WAVE_FORMAT_DOLBY_AC3_SPDIF; // huh? but it works.
       wfe->nSamplesPerSec = min(dwSampleRate, 48000);
     }
     break;
   default:
     ASSERT(0);
     break;
   }

   wfe->nBlockAlign = wfe->nChannels * wfe->wBitsPerSample / 8;
   wfe->nAvgBytesPerSec = wfe->nSamplesPerSec * wfe->nBlockAlign;

   if (subtype != GUID_NULL) {
      wfex.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
      wfex.Format.cbSize = sizeof(wfex) - sizeof(wfex.Format);
      wfex.dwChannelMask = get_channel_mask(wfe->nChannels);
      wfex.Samples.wValidBitsPerSample = wfex.Format.wBitsPerSample;
      wfex.SubFormat = subtype;
   }

   mt.SetSampleSize(1);
   mt.SetFormat((BYTE*)&wfex, sizeof(wfex.Format) + wfex.Format.cbSize);

   return mt;
}
Пример #7
0
static int v4w_open_videodevice(V4wState *s, int format, MSVideoSize *vsize)
{
	// Initialize COM
	CoInitialize(NULL);

	// get a Graph
	HRESULT hr=s->m_pGraph.CoCreateInstance(CLSID_FilterGraph);
	if(FAILED(hr))
	{
		return -1;
	}

	// get a CaptureGraphBuilder2
#if !defined(_WIN32_WCE)
	hr=s->m_pBuilder.CoCreateInstance(CLSID_CaptureGraphBuilder2);
#else
	hr=s->m_pBuilder.CoCreateInstance(CLSID_CaptureGraphBuilder);
#endif
	if(FAILED(hr))
	{
		return -2;
	}

	// connect capture graph builder with the graph
	s->m_pBuilder->SetFiltergraph(s->m_pGraph);

	// get mediacontrol so we can start and stop the filter graph
	hr=s->m_pGraph.QueryInterface(&(s->m_pControl));
	if(FAILED(hr))
	{
		return -3;
	}

	// get DXFilter
	s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE);
	if(s->m_pDXFilter==NULL)
	{
		return -4;
	}
	s->m_pDXFilter->AddRef();
	if(FAILED(hr))
	{
		return -4;
	}

	CMediaType mt;
	mt.SetType(&MEDIATYPE_Video);

	if (format==MS_YUV420P)
	{
		GUID m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0'));
		mt.SetSubtype(&m);
		mt.SetSubtype(&MEDIASUBTYPE_YV12);
	}
	else //if (format==MS_RGB24)
	{
		mt.SetSubtype(&MEDIASUBTYPE_RGB24);
	}

	//mt.SetSubtype(&MEDIASUBTYPE_IYUV);
	//mt.SetSubtype(&MEDIASUBTYPE_YUYV);
	//mt.SetSubtype(&MEDIASUBTYPE_RGB24);
	//mt.SetSampleSize();
	mt.formattype = FORMAT_VideoInfo;
	mt.SetTemporalCompression(FALSE);

	VIDEOINFO *pvi = (VIDEOINFO *)
	mt.AllocFormatBuffer(sizeof(VIDEOINFO));
	if (NULL == pvi)
		return E_OUTOFMEMORY;
	ZeroMemory(pvi, sizeof(VIDEOINFO));
	if (format==MS_YUV420P)
	{
		pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
		pvi->bmiHeader.biCompression = MAKEFOURCC('Y','V','1','2');
		pvi->bmiHeader.biBitCount = 12;
	}
	else
	{
		pvi->bmiHeader.biCompression = BI_RGB;
		pvi->bmiHeader.biBitCount = 24;
	}
	pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
	pvi->bmiHeader.biWidth = vsize->width;
	pvi->bmiHeader.biHeight = vsize->height;
	pvi->bmiHeader.biPlanes = 1;
	pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
	pvi->bmiHeader.biClrImportant = 0;
	mt.SetSampleSize(pvi->bmiHeader.biSizeImage);
	mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO));

	hr = s->m_pDXFilter->SetAcceptedMediaType(&mt);
	if(FAILED(hr))
	{
		return -5;
	}

	hr = s->m_pDXFilter->SetCallback(Callback); 
	if(FAILED(hr))
	{
		return -6;
	}

	hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter,
	 (LPVOID *)&s->m_pIDXFilter);
	if(FAILED(hr))
	{
		return -7;
	}

	hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter");
	if(FAILED(hr))
	{
		return -8;
	}

#ifdef WM6
	ICreateDevEnum *pCreateDevEnum = NULL;
	IEnumMoniker *pEnumMoniker = NULL;
	IMoniker *pMoniker = NULL;

	ULONG nFetched = 0;

	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, 
		IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
	if(FAILED(hr))
	{
		return -9;
	}

	hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
		&pEnumMoniker, 0);
	if (FAILED(hr) || pEnumMoniker == NULL) {
		//printf("no device\n");
		return -10;
	}

	pEnumMoniker->Reset();

	hr = pEnumMoniker->Next(1, &pMoniker, &nFetched);
	if(FAILED(hr) || pMoniker==NULL)
	{
		return -11;
	}

	hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
	if(FAILED(hr))
	{
		return -12;
	}

	s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

	pMoniker->Release();
	pEnumMoniker->Release();
	pCreateDevEnum->Release();
#else
	WCHAR wzDeviceName[ MAX_PATH + 1 ];
	CComVariant   varCamName;
	CPropertyBag PropBag;
    CComPtr<IPersistPropertyBag>    pPropertyBag;
	GetFirstCameraDriver(wzDeviceName);

	hr = s->m_pDeviceFilter.CoCreateInstance( CLSID_VideoCapture ); 
	if (FAILED(hr))
	{
		return -8;
	}

	s->m_pDeviceFilter.QueryInterface( &pPropertyBag );
	varCamName = wzDeviceName;
	if(( varCamName.vt == VT_BSTR ) == NULL ) {
	  return E_OUTOFMEMORY;
	}
	PropBag.Write( L"VCapName", &varCamName );   
	pPropertyBag->Load( &PropBag, NULL );
	pPropertyBag.Release();

	hr = s->m_pGraph->AddFilter( s->m_pDeviceFilter, L"Video capture source" );
#endif

	if (FAILED(hr))
	{
		return -8;
	}

	// get null renderer
	s->m_pNullRenderer = NULL;
#if 0
	hr=s->m_pNullRenderer.CoCreateInstance(CLSID_NullRenderer);
	if(FAILED(hr))
	{
		return -13;
	}
#endif
	if (s->m_pNullRenderer!=NULL)
	{
		s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer");
	}

	hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_PREVIEW,
		&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
	if (FAILED(hr))
	{
		//hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_CAPTURE,
		//	&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
		if (FAILED(hr))
		{
			return -14;
		}
	}
	
	//m_pDXFilter->SetBufferSamples(TRUE);


		// Create the System Device Enumerator.
IFilterMapper *pMapper = NULL;
//IEnumMoniker *pEnum = NULL;
IEnumRegFilters *pEnum = NULL;

hr = CoCreateInstance(CLSID_FilterMapper, 
    NULL, CLSCTX_INPROC, IID_IFilterMapper, 
    (void **) &pMapper);

if (FAILED(hr))
{
    // Error handling omitted for clarity.
}

GUID arrayInTypes[2];
arrayInTypes[0] = MEDIATYPE_Video;
arrayInTypes[1] = MEDIASUBTYPE_dvsd;

hr = pMapper->EnumMatchingFilters(
        &pEnum,
        MERIT_HW_COMPRESSOR, // Minimum merit.
        FALSE,               // At least one input pin?
        MEDIATYPE_NULL,
        MEDIASUBTYPE_NULL,
        FALSE,              // Must be a renderer?
        FALSE,               // At least one output pin?
        MEDIATYPE_NULL,                  
        MEDIASUBTYPE_NULL);              

// Enumerate the monikers.
//IMoniker *pMoniker;
REGFILTER *pMoniker;
ULONG cFetched;
while (pEnum->Next(1, &pMoniker, &cFetched) == S_OK)
{
    IPropertyBag *pPropBag = NULL;
#if 0
	hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, 
       (void **)&pPropBag);

    if (SUCCEEDED(hr))
    {
        // To retrieve the friendly name of the filter, do the following:
        VARIANT varName;
        VariantInit(&varName);
        hr = pPropBag->Read(L"FriendlyName", &varName, 0);
        if (SUCCEEDED(hr))
        {
            // Display the name in your UI somehow.
        }
        VariantClear(&varName);

        // To create an instance of the filter, do the following:
        IBaseFilter *pFilter;
        hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)&pFilter);
        // Now add the filter to the graph. Remember to release pFilter later.
    
        // Clean up.
        pPropBag->Release();
    }
    pMoniker->Release();
#endif

}

// Clean up.
pMapper->Release();
pEnum->Release();




	s_callback = s;
	hr = s->m_pControl->Run();
	if(FAILED(hr))
	{
		return -15;
	}

	s->rotregvalue=1;
	s->pix_fmt = format;
	s->vsize.height = vsize->height;
	s->vsize.width = vsize->width;
	return 0;
}
Пример #8
0
static int v4w_open_videodevice(V4wState *s)
{
	// Initialize COM
	CoInitialize(NULL);

	// get a Graph
	HRESULT hr= CoCreateInstance (CLSID_FilterGraph,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_IGraphBuilder, //IID_IBaseFilter,
		(void **)&s->m_pGraph);
	if(FAILED(hr))
	{
		return -1;
	}

	// get a CaptureGraphBuilder2
	hr= CoCreateInstance (CLSID_CaptureGraphBuilder2,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_ICaptureGraphBuilder2, //IID_IBaseFilter,
		(void **)&s->m_pBuilder);
	if(FAILED(hr))
	{
		return -2;
	}

	// connect capture graph builder with the graph
	s->m_pBuilder->SetFiltergraph(s->m_pGraph);

	// get mediacontrol so we can start and stop the filter graph
	hr=s->m_pGraph->QueryInterface (IID_IMediaControl, (void **)&s->m_pControl);
	if(FAILED(hr))
	{
		return -3;
	}


	ICreateDevEnum *pCreateDevEnum = NULL;
	IEnumMoniker *pEnumMoniker = NULL;
	IMoniker *pMoniker = NULL;

	ULONG nFetched = 0;

	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, 
		IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
	if(FAILED(hr))
	{
		return -4;
	}

	hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
		&pEnumMoniker, 0);
	if (FAILED(hr) || pEnumMoniker == NULL) {
		//printf("no device\n");
		return -5;
	}

	pEnumMoniker->Reset();

	int pos=0;
	while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
	{
		IPropertyBag *pBag;
		hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag );
		if( hr != S_OK )
			continue; 

		if (s->dev[0]=='\0')
			break;

		VARIANT var;
		VariantInit(&var);
		hr = pBag->Read( L"FriendlyName", &var, NULL ); 
		if( hr != S_OK )
		{
			pMoniker->Release();
			continue;
		}
		//USES_CONVERSION;
		char szName[256];

		WideCharToMultiByte(CP_UTF8,0,var.bstrVal,-1,szName,256,0,0);
		VariantClear(&var); 

		if (strcmp(szName, s->dev)==0)
			break;

		pMoniker->Release();
		pBag->Release();
		pMoniker=NULL;
		pBag=NULL;
	}

	if(pMoniker==NULL)
	{
		return -6;
	}

	hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
	if(FAILED(hr))
	{
		return -7;
	}

	s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

	pMoniker->Release();
	pEnumMoniker->Release();
	pCreateDevEnum->Release();


	GUID pPinCategory;

	if (try_format(s->m_pDeviceFilter, s->pix_fmt, &pPinCategory)==0)
		s->pix_fmt = s->pix_fmt;
	else if (try_format(s->m_pDeviceFilter,MS_YUV420P, &pPinCategory)==0)
		s->pix_fmt = MS_YUV420P;
	else if (try_format(s->m_pDeviceFilter,MS_YUY2, &pPinCategory)==0)
		s->pix_fmt = MS_YUY2;
	else if (try_format(s->m_pDeviceFilter,MS_YUYV, &pPinCategory)==0)
		s->pix_fmt = MS_YUYV;
	else if (try_format(s->m_pDeviceFilter,MS_UYVY, &pPinCategory)==0)
		s->pix_fmt = MS_UYVY;
	else if (try_format(s->m_pDeviceFilter,MS_RGB24, &pPinCategory)==0)
		s->pix_fmt = MS_RGB24;
	else
	{
		ms_error("Unsupported video pixel format.");
		return -8;
	}

	if (s->pix_fmt == MS_YUV420P)
		ms_message("Driver supports YUV420P, using that format.");
	else if (s->pix_fmt == MS_YUY2)
		ms_message("Driver supports YUY2 (YUYV), using that format.");
	else if (s->pix_fmt == MS_YUYV)
		ms_message("Driver supports YUV422, using that format.");
	else if (s->pix_fmt == MS_UYVY)
		ms_message("Driver supports UYVY, using that format.");
	else if (s->pix_fmt == MS_RGB24)
		ms_message("Driver supports RGB24, using that format.");

	if (try_format_size(s, s->pix_fmt, s->vsize.width, s->vsize.height, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", s->vsize.width, s->vsize.height);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H);
	else
	{
		ms_error("No supported size found for format.");
		/* size not supported? */
		return -9;
	}

	// get DXFilter
	s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE);
	if(s->m_pDXFilter==NULL)
	{
		return -10;
	}
	s->m_pDXFilter->AddRef();

	CMediaType mt;
	mt.SetType(&MEDIATYPE_Video);

	GUID m = MEDIASUBTYPE_RGB24;
	if (s->pix_fmt == MS_YUV420P)
		m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0'));
	else if (s->pix_fmt == MS_YUY2)
		m = MEDIASUBTYPE_YUY2;
	else if (s->pix_fmt == MS_YUYV)
		m = MEDIASUBTYPE_YUYV;
	else if (s->pix_fmt == MS_UYVY)
		m = MEDIASUBTYPE_UYVY;
	else if (s->pix_fmt == MS_RGB24)
		m = MEDIASUBTYPE_RGB24;
	mt.SetSubtype(&m);

	mt.formattype = FORMAT_VideoInfo;
	mt.SetTemporalCompression(FALSE);

	VIDEOINFO *pvi = (VIDEOINFO *)
		mt.AllocFormatBuffer(sizeof(VIDEOINFO));
	if (NULL == pvi)
		return -11;
	ZeroMemory(pvi, sizeof(VIDEOINFO));

	if (s->pix_fmt == MS_YUV420P)
		pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
	else if (s->pix_fmt == MS_YUY2)
		pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2');
	else if (s->pix_fmt == MS_YUYV)
		pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','V');
	else if (s->pix_fmt == MS_UYVY)
		pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y');
	else if (s->pix_fmt == MS_RGB24)
		pvi->bmiHeader.biCompression = BI_RGB;

	if (s->pix_fmt == MS_YUV420P)
		pvi->bmiHeader.biBitCount = 12;
	else if (s->pix_fmt == MS_YUY2)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_YUYV)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_UYVY)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_RGB24)
		pvi->bmiHeader.biBitCount = 24;

	pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
	pvi->bmiHeader.biWidth = s->vsize.width;
	pvi->bmiHeader.biHeight = s->vsize.height;
	pvi->bmiHeader.biPlanes = 1;
	pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
	pvi->bmiHeader.biClrImportant = 0;
	mt.SetSampleSize(pvi->bmiHeader.biSizeImage);

	mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO));

	hr = s->m_pDXFilter->SetAcceptedMediaType(&mt);
	if(FAILED(hr))
	{
		return -12;
	}

	hr = s->m_pDXFilter->SetCallback(Callback); 
	if(FAILED(hr))
	{
		return -13;
	}

	hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter,
		(LPVOID *)&s->m_pIDXFilter);
	if(FAILED(hr))
	{
		return -14;
	}

	hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter");
	if(FAILED(hr))
	{
		return -15;
	}


	// get null renderer
	hr=CoCreateInstance (CLSID_NullRenderer,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_IBaseFilter,
		(void **)&s->m_pNullRenderer);
	if(FAILED(hr))
	{
		return -16;
	}
	if (s->m_pNullRenderer!=NULL)
	{
		s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer");
	}

	hr = s->m_pBuilder->RenderStream(&pPinCategory,
		&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
	if (FAILED(hr))
	{
		return -17;
	}

	IAMStreamConfig *pConfig = NULL;
	hr = s->m_pBuilder->FindInterface(
		&pPinCategory, // Preview pin.
		&MEDIATYPE_Video,    // Any media type.
		s->m_pDeviceFilter, // Pointer to the capture filter.
		IID_IAMStreamConfig, (void**)&pConfig); 
	if (pConfig!=NULL)
	{
		AM_MEDIA_TYPE *pType = NULL;
		int iCount, iSize;
		pConfig->GetNumberOfCapabilities(&iCount, &iSize);

		for (int i = 0; i < iCount; i++) {
			VIDEO_STREAM_CONFIG_CAPS scc;
			pType = NULL;
			pConfig->GetStreamCaps(i, &pType, (BYTE *)&scc);

			if (!((pType->formattype == FORMAT_VideoInfo) &&
				(pType->cbFormat >= sizeof(VIDEOINFOHEADER)) &&
				(pType->pbFormat != NULL)))
				continue;

			VIDEOINFOHEADER & videoInfo = *(VIDEOINFOHEADER *)pType->pbFormat;

			if (m != pType->subtype)
				continue;

			if (videoInfo.bmiHeader.biWidth != s->vsize.width)
				continue;

			if (videoInfo.bmiHeader.biHeight != s->vsize.height)
				continue;

			if (videoInfo.bmiHeader.biBitCount != pvi->bmiHeader.biBitCount)
				continue;

			if (videoInfo.bmiHeader.biCompression != pvi->bmiHeader.biCompression)
				continue;

			videoInfo.AvgTimePerFrame = UNITS / (LONGLONG)s->fps;
			pConfig->SetFormat(pType);    
		}

		pConfig->GetFormat(&pType);
		if (pType!=NULL)
		{
			VIDEOINFO *pvi;
			pvi = (VIDEOINFO *)pType->pbFormat;
			ms_message("v4w: camera asked fps=%.2f // real fps=%.2f", s->fps, ((float)UNITS / (float)pvi->AvgTimePerFrame));
		}

		pConfig->Release();
	}

	//m_pDXFilter->SetBufferSamples(TRUE);

	s_callback = s;
	hr = s->m_pControl->Run();
	if(FAILED(hr))
	{
		return -18;
	}


	s->rotregvalue=1;
	return 0;
}
HRESULT CFSTSplitterFilter::Initialize(IPin *pPin, IAsyncReader *pReader)
{
	// Check and validate the pointer
	CheckPointer(pReader, E_POINTER);
	ValidateReadPtr(pReader, sizeof(IAsyncReader));

	// Read file header
	FST_HEADER header;
	HRESULT hr = pReader->SyncRead(0, sizeof(header), (BYTE*)&header);
	if (hr != S_OK)
		return hr;

	// Verify file header
	if (header.dwID != FST_ID_2TSF)
		return VFW_E_INVALID_FILE_FORMAT;

	// Protect the filter data
	CAutoLock datalock(&m_csData);

	// Set video stream info
	m_nVideoFrames		= header.nVideoFrames;
	m_nFramesPerSecond	= header.nFramesPerSecond;

	// Allocate frame table
	m_pFrameTable = (FST_FRAME_ENTRY*)CoTaskMemAlloc(m_nVideoFrames * sizeof(FST_FRAME_ENTRY));
	if (m_pFrameTable == NULL) {
		Shutdown();
		return E_OUTOFMEMORY;
	}

	// Read in the frame table
	hr = pReader->SyncRead(sizeof(header), m_nVideoFrames * sizeof(FST_FRAME_ENTRY), (BYTE*)m_pFrameTable);
	if (hr != S_OK) {
		Shutdown();
		return hr;
	}

	// Walk the frame table and determine the maximum frame data sizes
	DWORD cbMaxImage = 0, cbMaxSound = 0;
	for (DWORD iFrame = 0; iFrame < m_nVideoFrames; iFrame++) {
		if (m_pFrameTable[iFrame].cbImage > cbMaxImage)
			cbMaxImage = m_pFrameTable[iFrame].cbImage;
		if (m_pFrameTable[iFrame].cbSound > cbMaxSound)
			cbMaxSound = m_pFrameTable[iFrame].cbSound;
	}

	// Set file positions
	m_llDefaultStart	= (LONGLONG)sizeof(FST_HEADER) + m_nVideoFrames * sizeof(FST_FRAME_ENTRY);	// Right after the header and frame table
	m_llDefaultStop		= MAXLONGLONG; // Defaults to file end

	// Decide on the input pin properties
	m_cbInputAlign	= 1;
	m_cbInputBuffer	= cbMaxImage + cbMaxSound;

	// Protect the output pins state
	CAutoLock pinlock(&m_csPins);

	// Decide on the output pins count
	m_nOutputPins = 1;	// Video is always present

	// Check if we have soundtrack
	if (
		(header.dwAudioSampleRate	!= 0) &&
		(header.nAudioBits			!= 0) &&
		(m_pFrameTable[0].cbSound	!= 0)
	)
		m_nOutputPins++;

	// Create output pins array
	ASSERT(m_ppOutputPin == NULL);
	m_ppOutputPin = new CParserOutputPin*[m_nOutputPins];
	if (m_ppOutputPin == NULL) {
		m_nOutputPins = 0;
		Shutdown();
		return E_OUTOFMEMORY;
	}

	// Reset the output pin array elements to NULLs
	for (int i = 0; i < m_nOutputPins; i++)
		m_ppOutputPin[i] = NULL;

	// Allocate video media type
	CMediaType *pmtVideo = new CMediaType();
	if (pmtVideo == NULL) {
		Shutdown();
		return E_OUTOFMEMORY;
	}

	// Initialize the video media type
	pmtVideo->InitMediaType();
	pmtVideo->SetType(&MEDIATYPE_Video);
	pmtVideo->SetSubtype(&MEDIASUBTYPE_FSTVideo);
	pmtVideo->SetSampleSize(0);
	pmtVideo->SetTemporalCompression(TRUE);
	pmtVideo->SetFormatType(&FORMAT_FSTVideo);
	if (!pmtVideo->SetFormat((BYTE*)&header, sizeof(header))) {
		delete pmtVideo;
		Shutdown();
		return E_FAIL;
	}

	// Allocate the video allocator properties
	ALLOCATOR_PROPERTIES *papVideo = (ALLOCATOR_PROPERTIES*)CoTaskMemAlloc(sizeof(ALLOCATOR_PROPERTIES));
	if (papVideo == NULL) {
		delete pmtVideo;
		Shutdown();
		return E_OUTOFMEMORY;
	}

	// Set the video allocator properties
	papVideo->cbAlign	= 0;	// No matter
	papVideo->cbPrefix	= 0;	// No matter
	papVideo->cBuffers	= 4;	// TEMP: No need to set larger value?
	papVideo->cbBuffer	= cbMaxImage;

	// Allocate time formats array. If we fail here, it's not an error, 
	// we'll just set zero seeker parameters and may proceed
	DWORD dwVideoCapabilities = 0;
	int nVideoTimeFormats = 0;
	GUID *pVideoTimeFormats = (GUID*)CoTaskMemAlloc(3 * sizeof(GUID));
	if (pVideoTimeFormats) {

		nVideoTimeFormats = 3;

		// Fill in the time formats array
		pVideoTimeFormats[0] = TIME_FORMAT_MEDIA_TIME;
		pVideoTimeFormats[1] = TIME_FORMAT_FRAME;
		pVideoTimeFormats[2] = TIME_FORMAT_SAMPLE;

		dwVideoCapabilities =	AM_SEEKING_CanGetCurrentPos	|
								AM_SEEKING_CanGetStopPos	|
								AM_SEEKING_CanGetDuration;
	}

	// Create video output pin (always the first one!)
	hr = NOERROR;
	m_ppOutputPin[0] = new CParserOutputPin(
		NAME("FST Splitter Video Output Pin"),
		this,
		&m_csFilter,
		pmtVideo,
		papVideo,
		dwVideoCapabilities,
		nVideoTimeFormats,
		pVideoTimeFormats,
		&hr,
		wszFSTVideoOutputName
	);
	if (
		(FAILED(hr)) ||
		(m_ppOutputPin[0] == NULL)
	) {
		if (m_ppOutputPin[0]) {
			delete m_ppOutputPin[0];
			m_ppOutputPin[0] = NULL;
		} else {
			delete pmtVideo;
			CoTaskMemFree(papVideo);
			if (pVideoTimeFormats)
				CoTaskMemFree(pVideoTimeFormats);
		}
		Shutdown();

		if (FAILED(hr))
			return hr;
		else
			return E_OUTOFMEMORY;
	}
	// Hold a reference on the video output pin
	m_ppOutputPin[0]->AddRef();

	// We've created a new pin -- so increment pin version
	IncrementPinVersion();

	if (m_nOutputPins > 1) {

		// Allocate audio media type
		CMediaType *pmtAudio = new CMediaType();
		if (pmtAudio == NULL) {
			Shutdown();
			return E_OUTOFMEMORY;
		}

		// Initialize the audio media type
		pmtAudio->InitMediaType();
		pmtAudio->SetType(&MEDIATYPE_Audio);
		pmtAudio->SetSubtype(&MEDIASUBTYPE_PCM);
		pmtAudio->SetSampleSize(header.nAudioBits * (header.wAudioChannels + 1) / 8);
		pmtAudio->SetTemporalCompression(FALSE);
		pmtAudio->SetFormatType(&FORMAT_WaveFormatEx);
		WAVEFORMATEX *pFormat = (WAVEFORMATEX*)pmtAudio->AllocFormatBuffer(sizeof(WAVEFORMATEX));
		if (pFormat == NULL) {
			delete pmtAudio;
			Shutdown();
			return E_OUTOFMEMORY;
		}

		// Fill in the audio format block
		pFormat->wFormatTag			= WAVE_FORMAT_PCM;
		pFormat->nChannels			= header.wAudioChannels + 1; // TEMP: Is it really so?
		pFormat->nSamplesPerSec		= header.dwAudioSampleRate;
		pFormat->nAvgBytesPerSec	= pFormat->nChannels * header.nAudioBits * header.dwAudioSampleRate / 8;
		pFormat->nBlockAlign		= pFormat->nChannels * header.nAudioBits / 8;
		pFormat->wBitsPerSample		= header.nAudioBits;
		pFormat->cbSize				= 0;

		// Allocate the audio allocator properties
		ALLOCATOR_PROPERTIES *papAudio = (ALLOCATOR_PROPERTIES*)CoTaskMemAlloc(sizeof(ALLOCATOR_PROPERTIES));
		if (papAudio == NULL) {
			delete pmtAudio;
			Shutdown();
			return E_OUTOFMEMORY;
		}

		// Set the audio allocator properties
		papAudio->cbAlign	= 0;	// No matter
		papAudio->cbPrefix	= 0;	// No matter
		papAudio->cBuffers	= 4;	// No use to set different from video value
		papAudio->cbBuffer	= cbMaxSound;

		// Set the wave format parameters needed for the calculation
		// of sample stream and media duration
		m_nSampleSize		= pFormat->nBlockAlign;
		m_nAvgBytesPerSec	= pFormat->nAvgBytesPerSec;

		// Allocate time formats array. If we fail here, it's not an error, 
		// we'll just set zero seeker parameters and may proceed
		DWORD dwAudioCapabilities = 0;
		int nAudioTimeFormats = 0;
		GUID *pAudioTimeFormats = (GUID*)CoTaskMemAlloc(3 * sizeof(GUID));
		if (pAudioTimeFormats) {

			nAudioTimeFormats = 3;

			// Fill in the time formats array
			pAudioTimeFormats[0] = TIME_FORMAT_MEDIA_TIME;
			pAudioTimeFormats[1] = TIME_FORMAT_SAMPLE;
			pAudioTimeFormats[2] = TIME_FORMAT_BYTE;

			dwAudioCapabilities	=	AM_SEEKING_CanGetCurrentPos	|
									AM_SEEKING_CanGetStopPos	|
									AM_SEEKING_CanGetDuration;
		}

		// Create audio output pin
		hr = NOERROR;
		m_ppOutputPin[1] = new CParserOutputPin(
			NAME("FST Splitter Audio Output Pin"),
			this,
			&m_csFilter,
			pmtAudio,
			papAudio,
			dwAudioCapabilities,
			nAudioTimeFormats,
			pAudioTimeFormats,
			&hr,
			wszFSTAudioOutputName
		);
		if (
			(FAILED(hr)) ||
			(m_ppOutputPin[1] == NULL)
		) {
			if (m_ppOutputPin[1]) {
				delete m_ppOutputPin[1];
				m_ppOutputPin[1] = NULL;
			} else {
				delete pmtAudio;
				CoTaskMemFree(papAudio);
				if (pAudioTimeFormats)
					CoTaskMemFree(pAudioTimeFormats);
			}
			Shutdown();

			if (FAILED(hr))
				return hr;
			else
				return E_OUTOFMEMORY;
		}
		// Hold a reference on the audio output pin
		m_ppOutputPin[1]->AddRef();

		// We've created a new pin -- so increment pin version
		IncrementPinVersion();
	}

	// Scope for the locking
	{
		// Protect media content information
		CAutoLock infolock(&m_csInfo);

		// Set the media content strings
		m_wszAuthorName = (OLECHAR*)CoTaskMemAlloc(sizeof(OLECHAR) * (lstrlenW(wszFSTAuthorName) + 1));
		if (m_wszAuthorName)
			lstrcpyW(m_wszAuthorName, wszFSTAuthorName);
		m_wszDescription = (OLECHAR*)CoTaskMemAlloc(sizeof(OLECHAR) * (lstrlenW(wszFSTDescription) + 1));
		if (m_wszDescription)
			lstrcpyW(m_wszDescription, wszFSTDescription);
	}

	return NOERROR;
}
Пример #10
0
HRESULT CMatroskaSplitterFilter::CreateOutputs(IAsyncReader* pAsyncReader)
{
	CheckPointer(pAsyncReader, E_POINTER);

	HRESULT hr = E_FAIL;

	m_pFile.Free();
	m_pTrackEntryMap.RemoveAll();
	m_pOrderedTrackArray.RemoveAll();

	CAtlArray<CMatroskaSplitterOutputPin*> pinOut;
	CAtlArray<TrackEntry*> pinOutTE;

	m_pFile.Attach(DNew CMatroskaFile(pAsyncReader, hr));
	if(!m_pFile) return E_OUTOFMEMORY;
	if(FAILED(hr)) {m_pFile.Free(); return hr;}

	m_rtNewStart = m_rtCurrent = 0;
	m_rtNewStop = m_rtStop = m_rtDuration = 0;

	int iVideo = 1, iAudio = 1, iSubtitle = 1;

	POSITION pos = m_pFile->m_segment.Tracks.GetHeadPosition();
	while(pos)
	{
		Track* pT = m_pFile->m_segment.Tracks.GetNext(pos);

		POSITION pos2 = pT->TrackEntries.GetHeadPosition();
		while(pos2)
		{
			TrackEntry* pTE = pT->TrackEntries.GetNext(pos2);

			bool isSub = false;

			if(!pTE->Expand(pTE->CodecPrivate, ContentEncoding::TracksPrivateData))
				continue;

			CStringA CodecID = pTE->CodecID.ToString();

			CStringW Name;
			Name.Format(L"Output %I64d", (UINT64)pTE->TrackNumber);

			CMediaType mt;
			CAtlArray<CMediaType> mts;

			mt.SetSampleSize(1);

			if(pTE->TrackType == TrackEntry::TypeVideo)
			{
				Name.Format(L"Video %d", iVideo++);

				mt.majortype = MEDIATYPE_Video;

				if(CodecID == "V_MS/VFW/FOURCC")
				{
					mt.formattype = FORMAT_VideoInfo;
					VIDEOINFOHEADER* pvih = (VIDEOINFOHEADER*)mt.AllocFormatBuffer(sizeof(VIDEOINFOHEADER) + pTE->CodecPrivate.GetCount() - sizeof(BITMAPINFOHEADER));
					memset(mt.Format(), 0, mt.FormatLength());
					memcpy(&pvih->bmiHeader, pTE->CodecPrivate.GetData(), pTE->CodecPrivate.GetCount());
					mt.subtype = FOURCCMap(pvih->bmiHeader.biCompression);
					switch(pvih->bmiHeader.biCompression)
					{
					case BI_RGB: case BI_BITFIELDS: mt.subtype = 
						pvih->bmiHeader.biBitCount == 1 ? MEDIASUBTYPE_RGB1 :
						pvih->bmiHeader.biBitCount == 4 ? MEDIASUBTYPE_RGB4 :
						pvih->bmiHeader.biBitCount == 8 ? MEDIASUBTYPE_RGB8 :
						pvih->bmiHeader.biBitCount == 16 ? MEDIASUBTYPE_RGB565 :
						pvih->bmiHeader.biBitCount == 24 ? MEDIASUBTYPE_RGB24 :
						pvih->bmiHeader.biBitCount == 32 ? MEDIASUBTYPE_ARGB32 :
						MEDIASUBTYPE_NULL;
						break;
//					case BI_RLE8: mt.subtype = MEDIASUBTYPE_RGB8; break;
//					case BI_RLE4: mt.subtype = MEDIASUBTYPE_RGB4; break;
					}
					mts.Add(mt);
				}
				else if(CodecID == "V_UNCOMPRESSED")
				{
				}
				else if(CodecID.Find("V_MPEG4/ISO/AVC") == 0 && pTE->CodecPrivate.GetCount() >= 6)
				{
					BYTE sps = pTE->CodecPrivate[5] & 0x1f;

	std::vector<BYTE> avcC;
	for(int i = 0, j = pTE->CodecPrivate.GetCount(); i < j; i++)
		avcC.push_back(pTE->CodecPrivate[i]);

	std::vector<BYTE> sh;

	unsigned jj = 6;

	while (sps--) {
	  if (jj + 2 > avcC.size())
	    goto avcfail;
	  unsigned spslen = ((unsigned)avcC[jj] << 8) | avcC[jj+1];
	  if (jj + 2 + spslen > avcC.size())
	    goto avcfail;
	  unsigned cur = sh.size();
	  sh.resize(cur + spslen + 2, 0);
	  std::copy(avcC.begin() + jj, avcC.begin() + jj + 2 + spslen,sh.begin() + cur);
	  jj += 2 + spslen;
	}

	if (jj + 1 > avcC.size())
	  continue;

	unsigned pps = avcC[jj++];

	while (pps--) {
	  if (jj + 2 > avcC.size())
	    goto avcfail;
	  unsigned ppslen = ((unsigned)avcC[jj] << 8) | avcC[jj+1];
	  if (jj + 2 + ppslen > avcC.size())
	    goto avcfail;
	  unsigned cur = sh.size();
	  sh.resize(cur + ppslen + 2, 0);
	  std::copy(avcC.begin() + jj, avcC.begin() + jj + 2 + ppslen, sh.begin() + cur);
	  jj += 2 + ppslen;
	}

	goto avcsuccess;
avcfail:
	continue;
avcsuccess:

					CAtlArray<BYTE> data;
					data.SetCount(sh.size());
					std::copy(sh.begin(), sh.end(), data.GetData());

					mt.subtype = FOURCCMap('1CVA');
					mt.formattype = FORMAT_MPEG2Video;
					MPEG2VIDEOINFO* pm2vi = (MPEG2VIDEOINFO*)mt.AllocFormatBuffer(FIELD_OFFSET(MPEG2VIDEOINFO, dwSequenceHeader) + data.GetCount());
					memset(mt.Format(), 0, mt.FormatLength());
					pm2vi->hdr.bmiHeader.biSize = sizeof(pm2vi->hdr.bmiHeader);
					pm2vi->hdr.bmiHeader.biWidth = (LONG)pTE->v.PixelWidth;
					pm2vi->hdr.bmiHeader.biHeight = (LONG)pTE->v.PixelHeight;
					pm2vi->hdr.bmiHeader.biCompression = '1CVA';
					pm2vi->hdr.bmiHeader.biPlanes = 1;
					pm2vi->hdr.bmiHeader.biBitCount = 24;
					pm2vi->dwProfile = pTE->CodecPrivate[1];
					pm2vi->dwLevel = pTE->CodecPrivate[3];
					pm2vi->dwFlags = (pTE->CodecPrivate[4] & 3) + 1;
					BYTE* pSequenceHeader = (BYTE*)pm2vi->dwSequenceHeader;
					memcpy(pSequenceHeader, data.GetData(), data.GetCount());
					pm2vi->cbSequenceHeader = data.GetCount();
					mts.Add(mt);
				}
				else if(CodecID.Find("V_MPEG4/") == 0)
				{
					mt.subtype = FOURCCMap('V4PM');
					mt.formattype = FORMAT_MPEG2Video;
					MPEG2VIDEOINFO* pm2vi = (MPEG2VIDEOINFO*)mt.AllocFormatBuffer(FIELD_OFFSET(MPEG2VIDEOINFO, dwSequenceHeader) + pTE->CodecPrivate.GetCount());
					memset(mt.Format(), 0, mt.FormatLength());
					pm2vi->hdr.bmiHeader.biSize = sizeof(pm2vi->hdr.bmiHeader);
					pm2vi->hdr.bmiHeader.biWidth = (LONG)pTE->v.PixelWidth;
					pm2vi->hdr.bmiHeader.biHeight = (LONG)pTE->v.PixelHeight;
					pm2vi->hdr.bmiHeader.biCompression = 'V4PM';
					pm2vi->hdr.bmiHeader.biPlanes = 1;
					pm2vi->hdr.bmiHeader.biBitCount = 24;
					BYTE* pSequenceHeader = (BYTE*)pm2vi->dwSequenceHeader;
					memcpy(pSequenceHeader, pTE->CodecPrivate.GetData(), pTE->CodecPrivate.GetCount());
					pm2vi->cbSequenceHeader = pTE->CodecPrivate.GetCount();
					mts.Add(mt);
				}
				else if(CodecID.Find("V_REAL/RV") == 0)
				{
					mt.subtype = FOURCCMap('00VR' + ((CodecID[9]-0x30)<<16));
					mt.formattype = FORMAT_VideoInfo;
					VIDEOINFOHEADER* pvih = (VIDEOINFOHEADER*)mt.AllocFormatBuffer(sizeof(VIDEOINFOHEADER) + pTE->CodecPrivate.GetCount());
					memset(mt.Format(), 0, mt.FormatLength());
					memcpy(mt.Format() + sizeof(VIDEOINFOHEADER), pTE->CodecPrivate.GetData(), pTE->CodecPrivate.GetCount());
					pvih->bmiHeader.biSize = sizeof(pvih->bmiHeader);
					pvih->bmiHeader.biWidth = (LONG)pTE->v.PixelWidth;
					pvih->bmiHeader.biHeight = (LONG)pTE->v.PixelHeight;
					pvih->bmiHeader.biCompression = mt.subtype.Data1;
					mts.Add(mt);
				}
				else if(CodecID == "V_DIRAC")
				{
					mt.subtype = MEDIASUBTYPE_DiracVideo;
					mt.formattype = FORMAT_DiracVideoInfo;
					DIRACINFOHEADER* dvih = (DIRACINFOHEADER*)mt.AllocFormatBuffer(FIELD_OFFSET(DIRACINFOHEADER, dwSequenceHeader) + pTE->CodecPrivate.GetCount());
					memset(mt.Format(), 0, mt.FormatLength());
					dvih->hdr.bmiHeader.biSize = sizeof(dvih->hdr.bmiHeader);
					dvih->hdr.bmiHeader.biWidth = (LONG)pTE->v.PixelWidth;
					dvih->hdr.bmiHeader.biHeight = (LONG)pTE->v.PixelHeight;
					dvih->hdr.dwPictAspectRatioX = dvih->hdr.bmiHeader.biWidth;
					dvih->hdr.dwPictAspectRatioY = dvih->hdr.bmiHeader.biHeight;

					BYTE* pSequenceHeader = (BYTE*)dvih->dwSequenceHeader;
					memcpy(pSequenceHeader, pTE->CodecPrivate.GetData(), pTE->CodecPrivate.GetCount());
					dvih->cbSequenceHeader = pTE->CodecPrivate.GetCount();

					mts.Add(mt);
				}
				else if(CodecID == "V_MPEG2")
				{
					BYTE* seqhdr = pTE->CodecPrivate.GetData();
					DWORD len = pTE->CodecPrivate.GetCount();
					int w = pTE->v.PixelWidth;
					int h = pTE->v.PixelHeight;

					if(MakeMPEG2MediaType(mt, seqhdr, len, w, h))
						mts.Add(mt);
				}
				else if(CodecID == "V_THEORA")
				{
					BYTE* thdr = pTE->CodecPrivate.GetData() + 3;

					mt.majortype		= MEDIATYPE_Video;
					mt.subtype			= FOURCCMap('OEHT');
					mt.formattype		= FORMAT_MPEG2_VIDEO;
					MPEG2VIDEOINFO* vih = (MPEG2VIDEOINFO*)mt.AllocFormatBuffer(sizeof(MPEG2VIDEOINFO) + pTE->CodecPrivate.GetCount());
					memset(mt.Format(), 0, mt.FormatLength());
					vih->hdr.bmiHeader.biSize		 = sizeof(vih->hdr.bmiHeader);
					vih->hdr.bmiHeader.biWidth		 = *(WORD*)&thdr[10] >> 4;
					vih->hdr.bmiHeader.biHeight		 = *(WORD*)&thdr[12] >> 4;
					vih->hdr.bmiHeader.biCompression = 'OEHT';
					vih->hdr.bmiHeader.biPlanes		 = 1;
					vih->hdr.bmiHeader.biBitCount	 = 24;
					int nFpsNum	= (thdr[22]<<24)|(thdr[23]<<16)|(thdr[24]<<8)|thdr[25];
					int nFpsDenum	= (thdr[26]<<24)|(thdr[27]<<16)|(thdr[28]<<8)|thdr[29];
					if(nFpsNum) vih->hdr.AvgTimePerFrame = (REFERENCE_TIME)(10000000.0 * nFpsDenum / nFpsNum);
					vih->hdr.dwPictAspectRatioX = (thdr[14]<<16)|(thdr[15]<<8)|thdr[16];
					vih->hdr.dwPictAspectRatioY = (thdr[17]<<16)|(thdr[18]<<8)|thdr[19];
					mt.bFixedSizeSamples = 0;

					vih->cbSequenceHeader = pTE->CodecPrivate.GetCount();
					memcpy (&vih->dwSequenceHeader, pTE->CodecPrivate.GetData(), vih->cbSequenceHeader);

					mts.Add(mt);
				}
				else if(CodecID.Find("V_VP8") == 0) 
				{ 
					mt.subtype = FOURCCMap('08PV'); 
					mt.formattype = FORMAT_VideoInfo; 
					VIDEOINFOHEADER* pvih = (VIDEOINFOHEADER*)mt.AllocFormatBuffer(sizeof(VIDEOINFOHEADER) + pTE->CodecPrivate.GetCount()); 
					memset(mt.Format(), 0, mt.FormatLength()); 
					memcpy(mt.Format() + sizeof(VIDEOINFOHEADER), pTE->CodecPrivate.GetData(), pTE->CodecPrivate.GetCount()); 
					pvih->bmiHeader.biSize = sizeof(pvih->bmiHeader); 
					pvih->bmiHeader.biWidth = (LONG)pTE->v.PixelWidth; 
					pvih->bmiHeader.biHeight = (LONG)pTE->v.PixelHeight; 
					pvih->bmiHeader.biCompression = mt.subtype.Data1; 
					mts.Add(mt); 
				} 
/*
				else if(CodecID == "V_DSHOW/MPEG1VIDEO") // V_MPEG1
				{
					mt.majortype = MEDIATYPE_Video;
					mt.subtype = MEDIASUBTYPE_MPEG1Payload;
					mt.formattype = FORMAT_MPEGVideo;
					MPEG1VIDEOINFO* pm1vi = (MPEG1VIDEOINFO*)mt.AllocFormatBuffer(pTE->CodecPrivate.GetCount());
					memcpy(pm1vi, pTE->CodecPrivate.GetData(), pTE->CodecPrivate.GetCount());
					mt.SetSampleSize(pm1vi->hdr.bmiHeader.biWidth*pm1vi->hdr.bmiHeader.biHeight*4);
					mts.Add(mt);
				}
*/
				REFERENCE_TIME AvgTimePerFrame = 0;

                if(pTE->v.FramePerSec > 0)
					AvgTimePerFrame = (REFERENCE_TIME)(10000000i64 / pTE->v.FramePerSec);
				else if(pTE->DefaultDuration > 0)
					AvgTimePerFrame = (REFERENCE_TIME)pTE->DefaultDuration / 100;

				if(AvgTimePerFrame)
				{
					for(int i = 0; i < mts.GetCount(); i++)
					{
						if(mts[i].formattype == FORMAT_VideoInfo
						|| mts[i].formattype == FORMAT_VideoInfo2
						|| mts[i].formattype == FORMAT_MPEG2Video)
						{
							((VIDEOINFOHEADER*)mts[i].Format())->AvgTimePerFrame = AvgTimePerFrame;
						}
					}
				}

				if(pTE->v.DisplayWidth != 0 && pTE->v.DisplayHeight != 0)
				{
					for(int i = 0; i < mts.GetCount(); i++)
					{
						if(mts[i].formattype == FORMAT_VideoInfo)
						{
							DWORD vih1 = FIELD_OFFSET(VIDEOINFOHEADER, bmiHeader);
							DWORD vih2 = FIELD_OFFSET(VIDEOINFOHEADER2, bmiHeader);
							DWORD bmi = mts[i].FormatLength() - FIELD_OFFSET(VIDEOINFOHEADER, bmiHeader);
							mt.formattype = FORMAT_VideoInfo2;
							mt.AllocFormatBuffer(vih2 + bmi);
							memcpy(mt.Format(), mts[i].Format(), vih1);
							memset(mt.Format() + vih1, 0, vih2 - vih1);
							memcpy(mt.Format() + vih2, mts[i].Format() + vih1, bmi);
							((VIDEOINFOHEADER2*)mt.Format())->dwPictAspectRatioX = (DWORD)pTE->v.DisplayWidth;
							((VIDEOINFOHEADER2*)mt.Format())->dwPictAspectRatioY = (DWORD)pTE->v.DisplayHeight;
							mts.InsertAt(i++, mt);
						}
						else if(mts[i].formattype == FORMAT_MPEG2Video)
						{
							((MPEG2VIDEOINFO*)mts[i].Format())->hdr.dwPictAspectRatioX = (DWORD)pTE->v.DisplayWidth;
							((MPEG2VIDEOINFO*)mts[i].Format())->hdr.dwPictAspectRatioY = (DWORD)pTE->v.DisplayHeight;
						}
					}
				}
			}
Пример #11
0
HRESULT XnVideoStream::GetStreamCapability(int iIndex, CMediaType& mediaType, VIDEO_STREAM_CONFIG_CAPS& vscc)
{
	// check bounds
	if(iIndex < 0 || iIndex >= int(m_aSupportedModes.GetSize()))
	{
		xnLogVerbose(XN_MASK_FILTER, "GetStreamCapability() - Index %d is out of bounds!", iIndex);
		return S_FALSE;
	}

	VIDEOINFO *pvi = (VIDEOINFO*)mediaType.AllocFormatBuffer(sizeof(VIDEOINFO));
	if(NULL == pvi)
		return(E_OUTOFMEMORY);

	ZeroMemory(pvi, sizeof(VIDEOINFO));

	int xRes = m_aSupportedModes[iIndex].OutputMode.nXRes;
	int yRes = m_aSupportedModes[iIndex].OutputMode.nYRes;

	XnUInt64 nFrameTime = 10000000 / m_aSupportedModes[iIndex].OutputMode.nFPS;

	if (m_aSupportedModes[iIndex].Format == XN_PIXEL_FORMAT_RGB24)
	{
		pvi->bmiHeader.biCompression = BI_RGB;
	}
	else if (m_aSupportedModes[iIndex].Format == XN_PIXEL_FORMAT_MJPEG)
	{
		pvi->bmiHeader.biCompression = 'GPJM';
	}
	else
	{
		xnLogError(XN_MASK_FILTER, "Unknown format type!");
		return E_UNEXPECTED;
	}

	pvi->bmiHeader.biBitCount	= 24;
	pvi->bmiHeader.biSize       = sizeof(BITMAPINFOHEADER);
	pvi->bmiHeader.biWidth      = xRes;
	pvi->bmiHeader.biHeight     = yRes;
	pvi->bmiHeader.biPlanes     = 1;
	pvi->bmiHeader.biSizeImage  = GetBitmapSize(&pvi->bmiHeader);
	pvi->bmiHeader.biClrImportant = 0;

	SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
	SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle

	pvi->dwBitRate = 
		GetBitmapSize(&pvi->bmiHeader) * // bytes per frame
		m_aSupportedModes[iIndex].OutputMode.nFPS * // frames per second
		8; // bits per byte

	pvi->dwBitErrorRate = 0; // assume no errors
	pvi->AvgTimePerFrame = nFrameTime;

	mediaType.SetType(&MEDIATYPE_Video);
	mediaType.SetFormatType(&FORMAT_VideoInfo);
	mediaType.SetTemporalCompression(FALSE);

	// Work out the GUID for the subtype from the header info.
	const GUID SubTypeGUID = GetBitmapSubtype(&pvi->bmiHeader);
	mediaType.SetSubtype(&SubTypeGUID);
	mediaType.SetSampleSize(pvi->bmiHeader.biSizeImage);

	vscc.guid = FORMAT_VideoInfo;
	vscc.VideoStandard = AnalogVideo_None;
	vscc.InputSize.cx = xRes;
	vscc.InputSize.cy = yRes;
	vscc.MinCroppingSize.cx = xRes;
	vscc.MinCroppingSize.cy = yRes;
	vscc.MaxCroppingSize.cx = xRes;
	vscc.MaxCroppingSize.cy = yRes;
	vscc.CropGranularityX = 1;
	vscc.CropGranularityY = 1;
	vscc.CropAlignX = 1;
	vscc.CropAlignY = 1;

	vscc.MinOutputSize.cx = xRes;
	vscc.MinOutputSize.cy = yRes;
	vscc.MaxOutputSize.cx = xRes;
	vscc.MaxOutputSize.cy = yRes;
	vscc.OutputGranularityX = 1;
	vscc.OutputGranularityY = 1;
	vscc.StretchTapsX = 0;
	vscc.StretchTapsY = 0;
	vscc.ShrinkTapsX = 0;
	vscc.ShrinkTapsY = 0;
	// Frame interval is in 100 nanosecond units
	vscc.MinFrameInterval = nFrameTime;
	vscc.MaxFrameInterval = nFrameTime;
	vscc.MinBitsPerSecond = 
		mediaType.GetSampleSize() * // bytes in frame
		m_aSupportedModes[iIndex].OutputMode.nFPS * // frames per second
		8; // bits per byte
	vscc.MaxBitsPerSecond = vscc.MinBitsPerSecond;

	return S_OK;
}
Пример #12
0
HRESULT CConvert::AddVideoGroup(double dSourceFramerate, long nSourceWidth, long nSourceHeight)
{

    // make the root group/composition
	HRESULT hr = S_OK;
    hr = m_pTimeline->CreateEmptyNode(&m_pVideoGroupObj, TIMELINE_MAJOR_TYPE_GROUP);
    if(FAILED( hr )) 
    {
        return hr;
    }

	CComQIPtr<IAMTimelineGroup, &IID_IAMTimelineGroup> pVideoGroup(m_pVideoGroupObj);
 
	//// Set Media Type
	CMediaType VideoGroupType;
	VideoGroupType.SetType(&MEDIATYPE_Video);
	VideoGroupType.SetSubtype(&MEDIASUBTYPE_RGB24);
	VideoGroupType.SetFormatType(&FORMAT_VideoInfo);
	
	VIDEOINFOHEADER *pVideoHeader = (VIDEOINFOHEADER*)VideoGroupType.AllocFormatBuffer(sizeof(VIDEOINFOHEADER));
	ZeroMemory(pVideoHeader, sizeof(VIDEOINFOHEADER));
	
	pVideoHeader->bmiHeader.biBitCount = 24;
	pVideoHeader->bmiHeader.biWidth = nSourceWidth;
	pVideoHeader->bmiHeader.biHeight = nSourceHeight;
	pVideoHeader->bmiHeader.biPlanes = 1;
	pVideoHeader->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
	pVideoHeader->bmiHeader.biSizeImage = DIBSIZE(pVideoHeader->bmiHeader);

	VideoGroupType.SetSampleSize(DIBSIZE(pVideoHeader->bmiHeader));
	
	hr = pVideoGroup->SetMediaType(&VideoGroupType);
    if(FAILED( hr )) 
    {
        return hr;
    }

	
//	double dRequiredInputFramerate = 0;
//	m_pMPEGWriterProps->SetSourceFramerate(dSourceFramerate, &dRequiredInputFramerate);

// 	hr = pVideoGroup->SetOutputFPS(15);


	/*
	if (GetOutputFPS() != 0)
	{
		// the user set a framerate
		hr = pVideoGroup->SetOutputFPS(GetOutputFPS());
		GetMPEGWriterProps()->OverrideSourceFPS(GetOutputFPS());
	}
	else if (IsFrameRateSupported((float)dSourceFramerate))
	{
		// the user did not set a framerate. If the source
		// framerate is supported, we use it.
		hr = pVideoGroup->SetOutputFPS(dSourceFramerate);
		GetMPEGWriterProps()->OverrideSourceFPS((float)dSourceFramerate);
	}
	else
	{
		// the user did not want a framerate, and the framerate
		// of the file is not supported. We use 25fps
		hr = pVideoGroup->SetOutputFPS(25);
		GetMPEGWriterProps()->OverrideSourceFPS(25);
	}
	*/

	hr = m_pTimeline->AddGroup(m_pVideoGroupObj);

	return hr;
}
Пример #13
0
HRESULT CEASpliterFilter::CreateOutputs(IAsyncReader* pAsyncReader)
{
	SVP_LogMsg5(L" CEASpliterFilter::CreateOutputs");
	CheckPointer(pAsyncReader, E_POINTER);

	HRESULT hr = E_FAIL;

	m_pFile.Free();

	m_pFile.Attach(new CEAFile(pAsyncReader, hr));
	if(!m_pFile) return E_OUTOFMEMORY;
	if(FAILED(hr)) {m_pFile.Free(); return hr;}

	m_rtNewStart = m_rtCurrent = 0;
	m_rtNewStop = m_rtStop = m_rtDuration = 0;

	m_pFile->Seek(0);
	DWORD EAFileTag = bswap_32((DWORD)m_pFile->BitRead(32));

	switch(EAFileTag)
	{
		case ISNh_TAG:
		case SCHl_TAG:
		case SEAD_TAG:
		case SHEN_TAG:
		case kVGT_TAG:
		case MADk_TAG:
		case MPCh_TAG:
		case MVhd_TAG:
		case MVIh_TAG:
			SVP_LogMsg5(L"GotEA Tag");
			break;
		default:
			return hr;
	}

	m_pFile->Seek(0);
	EaDemuxContext *ea = (EaDemuxContext *)m_pea;
	SVP_LogMsg5(L"EABefore Info %x %x", ea->audio_codec, ea->video_codec);;
	process_header();
	SVP_LogMsg5(L"GotEA Info  %d %d", ea->audio_codec, ea->video_codec);;
	int idPin = 0;
	while(ea->video_codec){
		CMediaType mt;
		mt.SetSampleSize(1);
		mt.subtype = GUID_NULL;
		switch(ea->video_codec){

			case CODEC_ID_VP6:
				{
					mt.majortype = MEDIATYPE_Video;
					mt.formattype = FORMAT_VideoInfo;
					VIDEOINFOHEADER* vih = (VIDEOINFOHEADER*)mt.AllocFormatBuffer(sizeof(VIDEOINFOHEADER));
					memset(vih, 0, sizeof(VIDEOINFOHEADER));
					BITMAPINFOHEADER* bih = &vih->bmiHeader;
					vih->bmiHeader.biWidth = ea->width;
					vih->bmiHeader.biHeight = ea->height;
					bih->biCompression = '26PV';
					mt.subtype = MEDIASUBTYPE_VP62;
				}
				break;
			case CODEC_ID_MPEG2VIDEO:
				//TODO: handle MPEG2 in vp6
				/*mt.formattype = FORMAT_MPEG2Video;
				MPEG2VIDEOINFO* vih = (MPEG2VIDEOINFO*)mt.AllocFormatBuffer(FIELD_OFFSET(MPEG2VIDEOINFO, dwSequenceHeader) + headerSize);
				memset(vih, 0, mt.FormatLength());
				vih->hdr.bmiHeader.biSize = sizeof(vih->hdr.bmiHeader);
				vih->hdr.bmiHeader.biPlanes = 1;
				vih->hdr.bmiHeader.biBitCount = 24;
				vih->hdr.bmiHeader.biWidth =  ea->width;
				vih->hdr.bmiHeader.biHeight = ea->height;
				*/
			case CODEC_ID_MDEC:
			case CODEC_ID_CMV:
			case CODEC_ID_TGV:
			case CODEC_ID_TGQ:
			case CODEC_ID_TQI:
			case CODEC_ID_MAD:
				SVP_LogMsg5(L"sorry we cant handle this now");
				break;
		}
		
		if(mt.subtype == GUID_NULL)
			break;
		CAtlArray<CMediaType> mts;
		mts.Add(mt);
		CAutoPtr<CBaseSplitterOutputPin> pPinOut(new CBaseSplitterOutputPin(mts, L"Video", this, this, &hr));
		EXECUTE_ASSERT(SUCCEEDED(AddOutputPin(idPin, pPinOut)));
		
		idPin++;
		break;
	}
	if(ea->audio_codec){
		CMediaType mt;
		mt.SetSampleSize(1);
		

		mt.majortype = MEDIATYPE_Audio;
		mt.formattype = FORMAT_WaveFormatEx;
		
		switch(ea->audio_codec){
			case CODEC_ID_MP3:
				mt.subtype = MEDIASUBTYPE_MP3;
				{
					WAVEFORMATEX* wfe = (WAVEFORMATEX*)mt.AllocFormatBuffer(sizeof(WAVEFORMATEX));
					memset(wfe, 0, sizeof(WAVEFORMATEX));
					wfe->nSamplesPerSec = ea->sample_rate;
					wfe->wBitsPerSample = ea->bytes * 8;
					wfe->nChannels = ea->num_channels;
				}
				break;
			case CODEC_ID_ADPCM_EA:
			case CODEC_ID_ADPCM_EA_R1:
			case CODEC_ID_ADPCM_EA_R2:
			case CODEC_ID_ADPCM_EA_R3:
			case CODEC_ID_PCM_S16LE_PLANAR:
			case CODEC_ID_PCM_S8:
			case CODEC_ID_PCM_S16LE:
			case CODEC_ID_PCM_MULAW:
			case CODEC_ID_ADPCM_IMA_EA_EACS:
			case CODEC_ID_ADPCM_IMA_EA_SEAD:
				break;
		}
		
		if(mt.subtype != GUID_NULL){

			CAtlArray<CMediaType> mts;
			mts.Add(mt);
			CAutoPtr<CBaseSplitterOutputPin> pPinOut(new CBaseSplitterOutputPin(mts, L"Audio", this, this, &hr));
			EXECUTE_ASSERT(SUCCEEDED(AddOutputPin(idPin, pPinOut)));
		}

	}
	
	SVP_LogMsg5(L"EA Out %d", m_pOutputs.GetCount());
	m_rtNewStop = m_rtStop = m_rtDuration ;

	return m_pOutputs.GetCount() > 0 ? S_OK : E_FAIL;
}
Пример #14
0
static int v4w_open_videodevice(V4wState *s)
{
    // Initialize COM
    CoInitialize(NULL);

    // get a Graph
    HRESULT hr=s->m_pGraph.CoCreateInstance(CLSID_FilterGraph);
    if(FAILED(hr))
    {
        return -1;
    }

    // get a CaptureGraphBuilder2
    hr=s->m_pBuilder.CoCreateInstance(CLSID_CaptureGraphBuilder2);
    if(FAILED(hr))
    {
        return -2;
    }

    // connect capture graph builder with the graph
    s->m_pBuilder->SetFiltergraph(s->m_pGraph);

    // get mediacontrol so we can start and stop the filter graph
    hr=s->m_pGraph.QueryInterface(&(s->m_pControl));
    if(FAILED(hr))
    {
        return -3;
    }


#ifdef _DEBUG
    HANDLE m_hLogFile=CreateFile(L"DShowGraphLog.txt",GENERIC_READ|GENERIC_WRITE,FILE_SHARE_READ,NULL,OPEN_ALWAYS,FILE_ATTRIBUTE_NORMAL,NULL);
    if(m_hLogFile!=INVALID_HANDLE_VALUE)
    {
        hr=s->m_pGraph->SetLogFile((DWORD_PTR)m_hLogFile);
        /* ASSERT(SUCCEEDED(hr)); */
    }

    //AddGraphToRot(s->m_pGraph, &s->rotregvalue);
#endif

    ICreateDevEnum *pCreateDevEnum = NULL;
    IEnumMoniker *pEnumMoniker = NULL;
    IMoniker *pMoniker = NULL;

    ULONG nFetched = 0;

    hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
                          IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
    if(FAILED(hr))
    {
        return -4;
    }

    hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
            &pEnumMoniker, 0);
    if (FAILED(hr) || pEnumMoniker == NULL) {
        //printf("no device\n");
        return -5;
    }

    pEnumMoniker->Reset();

    int pos=0;
    while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
    {
        if (pos>=s->devidx)
            break;
        pos++;
        pMoniker->Release();
        pMoniker=NULL;
    }
    if(pMoniker==NULL)
    {
        return -6;
    }

    hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
    if(FAILED(hr))
    {
        return -7;
    }

    s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

    pMoniker->Release();
    pEnumMoniker->Release();
    pCreateDevEnum->Release();

    if (try_format(s, s->pix_fmt)==0)
        s->pix_fmt = s->pix_fmt;
    else if (try_format(s,MS_YUV420P)==0)
        s->pix_fmt = MS_YUV420P;
    else if (try_format(s,MS_YUY2)==0)
        s->pix_fmt = MS_YUY2;
    else if (try_format(s,MS_YUYV)==0)
        s->pix_fmt = MS_YUYV;
    else if (try_format(s,MS_UYVY)==0)
        s->pix_fmt = MS_UYVY;
    else if (try_format(s,MS_RGB24)==0)
        s->pix_fmt = MS_RGB24;
    else
    {
        ms_error("Unsupported video pixel format.");
        return -8;
    }

    if (s->pix_fmt == MS_YUV420P)
        ms_message("Driver supports YUV420P, using that format.");
    else if (s->pix_fmt == MS_YUY2)
        ms_message("Driver supports YUY2 (UYVY), using that format.");
    else if (s->pix_fmt == MS_YUYV)
        ms_message("Driver supports YUV422, using that format.");
    else if (s->pix_fmt == MS_UYVY)
        ms_message("Driver supports UYVY, using that format.");
    else if (s->pix_fmt == MS_RGB24)
        ms_message("Driver supports RGB24, using that format.");

    if (try_format_size(s, s->pix_fmt, s->vsize.width, s->vsize.height)==0)
        ms_message("Selected Size: %ix%i.", s->vsize.width, s->vsize.height);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H);
    else
    {
        ms_error("No supported size found for format.");
        /* size not supported? */
        return -9;
    }

    // get DXFilter
    s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE);
    if(s->m_pDXFilter==NULL)
    {
        return -10;
    }
    s->m_pDXFilter->AddRef();

    CMediaType mt;
    mt.SetType(&MEDIATYPE_Video);

    GUID m = MEDIASUBTYPE_RGB24;
    if (s->pix_fmt == MS_YUV420P)
        m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0'));
    else if (s->pix_fmt == MS_YUY2)
        m = MEDIASUBTYPE_YUY2;
    else if (s->pix_fmt == MS_YUYV)
        m = MEDIASUBTYPE_YUYV;
    else if (s->pix_fmt == MS_UYVY)
        m = MEDIASUBTYPE_UYVY;
    else if (s->pix_fmt == MS_RGB24)
        m = MEDIASUBTYPE_RGB24;
    mt.SetSubtype(&m);

    mt.formattype = FORMAT_VideoInfo;
    mt.SetTemporalCompression(FALSE);

    VIDEOINFO *pvi = (VIDEOINFO *)
                     mt.AllocFormatBuffer(sizeof(VIDEOINFO));
    if (NULL == pvi)
        return -11;
    ZeroMemory(pvi, sizeof(VIDEOINFO));

    if (s->pix_fmt == MS_YUV420P)
        pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
    else if (s->pix_fmt == MS_YUY2)
        pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2');
    else if (s->pix_fmt == MS_YUYV)
        pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','V');
    else if (s->pix_fmt == MS_UYVY)
        pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y');
    else if (s->pix_fmt == MS_RGB24)
        pvi->bmiHeader.biCompression = BI_RGB;

    if (s->pix_fmt == MS_YUV420P)
        pvi->bmiHeader.biBitCount = 12;
    else if (s->pix_fmt == MS_YUY2)
        pvi->bmiHeader.biBitCount = 16;
    else if (s->pix_fmt == MS_YUYV)
        pvi->bmiHeader.biBitCount = 16;
    else if (s->pix_fmt == MS_UYVY)
        pvi->bmiHeader.biBitCount = 16;
    else if (s->pix_fmt == MS_RGB24)
        pvi->bmiHeader.biBitCount = 24;

    pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
    pvi->bmiHeader.biWidth = s->vsize.width;
    pvi->bmiHeader.biHeight = s->vsize.height;
    pvi->bmiHeader.biPlanes = 1;
    pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
    pvi->bmiHeader.biClrImportant = 0;
    mt.SetSampleSize(pvi->bmiHeader.biSizeImage);
    mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO));

    hr = s->m_pDXFilter->SetAcceptedMediaType(&mt);
    if(FAILED(hr))
    {
        return -12;
    }

    hr = s->m_pDXFilter->SetCallback(Callback);
    if(FAILED(hr))
    {
        return -13;
    }

    hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter,
                                        (LPVOID *)&s->m_pIDXFilter);
    if(FAILED(hr))
    {
        return -14;
    }

    hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter");
    if(FAILED(hr))
    {
        return -15;
    }


    // get null renderer
    hr=s->m_pNullRenderer.CoCreateInstance(CLSID_NullRenderer);
    if(FAILED(hr))
    {
        return -16;
    }
    if (s->m_pNullRenderer!=NULL)
    {
        s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer");
    }

    hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_PREVIEW,
                                     &MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
    if (FAILED(hr))
    {
        //hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_CAPTURE,
        //	&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
        if (FAILED(hr))
        {
            return -17;
        }
    }

    //m_pDXFilter->SetBufferSamples(TRUE);

    s_callback = s;
    hr = s->m_pControl->Run();
    if(FAILED(hr))
    {
        return -18;
    }

    s->rotregvalue=1;
    return 0;
}
Пример #15
0
const bool CMediaViewer::OpenViewer(HWND hOwnerHwnd, HWND hMessageDrainHwnd,
			CVideoRenderer::RendererType RendererType,
			LPCWSTR pszVideoDecoder, LPCWSTR pszAudioDevice)
{
	CTryBlockLock Lock(&m_DecoderLock);
	if (!Lock.TryLock(LOCK_TIMEOUT)) {
		SetError(TEXT("タイムアウトエラーです。"));
		return false;
	}

	if (m_bInit) {
		SetError(TEXT("既にフィルタグラフが構築されています。"));
		return false;
	}

	TRACE(TEXT("CMediaViewer::OpenViewer() フィルタグラフ作成開始\n"));

	HRESULT hr=S_OK;

	IPin *pOutput=NULL;
	IPin *pOutputVideo=NULL;
	IPin *pOutputAudio=NULL;

	try {
		// フィルタグラフマネージャを構築する
		hr=::CoCreateInstance(CLSID_FilterGraph,NULL,CLSCTX_INPROC_SERVER,
				IID_IGraphBuilder,pointer_cast<LPVOID*>(&m_pFilterGraph));
		if (FAILED(hr)) {
			throw CBonException(hr,TEXT("フィルタグラフマネージャを作成できません。"));
		}
#ifdef _DEBUG
		AddToRot(m_pFilterGraph, &m_dwRegister);
#endif

		// IMediaControlインタフェースのクエリー
		hr=m_pFilterGraph->QueryInterface(IID_IMediaControl, pointer_cast<void**>(&m_pMediaControl));
		if (FAILED(hr)) {
			throw CBonException(hr,TEXT("メディアコントロールを取得できません。"));
		}

		Trace(TEXT("ソースフィルタの接続中..."));

		/* CBonSrcFilter */
		{
			// インスタンス作成
			m_pSrcFilter = static_cast<CBonSrcFilter*>(CBonSrcFilter::CreateInstance(NULL, &hr));
			if (m_pSrcFilter == NULL || FAILED(hr))
				throw CBonException(hr, TEXT("ソースフィルタを作成できません。"));
			m_pSrcFilter->SetOutputWhenPaused(RendererType == CVideoRenderer::RENDERER_DEFAULT);
			// フィルタグラフに追加
			hr = m_pFilterGraph->AddFilter(m_pSrcFilter, L"BonSrcFilter");
			if (FAILED(hr))
				throw CBonException(hr, TEXT("ソースフィルタをフィルタグラフに追加できません。"));
			// 出力ピンを取得
			pOutput = DirectShowUtil::GetFilterPin(m_pSrcFilter, PINDIR_OUTPUT);
			if (pOutput==NULL)
				throw CBonException(TEXT("ソースフィルタの出力ピンを取得できません。"));
			m_pSrcFilter->EnableSync(m_bEnablePTSSync);
		}

		Trace(TEXT("MPEG-2 Demultiplexerフィルタの接続中..."));

		/* MPEG-2 Demultiplexer */
		{
			CMediaType MediaTypeVideo;
			CMediaType MediaTypeAudio;
			IMpeg2Demultiplexer *pMpeg2Demuxer;

			hr=::CoCreateInstance(CLSID_MPEG2Demultiplexer,NULL,
					CLSCTX_INPROC_SERVER,IID_IBaseFilter,
					pointer_cast<LPVOID*>(&m_pMp2DemuxFilter));
			if (FAILED(hr))
				throw CBonException(hr,TEXT("MPEG-2 Demultiplexerフィルタを作成できません。"),
									TEXT("MPEG-2 Demultiplexerフィルタがインストールされているか確認してください。"));
			hr=DirectShowUtil::AppendFilterAndConnect(m_pFilterGraph,
								m_pMp2DemuxFilter,L"Mpeg2Demuxer",&pOutput);
			if (FAILED(hr))
				throw CBonException(hr,TEXT("MPEG-2 Demultiplexerをフィルタグラフに追加できません。"));
			// この時点でpOutput==NULLのはずだが念のため
			SAFE_RELEASE(pOutput);

			// IMpeg2Demultiplexerインタフェースのクエリー
			hr=m_pMp2DemuxFilter->QueryInterface(IID_IMpeg2Demultiplexer,
												 pointer_cast<void**>(&pMpeg2Demuxer));
			if (FAILED(hr))
				throw CBonException(hr,TEXT("MPEG-2 Demultiplexerインターフェースを取得できません。"),
									TEXT("互換性のないスプリッタの優先度がMPEG-2 Demultiplexerより高くなっている可能性があります。"));

			// 映像メディアフォーマット設定
			hr = SetVideoMediaType(&MediaTypeVideo, 1920, 1080);
			if (FAILED(hr))
				throw CBonException(TEXT("メモリが確保できません。"));
			// 映像出力ピン作成
			hr = pMpeg2Demuxer->CreateOutputPin(&MediaTypeVideo, L"Video", &pOutputVideo);
			if (FAILED(hr)) {
				pMpeg2Demuxer->Release();
				throw CBonException(hr, TEXT("MPEG-2 Demultiplexerの映像出力ピンを作成できません。"));
			}
			// 音声メディアフォーマット設定
			MediaTypeAudio.InitMediaType();
			MediaTypeAudio.SetType(&MEDIATYPE_Audio);
			MediaTypeAudio.SetSubtype(&MEDIASUBTYPE_NULL);
			MediaTypeAudio.SetVariableSize();
			MediaTypeAudio.SetTemporalCompression(TRUE);
			MediaTypeAudio.SetSampleSize(0);
			MediaTypeAudio.SetFormatType(&FORMAT_None);
			// 音声出力ピン作成
			hr=pMpeg2Demuxer->CreateOutputPin(&MediaTypeAudio,L"Audio",&pOutputAudio);
			pMpeg2Demuxer->Release();
			if (FAILED(hr))
				throw CBonException(hr,TEXT("MPEG-2 Demultiplexerの音声出力ピンを作成できません。"));
			// 映像出力ピンのIMPEG2PIDMapインタフェースのクエリー
			hr=pOutputVideo->QueryInterface(__uuidof(IMPEG2PIDMap),pointer_cast<void**>(&m_pMp2DemuxVideoMap));
			if (FAILED(hr))
				throw CBonException(hr,TEXT("映像出力ピンのIMPEG2PIDMapを取得できません。"));
			// 音声出力ピンのIMPEG2PIDMapインタフェースのクエリ
			hr=pOutputAudio->QueryInterface(__uuidof(IMPEG2PIDMap),pointer_cast<void**>(&m_pMp2DemuxAudioMap));
			if (FAILED(hr))
				throw CBonException(hr,TEXT("音声出力ピンのIMPEG2PIDMapを取得できません。"));
		}

#ifndef BONTSENGINE_H264_SUPPORT
		Trace(TEXT("MPEG-2パーサフィルタの接続中..."));

		/* CMpeg2ParserFilter */
		{
			// インスタンス作成
			m_pMpeg2Parser = static_cast<CMpeg2ParserFilter*>(CMpeg2ParserFilter::CreateInstance(NULL, &hr));
			if ((!m_pMpeg2Parser) || FAILED(hr))
				throw CBonException(hr,TEXT("MPEG-2パーサフィルタを作成できません。"));
			m_pMpeg2Parser->SetVideoInfoCallback(OnVideoInfo,this);
			// madVR は映像サイズの変化時に MediaType を設定しないと新しいサイズが適用されない
			m_pMpeg2Parser->SetAttachMediaType(RendererType==CVideoRenderer::RENDERER_madVR);
			// フィルタの追加と接続
			hr=DirectShowUtil::AppendFilterAndConnect(
				m_pFilterGraph,m_pMpeg2Parser,L"Mpeg2ParserFilter",&pOutputVideo);
			if (FAILED(hr))
				throw CBonException(hr,TEXT("MPEG-2パーサフィルタをフィルタグラフに追加できません。"));
		}
#else
		Trace(TEXT("H.264パーサフィルタの接続中..."));

		/* CH264ParserFilter */
		{
			// インスタンス作成
			m_pH264Parser = static_cast<CH264ParserFilter*>(CH264ParserFilter::CreateInstance(NULL, &hr));
			if ((!m_pH264Parser) || FAILED(hr))
				throw CBonException(TEXT("H.264パーサフィルタを作成できません。"));
			m_pH264Parser->SetVideoInfoCallback(OnVideoInfo,this);
			m_pH264Parser->SetAdjustTime(m_bAdjustVideoSampleTime);
			m_pH264Parser->SetAdjustFrameRate(m_bAdjustFrameRate);
			// madVR は映像サイズの変化時に MediaType を設定しないと新しいサイズが適用されない
			m_pH264Parser->SetAttachMediaType(RendererType==CVideoRenderer::RENDERER_madVR);
			// フィルタの追加と接続
			hr=DirectShowUtil::AppendFilterAndConnect(
				m_pFilterGraph,m_pH264Parser,L"H264ParserFilter",&pOutputVideo);
			if (FAILED(hr))
				throw CBonException(hr,TEXT("H.264パーサフィルタをフィルタグラフに追加できません。"));
		}
#endif	// BONTSENGINE_H264_SUPPORT

		Trace(TEXT("音声デコーダの接続中..."));

#if 1
		/* CAudioDecFilter */
		{
			// CAudioDecFilterインスタンス作成
			m_pAudioDecoder = static_cast<CAudioDecFilter*>(CAudioDecFilter::CreateInstance(NULL, &hr));
			if (!m_pAudioDecoder || FAILED(hr))
				throw CBonException(hr,TEXT("音声デコーダフィルタを作成できません。"));
			// フィルタの追加と接続
			hr=DirectShowUtil::AppendFilterAndConnect(
				m_pFilterGraph,m_pAudioDecoder,L"AudioDecFilter",&pOutputAudio);
			if (FAILED(hr))
				throw CBonException(hr,TEXT("音声デコーダフィルタをフィルタグラフに追加できません。"));

			m_pAudioDecoder->SetJitterCorrection(m_bAdjustAudioStreamTime);
			if (m_pAudioStreamCallback)
				m_pAudioDecoder->SetStreamCallback(m_pAudioStreamCallback,
												   m_pAudioStreamCallbackParam);
		}
#else
		/*
			外部AACデコーダを利用すると、チャンネル数が切り替わった際に音が出なくなる、
			デュアルモノラルがステレオとして再生される、といった問題が出る
		*/

		/* CAacParserFilter */
		{
			CAacParserFilter *m_pAacParser;
			// CAacParserFilterインスタンス作成
			m_pAacParser=static_cast<CAacParserFilter*>(CAacParserFilter::CreateInstance(NULL, &hr));
			if (!m_pAacParser || FAILED(hr))
				throw CBonException(hr,TEXT("AACパーサフィルタを作成できません。"));
			// フィルタの追加と接続
			hr=DirectShowUtil::AppendFilterAndConnect(
				m_pFilterGraph,m_pAacParser,L"AacParserFilter",&pOutputAudio);
			if (FAILED(hr))
				throw CBonException(TEXT("AACパーサフィルタをフィルタグラフに追加できません。"));
			m_pAacParser->Release();
		}

		/* AACデコーダー */
		{
			CDirectShowFilterFinder FilterFinder;

			// 検索
			if(!FilterFinder.FindFilter(&MEDIATYPE_Audio,&MEDIASUBTYPE_AAC))
				throw CBonException(TEXT("AACデコーダが見付かりません。"),
									TEXT("AACデコーダがインストールされているか確認してください。"));

			WCHAR szAacDecoder[128];
			CLSID idAac;
			bool bConnectSuccess=false;
			IBaseFilter *pAacDecFilter=NULL;

			for (int i=0;i<FilterFinder.GetFilterCount();i++){
				if (FilterFinder.GetFilterInfo(i,&idAac,szAacDecoder,128)) {
					if (pszAudioDecoder!=NULL && pszAudioDecoder[0]!='\0'
							&& ::lstrcmpi(szAacDecoder,pszAudioDecoder)!=0)
						continue;
					hr=DirectShowUtil::AppendFilterAndConnect(m_pFilterGraph,
							idAac,szAacDecoder,&pAacDecFilter,
							&pOutputAudio);
					if (SUCCEEDED(hr)) {
						TRACE(TEXT("AAC decoder connected : %s\n"),szAacDecoder);
						bConnectSuccess=true;
						break;
					}
				}
			}
			// どれかのフィルタで接続できたか
			if (bConnectSuccess) {
				SAFE_RELEASE(pAacDecFilter);
				//m_pszAacDecoderName=StdUtil::strdup(szAacDecoder);
			} else {
				throw CBonException(TEXT("AACデコーダフィルタをフィルタグラフに追加できません。"),
									TEXT("設定で有効なAACデコーダが選択されているか確認してください。"));
			}
		}
#endif

		/* ユーザー指定の音声フィルタの接続 */
		if (m_pszAudioFilterName) {
			Trace(TEXT("音声フィルタの接続中..."));

			// 検索
			bool bConnectSuccess=false;
			CDirectShowFilterFinder FilterFinder;
			if (FilterFinder.FindFilter(&MEDIATYPE_Audio,&MEDIASUBTYPE_PCM)) {
				WCHAR szAudioFilter[128];
				CLSID idAudioFilter;

				for (int i=0;i<FilterFinder.GetFilterCount();i++) {
					if (FilterFinder.GetFilterInfo(i,&idAudioFilter,szAudioFilter,128)) {
						if (::lstrcmpi(m_pszAudioFilterName,szAudioFilter)!=0)
							continue;
						hr=DirectShowUtil::AppendFilterAndConnect(m_pFilterGraph,
								idAudioFilter,szAudioFilter,&m_pAudioFilter,
								&pOutputAudio,NULL,true);
						if (SUCCEEDED(hr)) {
							TRACE(TEXT("音声フィルタ接続 : %s\n"),szAudioFilter);
							bConnectSuccess=true;
						}
						break;
					}
				}
			}
			if (!bConnectSuccess) {
				throw CBonException(hr,
					TEXT("音声フィルタをフィルタグラフに追加できません。"),
					TEXT("音声フィルタが利用できないか、音声デバイスに対応していない可能性があります。"));
			}
		}

#ifndef BONTSENGINE_H264_SUPPORT
		Trace(TEXT("MPEG-2デコーダの接続中..."));

		/* Mpeg2デコーダー */
		{
			CDirectShowFilterFinder FilterFinder;

			// 検索
			if(!FilterFinder.FindFilter(&MEDIATYPE_Video,&MEDIASUBTYPE_MPEG2_VIDEO))
				throw CBonException(TEXT("MPEG-2デコーダが見付かりません。"),
									TEXT("MPEG-2デコーダがインストールされているか確認してください。"));

			WCHAR szMpeg2Decoder[128];
			CLSID idMpeg2Vid;
			bool bConnectSuccess=false;

			for (int i=0;i<FilterFinder.GetFilterCount();i++){
				if (FilterFinder.GetFilterInfo(i,&idMpeg2Vid,szMpeg2Decoder,128)) {
					if (pszVideoDecoder!=NULL && pszVideoDecoder[0]!='\0'
							&& ::lstrcmpi(szMpeg2Decoder,pszVideoDecoder)!=0)
						continue;
					hr=DirectShowUtil::AppendFilterAndConnect(m_pFilterGraph,
							idMpeg2Vid,szMpeg2Decoder,&m_pVideoDecoderFilter,
							&pOutputVideo,NULL,true);
					if (SUCCEEDED(hr)) {
						bConnectSuccess=true;
						break;
					}
				}
			}
			// どれかのフィルタで接続できたか
			if (bConnectSuccess) {
				m_pszVideoDecoderName=StdUtil::strdup(szMpeg2Decoder);
			} else {
				throw CBonException(hr,TEXT("MPEG-2デコーダフィルタをフィルタグラフに追加できません。"),
					TEXT("設定で有効なMPEG-2デコーダが選択されているか確認してください。\nまた、レンダラを変えてみてください。"));
			}
		}

#ifndef MPEG2PARSERFILTER_INPLACE
		/*
			CyberLinkのデコーダとデフォルトレンダラの組み合わせで
			1080x1080(4:3)の映像が正方形に表示される問題に対応
			…しようと思ったが変になるので保留
		*/
		if (::StrStrI(m_pszVideoDecoderName, TEXT("CyberLink")) != NULL)
			m_pMpeg2Parser->SetFixSquareDisplay(true);
#endif
#else	// ndef BONTSENGINE_H264_SUPPORT
		Trace(TEXT("H.264デコーダの接続中..."));

		/* H.264デコーダー */
		{
			CDirectShowFilterFinder FilterFinder;

			// 検索
			if(!FilterFinder.FindFilter(&MEDIATYPE_Video,&MEDIASUBTYPE_H264))
				throw CBonException(TEXT("H.264デコーダが見付かりません。"),
									TEXT("H.264デコーダがインストールされているか確認してください。"));

			WCHAR szH264Decoder[128];
			CLSID idH264Decoder;
			bool bConnectSuccess=false;

			for (int i=0;i<FilterFinder.GetFilterCount();i++){
				if (FilterFinder.GetFilterInfo(i,&idH264Decoder,szH264Decoder,128)) {
					if (pszVideoDecoder!=NULL && pszVideoDecoder[0]!='\0'
							&& ::lstrcmpi(szH264Decoder,pszVideoDecoder)!=0)
						continue;
					hr=DirectShowUtil::AppendFilterAndConnect(m_pFilterGraph,
							idH264Decoder,szH264Decoder,&m_pVideoDecoderFilter,
							&pOutputVideo,NULL,true);
					if (SUCCEEDED(hr)) {
						bConnectSuccess=true;
						break;
					}
				}
			}
			// どれかのフィルタで接続できたか
			if (bConnectSuccess) {
				m_pszVideoDecoderName=StdUtil::strdup(szH264Decoder);
			} else {
				throw CBonException(hr,TEXT("H.264デコーダフィルタをフィルタグラフに追加できません。"),
					TEXT("設定で有効なH.264デコーダが選択されているか確認してください。\nまた、レンダラを変えてみてください。"));
			}
		}
#endif	// BONTSENGINE_H264_SUPPORT

		Trace(TEXT("映像レンダラの構築中..."));

		if (!CVideoRenderer::CreateRenderer(RendererType,&m_pVideoRenderer)) {
			throw CBonException(TEXT("映像レンダラを作成できません。"),
								TEXT("設定で有効なレンダラが選択されているか確認してください。"));
		}
		if (!m_pVideoRenderer->Initialize(m_pFilterGraph,pOutputVideo,
										  hOwnerHwnd,hMessageDrainHwnd)) {
			throw CBonException(m_pVideoRenderer->GetLastErrorException());
		}
		m_VideoRendererType=RendererType;

		Trace(TEXT("音声レンダラの構築中..."));

		// 音声レンダラ構築
		{
			bool fOK = false;

			if (pszAudioDevice != NULL && pszAudioDevice[0] != '\0') {
				CDirectShowDeviceEnumerator DevEnum;

				if (DevEnum.CreateFilter(CLSID_AudioRendererCategory,
										 pszAudioDevice, &m_pAudioRenderer)) {
					m_pszAudioRendererName=StdUtil::strdup(pszAudioDevice);
					fOK = true;
				}
			}
			if (!fOK) {
				hr = ::CoCreateInstance(CLSID_DSoundRender, NULL,
										CLSCTX_INPROC_SERVER, IID_IBaseFilter,
										pointer_cast<LPVOID*>(&m_pAudioRenderer));
				if (SUCCEEDED(hr)) {
					m_pszAudioRendererName=StdUtil::strdup(TEXT("DirectSound Renderer"));
					fOK = true;
				}
			}
			if (fOK) {
				hr = DirectShowUtil::AppendFilterAndConnect(m_pFilterGraph,
						m_pAudioRenderer, L"Audio Renderer", &pOutputAudio);
				if (SUCCEEDED(hr)) {
#ifdef _DEBUG
					if (pszAudioDevice != NULL && pszAudioDevice[0] != '\0')
						TRACE(TEXT("音声デバイス %s を接続\n"), pszAudioDevice);
#endif
					if (m_bUseAudioRendererClock) {
						IMediaFilter *pMediaFilter;

						if (SUCCEEDED(m_pFilterGraph->QueryInterface(IID_IMediaFilter,
								pointer_cast<void**>(&pMediaFilter)))) {
							IReferenceClock *pReferenceClock;

							if (SUCCEEDED(m_pAudioRenderer->QueryInterface(IID_IReferenceClock,
									pointer_cast<void**>(&pReferenceClock)))) {
								pMediaFilter->SetSyncSource(pReferenceClock);
								pReferenceClock->Release();
								TRACE(TEXT("グラフのクロックに音声レンダラを選択\n"));
							}
							pMediaFilter->Release();
						}
					}
					fOK = true;
				} else {
					fOK = false;
				}
				if (!fOK) {
					hr = m_pFilterGraph->Render(pOutputAudio);
					if (FAILED(hr))
						throw CBonException(hr, TEXT("音声レンダラを接続できません。"),
							TEXT("設定で有効な音声デバイスが選択されているか確認してください。"));
				}
			} else {
				// 音声デバイスが無い?
				// Nullレンダラを繋げておく
				hr = ::CoCreateInstance(CLSID_NullRenderer, NULL,
										CLSCTX_INPROC_SERVER, IID_IBaseFilter,
										pointer_cast<LPVOID*>(&m_pAudioRenderer));
				if (SUCCEEDED(hr)) {
					hr = DirectShowUtil::AppendFilterAndConnect(m_pFilterGraph,
						m_pAudioRenderer, L"Null Audio Renderer", &pOutputAudio);
					if (FAILED(hr)) {
						throw CBonException(hr, TEXT("Null音声レンダラを接続できません。"));
					}
					m_pszAudioRendererName=StdUtil::strdup(TEXT("Null Renderer"));
					TRACE(TEXT("Nullレンダラを接続\n"));
				}
			}
		}

		/*
			デフォルトでMPEG-2 Demultiplexerがグラフのクロックに
			設定されるらしいが、一応設定しておく
		*/
		if (!m_bUseAudioRendererClock) {
			IMediaFilter *pMediaFilter;

			if (SUCCEEDED(m_pFilterGraph->QueryInterface(
					IID_IMediaFilter,pointer_cast<void**>(&pMediaFilter)))) {
				IReferenceClock *pReferenceClock;

				if (SUCCEEDED(m_pMp2DemuxFilter->QueryInterface(
						IID_IReferenceClock,pointer_cast<void**>(&pReferenceClock)))) {
					pMediaFilter->SetSyncSource(pReferenceClock);
					pReferenceClock->Release();
					TRACE(TEXT("グラフのクロックにMPEG-2 Demultiplexerを選択\n"));
				}
				pMediaFilter->Release();
			}
		}

		// オーナウィンドウ設定
		m_hOwnerWnd = hOwnerHwnd;
		RECT rc;
		::GetClientRect(hOwnerHwnd, &rc);
		m_wVideoWindowX = (WORD)rc.right;
		m_wVideoWindowY = (WORD)rc.bottom;

		m_bInit=true;

		ULONG PID;
		if (m_wVideoEsPID != PID_INVALID) {
			PID = m_wVideoEsPID;
			if (FAILED(m_pMp2DemuxVideoMap->MapPID(1, &PID, MEDIA_ELEMENTARY_STREAM)))
				m_wVideoEsPID = PID_INVALID;
		}
		if (m_wAudioEsPID != PID_INVALID) {
			PID = m_wAudioEsPID;
			if (FAILED(m_pMp2DemuxAudioMap->MapPID(1, &PID, MEDIA_ELEMENTARY_STREAM)))
				m_wAudioEsPID = PID_INVALID;
		}
	} catch (CBonException &Exception) {
		SetError(Exception);
		if (Exception.GetErrorCode()!=0) {
			TCHAR szText[MAX_ERROR_TEXT_LEN+32];
			int Length;

			Length=::AMGetErrorText(Exception.GetErrorCode(),szText,MAX_ERROR_TEXT_LEN);
			::wsprintf(szText+Length,TEXT("\nエラーコード(HRESULT) 0x%08X"),Exception.GetErrorCode());
			SetErrorSystemMessage(szText);
		}

		SAFE_RELEASE(pOutput);
		SAFE_RELEASE(pOutputVideo);
		SAFE_RELEASE(pOutputAudio);
		CloseViewer();

		TRACE(TEXT("フィルタグラフ構築失敗 : %s\n"), GetLastErrorText());
		return false;
	}

	SAFE_RELEASE(pOutputVideo);
	SAFE_RELEASE(pOutputAudio);

	ClearError();

	TRACE(TEXT("フィルタグラフ構築成功\n"));
	return true;
}