Beispiel #1
0
CMediaType CLAVFVideoHelper::initVideoType(AVCodecID codecId, unsigned int &codecTag, std::string container)
{
  CMediaType mediaType;
  mediaType.InitMediaType();
  mediaType.majortype = MEDIATYPE_Video;
  mediaType.subtype = FOURCCMap(codecTag);
  mediaType.formattype = FORMAT_VideoInfo; //default value

    // Check against values from the map above
  for(unsigned i = 0; i < countof(video_map); ++i) {
    if (video_map[i].codec == codecId) {
      if (video_map[i].subtype)
        mediaType.subtype = *video_map[i].subtype;
      if (video_map[i].codecTag)
        codecTag = video_map[i].codecTag;
      if (video_map[i].format)
         mediaType.formattype = *video_map[i].format;
      break;
    }
  }

  switch(codecId)
  {
  // All these codecs should use VideoInfo2
  case AV_CODEC_ID_ASV1:
  case AV_CODEC_ID_ASV2:
  case AV_CODEC_ID_FLV1:
  case AV_CODEC_ID_HUFFYUV:
  case AV_CODEC_ID_WMV3:
    mediaType.formattype = FORMAT_VideoInfo2;
    break;
  case AV_CODEC_ID_MPEG4:
    if (container == "mp4") {
      mediaType.formattype = FORMAT_MPEG2Video;
    } else if (container == "mpegts") {
      mediaType.formattype = FORMAT_VideoInfo2;
      mediaType.subtype = MEDIASUBTYPE_MP4V;
    } else {
      mediaType.formattype = FORMAT_VideoInfo2;
    }
    break;
  case AV_CODEC_ID_VC1:
    if (codecTag != MKTAG('W','M','V','A'))
      codecTag = MKTAG('W','V','C','1');
    mediaType.formattype = FORMAT_VideoInfo2;
    mediaType.subtype = FOURCCMap(codecTag);
    break;
  case AV_CODEC_ID_DVVIDEO:
    if (codecTag == 0)
      mediaType.subtype = MEDIASUBTYPE_DVCP;
    break;
  }

  return mediaType;
}
HRESULT CBaseMuxerInputPin::CheckMediaType(const CMediaType* pmt)
{
	if(pmt->formattype == FORMAT_WaveFormatEx)
	{
		WORD wFormatTag = ((WAVEFORMATEX*)pmt->pbFormat)->wFormatTag;
		if((wFormatTag == WAVE_FORMAT_PCM 
		|| wFormatTag == WAVE_FORMAT_EXTENSIBLE
		|| wFormatTag == WAVE_FORMAT_IEEE_FLOAT)
		&& pmt->subtype != FOURCCMap(wFormatTag)
		&& !(pmt->subtype == MEDIASUBTYPE_PCM && wFormatTag == WAVE_FORMAT_EXTENSIBLE)
		&& !(pmt->subtype == MEDIASUBTYPE_PCM && wFormatTag == WAVE_FORMAT_IEEE_FLOAT)
		&& pmt->subtype != MEDIASUBTYPE_DVD_LPCM_AUDIO
		&& pmt->subtype != MEDIASUBTYPE_DOLBY_AC3
		&& pmt->subtype != MEDIASUBTYPE_DTS)
		{
			return E_INVALIDARG;
		}
	}

	return pmt->majortype == MEDIATYPE_Video
		|| pmt->majortype == MEDIATYPE_Audio && pmt->formattype != FORMAT_VorbisFormat
		|| pmt->majortype == MEDIATYPE_Text && pmt->subtype == MEDIASUBTYPE_NULL && pmt->formattype == FORMAT_None
		|| pmt->majortype == MEDIATYPE_Subtitle
		? S_OK
		: E_INVALIDARG;
}
CMediaType CLAVFAudioHelper::initAudioType(AVCodecID codecId, unsigned int &codecTag, std::string container)
{
    CMediaType mediaType;
    mediaType.InitMediaType();
    mediaType.majortype = MEDIATYPE_Audio;
    mediaType.subtype = FOURCCMap(codecTag);
    mediaType.formattype = FORMAT_WaveFormatEx; //default value
    mediaType.SetSampleSize(256000);

    // Check against values from the map above
    for(unsigned i = 0; i < countof(audio_map); ++i) {
        if (audio_map[i].codec == codecId) {
            if (audio_map[i].subtype)
                mediaType.subtype = *audio_map[i].subtype;
            if (audio_map[i].codecTag)
                codecTag = audio_map[i].codecTag;
            if (audio_map[i].format)
                mediaType.formattype = *audio_map[i].format;
            break;
        }
    }

    // special cases
    switch(codecId)
    {
    case AV_CODEC_ID_PCM_F64LE:
        // Qt PCM
        if (codecTag == MKTAG('f', 'l', '6', '4')) mediaType.subtype = MEDIASUBTYPE_PCM_FL64_le;
        break;
    }
    return mediaType;
}
Beispiel #4
0
bool CWAVFile::ProcessWAVEFORMATEX()
{
	if (!m_fmtdata || m_fmtsize < sizeof(WAVEFORMATEX)) {
		return false;
	}

	WAVEFORMATEX* wfe = (WAVEFORMATEX*)m_fmtdata;
	if (wfe->wFormatTag == WAVE_FORMAT_EXTENSIBLE && m_fmtsize >= sizeof(WAVEFORMATEXTENSIBLE)) {
		WAVEFORMATEXTENSIBLE* wfex = (WAVEFORMATEXTENSIBLE*)m_fmtdata;
		m_subtype = wfex->SubFormat;
		if (CountBits(wfex->dwChannelMask) != wfex->Format.nChannels) {
			// fix incorrect dwChannelMask
			wfex->dwChannelMask = GetDefChannelMask(wfex->Format.nChannels);
		}
	} else {
		m_subtype = FOURCCMap(wfe->wFormatTag);
	}

	if (wfe->wFormatTag == WAVE_FORMAT_PCM && (wfe->nChannels > 2 || wfe->wBitsPerSample != 8 && wfe->wBitsPerSample != 16)
			|| wfe->wFormatTag == WAVE_FORMAT_IEEE_FLOAT && wfe->nChannels > 2) {
		// convert incorrect WAVEFORMATEX to WAVEFORMATEXTENSIBLE

		WAVEFORMATEXTENSIBLE* wfex = DNew WAVEFORMATEXTENSIBLE;
		wfex->Format.wFormatTag				= WAVE_FORMAT_EXTENSIBLE;
		wfex->Format.nChannels				= wfe->nChannels;
		wfex->Format.nSamplesPerSec			= wfe->nSamplesPerSec;
		wfex->Format.wBitsPerSample			= (wfe->wBitsPerSample + 7) & ~7;
		wfex->Format.nBlockAlign			= wfex->Format.nChannels * wfex->Format.wBitsPerSample / 8;
		wfex->Format.nAvgBytesPerSec		= wfex->Format.nSamplesPerSec * wfex->Format.nBlockAlign;
		wfex->Format.cbSize					= 22;
		wfex->Samples.wValidBitsPerSample	= wfe->wBitsPerSample;
		wfex->dwChannelMask					= GetDefChannelMask(wfe->nChannels);
		wfex->SubFormat						= m_subtype;

		delete[] m_fmtdata;

		m_fmtdata = (BYTE*)wfex;
		m_fmtsize = sizeof(WAVEFORMATEXTENSIBLE);
		wfe = (WAVEFORMATEX*)m_fmtdata;
	}

	m_samplerate		= wfe->nSamplesPerSec;
	m_bitdepth			= wfe->wBitsPerSample;
	m_channels			= wfe->nChannels;
	if (wfe->wFormatTag == WAVE_FORMAT_EXTENSIBLE && m_fmtsize >= sizeof(WAVEFORMATEXTENSIBLE)) {
		m_layout = ((WAVEFORMATEXTENSIBLE*)m_fmtdata)->dwChannelMask;
	} else {
		m_layout = GetDefChannelMask(wfe->nChannels);
	}
	m_nAvgBytesPerSec	= wfe->nAvgBytesPerSec;
	m_nBlockAlign		= wfe->nBlockAlign;

	m_blocksize			= max(m_nBlockAlign, m_nAvgBytesPerSec / 16); // 62.5 ms
	m_blocksize			-= m_blocksize % m_nBlockAlign;

	return true;
}
Beispiel #5
0
HRESULT CVideoPin::GetMediaType(CMediaType* pmt)
{
  GetMediaTypeInternal(pmt);

  if (pmt->subtype == FOURCCMap('1CVW') && m_VC1Override != GUID_NULL)
  {
    pmt->subtype = m_VC1Override;
    LogDebug("vid: GetMediaType - force VC-1 GUID");
  }

  return S_OK;
}
Beispiel #6
0
STDAPI CreateAudioMediaType(
    const WAVEFORMATEX *pwfx,
    AM_MEDIA_TYPE *pmt,
    BOOL bSetFormat
)
{
    pmt->majortype            = MEDIATYPE_Audio;
    if (pwfx->wFormatTag == WAVE_FORMAT_EXTENSIBLE) {
        pmt->subtype = ((PWAVEFORMATEXTENSIBLE)pwfx)->SubFormat;
    } else {
        pmt->subtype              = FOURCCMap(pwfx->wFormatTag);
    }
    pmt->formattype           = FORMAT_WaveFormatEx;
    pmt->bFixedSizeSamples    = TRUE;
    pmt->bTemporalCompression = FALSE;
    pmt->lSampleSize          = pwfx->nBlockAlign;
    pmt->pUnk                 = NULL;
    if (bSetFormat) {
        if (pwfx->wFormatTag == WAVE_FORMAT_PCM) {
            pmt->cbFormat         = sizeof(WAVEFORMATEX);
        } else {
            pmt->cbFormat         = sizeof(WAVEFORMATEX) + pwfx->cbSize;
        }
        pmt->pbFormat             = (PBYTE)CoTaskMemAlloc(pmt->cbFormat);
        if (pmt->pbFormat == NULL) {
            return E_OUTOFMEMORY;
        }
        if (pwfx->wFormatTag == WAVE_FORMAT_PCM) {
            CopyMemory(pmt->pbFormat, pwfx, sizeof(PCMWAVEFORMAT));
            ((WAVEFORMATEX *)pmt->pbFormat)->cbSize = 0;
        } else {
            CopyMemory(pmt->pbFormat, pwfx, pmt->cbFormat);
        }
    }
    return S_OK;
}
Beispiel #7
0
HRESULT CSubtitleInputPin::CheckMediaType(const CMediaType* pmt)
{
    return pmt->majortype == MEDIATYPE_Text && (pmt->subtype == MEDIASUBTYPE_NULL || pmt->subtype == FOURCCMap((DWORD)0))
           || pmt->majortype == MEDIATYPE_Subtitle && pmt->subtype == MEDIASUBTYPE_UTF8
           || pmt->majortype == MEDIATYPE_Subtitle && (pmt->subtype == MEDIASUBTYPE_SSA || pmt->subtype == MEDIASUBTYPE_ASS || pmt->subtype == MEDIASUBTYPE_ASS2)
           || pmt->majortype == MEDIATYPE_Subtitle && pmt->subtype == MEDIASUBTYPE_SSF
           || pmt->majortype == MEDIATYPE_Subtitle && (pmt->subtype == MEDIASUBTYPE_VOBSUB)
           || IsHdmvSub(pmt)
           ? S_OK
           : E_FAIL;
}
Beispiel #8
0
static int v4w_open_videodevice(V4wState *s, int format, MSVideoSize *vsize)
{
	// Initialize COM
	CoInitialize(NULL);

	// get a Graph
	HRESULT hr=s->m_pGraph.CoCreateInstance(CLSID_FilterGraph);
	if(FAILED(hr))
	{
		return -1;
	}

	// get a CaptureGraphBuilder2
#if !defined(_WIN32_WCE)
	hr=s->m_pBuilder.CoCreateInstance(CLSID_CaptureGraphBuilder2);
#else
	hr=s->m_pBuilder.CoCreateInstance(CLSID_CaptureGraphBuilder);
#endif
	if(FAILED(hr))
	{
		return -2;
	}

	// connect capture graph builder with the graph
	s->m_pBuilder->SetFiltergraph(s->m_pGraph);

	// get mediacontrol so we can start and stop the filter graph
	hr=s->m_pGraph.QueryInterface(&(s->m_pControl));
	if(FAILED(hr))
	{
		return -3;
	}

	// get DXFilter
	s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE);
	if(s->m_pDXFilter==NULL)
	{
		return -4;
	}
	s->m_pDXFilter->AddRef();
	if(FAILED(hr))
	{
		return -4;
	}

	CMediaType mt;
	mt.SetType(&MEDIATYPE_Video);

	if (format==MS_YUV420P)
	{
		GUID m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0'));
		mt.SetSubtype(&m);
		mt.SetSubtype(&MEDIASUBTYPE_YV12);
	}
	else //if (format==MS_RGB24)
	{
		mt.SetSubtype(&MEDIASUBTYPE_RGB24);
	}

	//mt.SetSubtype(&MEDIASUBTYPE_IYUV);
	//mt.SetSubtype(&MEDIASUBTYPE_YUYV);
	//mt.SetSubtype(&MEDIASUBTYPE_RGB24);
	//mt.SetSampleSize();
	mt.formattype = FORMAT_VideoInfo;
	mt.SetTemporalCompression(FALSE);

	VIDEOINFO *pvi = (VIDEOINFO *)
	mt.AllocFormatBuffer(sizeof(VIDEOINFO));
	if (NULL == pvi)
		return E_OUTOFMEMORY;
	ZeroMemory(pvi, sizeof(VIDEOINFO));
	if (format==MS_YUV420P)
	{
		pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
		pvi->bmiHeader.biCompression = MAKEFOURCC('Y','V','1','2');
		pvi->bmiHeader.biBitCount = 12;
	}
	else
	{
		pvi->bmiHeader.biCompression = BI_RGB;
		pvi->bmiHeader.biBitCount = 24;
	}
	pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
	pvi->bmiHeader.biWidth = vsize->width;
	pvi->bmiHeader.biHeight = vsize->height;
	pvi->bmiHeader.biPlanes = 1;
	pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
	pvi->bmiHeader.biClrImportant = 0;
	mt.SetSampleSize(pvi->bmiHeader.biSizeImage);
	mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO));

	hr = s->m_pDXFilter->SetAcceptedMediaType(&mt);
	if(FAILED(hr))
	{
		return -5;
	}

	hr = s->m_pDXFilter->SetCallback(Callback); 
	if(FAILED(hr))
	{
		return -6;
	}

	hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter,
	 (LPVOID *)&s->m_pIDXFilter);
	if(FAILED(hr))
	{
		return -7;
	}

	hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter");
	if(FAILED(hr))
	{
		return -8;
	}

#ifdef WM6
	ICreateDevEnum *pCreateDevEnum = NULL;
	IEnumMoniker *pEnumMoniker = NULL;
	IMoniker *pMoniker = NULL;

	ULONG nFetched = 0;

	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, 
		IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
	if(FAILED(hr))
	{
		return -9;
	}

	hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
		&pEnumMoniker, 0);
	if (FAILED(hr) || pEnumMoniker == NULL) {
		//printf("no device\n");
		return -10;
	}

	pEnumMoniker->Reset();

	hr = pEnumMoniker->Next(1, &pMoniker, &nFetched);
	if(FAILED(hr) || pMoniker==NULL)
	{
		return -11;
	}

	hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
	if(FAILED(hr))
	{
		return -12;
	}

	s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

	pMoniker->Release();
	pEnumMoniker->Release();
	pCreateDevEnum->Release();
#else
	WCHAR wzDeviceName[ MAX_PATH + 1 ];
	CComVariant   varCamName;
	CPropertyBag PropBag;
    CComPtr<IPersistPropertyBag>    pPropertyBag;
	GetFirstCameraDriver(wzDeviceName);

	hr = s->m_pDeviceFilter.CoCreateInstance( CLSID_VideoCapture ); 
	if (FAILED(hr))
	{
		return -8;
	}

	s->m_pDeviceFilter.QueryInterface( &pPropertyBag );
	varCamName = wzDeviceName;
	if(( varCamName.vt == VT_BSTR ) == NULL ) {
	  return E_OUTOFMEMORY;
	}
	PropBag.Write( L"VCapName", &varCamName );   
	pPropertyBag->Load( &PropBag, NULL );
	pPropertyBag.Release();

	hr = s->m_pGraph->AddFilter( s->m_pDeviceFilter, L"Video capture source" );
#endif

	if (FAILED(hr))
	{
		return -8;
	}

	// get null renderer
	s->m_pNullRenderer = NULL;
#if 0
	hr=s->m_pNullRenderer.CoCreateInstance(CLSID_NullRenderer);
	if(FAILED(hr))
	{
		return -13;
	}
#endif
	if (s->m_pNullRenderer!=NULL)
	{
		s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer");
	}

	hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_PREVIEW,
		&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
	if (FAILED(hr))
	{
		//hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_CAPTURE,
		//	&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
		if (FAILED(hr))
		{
			return -14;
		}
	}
	
	//m_pDXFilter->SetBufferSamples(TRUE);


		// Create the System Device Enumerator.
IFilterMapper *pMapper = NULL;
//IEnumMoniker *pEnum = NULL;
IEnumRegFilters *pEnum = NULL;

hr = CoCreateInstance(CLSID_FilterMapper, 
    NULL, CLSCTX_INPROC, IID_IFilterMapper, 
    (void **) &pMapper);

if (FAILED(hr))
{
    // Error handling omitted for clarity.
}

GUID arrayInTypes[2];
arrayInTypes[0] = MEDIATYPE_Video;
arrayInTypes[1] = MEDIASUBTYPE_dvsd;

hr = pMapper->EnumMatchingFilters(
        &pEnum,
        MERIT_HW_COMPRESSOR, // Minimum merit.
        FALSE,               // At least one input pin?
        MEDIATYPE_NULL,
        MEDIASUBTYPE_NULL,
        FALSE,              // Must be a renderer?
        FALSE,               // At least one output pin?
        MEDIATYPE_NULL,                  
        MEDIASUBTYPE_NULL);              

// Enumerate the monikers.
//IMoniker *pMoniker;
REGFILTER *pMoniker;
ULONG cFetched;
while (pEnum->Next(1, &pMoniker, &cFetched) == S_OK)
{
    IPropertyBag *pPropBag = NULL;
#if 0
	hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, 
       (void **)&pPropBag);

    if (SUCCEEDED(hr))
    {
        // To retrieve the friendly name of the filter, do the following:
        VARIANT varName;
        VariantInit(&varName);
        hr = pPropBag->Read(L"FriendlyName", &varName, 0);
        if (SUCCEEDED(hr))
        {
            // Display the name in your UI somehow.
        }
        VariantClear(&varName);

        // To create an instance of the filter, do the following:
        IBaseFilter *pFilter;
        hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)&pFilter);
        // Now add the filter to the graph. Remember to release pFilter later.
    
        // Clean up.
        pPropBag->Release();
    }
    pMoniker->Release();
#endif

}

// Clean up.
pMapper->Release();
pEnum->Release();




	s_callback = s;
	hr = s->m_pControl->Run();
	if(FAILED(hr))
	{
		return -15;
	}

	s->rotregvalue=1;
	s->pix_fmt = format;
	s->vsize.height = vsize->height;
	s->vsize.width = vsize->width;
	return 0;
}
Beispiel #9
0
// {228A3434-08CB-4602-8922-FF404B5FA126}
static const GUID CLSID_CorePNGEncoder = 
{ 0x228a3434, 0x8cb, 0x4602, { 0x89, 0x22, 0xff, 0x40, 0x4b, 0x5f, 0xa1, 0x26 } };

// {F5B73A39-DD6B-48c1-BBA5-9F95DE9A1029}
static const GUID CLSID_CorePNGDecoder =
{ 0xf5b73a39, 0xdd6b, 0x48c1, { 0xbb, 0xa5, 0x9f, 0x95, 0xde, 0x9a, 0x10, 0x29 } };

// {05B84A39-DD6B-48c1-BBA5-9F95DE9A1029}
static const GUID CLSID_CorePNGSubtitler =
{ 0x05b84a39, 0xdd6b, 0x48c1, { 0xbb, 0xa5, 0x9f, 0x95, 0xde, 0x9a, 0x10, 0x29 } };

#define FOURCC_PNG MAKEFOURCC('P', 'N', 'G', '1')
#define FOURCC_PNG_OLD MAKEFOURCC('P', 'N', 'G', ' ')

const static GUID MEDIASUBTYPE_PNG = (GUID)FOURCCMap(FOURCC_PNG);
const static GUID MEDIASUBTYPE_PNG_OLD = (GUID)FOURCCMap(FOURCC_PNG_OLD);

const AMOVIESETUP_FILTER sudCorePNGEncoder =
{
    &CLSID_CorePNGEncoder,   // clsID
    g_swCorePNGEncoderName,    // strName
    MERIT_DO_NOT_USE,         // dwMerit
    0,                        // nPins
    0                         // lpPin
};

AMOVIESETUP_MEDIATYPE sudInputType[] =
{
	{ &MEDIATYPE_Video, &MEDIASUBTYPE_PNG},
	{ &MEDIATYPE_Video, &MEDIASUBTYPE_PNG_OLD}
static int v4w_open_videodevice(V4wState *s)
{
	// Initialize COM
	CoInitialize(NULL);

	// get a Graph
	HRESULT hr= CoCreateInstance (CLSID_FilterGraph,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_IGraphBuilder, //IID_IBaseFilter,
		(void **)&s->m_pGraph);
	if(FAILED(hr))
	{
		return -1;
	}

	// get a CaptureGraphBuilder2
	hr= CoCreateInstance (CLSID_CaptureGraphBuilder2,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_ICaptureGraphBuilder2, //IID_IBaseFilter,
		(void **)&s->m_pBuilder);
	if(FAILED(hr))
	{
		return -2;
	}

	// connect capture graph builder with the graph
	s->m_pBuilder->SetFiltergraph(s->m_pGraph);

	// get mediacontrol so we can start and stop the filter graph
	hr=s->m_pGraph->QueryInterface (IID_IMediaControl, (void **)&s->m_pControl);
	if(FAILED(hr))
	{
		return -3;
	}


	ICreateDevEnum *pCreateDevEnum = NULL;
	IEnumMoniker *pEnumMoniker = NULL;
	IMoniker *pMoniker = NULL;

	ULONG nFetched = 0;

	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, 
		IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
	if(FAILED(hr))
	{
		return -4;
	}

	hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
		&pEnumMoniker, 0);
	if (FAILED(hr) || pEnumMoniker == NULL) {
		//printf("no device\n");
		return -5;
	}

	pEnumMoniker->Reset();

	int pos=0;
	while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
	{
		IPropertyBag *pBag;
		hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag );
		if( hr != S_OK )
			continue; 

		if (s->dev[0]=='\0')
			break;

		VARIANT var;
		VariantInit(&var);
		hr = pBag->Read( L"FriendlyName", &var, NULL ); 
		if( hr != S_OK )
		{
			pMoniker->Release();
			continue;
		}
		//USES_CONVERSION;
		char szName[256];

		WideCharToMultiByte(CP_UTF8,0,var.bstrVal,-1,szName,256,0,0);
		VariantClear(&var); 

		if (strcmp(szName, s->dev)==0)
			break;

		pMoniker->Release();
		pBag->Release();
		pMoniker=NULL;
		pBag=NULL;
	}

	if(pMoniker==NULL)
	{
		return -6;
	}

	hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
	if(FAILED(hr))
	{
		return -7;
	}

	s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

	pMoniker->Release();
	pEnumMoniker->Release();
	pCreateDevEnum->Release();


	GUID pPinCategory;

	if (try_format(s->m_pDeviceFilter, s->pix_fmt, &pPinCategory)==0)
		s->pix_fmt = s->pix_fmt;
	else if (try_format(s->m_pDeviceFilter,MS_YUV420P, &pPinCategory)==0)
		s->pix_fmt = MS_YUV420P;
	else if (try_format(s->m_pDeviceFilter,MS_YUY2, &pPinCategory)==0)
		s->pix_fmt = MS_YUY2;
	else if (try_format(s->m_pDeviceFilter,MS_YUYV, &pPinCategory)==0)
		s->pix_fmt = MS_YUYV;
	else if (try_format(s->m_pDeviceFilter,MS_UYVY, &pPinCategory)==0)
		s->pix_fmt = MS_UYVY;
	else if (try_format(s->m_pDeviceFilter,MS_RGB24, &pPinCategory)==0)
		s->pix_fmt = MS_RGB24;
	else
	{
		ms_error("Unsupported video pixel format.");
		return -8;
	}

	if (s->pix_fmt == MS_YUV420P)
		ms_message("Driver supports YUV420P, using that format.");
	else if (s->pix_fmt == MS_YUY2)
		ms_message("Driver supports YUY2 (YUYV), using that format.");
	else if (s->pix_fmt == MS_YUYV)
		ms_message("Driver supports YUV422, using that format.");
	else if (s->pix_fmt == MS_UYVY)
		ms_message("Driver supports UYVY, using that format.");
	else if (s->pix_fmt == MS_RGB24)
		ms_message("Driver supports RGB24, using that format.");

	if (try_format_size(s, s->pix_fmt, s->vsize.width, s->vsize.height, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", s->vsize.width, s->vsize.height);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H);
	else
	{
		ms_error("No supported size found for format.");
		/* size not supported? */
		return -9;
	}

	// get DXFilter
	s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE);
	if(s->m_pDXFilter==NULL)
	{
		return -10;
	}
	s->m_pDXFilter->AddRef();

	CMediaType mt;
	mt.SetType(&MEDIATYPE_Video);

	GUID m = MEDIASUBTYPE_RGB24;
	if (s->pix_fmt == MS_YUV420P)
		m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0'));
	else if (s->pix_fmt == MS_YUY2)
		m = MEDIASUBTYPE_YUY2;
	else if (s->pix_fmt == MS_YUYV)
		m = MEDIASUBTYPE_YUYV;
	else if (s->pix_fmt == MS_UYVY)
		m = MEDIASUBTYPE_UYVY;
	else if (s->pix_fmt == MS_RGB24)
		m = MEDIASUBTYPE_RGB24;
	mt.SetSubtype(&m);

	mt.formattype = FORMAT_VideoInfo;
	mt.SetTemporalCompression(FALSE);

	VIDEOINFO *pvi = (VIDEOINFO *)
		mt.AllocFormatBuffer(sizeof(VIDEOINFO));
	if (NULL == pvi)
		return -11;
	ZeroMemory(pvi, sizeof(VIDEOINFO));

	if (s->pix_fmt == MS_YUV420P)
		pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
	else if (s->pix_fmt == MS_YUY2)
		pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2');
	else if (s->pix_fmt == MS_YUYV)
		pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','V');
	else if (s->pix_fmt == MS_UYVY)
		pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y');
	else if (s->pix_fmt == MS_RGB24)
		pvi->bmiHeader.biCompression = BI_RGB;

	if (s->pix_fmt == MS_YUV420P)
		pvi->bmiHeader.biBitCount = 12;
	else if (s->pix_fmt == MS_YUY2)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_YUYV)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_UYVY)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_RGB24)
		pvi->bmiHeader.biBitCount = 24;

	pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
	pvi->bmiHeader.biWidth = s->vsize.width;
	pvi->bmiHeader.biHeight = s->vsize.height;
	pvi->bmiHeader.biPlanes = 1;
	pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
	pvi->bmiHeader.biClrImportant = 0;
	mt.SetSampleSize(pvi->bmiHeader.biSizeImage);

	mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO));

	hr = s->m_pDXFilter->SetAcceptedMediaType(&mt);
	if(FAILED(hr))
	{
		return -12;
	}

	hr = s->m_pDXFilter->SetCallback(Callback); 
	if(FAILED(hr))
	{
		return -13;
	}

	hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter,
		(LPVOID *)&s->m_pIDXFilter);
	if(FAILED(hr))
	{
		return -14;
	}

	hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter");
	if(FAILED(hr))
	{
		return -15;
	}


	// get null renderer
	hr=CoCreateInstance (CLSID_NullRenderer,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_IBaseFilter,
		(void **)&s->m_pNullRenderer);
	if(FAILED(hr))
	{
		return -16;
	}
	if (s->m_pNullRenderer!=NULL)
	{
		s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer");
	}

	hr = s->m_pBuilder->RenderStream(&pPinCategory,
		&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
	if (FAILED(hr))
	{
		return -17;
	}

	IAMStreamConfig *pConfig = NULL;
	hr = s->m_pBuilder->FindInterface(
		&pPinCategory, // Preview pin.
		&MEDIATYPE_Video,    // Any media type.
		s->m_pDeviceFilter, // Pointer to the capture filter.
		IID_IAMStreamConfig, (void**)&pConfig); 
	if (pConfig!=NULL)
	{
		AM_MEDIA_TYPE *pType = NULL;
		int iCount, iSize;
		pConfig->GetNumberOfCapabilities(&iCount, &iSize);

		for (int i = 0; i < iCount; i++) {
			VIDEO_STREAM_CONFIG_CAPS scc;
			pType = NULL;
			pConfig->GetStreamCaps(i, &pType, (BYTE *)&scc);

			if (!((pType->formattype == FORMAT_VideoInfo) &&
				(pType->cbFormat >= sizeof(VIDEOINFOHEADER)) &&
				(pType->pbFormat != NULL)))
				continue;

			VIDEOINFOHEADER & videoInfo = *(VIDEOINFOHEADER *)pType->pbFormat;

			if (m != pType->subtype)
				continue;

			if (videoInfo.bmiHeader.biWidth != s->vsize.width)
				continue;

			if (videoInfo.bmiHeader.biHeight != s->vsize.height)
				continue;

			if (videoInfo.bmiHeader.biBitCount != pvi->bmiHeader.biBitCount)
				continue;

			if (videoInfo.bmiHeader.biCompression != pvi->bmiHeader.biCompression)
				continue;

			videoInfo.AvgTimePerFrame = UNITS / (LONGLONG)s->fps;
			pConfig->SetFormat(pType);    
		}

		pConfig->GetFormat(&pType);
		if (pType!=NULL)
		{
			VIDEOINFO *pvi;
			pvi = (VIDEOINFO *)pType->pbFormat;
			ms_message("v4w: camera asked fps=%.2f // real fps=%.2f", s->fps, ((float)UNITS / (float)pvi->AvgTimePerFrame));
		}

		pConfig->Release();
	}

	//m_pDXFilter->SetBufferSamples(TRUE);

	s_callback = s;
	hr = s->m_pControl->Run();
	if(FAILED(hr))
	{
		return -18;
	}


	s->rotregvalue=1;
	return 0;
}
Beispiel #11
0
static int v4w_open_videodevice(V4wState *s)
{
    // Initialize COM
    CoInitialize(NULL);

    // get a Graph
    HRESULT hr=s->m_pGraph.CoCreateInstance(CLSID_FilterGraph);
    if(FAILED(hr))
    {
        return -1;
    }

    // get a CaptureGraphBuilder2
    hr=s->m_pBuilder.CoCreateInstance(CLSID_CaptureGraphBuilder2);
    if(FAILED(hr))
    {
        return -2;
    }

    // connect capture graph builder with the graph
    s->m_pBuilder->SetFiltergraph(s->m_pGraph);

    // get mediacontrol so we can start and stop the filter graph
    hr=s->m_pGraph.QueryInterface(&(s->m_pControl));
    if(FAILED(hr))
    {
        return -3;
    }


#ifdef _DEBUG
    HANDLE m_hLogFile=CreateFile(L"DShowGraphLog.txt",GENERIC_READ|GENERIC_WRITE,FILE_SHARE_READ,NULL,OPEN_ALWAYS,FILE_ATTRIBUTE_NORMAL,NULL);
    if(m_hLogFile!=INVALID_HANDLE_VALUE)
    {
        hr=s->m_pGraph->SetLogFile((DWORD_PTR)m_hLogFile);
        /* ASSERT(SUCCEEDED(hr)); */
    }

    //AddGraphToRot(s->m_pGraph, &s->rotregvalue);
#endif

    ICreateDevEnum *pCreateDevEnum = NULL;
    IEnumMoniker *pEnumMoniker = NULL;
    IMoniker *pMoniker = NULL;

    ULONG nFetched = 0;

    hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
                          IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
    if(FAILED(hr))
    {
        return -4;
    }

    hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
            &pEnumMoniker, 0);
    if (FAILED(hr) || pEnumMoniker == NULL) {
        //printf("no device\n");
        return -5;
    }

    pEnumMoniker->Reset();

    int pos=0;
    while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
    {
        if (pos>=s->devidx)
            break;
        pos++;
        pMoniker->Release();
        pMoniker=NULL;
    }
    if(pMoniker==NULL)
    {
        return -6;
    }

    hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
    if(FAILED(hr))
    {
        return -7;
    }

    s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

    pMoniker->Release();
    pEnumMoniker->Release();
    pCreateDevEnum->Release();

    if (try_format(s, s->pix_fmt)==0)
        s->pix_fmt = s->pix_fmt;
    else if (try_format(s,MS_YUV420P)==0)
        s->pix_fmt = MS_YUV420P;
    else if (try_format(s,MS_YUY2)==0)
        s->pix_fmt = MS_YUY2;
    else if (try_format(s,MS_YUYV)==0)
        s->pix_fmt = MS_YUYV;
    else if (try_format(s,MS_UYVY)==0)
        s->pix_fmt = MS_UYVY;
    else if (try_format(s,MS_RGB24)==0)
        s->pix_fmt = MS_RGB24;
    else
    {
        ms_error("Unsupported video pixel format.");
        return -8;
    }

    if (s->pix_fmt == MS_YUV420P)
        ms_message("Driver supports YUV420P, using that format.");
    else if (s->pix_fmt == MS_YUY2)
        ms_message("Driver supports YUY2 (UYVY), using that format.");
    else if (s->pix_fmt == MS_YUYV)
        ms_message("Driver supports YUV422, using that format.");
    else if (s->pix_fmt == MS_UYVY)
        ms_message("Driver supports UYVY, using that format.");
    else if (s->pix_fmt == MS_RGB24)
        ms_message("Driver supports RGB24, using that format.");

    if (try_format_size(s, s->pix_fmt, s->vsize.width, s->vsize.height)==0)
        ms_message("Selected Size: %ix%i.", s->vsize.width, s->vsize.height);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H);
    else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H)==0)
        ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H);
    else
    {
        ms_error("No supported size found for format.");
        /* size not supported? */
        return -9;
    }

    // get DXFilter
    s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE);
    if(s->m_pDXFilter==NULL)
    {
        return -10;
    }
    s->m_pDXFilter->AddRef();

    CMediaType mt;
    mt.SetType(&MEDIATYPE_Video);

    GUID m = MEDIASUBTYPE_RGB24;
    if (s->pix_fmt == MS_YUV420P)
        m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0'));
    else if (s->pix_fmt == MS_YUY2)
        m = MEDIASUBTYPE_YUY2;
    else if (s->pix_fmt == MS_YUYV)
        m = MEDIASUBTYPE_YUYV;
    else if (s->pix_fmt == MS_UYVY)
        m = MEDIASUBTYPE_UYVY;
    else if (s->pix_fmt == MS_RGB24)
        m = MEDIASUBTYPE_RGB24;
    mt.SetSubtype(&m);

    mt.formattype = FORMAT_VideoInfo;
    mt.SetTemporalCompression(FALSE);

    VIDEOINFO *pvi = (VIDEOINFO *)
                     mt.AllocFormatBuffer(sizeof(VIDEOINFO));
    if (NULL == pvi)
        return -11;
    ZeroMemory(pvi, sizeof(VIDEOINFO));

    if (s->pix_fmt == MS_YUV420P)
        pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
    else if (s->pix_fmt == MS_YUY2)
        pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2');
    else if (s->pix_fmt == MS_YUYV)
        pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','V');
    else if (s->pix_fmt == MS_UYVY)
        pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y');
    else if (s->pix_fmt == MS_RGB24)
        pvi->bmiHeader.biCompression = BI_RGB;

    if (s->pix_fmt == MS_YUV420P)
        pvi->bmiHeader.biBitCount = 12;
    else if (s->pix_fmt == MS_YUY2)
        pvi->bmiHeader.biBitCount = 16;
    else if (s->pix_fmt == MS_YUYV)
        pvi->bmiHeader.biBitCount = 16;
    else if (s->pix_fmt == MS_UYVY)
        pvi->bmiHeader.biBitCount = 16;
    else if (s->pix_fmt == MS_RGB24)
        pvi->bmiHeader.biBitCount = 24;

    pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
    pvi->bmiHeader.biWidth = s->vsize.width;
    pvi->bmiHeader.biHeight = s->vsize.height;
    pvi->bmiHeader.biPlanes = 1;
    pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
    pvi->bmiHeader.biClrImportant = 0;
    mt.SetSampleSize(pvi->bmiHeader.biSizeImage);
    mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO));

    hr = s->m_pDXFilter->SetAcceptedMediaType(&mt);
    if(FAILED(hr))
    {
        return -12;
    }

    hr = s->m_pDXFilter->SetCallback(Callback);
    if(FAILED(hr))
    {
        return -13;
    }

    hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter,
                                        (LPVOID *)&s->m_pIDXFilter);
    if(FAILED(hr))
    {
        return -14;
    }

    hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter");
    if(FAILED(hr))
    {
        return -15;
    }


    // get null renderer
    hr=s->m_pNullRenderer.CoCreateInstance(CLSID_NullRenderer);
    if(FAILED(hr))
    {
        return -16;
    }
    if (s->m_pNullRenderer!=NULL)
    {
        s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer");
    }

    hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_PREVIEW,
                                     &MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
    if (FAILED(hr))
    {
        //hr = s->m_pBuilder->RenderStream(&PIN_CATEGORY_CAPTURE,
        //	&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
        if (FAILED(hr))
        {
            return -17;
        }
    }

    //m_pDXFilter->SetBufferSamples(TRUE);

    s_callback = s;
    hr = s->m_pControl->Run();
    if(FAILED(hr))
    {
        return -18;
    }

    s->rotregvalue=1;
    return 0;
}
Beispiel #12
0
HRESULT CVideoPin::FillBuffer(IMediaSample* pSample)
{
  try
  {
    Packet* buffer = NULL;

    do
    {
      if (m_pFilter->IsStopping() || m_demux.IsMediaChanging() || m_bFlushing || !m_bSeekDone || m_demux.m_bRebuildOngoing)
      {
        Sleep(1);
        return ERROR_NO_DATA;
      }

      if (m_demux.EndOfFile())
      {
        LogDebug("vid: set EOF");
        return S_FALSE;
      }

      if (m_demux.m_bVideoClipSeen || m_demux.m_bAudioRequiresRebuild && !m_demux.m_bVideoClipSeen && !m_demux.m_eAudioClipSeen->Check())
      {
        CheckPlaybackState();
        return ERROR_NO_DATA;
      }

      if (m_pCachedBuffer)
      {
        LogDebug("vid: cached fetch %6.3f clip: %d playlist: %d", m_pCachedBuffer->rtStart / 10000000.0, m_pCachedBuffer->nClipNumber, m_pCachedBuffer->nPlaylist);
        buffer = m_pCachedBuffer;
        m_pCachedBuffer = NULL;
        buffer->bDiscontinuity = true;
        
        if (m_bProvidePMT)
        {
          CMediaType mt(*buffer->pmt);
          SetMediaType(&mt);
          pSample->SetMediaType(&mt);
          m_bProvidePMT = false;
        }
      }
      else
        buffer = m_demux.GetVideo();

      if (!buffer)
      {
        if (m_bFirstSample)
          Sleep(10);
        else 
        {
          if (!m_bClipEndingNotified)
          {
            // Deliver end of stream notification to flush the video decoder.
            // This should only happen when the stream enters into paused state
            LogDebug("vid: FillBuffer - DeliverEndOfStream");
            DeliverEndOfStream();
            m_bClipEndingNotified = true;
          }
          else
            Sleep(10);
		  
          return ERROR_NO_DATA;
        }
      }
      else
      {
        bool checkPlaybackState = false;

        {
          CAutoLock lock(m_section);

          if (buffer->nNewSegment > 0)
          {
            if ((buffer->nNewSegment & NS_NEW_CLIP) == NS_NEW_CLIP)
            {
              LogDebug("vid: Playlist changed to %d - nNewSegment: %d offset: %6.3f rtStart: %6.3f rtPlaylistTime: %6.3f", 
                buffer->nPlaylist, buffer->nNewSegment, buffer->rtOffset / 10000000.0, buffer->rtStart / 10000000.0, buffer->rtPlaylistTime / 10000000.0);
            
              m_demux.m_bVideoClipSeen = true;
 
              m_bInitDuration = true;
              checkPlaybackState = true;
              m_bClipEndingNotified = false;

              if (buffer->bResuming || buffer->nNewSegment & NS_INTERRUPTED)
              {
                m_bDoFakeSeek = true;
                m_rtStreamOffset = buffer->rtPlaylistTime;
                m_bZeroTimeStream = true;
                m_demux.m_bAudioResetStreamPosition = true;
              }
              else
                m_rtStreamOffset = 0;

              // LAV video decoder requires an end of stream notification to be able to provide complete video frames
              // to downstream filters in a case where we are waiting for the audio pin to see the clip boundary as
              // we cannot provide yet the next clip's PMT downstream since audio stream could require a rebuild
              if (m_currentDecoder == CLSID_LAVVideo && (buffer->nNewSegment & NS_NEW_PLAYLIST))
              {
                LogDebug("DeliverEndOFStream LAV Only for audio pin wait (%d,%d)", buffer->nPlaylist, buffer->nClipNumber);
                DeliverEndOfStream();
              }
            }
            if ((buffer->nNewSegment & NS_STREAM_RESET) == NS_STREAM_RESET)
              m_bInitDuration = true;
          }

          if (buffer->pmt)
          {
            GUID subtype = subtype = buffer->pmt->subtype;

            if (buffer->pmt->subtype == FOURCCMap('1CVW') && m_VC1Override != GUID_NULL)
            {
              buffer->pmt->subtype = m_VC1Override;
              LogDebug("vid: FillBuffer - force VC-1 GUID");
            }

            if (!CompareMediaTypes(buffer->pmt, &m_mt))
            {
              LogMediaType(buffer->pmt);
            
              HRESULT hrAccept = S_FALSE;
              m_bProvidePMT = true;

              if (m_pReceiver && CheckVideoFormat(&buffer->pmt->subtype, &m_currentDecoder))
              {
                // Currently no other video decoders than LAV seems to be compatible with
                // the dynamic format changes
                if (m_currentDecoder == CLSID_LAVVideo)
                hrAccept = m_pReceiver->QueryAccept(buffer->pmt);
              }

              if (hrAccept != S_OK)
              {
                CMediaType mt(*buffer->pmt);
                SetMediaType(&mt);

                LogDebug("vid: graph rebuilding required");

                m_demux.m_bVideoRequiresRebuild = true;
                m_bZeroTimeStream = true;
                checkPlaybackState = true;

                //LogDebug("DeliverEndOFStream for rebuild (%d,%d)", buffer->nPlaylist, buffer->nClipNumber);
                //DeliverEndOfStream();
              }
              else
              {
                LogDebug("vid: format change accepted");
                CMediaType mt(*buffer->pmt);
                SetMediaType(&mt);
                pSample->SetMediaType(&mt);

                buffer->nNewSegment = 0;
                m_pCachedBuffer = buffer;
				
                //if (m_currentDecoder == CLSID_LAVVideo)
                //{
                //  LogDebug("DeliverEndOFStream LAV Only (%d,%d)", buffer->nPlaylist, buffer->nClipNumber);
                //  DeliverEndOfStream();
                //}

                return ERROR_NO_DATA;
              }
            } // comparemediatypes
          }
        } // lock ends

        m_rtTitleDuration = buffer->rtTitleDuration;

        if (checkPlaybackState)
        {
          buffer->nNewSegment = 0;
          m_pCachedBuffer = buffer;

          CheckPlaybackState();

          LogDebug("vid: cached push  %6.3f clip: %d playlist: %d", m_pCachedBuffer->rtStart / 10000000.0, m_pCachedBuffer->nClipNumber, m_pCachedBuffer->nPlaylist);

          return ERROR_NO_DATA;
        }

        bool hasTimestamp = buffer->rtStart != Packet::INVALID_TIME;
        REFERENCE_TIME rtCorrectedStartTime = 0;
        REFERENCE_TIME rtCorrectedStopTime = 0;

        if (hasTimestamp)
        {
          if (m_bZeroTimeStream)
          {
            m_rtStreamTimeOffset = buffer->rtStart - buffer->rtClipStartTime;
            m_bZeroTimeStream = false;
          }

          if (m_bDiscontinuity || buffer->bDiscontinuity)
          {
            LogDebug("vid: set discontinuity");
            pSample->SetDiscontinuity(true);
            pSample->SetMediaType(buffer->pmt);
            m_bDiscontinuity = false;
          }

          rtCorrectedStartTime = buffer->rtStart - m_rtStreamTimeOffset;
          rtCorrectedStopTime = buffer->rtStop - m_rtStreamTimeOffset;

          pSample->SetTime(&rtCorrectedStartTime, &rtCorrectedStopTime);

          if (m_bInitDuration)
          {
            m_pFilter->SetTitleDuration(m_rtTitleDuration);
            m_pFilter->ResetPlaybackOffset(buffer->rtPlaylistTime - rtCorrectedStartTime);
            m_bInitDuration = false;
          }

          m_pFilter->OnPlaybackPositionChange();
        }
        else // Buffer has no timestamp
          pSample->SetTime(NULL, NULL);

        pSample->SetSyncPoint(buffer->bSyncPoint);

        {
          CAutoLock lock(&m_csDeliver);

          if (!m_bFlushing)
          {
            BYTE* pSampleBuffer;
            pSample->SetActualDataLength(buffer->GetDataSize());
            pSample->GetPointer(&pSampleBuffer);
            memcpy(pSampleBuffer, buffer->GetData(), buffer->GetDataSize());

            m_bFirstSample = false;

#ifdef LOG_VIDEO_PIN_SAMPLES
            LogDebug("vid: %6.3f corr %6.3f playlist time %6.3f clip: %d playlist: %d size: %d", buffer->rtStart / 10000000.0, rtCorrectedStartTime / 10000000.0, 
              buffer->rtPlaylistTime / 10000000.0, buffer->nClipNumber, buffer->nPlaylist, buffer->GetCount());
#endif
          }
          else
          {
            LogDebug("vid: dropped sample as flush is active!");
            return ERROR_NO_DATA;
          }
        }

        //static int iFrameNumber = 0;
        //LogMediaSample(pSample, iFrameNumber++);

        delete buffer;
      }
    } while (!buffer);
    return NOERROR;
  }

  catch(...)
  {
    LogDebug("vid: FillBuffer exception");
  }

  return S_OK;
}
void CLAVPixFmtConverter::GetMediaType(CMediaType *mt, int index, LONG biWidth, LONG biHeight, DWORD dwAspectX, DWORD dwAspectY, REFERENCE_TIME rtAvgTime, BOOL bInterlaced, BOOL bVIH1)
{
  if (index < 0 || index >= GetFilteredFormatCount())
    index = 0;

  LAVOutPixFmts pixFmt = GetFilteredFormat(index);

  GUID guid = lav_pixfmt_desc[pixFmt].subtype;

  mt->SetType(&MEDIATYPE_Video);
  mt->SetSubtype(&guid);

  BITMAPINFOHEADER *pBIH = nullptr;
  if (bVIH1) {
    mt->SetFormatType(&FORMAT_VideoInfo);

    VIDEOINFOHEADER *vih = (VIDEOINFOHEADER *)mt->ReallocFormatBuffer(sizeof(VIDEOINFOHEADER));
    memset(vih, 0, sizeof(VIDEOINFOHEADER));

    vih->rcSource.right = vih->rcTarget.right = biWidth;
    vih->rcSource.bottom = vih->rcTarget.bottom = biHeight;
    vih->AvgTimePerFrame = rtAvgTime;

    pBIH = &vih->bmiHeader;
  } else {
    mt->SetFormatType(&FORMAT_VideoInfo2);

    VIDEOINFOHEADER2 *vih2 = (VIDEOINFOHEADER2 *)mt->ReallocFormatBuffer(sizeof(VIDEOINFOHEADER2));
    memset(vih2, 0, sizeof(VIDEOINFOHEADER2));

    // Validate the Aspect Ratio - an AR of 0 crashes VMR-9
    if (dwAspectX == 0 || dwAspectY == 0) {
      dwAspectX = biWidth;
      dwAspectY = biHeight;
    }

    // Always reduce the AR to the smalles fraction
    int dwX = 0, dwY = 0;
    av_reduce(&dwX, &dwY, dwAspectX, dwAspectY, max(dwAspectX, dwAspectY));

    vih2->rcSource.right = vih2->rcTarget.right = biWidth;
    vih2->rcSource.bottom = vih2->rcTarget.bottom = biHeight;
    vih2->AvgTimePerFrame = rtAvgTime;
    vih2->dwPictAspectRatioX = dwX;
    vih2->dwPictAspectRatioY = dwY;

    if (bInterlaced)
      vih2->dwInterlaceFlags = AMINTERLACE_IsInterlaced | AMINTERLACE_DisplayModeBobOrWeave;

    pBIH = &vih2->bmiHeader;
  }

  pBIH->biSize = sizeof(BITMAPINFOHEADER);
  pBIH->biWidth = biWidth;
  pBIH->biHeight = biHeight;
  pBIH->biBitCount = lav_pixfmt_desc[pixFmt].bpp;
  pBIH->biPlanes = lav_pixfmt_desc[pixFmt].planes;
  pBIH->biSizeImage = GetImageSize(biWidth, abs(biHeight), pixFmt);
  pBIH->biCompression = guid.Data1;

  if (!pBIH->biPlanes) {
    pBIH->biPlanes = 1;
  }

  if (guid == MEDIASUBTYPE_RGB32 || guid == MEDIASUBTYPE_RGB24) {
    pBIH->biCompression = BI_RGB;
  }

  // Correct implied stride for v210
  if (guid == FOURCCMap('012v')) {
    pBIH->biWidth = FFALIGN(biWidth, 48);
  }

  mt->SetSampleSize(pBIH->biSizeImage);
  mt->SetTemporalCompression(0);
}
  { LAVPixFmt_RGB32,  8,    { PIXOUT_RGB_8, PIXOUT_RGB_16, PIXOUT_444_16, PIXOUT_444_10, PIXOUT_444_8, PIXOUT_422_16, PIXOUT_422_10, PIXOUT_422_8, PIXOUT_420_16, PIXOUT_420_10, PIXOUT_420_8 } },
  { LAVPixFmt_ARGB32, 8,    { PIXOUT_RGB_8, PIXOUT_RGB_16, PIXOUT_444_16, PIXOUT_444_10, PIXOUT_444_8, PIXOUT_422_16, PIXOUT_422_10, PIXOUT_422_8, PIXOUT_420_16, PIXOUT_420_10, PIXOUT_420_8 } },
  { LAVPixFmt_RGB48,  8,    { PIXOUT_RGB_16, PIXOUT_RGB_8, PIXOUT_444_16, PIXOUT_444_10, PIXOUT_444_8, PIXOUT_422_16, PIXOUT_422_10, PIXOUT_422_8, PIXOUT_420_16, PIXOUT_420_10, PIXOUT_420_8 } },

  { LAVPixFmt_DXVA2, 8,     { PIXOUT_420_8, PIXOUT_420_10, PIXOUT_420_16, PIXOUT_422_16, PIXOUT_422_10, PIXOUT_422_8, PIXOUT_RGB_8, PIXOUT_444_16, PIXOUT_444_10, PIXOUT_444_8, PIXOUT_RGB_16 } },
};

LAVOutPixFmtDesc lav_pixfmt_desc[] = {
  { MEDIASUBTYPE_YV12,  12, 1, 3, { 1, 2, 2 }, { 1, 2, 2 } },        // YV12
  { MEDIASUBTYPE_NV12,  12, 1, 2, { 1, 2 }, { 1, 1 } },              // NV12
  { MEDIASUBTYPE_YUY2,  16, 2, 0, { 1 }, { 1 } },                    // YUY2 (packed)
  { MEDIASUBTYPE_UYVY,  16, 2, 0, { 1 }, { 1 } },                    // UYVY (packed)
  { MEDIASUBTYPE_AYUV,  32, 4, 0, { 1 }, { 1 } },                    // AYUV (packed)
  { MEDIASUBTYPE_P010,  24, 2, 2, { 1, 2 }, { 1, 1 } },              // P010
  { MEDIASUBTYPE_P210,  32, 2, 2, { 1, 1 }, { 1, 1 } },              // P210
  { FOURCCMap('014Y'),  32, 4, 0, { 1 }, { 1 }  },                   // Y410 (packed)
  { MEDIASUBTYPE_P016,  24, 2, 2, { 1, 2 }, { 1, 1 } },              // P016
  { MEDIASUBTYPE_P216,  32, 2, 2, { 1, 1 }, { 1, 1 } },              // P216
  { FOURCCMap('614Y'),  64, 8, 0, { 1 }, { 1 } },                    // Y416 (packed)
  { MEDIASUBTYPE_RGB32, 32, 4, 0, { 1 }, { 1 } },                    // RGB32
  { MEDIASUBTYPE_RGB24, 24, 3, 0, { 1 }, { 1 } },                    // RGB24
  { FOURCCMap('012v'),  24, 4, 0, { 1 }, { 1 } },                    // v210 (packed)
  { FOURCCMap('014v'),  32, 4, 0, { 1 }, { 1 }  },                   // v410 (packed)
  { MEDIASUBTYPE_YV16,  16, 1, 3, { 1, 1, 1 }, { 1, 2, 2 } },        // YV16
  { MEDIASUBTYPE_YV24,  24, 1, 3, { 1, 1, 1 }, { 1, 1, 1 } },        // YV24
  { FOURCCMap('0BGR'),  48, 6, 0, { 1 }, { 1 } },                    // RGB48
};

static LAV_INOUT_PIXFMT_MAP *lookupFormatMap(LAVPixelFormat informat, int bpp, BOOL bFallbackToDefault = TRUE)
{
  for (int i = 0; i < countof(lav_pixfmt_map); ++i) {
Beispiel #15
0
STDMETHODIMP CDecAvcodec::InitDecoder(AVCodecID codec, const CMediaType *pmt)
{
  DestroyDecoder();
  DbgLog((LOG_TRACE, 10, L"Initializing ffmpeg for codec %S", avcodec_get_name(codec)));

  BITMAPINFOHEADER *pBMI = NULL;
  videoFormatTypeHandler((const BYTE *)pmt->Format(), pmt->FormatType(), &pBMI);

  m_pAVCodec = avcodec_find_decoder(codec);
  CheckPointer(m_pAVCodec, VFW_E_UNSUPPORTED_VIDEO);

  m_pAVCtx = avcodec_alloc_context3(m_pAVCodec);
  CheckPointer(m_pAVCtx, E_POINTER);

  if(codec == AV_CODEC_ID_MPEG1VIDEO || codec == AV_CODEC_ID_MPEG2VIDEO || pmt->subtype == FOURCCMap(MKTAG('H','2','6','4')) || pmt->subtype == FOURCCMap(MKTAG('h','2','6','4'))) {
    m_pParser = av_parser_init(codec);
  }

  DWORD dwDecFlags = m_pCallback->GetDecodeFlags();

  LONG biRealWidth = pBMI->biWidth, biRealHeight = pBMI->biHeight;
  if (pmt->formattype == FORMAT_VideoInfo || pmt->formattype == FORMAT_MPEGVideo) {
    VIDEOINFOHEADER *vih = (VIDEOINFOHEADER *)pmt->Format();
    if (vih->rcTarget.right != 0 && vih->rcTarget.bottom != 0) {
      biRealWidth  = vih->rcTarget.right;
      biRealHeight = vih->rcTarget.bottom;
    }
  } else if (pmt->formattype == FORMAT_VideoInfo2 || pmt->formattype == FORMAT_MPEG2Video) {
    VIDEOINFOHEADER2 *vih2 = (VIDEOINFOHEADER2 *)pmt->Format();
    if (vih2->rcTarget.right != 0 && vih2->rcTarget.bottom != 0) {
      biRealWidth  = vih2->rcTarget.right;
      biRealHeight = vih2->rcTarget.bottom;
    }
  }

  m_pAVCtx->codec_id              = codec;
  m_pAVCtx->codec_tag             = pBMI->biCompression;
  m_pAVCtx->coded_width           = pBMI->biWidth;
  m_pAVCtx->coded_height          = abs(pBMI->biHeight);
  m_pAVCtx->bits_per_coded_sample = pBMI->biBitCount;
  m_pAVCtx->error_concealment     = FF_EC_GUESS_MVS | FF_EC_DEBLOCK;
  m_pAVCtx->err_recognition       = AV_EF_CAREFUL;
  m_pAVCtx->workaround_bugs       = FF_BUG_AUTODETECT;
  m_pAVCtx->refcounted_frames     = 1;

  if (codec == AV_CODEC_ID_H264)
    m_pAVCtx->flags2             |= CODEC_FLAG2_SHOW_ALL;

  // Setup threading
  int thread_type = getThreadFlags(codec);
  if (thread_type) {
    // Thread Count. 0 = auto detect
    int thread_count = m_pSettings->GetNumThreads();
    if (thread_count == 0) {
      thread_count = av_cpu_count() * 3 / 2;
    }

    m_pAVCtx->thread_count = max(1, min(thread_count, AVCODEC_MAX_THREADS));
    m_pAVCtx->thread_type = thread_type;
  } else {
    m_pAVCtx->thread_count = 1;
  }

  if (dwDecFlags & LAV_VIDEO_DEC_FLAG_NO_MT) {
    m_pAVCtx->thread_count = 1;
  }

  m_pFrame = av_frame_alloc();
  CheckPointer(m_pFrame, E_POINTER);

  m_h264RandomAccess.SetAVCNALSize(0);

  // Process Extradata
  BYTE *extra = NULL;
  size_t extralen = 0;
  getExtraData(*pmt, NULL, &extralen);

  BOOL bH264avc = FALSE;
  if (extralen > 0) {
    DbgLog((LOG_TRACE, 10, L"-> Processing extradata of %d bytes", extralen));
    // Reconstruct AVC1 extradata format
    if (pmt->formattype == FORMAT_MPEG2Video && (m_pAVCtx->codec_tag == MAKEFOURCC('a','v','c','1') || m_pAVCtx->codec_tag == MAKEFOURCC('A','V','C','1') || m_pAVCtx->codec_tag == MAKEFOURCC('C','C','V','1'))) {
      MPEG2VIDEOINFO *mp2vi = (MPEG2VIDEOINFO *)pmt->Format();
      extralen += 7;
      extra = (uint8_t *)av_mallocz(extralen + FF_INPUT_BUFFER_PADDING_SIZE);
      extra[0] = 1;
      extra[1] = (BYTE)mp2vi->dwProfile;
      extra[2] = 0;
      extra[3] = (BYTE)mp2vi->dwLevel;
      extra[4] = (BYTE)(mp2vi->dwFlags ? mp2vi->dwFlags : 4) - 1;

      // Actually copy the metadata into our new buffer
      size_t actual_len;
      getExtraData(*pmt, extra+6, &actual_len);

      // Count the number of SPS/PPS in them and set the length
      // We'll put them all into one block and add a second block with 0 elements afterwards
      // The parsing logic does not care what type they are, it just expects 2 blocks.
      BYTE *p = extra+6, *end = extra+6+actual_len;
      BOOL bSPS = FALSE, bPPS = FALSE;
      int count = 0;
      while (p+1 < end) {
        unsigned len = (((unsigned)p[0] << 8) | p[1]) + 2;
        if (p + len > end) {
          break;
        }
        if ((p[2] & 0x1F) == 7)
          bSPS = TRUE;
        if ((p[2] & 0x1F) == 8)
          bPPS = TRUE;
        count++;
        p += len;
      }
      extra[5] = count;
      extra[extralen-1] = 0;

      bH264avc = TRUE;
      m_h264RandomAccess.SetAVCNALSize(mp2vi->dwFlags);
    } else if (pmt->subtype == MEDIASUBTYPE_LAV_RAWVIDEO) {
      if (extralen < sizeof(m_pAVCtx->pix_fmt)) {
        DbgLog((LOG_TRACE, 10, L"-> LAV RAW Video extradata is missing.."));
      } else {
        extra = (uint8_t *)av_mallocz(extralen + FF_INPUT_BUFFER_PADDING_SIZE);
        getExtraData(*pmt, extra, NULL);
        m_pAVCtx->pix_fmt = *(AVPixelFormat *)extra;
        extralen -= sizeof(AVPixelFormat);
        memmove(extra, extra+sizeof(AVPixelFormat), extralen);
      }
    } else {
      // Just copy extradata for other formats
      extra = (uint8_t *)av_mallocz(extralen + FF_INPUT_BUFFER_PADDING_SIZE);
      getExtraData(*pmt, extra, NULL);
    }
    // Hack to discard invalid MP4 metadata with AnnexB style video
    if (codec == AV_CODEC_ID_H264 && !bH264avc && extra[0] == 1) {
      av_freep(&extra);
      extralen = 0;
    }
    m_pAVCtx->extradata = extra;
    m_pAVCtx->extradata_size = (int)extralen;
  } else {
    if (codec == AV_CODEC_ID_VP6 || codec == AV_CODEC_ID_VP6A || codec == AV_CODEC_ID_VP6F) {
      int cropH = pBMI->biWidth - biRealWidth;
      int cropV = pBMI->biHeight - biRealHeight;
      if (cropH >= 0 && cropH <= 0x0f && cropV >= 0 && cropV <= 0x0f) {
        m_pAVCtx->extradata = (uint8_t *)av_mallocz(1 + FF_INPUT_BUFFER_PADDING_SIZE);
        m_pAVCtx->extradata_size = 1;
        m_pAVCtx->extradata[0] = (cropH << 4) | cropV;
      }
    }
  }

  m_h264RandomAccess.flush(m_pAVCtx->thread_count);
  m_CurrentThread = 0;
  m_rtStartCache = AV_NOPTS_VALUE;

  LAVPinInfo lavPinInfo = {0};
  BOOL bLAVInfoValid = SUCCEEDED(m_pCallback->GetLAVPinInfo(lavPinInfo));

  m_bInputPadded = dwDecFlags & LAV_VIDEO_DEC_FLAG_LAVSPLITTER;

  // Setup codec-specific timing logic
  BOOL bVC1IsPTS = (codec == AV_CODEC_ID_VC1 && !(dwDecFlags & LAV_VIDEO_DEC_FLAG_VC1_DTS));

  // Use ffmpegs logic to reorder timestamps
  // This is required for H264 content (except AVI), and generally all codecs that use frame threading
  // VC-1 is also a special case. Its required for splitters that deliver PTS timestamps (see bVC1IsPTS above)
  m_bFFReordering        =  ( codec == AV_CODEC_ID_H264 && !(dwDecFlags & LAV_VIDEO_DEC_FLAG_H264_AVI))
                           || codec == AV_CODEC_ID_VP8
                           || codec == AV_CODEC_ID_VP3
                           || codec == AV_CODEC_ID_THEORA
                           || codec == AV_CODEC_ID_HUFFYUV
                           || codec == AV_CODEC_ID_FFVHUFF
                           || codec == AV_CODEC_ID_MPEG2VIDEO
                           || codec == AV_CODEC_ID_MPEG1VIDEO
                           || codec == AV_CODEC_ID_DIRAC
                           || codec == AV_CODEC_ID_UTVIDEO
                           || codec == AV_CODEC_ID_DNXHD
                           || codec == AV_CODEC_ID_JPEG2000
                           || (codec == AV_CODEC_ID_MPEG4 && pmt->formattype == FORMAT_MPEG2Video)
                           || bVC1IsPTS;

  // Stop time is unreliable, drop it and calculate it
  m_bCalculateStopTime   = (codec == AV_CODEC_ID_H264 || codec == AV_CODEC_ID_DIRAC || (codec == AV_CODEC_ID_MPEG4 && pmt->formattype == FORMAT_MPEG2Video) || bVC1IsPTS);

  // Real Video content has some odd timestamps
  // LAV Splitter does them allright with RV30/RV40, everything else screws them up
  m_bRVDropBFrameTimings = (codec == AV_CODEC_ID_RV10 || codec == AV_CODEC_ID_RV20 || ((codec == AV_CODEC_ID_RV30 || codec == AV_CODEC_ID_RV40) && (!(dwDecFlags & LAV_VIDEO_DEC_FLAG_LAVSPLITTER) || (bLAVInfoValid && (lavPinInfo.flags & LAV_STREAM_FLAG_RV34_MKV)))));

  // Enable B-Frame delay handling
  m_bBFrameDelay = !m_bFFReordering && !m_bRVDropBFrameTimings;

  m_bWaitingForKeyFrame = TRUE;
  m_bResumeAtKeyFrame =    codec == AV_CODEC_ID_MPEG2VIDEO
                        || codec == AV_CODEC_ID_VC1
                        || codec == AV_CODEC_ID_RV30
                        || codec == AV_CODEC_ID_RV40
                        || codec == AV_CODEC_ID_VP3
                        || codec == AV_CODEC_ID_THEORA
                        || codec == AV_CODEC_ID_MPEG4;

  m_bNoBufferConsumption =    codec == AV_CODEC_ID_MJPEGB
                           || codec == AV_CODEC_ID_LOCO
                           || codec == AV_CODEC_ID_JPEG2000;

  m_bHasPalette = m_pAVCtx->bits_per_coded_sample <= 8 && m_pAVCtx->extradata_size && !(dwDecFlags & LAV_VIDEO_DEC_FLAG_LAVSPLITTER)
                  &&  (codec == AV_CODEC_ID_MSVIDEO1
                    || codec == AV_CODEC_ID_MSRLE
                    || codec == AV_CODEC_ID_CINEPAK
                    || codec == AV_CODEC_ID_8BPS
                    || codec == AV_CODEC_ID_QPEG
                    || codec == AV_CODEC_ID_QTRLE
                    || codec == AV_CODEC_ID_TSCC);

  if (FAILED(AdditionaDecoderInit())) {
    return E_FAIL;
  }

  if (bLAVInfoValid) {
    // Setting has_b_frames to a proper value will ensure smoother decoding of H264
    if (lavPinInfo.has_b_frames >= 0) {
      DbgLog((LOG_TRACE, 10, L"-> Setting has_b_frames to %d", lavPinInfo.has_b_frames));
      m_pAVCtx->has_b_frames = lavPinInfo.has_b_frames;
    }
  }

  // Open the decoder
  int ret = avcodec_open2(m_pAVCtx, m_pAVCodec, NULL);
  if (ret >= 0) {
    DbgLog((LOG_TRACE, 10, L"-> ffmpeg codec opened successfully (ret: %d)", ret));
    m_nCodecId = codec;
  } else {
    DbgLog((LOG_TRACE, 10, L"-> ffmpeg codec failed to open (ret: %d)", ret));
    DestroyDecoder();
    return VFW_E_UNSUPPORTED_VIDEO;
  }

  m_iInterlaced = 0;
  for (int i = 0; i < countof(ff_interlace_capable); i++) {
    if (codec == ff_interlace_capable[i]) {
      m_iInterlaced = -1;
      break;
    }
  }

  // Detect chroma and interlaced
  if (m_pAVCtx->extradata && m_pAVCtx->extradata_size) {
    if (codec == AV_CODEC_ID_MPEG2VIDEO) {
      CMPEG2HeaderParser mpeg2Parser(extra, extralen);
      if (mpeg2Parser.hdr.valid) {
        if (mpeg2Parser.hdr.chroma < 2) {
          m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV420P;
        } else if (mpeg2Parser.hdr.chroma == 2) {
          m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV422P;
        }
        m_iInterlaced = mpeg2Parser.hdr.interlaced;
      }
    } else if (codec == AV_CODEC_ID_H264) {
      CH264SequenceParser h264parser;
      if (bH264avc)
        h264parser.ParseNALs(extra+6, extralen-6, 2);
      else
        h264parser.ParseNALs(extra, extralen, 0);
      if (h264parser.sps.valid)
        m_iInterlaced = h264parser.sps.interlaced;
    } else if (codec == AV_CODEC_ID_VC1) {
      CVC1HeaderParser vc1parser(extra, extralen);
      if (vc1parser.hdr.valid)
        m_iInterlaced = (vc1parser.hdr.interlaced ? -1 : 0);
    }
  }

  if (codec == AV_CODEC_ID_DNXHD)
    m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV422P10;
  else if (codec == AV_CODEC_ID_FRAPS)
    m_pAVCtx->pix_fmt = AV_PIX_FMT_BGR24;

  if (bLAVInfoValid && codec != AV_CODEC_ID_FRAPS && m_pAVCtx->pix_fmt != AV_PIX_FMT_DXVA2_VLD)
    m_pAVCtx->pix_fmt = lavPinInfo.pix_fmt;

  DbgLog((LOG_TRACE, 10, L"AVCodec init successfull. interlaced: %d", m_iInterlaced));

  return S_OK;
}
int try_format_size(V4wState *s, int format, int width, int height, GUID *pPinCategory)
{
	HRESULT hr=S_OK;
	IEnumPins *pEnum=0;
	ULONG ulFound;
	IPin *pPin;

	GUID guid_format;
	DWORD biCompression;
	DWORD biBitCount;

	// Verify input
	if (!s->m_pDeviceFilter)
		return -1;

	if (format == MS_YUV420P)
		guid_format = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0'));
	else if (format == MS_YUYV)
		guid_format = MEDIASUBTYPE_YUYV;
	else if (format == MS_UYVY)
		guid_format = MEDIASUBTYPE_UYVY;
	else if (format == MS_RGB24)
		guid_format = MEDIASUBTYPE_RGB24;
	else if (format == MS_YUY2)
		guid_format = MEDIASUBTYPE_YUY2;

	if (format == MS_YUV420P)
		biCompression = MAKEFOURCC('I','4','2','0');
	else if (format == MS_YUYV)
		biCompression = MAKEFOURCC('Y','U','Y','V');
	else if (format == MS_UYVY)
		biCompression = MAKEFOURCC('U','Y','V','Y');
	else if (format == MS_RGB24)
		biCompression = BI_RGB;
	else if (format == MS_YUY2)
		biCompression = MAKEFOURCC('Y','U','Y','2');

	if (format == MS_YUV420P)
		biBitCount = 12;
	else if (format == MS_YUYV)
		biBitCount = 16;
	else if (format == MS_UYVY)
		biBitCount = 16;
	else if (format == MS_RGB24)
		biBitCount = 24;
	else if (format == MS_YUY2)
		biBitCount = 16;

	// Get pin enumerator
	hr = s->m_pDeviceFilter->EnumPins(&pEnum);
	if(FAILED(hr)) 
		return -1;

	pEnum->Reset();

	// Count every pin on the filter
	while(S_OK == pEnum->Next(1, &pPin, &ulFound))
	{
		PIN_DIRECTION pindir = (PIN_DIRECTION) 3;

		hr = pPin->QueryDirection(&pindir);

		if(pindir != PINDIR_INPUT)
		{
			IEnumMediaTypes *ppEnum;
			ULONG ulFound2;
			hr = pPin->EnumMediaTypes(&ppEnum);
			if(FAILED(hr)) 
				continue;

			GUID pCurrentPinCategory;
			GetPinCategory(pPin, &pCurrentPinCategory);
			if (*pPinCategory!=pCurrentPinCategory)
				continue;

			AM_MEDIA_TYPE *ppMediaTypes;
			while(S_OK == ppEnum->Next(1, &ppMediaTypes, &ulFound2))
			{
				if (ppMediaTypes->formattype != FORMAT_VideoInfo)
					continue;
				if (ppMediaTypes->majortype != MEDIATYPE_Video)
					continue;
				if (ppMediaTypes->subtype != guid_format)
					continue;
				VIDEOINFO *pvi = (VIDEOINFO *)ppMediaTypes->pbFormat;
				if (pvi->bmiHeader.biCompression!=biCompression)
					continue;
				if (pvi->bmiHeader.biBitCount!=biBitCount)
					continue;
				if (pvi->bmiHeader.biHeight!=height)
					continue;
				if (pvi->bmiHeader.biWidth!=width)
					continue;

				s->vsize.width = width;
				s->vsize.height = height;

				pPin->Release();
				pEnum->Release();
				return 0;
			}
		}

		pPin->Release();
	} 

	pEnum->Release();
	return -1;
}
Beispiel #17
0
HRESULT CNutSplitterFilter::CreateOutputs(IAsyncReader* pAsyncReader)
{
	CheckPointer(pAsyncReader, E_POINTER);

	HRESULT hr = E_FAIL;

	m_pFile.Free();

	m_pFile.Attach(new CNutFile(pAsyncReader, hr));
	if(!m_pFile) return E_OUTOFMEMORY;
	if(FAILED(hr)) {m_pFile.Free(); return hr;}

	m_rtNewStart = m_rtCurrent = 0;
	m_rtNewStop = m_rtStop = m_rtDuration = 0;

	// pins

	POSITION pos = m_pFile->m_streams.GetHeadPosition();
	while(pos)
	{
		CNutFile::stream_header* sh = m_pFile->m_streams.GetNext(pos);

		CAtlArray<CMediaType> mts;
		CMediaType mt;

		if(sh->stream_class == CNutFile::SC_VIDEO)
		{
			CNutFile::video_stream_header& vsh = sh->vsh;

			mt.majortype = MEDIATYPE_Video;
			mt.subtype = FOURCCMap(*(DWORD*)sh->fourcc.GetData());
			mt.formattype = FORMAT_VideoInfo;

			VIDEOINFOHEADER vih;
			memset(&vih, 0, sizeof(vih));
			vih.dwBitRate = (DWORD)sh->average_bitrate;
			vih.AvgTimePerFrame = 10000000i64 * sh->time_base_nom / sh->time_base_denom;
			vih.bmiHeader.biSize = sizeof(vih.bmiHeader);
			vih.bmiHeader.biCompression = mt.subtype.Data1;
			vih.bmiHeader.biWidth = (LONG)vsh.width;
			vih.bmiHeader.biHeight = (LONG)vsh.height;
			mt.SetFormat((BYTE*)&vih, sizeof(vih));

			mts.Add(mt);

			if(vsh.sample_width && vsh.sample_height)
			{
				VIDEOINFOHEADER2 vih2;
				memset(&vih2, 0, sizeof(vih2));
				vih2.dwBitRate = vih.dwBitRate;
				vih2.AvgTimePerFrame = vih.AvgTimePerFrame;
				vih2.dwPictAspectRatioX = (DWORD)vsh.sample_width;
				vih2.dwPictAspectRatioY = (DWORD)vsh.sample_height;
				vih2.bmiHeader = vih.bmiHeader;
				mt.SetFormat((BYTE*)&vih2, sizeof(vih2));

				mts.InsertAt(0, mt);
			}
		}
		else if(sh->stream_class == CNutFile::SC_AUDIO)
		{
			CNutFile::audio_stream_header& ash = sh->ash;
			// TODO
		}
		else if(sh->stream_class == CNutFile::SC_SUBTITLE)
		{
			// TODO
		}

		if(mts.GetCount() > 0)
		{
			CAutoPtr<CBaseSplitterOutputPin> pPinOut(new CBaseSplitterOutputPin(mts, L"Video", this, this, &hr));
			AddOutputPin((DWORD)sh->stream_id, pPinOut);
		}
	}

	// TODO
	m_rtNewStop = m_rtStop = m_rtDuration = 0;

	return m_pOutputs.GetCount() > 0 ? S_OK : E_FAIL;
}