예제 #1
0
/// method which implements IAMStreamSelect.Info
/// returns an array of all audio streams available
STDMETHODIMP CBDReaderFilter::Info(long lIndex, AM_MEDIA_TYPE**ppmt, DWORD* pdwFlags, LCID* plcid, DWORD* pdwGroup, WCHAR** ppszName, IUnknown** ppObject, IUnknown** ppUnk)
{
  if (pdwFlags)
  {
    int audioIndex = 0;
    m_demultiplexer.GetAudioStream(audioIndex);

    //if (m_demultiplexer.GetAudioStream()==(int)lIndex)
    if (audioIndex == (int)lIndex)
      *pdwFlags = AMSTREAMSELECTINFO_EXCLUSIVE;
    else
      *pdwFlags = 0;
  }
  if (plcid) *plcid = 0;
  if (pdwGroup) *pdwGroup = m_demultiplexer.GetAudioStreamType((int)lIndex); //*pdwGroup = 1;
  if (ppObject) *ppObject = NULL;
  if (ppUnk) *ppUnk = NULL;
  if (ppszName)
  {
    char szName[40];
    m_demultiplexer.GetAudioStreamInfo((int)lIndex, szName);
    *ppszName = (WCHAR *)CoTaskMemAlloc(20);
    MultiByteToWideChar(CP_ACP, 0, szName, -1, *ppszName, 20);
  }
  if (ppmt)
  {
    CMediaType mediaType;
    m_demultiplexer.GetAudioStreamPMT(mediaType);
    AM_MEDIA_TYPE* mType = (AM_MEDIA_TYPE*)(&mediaType);
    *ppmt = (AM_MEDIA_TYPE*)CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE));
    if (*ppmt)
    {
      memcpy(*ppmt, mType, sizeof(AM_MEDIA_TYPE));
      (*ppmt)->pbFormat = (BYTE*)CoTaskMemAlloc(mediaType.FormatLength());
      memcpy((*ppmt)->pbFormat, mType->pbFormat, mediaType.FormatLength());
    }
    else
      return S_FALSE;
  }
  return S_OK;
}
예제 #2
0
bool CDSMSplitterFile::Read(__int64 len, BYTE& id, CMediaType& mt)
{
	id = (BYTE)BitRead(8);
	ByteRead((BYTE*)&mt.majortype, sizeof(mt.majortype));
	ByteRead((BYTE*)&mt.subtype, sizeof(mt.subtype));
	mt.bFixedSizeSamples = (BOOL)BitRead(1);
	mt.bTemporalCompression = (BOOL)BitRead(1);
	mt.lSampleSize = (ULONG)BitRead(30);
	ByteRead((BYTE*)&mt.formattype, sizeof(mt.formattype));
	len -= 5 + sizeof(GUID)*3;
	ASSERT(len >= 0);
	if(len > 0) {mt.AllocFormatBuffer((LONG)len); ByteRead(mt.Format(), mt.FormatLength());}
	else mt.ResetFormatBuffer();	
	return true;
}
예제 #3
0
void CBaseDemuxer::CreatePGSForcedSubtitleStream()
{
  stream s;
  s.pid = FORCED_SUBTITLE_PID;
  s.streamInfo = new CStreamInfo();
  s.language = "und";
  // Create the media type
  CMediaType mtype;
  mtype.majortype = MEDIATYPE_Subtitle;
  mtype.subtype = MEDIASUBTYPE_HDMVSUB;
  mtype.formattype = FORMAT_SubtitleInfo;
  SUBTITLEINFO *subInfo = (SUBTITLEINFO *)mtype.AllocFormatBuffer(sizeof(SUBTITLEINFO));
  memset(subInfo, 0, mtype.FormatLength());
  wcscpy_s(subInfo->TrackName, FORCED_SUB_STRING);
  subInfo->dwOffset = sizeof(SUBTITLEINFO);
  s.streamInfo->mtypes.push_back(mtype);
  // Append it to the list
  m_streams[subpic].push_back(s);
}
예제 #4
0
bool mt2spk(CMediaType mt, Speakers &spk)
{
  const GUID type = *mt.Type();
  const GUID subtype = *mt.Subtype();
  const GUID formattype = *mt.FormatType();

  WAVEFORMAT *wf = 0;
  size_t wf_size = 0;
  int sample_rate = 0;

  if ((formattype == FORMAT_WaveFormatEx) &&
      (mt.FormatLength() > sizeof(WAVEFORMAT)))
  {
    wf = (WAVEFORMAT *)mt.Format();
    wf_size = mt.FormatLength();
    sample_rate = wf->nSamplesPerSec;
  }

  /////////////////////////////////////////////////////////
  // HD LPCM

  if (type == MEDIATYPE_Audio &&
      subtype == MEDIASUBTYPE_HDMV_LPCM_AUDIO &&
      wf && wf->wFormatTag == 1)
  {
    spk = wf2spk(wf, wf_size);
    switch (spk.format)
    {
      case FORMAT_PCM16: spk.format = FORMAT_PCM16_BE; return true;
      case FORMAT_PCM24: spk.format = FORMAT_PCM24_BE; return true;
      case FORMAT_PCM32: spk.format = FORMAT_PCM32_BE; return true;
      default: return false;
    }
  }

  /////////////////////////////////////////////////////////
  // Compressed formats

  if (type == MEDIATYPE_MPEG2_PES ||
      type == MEDIATYPE_MPEG2_PACK ||
      type == MEDIATYPE_DVD_ENCRYPTED_PACK)
    if (subtype == MEDIASUBTYPE_DOLBY_AC3 ||
        subtype == MEDIASUBTYPE_DTS ||
        subtype == MEDIASUBTYPE_MPEG1AudioPayload ||
        subtype == MEDIASUBTYPE_MPEG2_AUDIO ||
        subtype == MEDIASUBTYPE_DVD_LPCM_AUDIO)
    {
      spk = Speakers(FORMAT_PES, 0, sample_rate);
      return true;
    }

  if (subtype == MEDIASUBTYPE_DOLBY_AC3 || 
      subtype == MEDIASUBTYPE_AVI_AC3)
  {
    // It may be AC3 or EAC3
    spk = Speakers(FORMAT_DOLBY, 0, sample_rate);
    return true;
  }

  if (subtype == MEDIASUBTYPE_DOLBY_DDPLUS)
  {
    spk = Speakers(FORMAT_EAC3, 0, sample_rate);
    return true;
  }

  if (subtype == MEDIASUBTYPE_DOLBY_TRUEHD)
  {
    spk = Speakers(FORMAT_TRUEHD, 0, sample_rate);
    return true;
  }

  if (subtype == MEDIASUBTYPE_DTS || 
      subtype == MEDIASUBTYPE_DTS_HD ||
      subtype == MEDIASUBTYPE_AVI_DTS)
  {
    spk = Speakers(FORMAT_DTS, 0, sample_rate);
    return true;
  }

  if (subtype == MEDIASUBTYPE_MPEG1AudioPayload ||
      subtype == MEDIASUBTYPE_MPEG2_AUDIO)
  {
    spk = Speakers(FORMAT_MPA, 0, sample_rate);
    return true;
  }

  if (subtype == MEDIASUBTYPE_DOLBY_AC3_SPDIF)
  {
    spk = Speakers(FORMAT_SPDIF, 0, sample_rate);
    return true;
  }
/*
  if (subtype == MEDIASUBTYPE_Vorbis &&
      formattype == FORMAT_Vorbis && 
      mt.FormatLength() > sizeof(VORBISFORMAT))
  {
    VORBISFORMAT *format = (VORBISFORMAT *)mt.Format();
    spk = Speakers(FORMAT_VORBIS, 0, format->samplesPerSec);
    spk.set_format_data(mt.Format(), mt.FormatLength());
  }
*/
  if (subtype == MEDIASUBTYPE_Vorbis2 &&
      formattype == FORMAT_Vorbis2 &&
      mt.FormatLength() > sizeof(VORBISFORMAT2))
  {
    VORBISFORMAT2 *format = (VORBISFORMAT2 *)mt.Format();
    spk = Speakers(FORMAT_VORBIS, 0, format->samplesPerSec);
    spk.set_format_data(mt.Format(), mt.FormatLength());
    return true;
  }

  /////////////////////////////////////////////////////////
  // LPCM

  if (subtype == MEDIASUBTYPE_DVD_LPCM_AUDIO)
  {
    PCMWAVEFORMAT *pcmwf = wf_cast<PCMWAVEFORMAT>(wf, wf_size);
    if (!pcmwf) return false;

    int format, mode;
    switch (pcmwf->wBitsPerSample)
    {
      case 16: format = FORMAT_PCM16_BE; break;
      case 20: format = FORMAT_LPCM20; break;
      case 24: format = FORMAT_LPCM24; break;
      default: return false;
    }
    switch (pcmwf->wf.nChannels)
    {
      case 1: mode = MODE_MONO; break;
      case 2: mode = MODE_STEREO; break;
      default: return false;
    }
    spk = Speakers(format, mode, sample_rate);
    return true;
  }

  /////////////////////////////////////////////////////////
  // General WAVEFORMAT conversion

  spk = Speakers();
  if (wf)
    spk = wf2spk(wf, wf_size);
  return !spk.is_unknown();
}
예제 #5
0
HRESULT CMatroskaSplitterFilter::CreateOutputs(IAsyncReader* pAsyncReader)
{
	CheckPointer(pAsyncReader, E_POINTER);

	HRESULT hr = E_FAIL;

	m_pFile.Free();
	m_pTrackEntryMap.RemoveAll();
	m_pOrderedTrackArray.RemoveAll();

	CAtlArray<CMatroskaSplitterOutputPin*> pinOut;
	CAtlArray<TrackEntry*> pinOutTE;

	m_pFile.Attach(DNew CMatroskaFile(pAsyncReader, hr));
	if(!m_pFile) return E_OUTOFMEMORY;
	if(FAILED(hr)) {m_pFile.Free(); return hr;}

	m_rtNewStart = m_rtCurrent = 0;
	m_rtNewStop = m_rtStop = m_rtDuration = 0;

	int iVideo = 1, iAudio = 1, iSubtitle = 1;

	POSITION pos = m_pFile->m_segment.Tracks.GetHeadPosition();
	while(pos)
	{
		Track* pT = m_pFile->m_segment.Tracks.GetNext(pos);

		POSITION pos2 = pT->TrackEntries.GetHeadPosition();
		while(pos2)
		{
			TrackEntry* pTE = pT->TrackEntries.GetNext(pos2);

			bool isSub = false;

			if(!pTE->Expand(pTE->CodecPrivate, ContentEncoding::TracksPrivateData))
				continue;

			CStringA CodecID = pTE->CodecID.ToString();

			CStringW Name;
			Name.Format(L"Output %I64d", (UINT64)pTE->TrackNumber);

			CMediaType mt;
			CAtlArray<CMediaType> mts;

			mt.SetSampleSize(1);

			if(pTE->TrackType == TrackEntry::TypeVideo)
			{
				Name.Format(L"Video %d", iVideo++);

				mt.majortype = MEDIATYPE_Video;

				if(CodecID == "V_MS/VFW/FOURCC")
				{
					mt.formattype = FORMAT_VideoInfo;
					VIDEOINFOHEADER* pvih = (VIDEOINFOHEADER*)mt.AllocFormatBuffer(sizeof(VIDEOINFOHEADER) + pTE->CodecPrivate.GetCount() - sizeof(BITMAPINFOHEADER));
					memset(mt.Format(), 0, mt.FormatLength());
					memcpy(&pvih->bmiHeader, pTE->CodecPrivate.GetData(), pTE->CodecPrivate.GetCount());
					mt.subtype = FOURCCMap(pvih->bmiHeader.biCompression);
					switch(pvih->bmiHeader.biCompression)
					{
					case BI_RGB: case BI_BITFIELDS: mt.subtype = 
						pvih->bmiHeader.biBitCount == 1 ? MEDIASUBTYPE_RGB1 :
						pvih->bmiHeader.biBitCount == 4 ? MEDIASUBTYPE_RGB4 :
						pvih->bmiHeader.biBitCount == 8 ? MEDIASUBTYPE_RGB8 :
						pvih->bmiHeader.biBitCount == 16 ? MEDIASUBTYPE_RGB565 :
						pvih->bmiHeader.biBitCount == 24 ? MEDIASUBTYPE_RGB24 :
						pvih->bmiHeader.biBitCount == 32 ? MEDIASUBTYPE_ARGB32 :
						MEDIASUBTYPE_NULL;
						break;
//					case BI_RLE8: mt.subtype = MEDIASUBTYPE_RGB8; break;
//					case BI_RLE4: mt.subtype = MEDIASUBTYPE_RGB4; break;
					}
					mts.Add(mt);
				}
				else if(CodecID == "V_UNCOMPRESSED")
				{
				}
				else if(CodecID.Find("V_MPEG4/ISO/AVC") == 0 && pTE->CodecPrivate.GetCount() >= 6)
				{
					BYTE sps = pTE->CodecPrivate[5] & 0x1f;

	std::vector<BYTE> avcC;
	for(int i = 0, j = pTE->CodecPrivate.GetCount(); i < j; i++)
		avcC.push_back(pTE->CodecPrivate[i]);

	std::vector<BYTE> sh;

	unsigned jj = 6;

	while (sps--) {
	  if (jj + 2 > avcC.size())
	    goto avcfail;
	  unsigned spslen = ((unsigned)avcC[jj] << 8) | avcC[jj+1];
	  if (jj + 2 + spslen > avcC.size())
	    goto avcfail;
	  unsigned cur = sh.size();
	  sh.resize(cur + spslen + 2, 0);
	  std::copy(avcC.begin() + jj, avcC.begin() + jj + 2 + spslen,sh.begin() + cur);
	  jj += 2 + spslen;
	}

	if (jj + 1 > avcC.size())
	  continue;

	unsigned pps = avcC[jj++];

	while (pps--) {
	  if (jj + 2 > avcC.size())
	    goto avcfail;
	  unsigned ppslen = ((unsigned)avcC[jj] << 8) | avcC[jj+1];
	  if (jj + 2 + ppslen > avcC.size())
	    goto avcfail;
	  unsigned cur = sh.size();
	  sh.resize(cur + ppslen + 2, 0);
	  std::copy(avcC.begin() + jj, avcC.begin() + jj + 2 + ppslen, sh.begin() + cur);
	  jj += 2 + ppslen;
	}

	goto avcsuccess;
avcfail:
	continue;
avcsuccess:

					CAtlArray<BYTE> data;
					data.SetCount(sh.size());
					std::copy(sh.begin(), sh.end(), data.GetData());

					mt.subtype = FOURCCMap('1CVA');
					mt.formattype = FORMAT_MPEG2Video;
					MPEG2VIDEOINFO* pm2vi = (MPEG2VIDEOINFO*)mt.AllocFormatBuffer(FIELD_OFFSET(MPEG2VIDEOINFO, dwSequenceHeader) + data.GetCount());
					memset(mt.Format(), 0, mt.FormatLength());
					pm2vi->hdr.bmiHeader.biSize = sizeof(pm2vi->hdr.bmiHeader);
					pm2vi->hdr.bmiHeader.biWidth = (LONG)pTE->v.PixelWidth;
					pm2vi->hdr.bmiHeader.biHeight = (LONG)pTE->v.PixelHeight;
					pm2vi->hdr.bmiHeader.biCompression = '1CVA';
					pm2vi->hdr.bmiHeader.biPlanes = 1;
					pm2vi->hdr.bmiHeader.biBitCount = 24;
					pm2vi->dwProfile = pTE->CodecPrivate[1];
					pm2vi->dwLevel = pTE->CodecPrivate[3];
					pm2vi->dwFlags = (pTE->CodecPrivate[4] & 3) + 1;
					BYTE* pSequenceHeader = (BYTE*)pm2vi->dwSequenceHeader;
					memcpy(pSequenceHeader, data.GetData(), data.GetCount());
					pm2vi->cbSequenceHeader = data.GetCount();
					mts.Add(mt);
				}
				else if(CodecID.Find("V_MPEG4/") == 0)
				{
					mt.subtype = FOURCCMap('V4PM');
					mt.formattype = FORMAT_MPEG2Video;
					MPEG2VIDEOINFO* pm2vi = (MPEG2VIDEOINFO*)mt.AllocFormatBuffer(FIELD_OFFSET(MPEG2VIDEOINFO, dwSequenceHeader) + pTE->CodecPrivate.GetCount());
					memset(mt.Format(), 0, mt.FormatLength());
					pm2vi->hdr.bmiHeader.biSize = sizeof(pm2vi->hdr.bmiHeader);
					pm2vi->hdr.bmiHeader.biWidth = (LONG)pTE->v.PixelWidth;
					pm2vi->hdr.bmiHeader.biHeight = (LONG)pTE->v.PixelHeight;
					pm2vi->hdr.bmiHeader.biCompression = 'V4PM';
					pm2vi->hdr.bmiHeader.biPlanes = 1;
					pm2vi->hdr.bmiHeader.biBitCount = 24;
					BYTE* pSequenceHeader = (BYTE*)pm2vi->dwSequenceHeader;
					memcpy(pSequenceHeader, pTE->CodecPrivate.GetData(), pTE->CodecPrivate.GetCount());
					pm2vi->cbSequenceHeader = pTE->CodecPrivate.GetCount();
					mts.Add(mt);
				}
				else if(CodecID.Find("V_REAL/RV") == 0)
				{
					mt.subtype = FOURCCMap('00VR' + ((CodecID[9]-0x30)<<16));
					mt.formattype = FORMAT_VideoInfo;
					VIDEOINFOHEADER* pvih = (VIDEOINFOHEADER*)mt.AllocFormatBuffer(sizeof(VIDEOINFOHEADER) + pTE->CodecPrivate.GetCount());
					memset(mt.Format(), 0, mt.FormatLength());
					memcpy(mt.Format() + sizeof(VIDEOINFOHEADER), pTE->CodecPrivate.GetData(), pTE->CodecPrivate.GetCount());
					pvih->bmiHeader.biSize = sizeof(pvih->bmiHeader);
					pvih->bmiHeader.biWidth = (LONG)pTE->v.PixelWidth;
					pvih->bmiHeader.biHeight = (LONG)pTE->v.PixelHeight;
					pvih->bmiHeader.biCompression = mt.subtype.Data1;
					mts.Add(mt);
				}
				else if(CodecID == "V_DIRAC")
				{
					mt.subtype = MEDIASUBTYPE_DiracVideo;
					mt.formattype = FORMAT_DiracVideoInfo;
					DIRACINFOHEADER* dvih = (DIRACINFOHEADER*)mt.AllocFormatBuffer(FIELD_OFFSET(DIRACINFOHEADER, dwSequenceHeader) + pTE->CodecPrivate.GetCount());
					memset(mt.Format(), 0, mt.FormatLength());
					dvih->hdr.bmiHeader.biSize = sizeof(dvih->hdr.bmiHeader);
					dvih->hdr.bmiHeader.biWidth = (LONG)pTE->v.PixelWidth;
					dvih->hdr.bmiHeader.biHeight = (LONG)pTE->v.PixelHeight;
					dvih->hdr.dwPictAspectRatioX = dvih->hdr.bmiHeader.biWidth;
					dvih->hdr.dwPictAspectRatioY = dvih->hdr.bmiHeader.biHeight;

					BYTE* pSequenceHeader = (BYTE*)dvih->dwSequenceHeader;
					memcpy(pSequenceHeader, pTE->CodecPrivate.GetData(), pTE->CodecPrivate.GetCount());
					dvih->cbSequenceHeader = pTE->CodecPrivate.GetCount();

					mts.Add(mt);
				}
				else if(CodecID == "V_MPEG2")
				{
					BYTE* seqhdr = pTE->CodecPrivate.GetData();
					DWORD len = pTE->CodecPrivate.GetCount();
					int w = pTE->v.PixelWidth;
					int h = pTE->v.PixelHeight;

					if(MakeMPEG2MediaType(mt, seqhdr, len, w, h))
						mts.Add(mt);
				}
				else if(CodecID == "V_THEORA")
				{
					BYTE* thdr = pTE->CodecPrivate.GetData() + 3;

					mt.majortype		= MEDIATYPE_Video;
					mt.subtype			= FOURCCMap('OEHT');
					mt.formattype		= FORMAT_MPEG2_VIDEO;
					MPEG2VIDEOINFO* vih = (MPEG2VIDEOINFO*)mt.AllocFormatBuffer(sizeof(MPEG2VIDEOINFO) + pTE->CodecPrivate.GetCount());
					memset(mt.Format(), 0, mt.FormatLength());
					vih->hdr.bmiHeader.biSize		 = sizeof(vih->hdr.bmiHeader);
					vih->hdr.bmiHeader.biWidth		 = *(WORD*)&thdr[10] >> 4;
					vih->hdr.bmiHeader.biHeight		 = *(WORD*)&thdr[12] >> 4;
					vih->hdr.bmiHeader.biCompression = 'OEHT';
					vih->hdr.bmiHeader.biPlanes		 = 1;
					vih->hdr.bmiHeader.biBitCount	 = 24;
					int nFpsNum	= (thdr[22]<<24)|(thdr[23]<<16)|(thdr[24]<<8)|thdr[25];
					int nFpsDenum	= (thdr[26]<<24)|(thdr[27]<<16)|(thdr[28]<<8)|thdr[29];
					if(nFpsNum) vih->hdr.AvgTimePerFrame = (REFERENCE_TIME)(10000000.0 * nFpsDenum / nFpsNum);
					vih->hdr.dwPictAspectRatioX = (thdr[14]<<16)|(thdr[15]<<8)|thdr[16];
					vih->hdr.dwPictAspectRatioY = (thdr[17]<<16)|(thdr[18]<<8)|thdr[19];
					mt.bFixedSizeSamples = 0;

					vih->cbSequenceHeader = pTE->CodecPrivate.GetCount();
					memcpy (&vih->dwSequenceHeader, pTE->CodecPrivate.GetData(), vih->cbSequenceHeader);

					mts.Add(mt);
				}
				else if(CodecID.Find("V_VP8") == 0) 
				{ 
					mt.subtype = FOURCCMap('08PV'); 
					mt.formattype = FORMAT_VideoInfo; 
					VIDEOINFOHEADER* pvih = (VIDEOINFOHEADER*)mt.AllocFormatBuffer(sizeof(VIDEOINFOHEADER) + pTE->CodecPrivate.GetCount()); 
					memset(mt.Format(), 0, mt.FormatLength()); 
					memcpy(mt.Format() + sizeof(VIDEOINFOHEADER), pTE->CodecPrivate.GetData(), pTE->CodecPrivate.GetCount()); 
					pvih->bmiHeader.biSize = sizeof(pvih->bmiHeader); 
					pvih->bmiHeader.biWidth = (LONG)pTE->v.PixelWidth; 
					pvih->bmiHeader.biHeight = (LONG)pTE->v.PixelHeight; 
					pvih->bmiHeader.biCompression = mt.subtype.Data1; 
					mts.Add(mt); 
				} 
/*
				else if(CodecID == "V_DSHOW/MPEG1VIDEO") // V_MPEG1
				{
					mt.majortype = MEDIATYPE_Video;
					mt.subtype = MEDIASUBTYPE_MPEG1Payload;
					mt.formattype = FORMAT_MPEGVideo;
					MPEG1VIDEOINFO* pm1vi = (MPEG1VIDEOINFO*)mt.AllocFormatBuffer(pTE->CodecPrivate.GetCount());
					memcpy(pm1vi, pTE->CodecPrivate.GetData(), pTE->CodecPrivate.GetCount());
					mt.SetSampleSize(pm1vi->hdr.bmiHeader.biWidth*pm1vi->hdr.bmiHeader.biHeight*4);
					mts.Add(mt);
				}
*/
				REFERENCE_TIME AvgTimePerFrame = 0;

                if(pTE->v.FramePerSec > 0)
					AvgTimePerFrame = (REFERENCE_TIME)(10000000i64 / pTE->v.FramePerSec);
				else if(pTE->DefaultDuration > 0)
					AvgTimePerFrame = (REFERENCE_TIME)pTE->DefaultDuration / 100;

				if(AvgTimePerFrame)
				{
					for(int i = 0; i < mts.GetCount(); i++)
					{
						if(mts[i].formattype == FORMAT_VideoInfo
						|| mts[i].formattype == FORMAT_VideoInfo2
						|| mts[i].formattype == FORMAT_MPEG2Video)
						{
							((VIDEOINFOHEADER*)mts[i].Format())->AvgTimePerFrame = AvgTimePerFrame;
						}
					}
				}

				if(pTE->v.DisplayWidth != 0 && pTE->v.DisplayHeight != 0)
				{
					for(int i = 0; i < mts.GetCount(); i++)
					{
						if(mts[i].formattype == FORMAT_VideoInfo)
						{
							DWORD vih1 = FIELD_OFFSET(VIDEOINFOHEADER, bmiHeader);
							DWORD vih2 = FIELD_OFFSET(VIDEOINFOHEADER2, bmiHeader);
							DWORD bmi = mts[i].FormatLength() - FIELD_OFFSET(VIDEOINFOHEADER, bmiHeader);
							mt.formattype = FORMAT_VideoInfo2;
							mt.AllocFormatBuffer(vih2 + bmi);
							memcpy(mt.Format(), mts[i].Format(), vih1);
							memset(mt.Format() + vih1, 0, vih2 - vih1);
							memcpy(mt.Format() + vih2, mts[i].Format() + vih1, bmi);
							((VIDEOINFOHEADER2*)mt.Format())->dwPictAspectRatioX = (DWORD)pTE->v.DisplayWidth;
							((VIDEOINFOHEADER2*)mt.Format())->dwPictAspectRatioY = (DWORD)pTE->v.DisplayHeight;
							mts.InsertAt(i++, mt);
						}
						else if(mts[i].formattype == FORMAT_MPEG2Video)
						{
							((MPEG2VIDEOINFO*)mts[i].Format())->hdr.dwPictAspectRatioX = (DWORD)pTE->v.DisplayWidth;
							((MPEG2VIDEOINFO*)mts[i].Format())->hdr.dwPictAspectRatioY = (DWORD)pTE->v.DisplayHeight;
						}
					}
				}
			}