Example #1
0
void CVideoPin::SetVideoDecoder(int format, GUID* decoder)
{
  AM_MEDIA_TYPE tmp;
  tmp.cbFormat = 0;

  if (format == BLURAY_STREAM_TYPE_VIDEO_H264)
  {
    m_H264decoder = tmp.subtype = *decoder;
    LogDebug("vid: SetVideoDecoder for H264");
    LogMediaType(&tmp);
  }
  else if (format == BLURAY_STREAM_TYPE_VIDEO_VC1)
  {
    m_VC1decoder = tmp.subtype = *decoder;
    LogDebug("vid: SetVideoDecoder for VC1");
    LogMediaType(&tmp);
  }
  else if (format == BLURAY_STREAM_TYPE_VIDEO_MPEG2)
  {
    m_MPEG2decoder = tmp.subtype = *decoder;
    LogDebug("vid: SetVideoDecoder for MPEG2");
    LogMediaType(&tmp);
  }
  else
  {
    LogDebug("vid: SetVideoDecoder - trying to set a decoder for invalid format %d", format);
    return;
  }
}
Example #2
0
HRESULT CMFCamCapture::chooseCaptureFormats()
{
    IMFPresentationDescriptor *pPD = NULL;
    IMFStreamDescriptor *pSD = NULL;
    IMFMediaTypeHandler *pHandler = NULL;
    IMFMediaType *pType = NULL;
    DWORD i;

    HRESULT hr = m_spSource->CreatePresentationDescriptor(&pPD);
    if (FAILED(hr)) {
        goto done;
    }

    BOOL fSelected;
    hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pSD);
    if (FAILED(hr)) {
        goto done;
    }

    hr = pSD->GetMediaTypeHandler(&pHandler);
    if (FAILED(hr)) {
        goto done;
    }

    DWORD cTypes = 0;
    hr = pHandler->GetMediaTypeCount(&cTypes);
    if (FAILED(hr)) {
        goto done;
    }

    for (i = 0; i < cTypes; i++) {
        hr = pHandler->GetMediaTypeByIndex(i, &pType);
        if (FAILED(hr)) {
            goto done;
        }

        bool found = selectMediaType(m_spSource, pType);
        if (found) {
            LogMediaType(pType);
            OutputDebugString(L"\n");
        }
        SafeRelease(&pType);
        if (found) {            
            hr = SetDeviceFormat(m_spSource, i);
            break;
        }
    }

    if (i >= cTypes) {
        hr = E_FAIL;
    }

done:
    SafeRelease(&pPD);
    SafeRelease(&pSD);
    SafeRelease(&pHandler);
    SafeRelease(&pType);
    return hr;
}
Example #3
0
void CVideoPin::SetVC1Override(GUID* subtype)
{
  AM_MEDIA_TYPE tmp;
  tmp.cbFormat = 0;
  m_VC1Override = tmp.subtype = *subtype; 
  
  LogDebug("vid: SetVC1Override");
  LogMediaType(&tmp);
}
Example #4
0
HRESULT EnumerateCaptureFormats(IMFMediaSource *pSource)
{
  IMFPresentationDescriptor *pPD = NULL;
  IMFStreamDescriptor *pSD = NULL;
  IMFMediaTypeHandler *pHandler = NULL;
  IMFMediaType *pType = NULL;

  HRESULT hr = pSource->CreatePresentationDescriptor(&pPD);
  if (FAILED(hr))
  {
    goto done;
  }

  BOOL fSelected;
  hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pSD);
  if (FAILED(hr))
  {
    goto done;
  }

  hr = pSD->GetMediaTypeHandler(&pHandler);
  if (FAILED(hr))
  {
    goto done;
  }

  DWORD cTypes = 0;
  hr = pHandler->GetMediaTypeCount(&cTypes);
  if (FAILED(hr))
  {
    goto done;
  }

  for (DWORD i = 0; i < cTypes; i++)
  {
    hr = pHandler->GetMediaTypeByIndex(i, &pType);
    if (FAILED(hr))
    {
      goto done;
    }

    LogMediaType(pType);
    OutputDebugString(L"\n");

    SafeRelease(&pType);
  }

done:
  SafeRelease(&pPD);
  SafeRelease(&pSD);
  SafeRelease(&pHandler);
  SafeRelease(&pType);
  return hr;
}
Example #5
0
HRESULT CAtmoLightFilter::Transform(IMediaSample *pSample)
{
    CheckPointer(pSample,E_POINTER);
    if(!m_bitmap_header || !m_bitmap_header)
        return NOERROR;

    BITMAPINFOHEADER *bmiHeader = NULL;
    RECT rect;

    AM_MEDIA_TYPE *pMT;
    if((pSample->GetMediaType(&pMT) == S_OK) && pMT)
    {
       log("CAtmoLightFilter::Transform: MediaType changed!\n");

       CMediaType temp(*pMT, NULL);

       LogMediaType("From",&m_pInput->CurrentMediaType());
       LogMediaType("Changed to", &temp);

       m_pInput->SetMediaType( &temp );
       
       DeleteMediaType(pMT);
    }

    
    if( IsEqualGUID( *m_pInput->CurrentMediaType().FormatType(), FORMAT_VideoInfo) ) 
    {
        VIDEOINFO* pVI = (VIDEOINFO*) m_pInput->CurrentMediaType( ).Format( );
        CheckPointer(pVI,E_UNEXPECTED);

        rect = pVI->rcSource;

        if(pVI->bmiHeader.biSize >= sizeof(BITMAPINFOHEADER)) 
           bmiHeader = &pVI->bmiHeader;
        else
           return NOERROR;  
    } else
    if( IsEqualGUID( *m_pInput->CurrentMediaType().FormatType(), FORMAT_VideoInfo2) ) 
    {
        VIDEOINFOHEADER2* pVI = (VIDEOINFOHEADER2*) m_pInput->CurrentMediaType( ).Format( );
        CheckPointer(pVI, E_UNEXPECTED);

        rect = pVI->rcSource;

        if(pVI->bmiHeader.biSize >= sizeof(BITMAPINFOHEADER))
           bmiHeader = &pVI->bmiHeader;
        else
           return NOERROR;  
    } else {
        log("Error: no Format_VideoInfo structure.");
        return NOERROR;
    }
    
    unsigned char *pBuffer = NULL;
    if( pSample->GetPointer( (LPBYTE*) &pBuffer ) != S_OK || !pBuffer )
        return NOERROR;


    m_FrameCounter++;


    OleInitialize( NULL ); 

    
    int stride = (((bmiHeader->biWidth * bmiHeader->biBitCount) + 31) & ~31) >> 3;
  
    //http://msdn.microsoft.com/en-us/library/aa904813(VS.80).aspx
    if( m_pfTransform )
    {
        IAtmoLiveViewControl *liveControl = getAtmoLiveViewControl();
        if(liveControl)
        {
            unsigned char *rgb_buffer;
            if(SafeArrayAccessData(m_pixel_buffer,(void **)&rgb_buffer) == S_OK)
            {
               (this->*m_pfTransform)( pSample, pBuffer, rect, bmiHeader, stride, rgb_buffer );

                if(m_LogLevel>=2 && (m_FrameCounter % 25) == 0) 
                {
                   char fn[MAX_PATH];
                   sprintf( fn, "%simg_%d.bmp", m_pszImageLogPath, m_FrameCounter/25+100000 );
                   SaveBitmap( fn, rgb_buffer, m_atmo_capture_width, m_atmo_capture_height);
                }
                
                SafeArrayUnaccessData( m_pixel_buffer );
                liveControl->setPixelData(m_bitmap_header, m_pixel_buffer);
            }

            liveControl->Release();
        }     
    }
    

    OleUninitialize();

    return NOERROR;
}
Example #6
0
HRESULT CAtmoLightFilter::CheckInputType(const CMediaType *mtIn)
{
    CheckPointer(mtIn,E_POINTER);

    log("CAtmoLightFilter::CheckInputType()\n");
    LogMediaType("CheckInputType", mtIn );

    GUID guid = *mtIn->Type();
    if( !IsEqualGUID(guid, MEDIATYPE_Video) )
    {
        log("CheckInputType: Mediatype is not MEDIATYPE_Video! ->" GUID_FMT "\n", GUID_PRINT(guid) );
        return E_INVALIDARG;
    }
    
    if( !IsEqualGUID( *mtIn->FormatType(), FORMAT_VideoInfo2) && !IsEqualGUID( *mtIn->FormatType(), FORMAT_VideoInfo) )
    {
       log("CheckInputType: FormatType is not VideoInfo2 or VideoInfo!\n");
       return E_INVALIDARG;
    }

    GUID mediaSubType = *mtIn->Subtype();

    if( IsEqualGUID( mediaSubType, MEDIASUBTYPE_YUY2) || IsEqualGUID( mediaSubType, MEDIASUBTYPE_YUYV) )
        return NOERROR;
    else  
    if( IsEqualGUID( mediaSubType, MEDIASUBTYPE_Y411) )
        return NOERROR;
    else
    if( IsEqualGUID( mediaSubType, MEDIASUBTYPE_Y41P) )
        return NOERROR;
    else
    if( IsEqualGUID( mediaSubType, MEDIASUBTYPE_YVYU) )
        return NOERROR;
    else 
    if( IsEqualGUID( mediaSubType, MEDIASUBTYPE_NV12) )
        return NOERROR;
    else
    if( IsEqualGUID( mediaSubType, MEDIASUBTYPE_NV21) )
        return NOERROR;
    else
    if( IsEqualGUID( mediaSubType, MEDIASUBTYPE_UYVY) )
        return NOERROR;
    else
    if( IsEqualGUID( mediaSubType, MEDIASUBTYPE_Y211) )
        return NOERROR;
    else
    if( IsEqualGUID( mediaSubType, MEDIASUBTYPE_CLJR) )
        return NOERROR;
    else
    if( IsEqualGUID( mediaSubType, MEDIASUBTYPE_IYUV ) || IsEqualGUID( mediaSubType, MEDIASUBTYPE_I420 ) ) 
        return NOERROR;
    else
    if( IsEqualGUID( mediaSubType, MEDIASUBTYPE_YV12 ) )  // YVU
        return NOERROR;
    else
    if( IsEqualGUID( mediaSubType, MEDIASUBTYPE_YVU9 ) )  // YVU9 I411 format 
        return NOERROR;
    else
    if( IsEqualGUID( mediaSubType, MEDIASUBTYPE_RGB565 ) )  // RGB 16bit 5-6-5
        return NOERROR;
    else
    if( IsEqualGUID( mediaSubType, MEDIASUBTYPE_RGB555 ) )  // RGB 15bit 5-5-5
        return NOERROR;
    else
    if( IsEqualGUID( mediaSubType, MEDIASUBTYPE_RGB24 ) )  // RGB 24bit 8-8-8
        return NOERROR;
    else
    if( IsEqualGUID( mediaSubType, MEDIASUBTYPE_RGB32 ) )  // RGB 24bit 8-8-8+Alpha?
        return NOERROR;
    else
        return E_INVALIDARG;
}
Example #7
0
HRESULT CAtmoLightFilter::SetMediaType( PIN_DIRECTION pindir, const CMediaType *pMediaType)
{
    CheckPointer(pMediaType,E_POINTER);

/*
    1B81BE68-A0C7-11D3-B984-00C04F2E73C5   dxva h264_E
    604F8E68-4951-4c54-88FE-ABD25C15B3D6   DXVADDI_Intel_ModeH264_E  
*/

    LogMediaType("SetMediaType", pMediaType );

    m_mediaSubType = *pMediaType->Subtype();

    if( IsEqualGUID( m_mediaSubType, MEDIASUBTYPE_YUY2) || IsEqualGUID( m_mediaSubType, MEDIASUBTYPE_YUYV) )
        m_pfTransform = &CAtmoLightFilter::Transform_Packed_YUY2_YUYV;
    else  
    if( IsEqualGUID( m_mediaSubType, MEDIASUBTYPE_Y411) )
        m_pfTransform = &CAtmoLightFilter::Transform_Packed_Y411;
    else
    if( IsEqualGUID( m_mediaSubType, MEDIASUBTYPE_Y41P) )
        m_pfTransform = &CAtmoLightFilter::Transform_Packed_Y41P;
    else
    if( IsEqualGUID( m_mediaSubType, MEDIASUBTYPE_YVYU) )
        m_pfTransform = &CAtmoLightFilter::Transform_Packed_YVYU;
    else 
    if( IsEqualGUID( m_mediaSubType, MEDIASUBTYPE_NV12) )
        m_pfTransform = &CAtmoLightFilter::Transform_Planar_NV12;
    else
    if( IsEqualGUID( m_mediaSubType, MEDIASUBTYPE_NV21) )
        m_pfTransform = &CAtmoLightFilter::Transform_Planar_NV21;
    else
    if( IsEqualGUID( m_mediaSubType, MEDIASUBTYPE_UYVY) )
        m_pfTransform = &CAtmoLightFilter::Transform_Packed_UYVY;
    else
    if( IsEqualGUID( m_mediaSubType, MEDIASUBTYPE_Y211) )
        m_pfTransform = &CAtmoLightFilter::Transform_Packed_Y211;
    else
    if( IsEqualGUID( m_mediaSubType, MEDIASUBTYPE_CLJR) )
        m_pfTransform = &CAtmoLightFilter::Transform_Packed_CLJR;
    else
    if( IsEqualGUID( m_mediaSubType, MEDIASUBTYPE_IYUV ) || IsEqualGUID( m_mediaSubType, MEDIASUBTYPE_I420 ) ) 
        m_pfTransform = &CAtmoLightFilter::Transform_Planar_IYUV_I420;
    else
    if( IsEqualGUID( m_mediaSubType, MEDIASUBTYPE_YV12 ) )  // YVU
        m_pfTransform = &CAtmoLightFilter::Transform_Planar_YV12;
    else
    if( IsEqualGUID( m_mediaSubType, MEDIASUBTYPE_YVU9 ) )  // YVU9 I411 format 
        m_pfTransform = &CAtmoLightFilter::Transform_Planar_YVU9;
    else
    if( IsEqualGUID( m_mediaSubType, MEDIASUBTYPE_RGB565 ) )  // RGB 16bit 5-6-5
        m_pfTransform = &CAtmoLightFilter::Transform_RGB_565;
    else
    if( IsEqualGUID( m_mediaSubType, MEDIASUBTYPE_RGB555 ) )  // RGB 15bit 5-5-5
        m_pfTransform = &CAtmoLightFilter::Transform_RGB_555;
    else
    if( IsEqualGUID( m_mediaSubType, MEDIASUBTYPE_RGB24 ) )  // RGB 24bit 8-8-8
        m_pfTransform = &CAtmoLightFilter::Transform_RGB_888;
    else
    if( IsEqualGUID( m_mediaSubType, MEDIASUBTYPE_RGB32 ) )  // RGB 24bit 8-8-8+Alpha?
        m_pfTransform = &CAtmoLightFilter::Transform_RGB_8888;
    else
        m_pfTransform = NULL;

    return CTransInPlaceFilter::SetMediaType( pindir, pMediaType );
}
Example #8
0
HRESULT CAudioPin::FillBuffer(IMediaSample *pSample)
{
  try
  {
    Packet* buffer = NULL;

    do
    {
      if (m_demux.m_bAudioWaitForSeek)
      {
        m_demux.m_bAudioWaitForSeek = false;
        m_bSeekDone = false;
      }

      if (!m_bSeekDone || m_pFilter->IsStopping() || m_bFlushing || m_demux.IsMediaChanging() || m_demux.m_bRebuildOngoing || 
        m_demux.m_eAudioClipSeen->Check())
      {
        Sleep(1);
        return ERROR_NO_DATA;
      }

      if (m_pCachedBuffer)
      {
        LogDebug("aud: cached fetch %6.3f clip: %d playlist: %d", m_pCachedBuffer->rtStart / 10000000.0, m_pCachedBuffer->nClipNumber, m_pCachedBuffer->nPlaylist);
        buffer = m_pCachedBuffer;
        m_pCachedBuffer = NULL;
      }
      else
        buffer = m_demux.GetAudio();

      if (m_demux.EndOfFile())
      {
        LogDebug("aud: set EOF");
        m_demux.m_eAudioClipSeen->Set();
        return S_FALSE;
      }

      if (!buffer)
      {
        if (m_bFirstSample)
          Sleep(10);
        else 
        {
          if (!m_bClipEndingNotified)
          {
            // Deliver end of stream notification to allow audio renderer to stop buffering.
            // This should only happen when the stream enters into paused state
            //LogDebug("aud: FillBuffer - DeliverEndOfStream");
            //DeliverEndOfStream();
            m_bClipEndingNotified = true;
          }
          else
            Sleep(10);

          return ERROR_NO_DATA;
        }
      }
      else
      {
        bool checkPlaybackState = false;
        REFERENCE_TIME rtStart = m_rtStart;

        //JoinAudioBuffers(buffer, &demux);
        
        {
          CAutoLock lock(m_section);

          if (m_demux.m_bAudioResetStreamPosition)
          {
            m_demux.m_bAudioResetStreamPosition = false;
            m_bZeroTimeStream = true;
          }

          if ((buffer->nNewSegment & NS_NEW_CLIP) == NS_NEW_CLIP)
          {
            LogDebug("aud: Playlist changed to %d - nNewSegment: %d offset: %6.3f rtStart: %6.3f rtPlaylistTime: %6.3f", 
              buffer->nPlaylist, buffer->nNewSegment, buffer->rtOffset / 10000000.0, buffer->rtStart / 10000000.0, buffer->rtPlaylistTime / 10000000.0);

            checkPlaybackState = true;
            m_bClipEndingNotified = false;

            m_demux.m_eAudioClipSeen->Set();
          }

          // Do not convert LPCM to PCM if audio decoder supports LPCM (LAV audio decoder style)
          if (!m_bUsePCM && buffer->pmt && buffer->pmt->subtype == MEDIASUBTYPE_PCM)
            buffer->pmt->subtype = MEDIASUBTYPE_BD_LPCM_AUDIO;

          if (buffer->pmt && m_mt != *buffer->pmt && !((buffer->nNewSegment & NS_NEW_CLIP)==NS_NEW_CLIP))
          {
            HRESULT hrAccept = S_FALSE;
            LogMediaType(buffer->pmt);

            if (m_pPinConnection && false) // TODO - DS audio renderer seems to be only one that supports this
              hrAccept = m_pPinConnection->DynamicQueryAccept(buffer->pmt);
            else if (m_pReceiver)
            {
              //LogDebug("aud: DynamicQueryAccept - not avail");
              GUID guid = buffer->pmt->subtype;
              if (buffer->pmt->subtype == MEDIASUBTYPE_PCM)
              {
                buffer->pmt->subtype = MEDIASUBTYPE_BD_LPCM_AUDIO;
                hrAccept = m_pReceiver->QueryAccept(buffer->pmt);
              }
              
              if (hrAccept != S_OK)
              {
                buffer->pmt->subtype = guid;
                hrAccept = m_pReceiver->QueryAccept(buffer->pmt);
                m_bUsePCM = true;
              }
              else
                m_bUsePCM = false;
            }

            if (hrAccept != S_OK)
            {
              CMediaType mt(*buffer->pmt);
              SetMediaType(&mt);

              LogDebug("aud: graph rebuilding required");

              m_demux.m_bAudioRequiresRebuild = true;
              checkPlaybackState = true;

              DeliverEndOfStream();
            }
            else
            {
              LogDebug("aud: format change accepted");
              CMediaType mt(*buffer->pmt);
              SetMediaType(&mt);
              pSample->SetMediaType(&mt);
              m_pCachedBuffer = buffer;

              return ERROR_NO_DATA;
            }
          }
        } // lock ends

        if (checkPlaybackState)
        {
          m_pCachedBuffer = buffer;
          LogDebug("aud: cached push  %6.3f clip: %d playlist: %d", m_pCachedBuffer->rtStart / 10000000.0, m_pCachedBuffer->nClipNumber, m_pCachedBuffer->nPlaylist);
          
          if (checkPlaybackState)
          {
            if (buffer->pmt && m_mt != *buffer->pmt && !((buffer->nNewSegment & NS_NEW_CLIP)==NS_NEW_CLIP))
            {
              CMediaType mt(*buffer->pmt);
              SetMediaType(&mt);
            }
          }
          m_pCachedBuffer->nNewSegment = 0;

          return ERROR_NO_DATA;
        }
  
        bool hasTimestamp = buffer->rtStart != Packet::INVALID_TIME;

        REFERENCE_TIME rtCorrectedStartTime = 0;
        REFERENCE_TIME rtCorrectedStopTime = 0;

        if (hasTimestamp && m_dRateSeeking == 1.0)
        {
          bool setPMT = false;

          if (m_bDiscontinuity || buffer->bDiscontinuity)
          {
            LogDebug("aud: set discontinuity");
            pSample->SetDiscontinuity(true);
            setPMT = true;
            m_bDiscontinuity = false;
          }

          if (buffer->pmt || setPMT)
          {
            LogDebug("aud: set PMT");
            pSample->SetMediaType(buffer->pmt);
            m_bDiscontinuity = false;          
          }

          if (hasTimestamp)
          {
            if (m_bZeroTimeStream)
            {
              m_rtStreamTimeOffset = buffer->rtStart - buffer->rtClipStartTime;
              m_bZeroTimeStream=false;
            }
            // Now we have the final timestamp, set timestamp in sample
            //REFERENCE_TIME refTime=(REFERENCE_TIME)cRefTimeStart;
            //refTime /= m_dRateSeeking; //the if rate===1.0 makes this redundant

            pSample->SetSyncPoint(true); // allow all packets to be seeking targets
            rtCorrectedStartTime = buffer->rtStart - m_rtStreamTimeOffset;//- m_rtStart;
            rtCorrectedStopTime = buffer->rtStop - m_rtStreamTimeOffset;// - m_rtStart;
            pSample->SetTime(&rtCorrectedStartTime, &rtCorrectedStopTime);
          }
          else
          {
            // Buffer has no timestamp
            pSample->SetTime(NULL, NULL);
            pSample->SetSyncPoint(false);
          }

          {
            CAutoLock lock(&m_csDeliver);

            if (!m_bFlushing)
            {
              ProcessAudioSample(buffer, pSample);
#ifdef LOG_AUDIO_PIN_SAMPLES
             LogDebug("aud: %6.3f corr %6.3f Playlist time %6.3f clip: %d playlist: %d", buffer->rtStart / 10000000.0, rtCorrectedStartTime / 10000000.0,
                buffer->rtPlaylistTime / 10000000.0, buffer->nClipNumber, buffer->nPlaylist);
#endif
            }
            else
            {
              LogDebug("aud: dropped sample as flush is active!");
              delete buffer;
              return ERROR_NO_DATA;
            }
          }

          m_bFirstSample = false;
          delete buffer;
        }
        else
        { // Buffer was not displayed because it was out of date, search for next.
          delete buffer;
          buffer = NULL;
        }
      }
    } while (!buffer);
    return NOERROR;
  }

  // Should we return something else than NOERROR when hitting an exception?
  catch (int e)
  {
    LogDebug("aud: FillBuffer exception %d", e);
  }
  catch (...)
  {
    LogDebug("aud: FillBuffer exception ...");
  }

  return NOERROR;
}
Example #9
0
HRESULT CVideoPin::FillBuffer(IMediaSample* pSample)
{
  try
  {
    Packet* buffer = NULL;

    do
    {
      if (m_pFilter->IsStopping() || m_demux.IsMediaChanging() || m_bFlushing || !m_bSeekDone || m_demux.m_bRebuildOngoing)
      {
        Sleep(1);
        return ERROR_NO_DATA;
      }

      if (m_demux.EndOfFile())
      {
        LogDebug("vid: set EOF");
        return S_FALSE;
      }

      if (m_demux.m_bVideoClipSeen || m_demux.m_bAudioRequiresRebuild && !m_demux.m_bVideoClipSeen && !m_demux.m_eAudioClipSeen->Check())
      {
        CheckPlaybackState();
        return ERROR_NO_DATA;
      }

      if (m_pCachedBuffer)
      {
        LogDebug("vid: cached fetch %6.3f clip: %d playlist: %d", m_pCachedBuffer->rtStart / 10000000.0, m_pCachedBuffer->nClipNumber, m_pCachedBuffer->nPlaylist);
        buffer = m_pCachedBuffer;
        m_pCachedBuffer = NULL;
        buffer->bDiscontinuity = true;
        
        if (m_bProvidePMT)
        {
          CMediaType mt(*buffer->pmt);
          SetMediaType(&mt);
          pSample->SetMediaType(&mt);
          m_bProvidePMT = false;
        }
      }
      else
        buffer = m_demux.GetVideo();

      if (!buffer)
      {
        if (m_bFirstSample)
          Sleep(10);
        else 
        {
          if (!m_bClipEndingNotified)
          {
            // Deliver end of stream notification to flush the video decoder.
            // This should only happen when the stream enters into paused state
            LogDebug("vid: FillBuffer - DeliverEndOfStream");
            DeliverEndOfStream();
            m_bClipEndingNotified = true;
          }
          else
            Sleep(10);
		  
          return ERROR_NO_DATA;
        }
      }
      else
      {
        bool checkPlaybackState = false;

        {
          CAutoLock lock(m_section);

          if (buffer->nNewSegment > 0)
          {
            if ((buffer->nNewSegment & NS_NEW_CLIP) == NS_NEW_CLIP)
            {
              LogDebug("vid: Playlist changed to %d - nNewSegment: %d offset: %6.3f rtStart: %6.3f rtPlaylistTime: %6.3f", 
                buffer->nPlaylist, buffer->nNewSegment, buffer->rtOffset / 10000000.0, buffer->rtStart / 10000000.0, buffer->rtPlaylistTime / 10000000.0);
            
              m_demux.m_bVideoClipSeen = true;
 
              m_bInitDuration = true;
              checkPlaybackState = true;
              m_bClipEndingNotified = false;

              if (buffer->bResuming || buffer->nNewSegment & NS_INTERRUPTED)
              {
                m_bDoFakeSeek = true;
                m_rtStreamOffset = buffer->rtPlaylistTime;
                m_bZeroTimeStream = true;
                m_demux.m_bAudioResetStreamPosition = true;
              }
              else
                m_rtStreamOffset = 0;

              // LAV video decoder requires an end of stream notification to be able to provide complete video frames
              // to downstream filters in a case where we are waiting for the audio pin to see the clip boundary as
              // we cannot provide yet the next clip's PMT downstream since audio stream could require a rebuild
              if (m_currentDecoder == CLSID_LAVVideo && (buffer->nNewSegment & NS_NEW_PLAYLIST))
              {
                LogDebug("DeliverEndOFStream LAV Only for audio pin wait (%d,%d)", buffer->nPlaylist, buffer->nClipNumber);
                DeliverEndOfStream();
              }
            }
            if ((buffer->nNewSegment & NS_STREAM_RESET) == NS_STREAM_RESET)
              m_bInitDuration = true;
          }

          if (buffer->pmt)
          {
            GUID subtype = subtype = buffer->pmt->subtype;

            if (buffer->pmt->subtype == FOURCCMap('1CVW') && m_VC1Override != GUID_NULL)
            {
              buffer->pmt->subtype = m_VC1Override;
              LogDebug("vid: FillBuffer - force VC-1 GUID");
            }

            if (!CompareMediaTypes(buffer->pmt, &m_mt))
            {
              LogMediaType(buffer->pmt);
            
              HRESULT hrAccept = S_FALSE;
              m_bProvidePMT = true;

              if (m_pReceiver && CheckVideoFormat(&buffer->pmt->subtype, &m_currentDecoder))
              {
                // Currently no other video decoders than LAV seems to be compatible with
                // the dynamic format changes
                if (m_currentDecoder == CLSID_LAVVideo)
                hrAccept = m_pReceiver->QueryAccept(buffer->pmt);
              }

              if (hrAccept != S_OK)
              {
                CMediaType mt(*buffer->pmt);
                SetMediaType(&mt);

                LogDebug("vid: graph rebuilding required");

                m_demux.m_bVideoRequiresRebuild = true;
                m_bZeroTimeStream = true;
                checkPlaybackState = true;

                //LogDebug("DeliverEndOFStream for rebuild (%d,%d)", buffer->nPlaylist, buffer->nClipNumber);
                //DeliverEndOfStream();
              }
              else
              {
                LogDebug("vid: format change accepted");
                CMediaType mt(*buffer->pmt);
                SetMediaType(&mt);
                pSample->SetMediaType(&mt);

                buffer->nNewSegment = 0;
                m_pCachedBuffer = buffer;
				
                //if (m_currentDecoder == CLSID_LAVVideo)
                //{
                //  LogDebug("DeliverEndOFStream LAV Only (%d,%d)", buffer->nPlaylist, buffer->nClipNumber);
                //  DeliverEndOfStream();
                //}

                return ERROR_NO_DATA;
              }
            } // comparemediatypes
          }
        } // lock ends

        m_rtTitleDuration = buffer->rtTitleDuration;

        if (checkPlaybackState)
        {
          buffer->nNewSegment = 0;
          m_pCachedBuffer = buffer;

          CheckPlaybackState();

          LogDebug("vid: cached push  %6.3f clip: %d playlist: %d", m_pCachedBuffer->rtStart / 10000000.0, m_pCachedBuffer->nClipNumber, m_pCachedBuffer->nPlaylist);

          return ERROR_NO_DATA;
        }

        bool hasTimestamp = buffer->rtStart != Packet::INVALID_TIME;
        REFERENCE_TIME rtCorrectedStartTime = 0;
        REFERENCE_TIME rtCorrectedStopTime = 0;

        if (hasTimestamp)
        {
          if (m_bZeroTimeStream)
          {
            m_rtStreamTimeOffset = buffer->rtStart - buffer->rtClipStartTime;
            m_bZeroTimeStream = false;
          }

          if (m_bDiscontinuity || buffer->bDiscontinuity)
          {
            LogDebug("vid: set discontinuity");
            pSample->SetDiscontinuity(true);
            pSample->SetMediaType(buffer->pmt);
            m_bDiscontinuity = false;
          }

          rtCorrectedStartTime = buffer->rtStart - m_rtStreamTimeOffset;
          rtCorrectedStopTime = buffer->rtStop - m_rtStreamTimeOffset;

          pSample->SetTime(&rtCorrectedStartTime, &rtCorrectedStopTime);

          if (m_bInitDuration)
          {
            m_pFilter->SetTitleDuration(m_rtTitleDuration);
            m_pFilter->ResetPlaybackOffset(buffer->rtPlaylistTime - rtCorrectedStartTime);
            m_bInitDuration = false;
          }

          m_pFilter->OnPlaybackPositionChange();
        }
        else // Buffer has no timestamp
          pSample->SetTime(NULL, NULL);

        pSample->SetSyncPoint(buffer->bSyncPoint);

        {
          CAutoLock lock(&m_csDeliver);

          if (!m_bFlushing)
          {
            BYTE* pSampleBuffer;
            pSample->SetActualDataLength(buffer->GetDataSize());
            pSample->GetPointer(&pSampleBuffer);
            memcpy(pSampleBuffer, buffer->GetData(), buffer->GetDataSize());

            m_bFirstSample = false;

#ifdef LOG_VIDEO_PIN_SAMPLES
            LogDebug("vid: %6.3f corr %6.3f playlist time %6.3f clip: %d playlist: %d size: %d", buffer->rtStart / 10000000.0, rtCorrectedStartTime / 10000000.0, 
              buffer->rtPlaylistTime / 10000000.0, buffer->nClipNumber, buffer->nPlaylist, buffer->GetCount());
#endif
          }
          else
          {
            LogDebug("vid: dropped sample as flush is active!");
            return ERROR_NO_DATA;
          }
        }

        //static int iFrameNumber = 0;
        //LogMediaSample(pSample, iFrameNumber++);

        delete buffer;
      }
    } while (!buffer);
    return NOERROR;
  }

  catch(...)
  {
    LogDebug("vid: FillBuffer exception");
  }

  return S_OK;
}