//------------------------------------------------------------------------------
// Name: CStreamData::GetSameMediaType()
// Desc: Finds the buffer window for a stream of the specified media type.
//------------------------------------------------------------------------------
WORD CStreamData::GetSameMediaType( WORD * ptrNumMap, 
                                    WM_MEDIA_TYPE * pMediaToFind, 
                                    BOOL fVBR, 
                                    CStreamData& data2, 
                                    DWORD * pdwBufferWindow )
{
    if( NULL == ptrNumMap || NULL == pMediaToFind || NULL == pdwBufferWindow )
    {
        return( -1 );
    }

    DWORD dwCount = m_dwStreamCount * 2;
    for( DWORD i =0; i < m_dwStreamCount; i++ )
    {
        if( fVBR == m_pfVBRStream[i]
            && CompareMediaTypes( pMediaToFind, data2.m_ptrMediaArray[i], fVBR ) )
        {
            //
            //  Skip stream if already chosen
            //
            DWORD j;
            for( j = m_dwStreamCount; j < dwCount; j++ )
            {
                if( ptrNumMap[j] == data2.m_ptrStreamNumArray[i] )
                    break;
            }
            if( j == dwCount )
            {
                *pdwBufferWindow = data2.m_ptrStreamBufferWindow[i];
                return( data2.m_ptrStreamNumArray[i] );
            }
        }
    }
    return( -1 );
}
int QTFileBroadcaster::MapMovieToStream()
{
    int             result = -1;
    bool            matches = false;
    ArrayList<bool> map;
    bool            *isMappedPtr;
    int             masterPos = 0;  
    int             mappedTracks = 0;
    
    map.SetSize(fStreamSDPParser.fSDPMediaList.Size()); 
    
    isMappedPtr = map.Begin();
    
    while (isMappedPtr)
    {   *isMappedPtr = false;
        isMappedPtr = map.Next();
    }
    
    TypeMap *movieMediaTypePtr = fMovieSDPParser->fSDPMediaList.Begin();
    
    while (movieMediaTypePtr)
    {
        TypeMap *streamMediaTypePtr = fStreamSDPParser.fSDPMediaList.Begin();
        
        while (streamMediaTypePtr)
        {
            matches = CompareMediaTypes(movieMediaTypePtr, streamMediaTypePtr);
            
            if (matches)
            {
                masterPos = fStreamSDPParser.fSDPMediaList.GetPos();
                isMappedPtr = map.SetPos(masterPos);
                if (isMappedPtr == NULL) 
                    break;
                
                if (false == *isMappedPtr) 
                {   
                    movieMediaTypePtr->fMediaStreamPtr = streamMediaTypePtr->fMediaStreamPtr;
                    *isMappedPtr = true;
                    mappedTracks++;
                    break; 
                }
            }
            streamMediaTypePtr = fStreamSDPParser.fSDPMediaList.Next();
        }
        movieMediaTypePtr = fMovieSDPParser->fSDPMediaList.Next();
    }

    result = mappedTracks;

    return result;

}
Пример #3
0
HRESULT CVideoPin::FillBuffer(IMediaSample* pSample)
{
  try
  {
    Packet* buffer = NULL;

    do
    {
      if (m_pFilter->IsStopping() || m_demux.IsMediaChanging() || m_bFlushing || !m_bSeekDone || m_demux.m_bRebuildOngoing)
      {
        Sleep(1);
        return ERROR_NO_DATA;
      }

      if (m_demux.EndOfFile())
      {
        LogDebug("vid: set EOF");
        return S_FALSE;
      }

      if (m_demux.m_bVideoClipSeen || m_demux.m_bAudioRequiresRebuild && !m_demux.m_bVideoClipSeen && !m_demux.m_eAudioClipSeen->Check())
      {
        CheckPlaybackState();
        return ERROR_NO_DATA;
      }

      if (m_pCachedBuffer)
      {
        LogDebug("vid: cached fetch %6.3f clip: %d playlist: %d", m_pCachedBuffer->rtStart / 10000000.0, m_pCachedBuffer->nClipNumber, m_pCachedBuffer->nPlaylist);
        buffer = m_pCachedBuffer;
        m_pCachedBuffer = NULL;
        buffer->bDiscontinuity = true;
        
        if (m_bProvidePMT)
        {
          CMediaType mt(*buffer->pmt);
          SetMediaType(&mt);
          pSample->SetMediaType(&mt);
          m_bProvidePMT = false;
        }
      }
      else
        buffer = m_demux.GetVideo();

      if (!buffer)
      {
        if (m_bFirstSample)
          Sleep(10);
        else 
        {
          if (!m_bClipEndingNotified)
          {
            // Deliver end of stream notification to flush the video decoder.
            // This should only happen when the stream enters into paused state
            LogDebug("vid: FillBuffer - DeliverEndOfStream");
            DeliverEndOfStream();
            m_bClipEndingNotified = true;
          }
          else
            Sleep(10);
		  
          return ERROR_NO_DATA;
        }
      }
      else
      {
        bool checkPlaybackState = false;

        {
          CAutoLock lock(m_section);

          if (buffer->nNewSegment > 0)
          {
            if ((buffer->nNewSegment & NS_NEW_CLIP) == NS_NEW_CLIP)
            {
              LogDebug("vid: Playlist changed to %d - nNewSegment: %d offset: %6.3f rtStart: %6.3f rtPlaylistTime: %6.3f", 
                buffer->nPlaylist, buffer->nNewSegment, buffer->rtOffset / 10000000.0, buffer->rtStart / 10000000.0, buffer->rtPlaylistTime / 10000000.0);
            
              m_demux.m_bVideoClipSeen = true;
 
              m_bInitDuration = true;
              checkPlaybackState = true;
              m_bClipEndingNotified = false;

              if (buffer->bResuming || buffer->nNewSegment & NS_INTERRUPTED)
              {
                m_bDoFakeSeek = true;
                m_rtStreamOffset = buffer->rtPlaylistTime;
                m_bZeroTimeStream = true;
                m_demux.m_bAudioResetStreamPosition = true;
              }
              else
                m_rtStreamOffset = 0;

              // LAV video decoder requires an end of stream notification to be able to provide complete video frames
              // to downstream filters in a case where we are waiting for the audio pin to see the clip boundary as
              // we cannot provide yet the next clip's PMT downstream since audio stream could require a rebuild
              if (m_currentDecoder == CLSID_LAVVideo && (buffer->nNewSegment & NS_NEW_PLAYLIST))
              {
                LogDebug("DeliverEndOFStream LAV Only for audio pin wait (%d,%d)", buffer->nPlaylist, buffer->nClipNumber);
                DeliverEndOfStream();
              }
            }
            if ((buffer->nNewSegment & NS_STREAM_RESET) == NS_STREAM_RESET)
              m_bInitDuration = true;
          }

          if (buffer->pmt)
          {
            GUID subtype = subtype = buffer->pmt->subtype;

            if (buffer->pmt->subtype == FOURCCMap('1CVW') && m_VC1Override != GUID_NULL)
            {
              buffer->pmt->subtype = m_VC1Override;
              LogDebug("vid: FillBuffer - force VC-1 GUID");
            }

            if (!CompareMediaTypes(buffer->pmt, &m_mt))
            {
              LogMediaType(buffer->pmt);
            
              HRESULT hrAccept = S_FALSE;
              m_bProvidePMT = true;

              if (m_pReceiver && CheckVideoFormat(&buffer->pmt->subtype, &m_currentDecoder))
              {
                // Currently no other video decoders than LAV seems to be compatible with
                // the dynamic format changes
                if (m_currentDecoder == CLSID_LAVVideo)
                hrAccept = m_pReceiver->QueryAccept(buffer->pmt);
              }

              if (hrAccept != S_OK)
              {
                CMediaType mt(*buffer->pmt);
                SetMediaType(&mt);

                LogDebug("vid: graph rebuilding required");

                m_demux.m_bVideoRequiresRebuild = true;
                m_bZeroTimeStream = true;
                checkPlaybackState = true;

                //LogDebug("DeliverEndOFStream for rebuild (%d,%d)", buffer->nPlaylist, buffer->nClipNumber);
                //DeliverEndOfStream();
              }
              else
              {
                LogDebug("vid: format change accepted");
                CMediaType mt(*buffer->pmt);
                SetMediaType(&mt);
                pSample->SetMediaType(&mt);

                buffer->nNewSegment = 0;
                m_pCachedBuffer = buffer;
				
                //if (m_currentDecoder == CLSID_LAVVideo)
                //{
                //  LogDebug("DeliverEndOFStream LAV Only (%d,%d)", buffer->nPlaylist, buffer->nClipNumber);
                //  DeliverEndOfStream();
                //}

                return ERROR_NO_DATA;
              }
            } // comparemediatypes
          }
        } // lock ends

        m_rtTitleDuration = buffer->rtTitleDuration;

        if (checkPlaybackState)
        {
          buffer->nNewSegment = 0;
          m_pCachedBuffer = buffer;

          CheckPlaybackState();

          LogDebug("vid: cached push  %6.3f clip: %d playlist: %d", m_pCachedBuffer->rtStart / 10000000.0, m_pCachedBuffer->nClipNumber, m_pCachedBuffer->nPlaylist);

          return ERROR_NO_DATA;
        }

        bool hasTimestamp = buffer->rtStart != Packet::INVALID_TIME;
        REFERENCE_TIME rtCorrectedStartTime = 0;
        REFERENCE_TIME rtCorrectedStopTime = 0;

        if (hasTimestamp)
        {
          if (m_bZeroTimeStream)
          {
            m_rtStreamTimeOffset = buffer->rtStart - buffer->rtClipStartTime;
            m_bZeroTimeStream = false;
          }

          if (m_bDiscontinuity || buffer->bDiscontinuity)
          {
            LogDebug("vid: set discontinuity");
            pSample->SetDiscontinuity(true);
            pSample->SetMediaType(buffer->pmt);
            m_bDiscontinuity = false;
          }

          rtCorrectedStartTime = buffer->rtStart - m_rtStreamTimeOffset;
          rtCorrectedStopTime = buffer->rtStop - m_rtStreamTimeOffset;

          pSample->SetTime(&rtCorrectedStartTime, &rtCorrectedStopTime);

          if (m_bInitDuration)
          {
            m_pFilter->SetTitleDuration(m_rtTitleDuration);
            m_pFilter->ResetPlaybackOffset(buffer->rtPlaylistTime - rtCorrectedStartTime);
            m_bInitDuration = false;
          }

          m_pFilter->OnPlaybackPositionChange();
        }
        else // Buffer has no timestamp
          pSample->SetTime(NULL, NULL);

        pSample->SetSyncPoint(buffer->bSyncPoint);

        {
          CAutoLock lock(&m_csDeliver);

          if (!m_bFlushing)
          {
            BYTE* pSampleBuffer;
            pSample->SetActualDataLength(buffer->GetDataSize());
            pSample->GetPointer(&pSampleBuffer);
            memcpy(pSampleBuffer, buffer->GetData(), buffer->GetDataSize());

            m_bFirstSample = false;

#ifdef LOG_VIDEO_PIN_SAMPLES
            LogDebug("vid: %6.3f corr %6.3f playlist time %6.3f clip: %d playlist: %d size: %d", buffer->rtStart / 10000000.0, rtCorrectedStartTime / 10000000.0, 
              buffer->rtPlaylistTime / 10000000.0, buffer->nClipNumber, buffer->nPlaylist, buffer->GetCount());
#endif
          }
          else
          {
            LogDebug("vid: dropped sample as flush is active!");
            return ERROR_NO_DATA;
          }
        }

        //static int iFrameNumber = 0;
        //LogMediaSample(pSample, iFrameNumber++);

        delete buffer;
      }
    } while (!buffer);
    return NOERROR;
  }

  catch(...)
  {
    LogDebug("vid: FillBuffer exception");
  }

  return S_OK;
}