コード例 #1
0
HRESULT CWASAPIRenderFilter::PutSample(IMediaSample* pSample)
{
 HRESULT hr = CQueuedAudioSink::PutSample(pSample);

  if (m_filterState != State_Running)
    Log("Buffering...%6.3f", BufferredDataDuration() / 10000000.0);

  return hr;
}
コード例 #2
0
HRESULT CWASAPIRenderFilter::PutSample(IMediaSample* pSample)
{
  HRESULT hr = CQueuedAudioSink::PutSample(pSample);

  if (m_filterState != State_Running)
  {
    CheckBufferStatus();
    Log("Buffering...%5d ms", BufferredDataDuration() / 10000);
  }

  return hr;
}
コード例 #3
0
void CWASAPIRenderFilter::CheckBufferStatus()
{
  CAutoLock lock(&m_csResources);
  if (m_hNeedMoreSamples)
  {
    REFERENCE_TIME bufferedAmount = BufferredDataDuration();
    if (m_hNeedMoreSamples && bufferedAmount < m_dOutputBufferSize)
    {
      //Log("CWASAPIRenderFilter::Render -      need more data - buffer: %6.3f", bufferedAmount / 10000000.0);
      SetEvent(*m_hNeedMoreSamples);
    }
    else
    {
      //Log("CWASAPIRenderFilter::Render - dont need more data - buffer: %6.3f", bufferedAmount / 10000000.0);
      ResetEvent(*m_hNeedMoreSamples);
    }
  }
}
コード例 #4
0
HRESULT CWASAPIRenderFilter::CheckStreamTimeline(IMediaSample* pSample, REFERENCE_TIME* pDueTime, UINT32 sampleOffset)
{
  *pDueTime = 0;

  if (!pSample)
    return S_FALSE;

  REFERENCE_TIME rtHWTime = 0;
  REFERENCE_TIME rtRefClock = 0;
  REFERENCE_TIME rtStop = 0;
  REFERENCE_TIME rtStart = 0;
  REFERENCE_TIME rtDuration = 0;

  bool resync = false;

  HRESULT hr = pSample->GetTime(&rtStart, &rtStop);
  if (FAILED(hr))
  {
    // Render all samples flat that dont have presentation time
    m_nSampleNum++;
    return MPAR_S_RENDER_SAMPLE;
  }

  if (m_nSampleNum == 0)
    m_rtNextSampleTime = rtStart;

  long sampleLength = pSample->GetActualDataLength();

  UINT nFrames = sampleLength / m_pInputFormat->Format.nBlockAlign;
  rtDuration = nFrames * UNITS / m_pInputFormat->Format.nSamplesPerSec;

  if (SUCCEEDED(m_pClock->GetHWTime(&rtRefClock, &rtHWTime)))
  {
    rtRefClock -= m_rtStart;
    rtHWTime -= m_rtHwStart;
  }
  else
  {
    m_nSampleNum++;
    return MPAR_S_RENDER_SAMPLE;
  }

  if (m_pSettings->m_bLogSampleTimes)
    Log("   sample start: %6.3f  stop: %6.3f dur: %6.3f diff: %6.3f rtHWTime: %6.3f rtRefClock: %6.3f early: %6.3f queue: %d %6.3f", 
      rtStart / 10000000.0, rtStop / 10000000.0, rtDuration / 10000000.0, (rtStart - m_rtNextSampleTime) / 10000000.0, 
      rtHWTime / 10000000.0, rtRefClock / 10000000.0, (rtStart - rtHWTime) / 10000000.0, m_inputQueue.size(), BufferredDataDuration() / 10000000.0);

  // Try to keep the A/V sync when data has been dropped
  if (abs(rtStart - m_rtNextSampleTime) > MAX_SAMPLE_TIME_ERROR)
  {
    resync = true;
    Log("   Discontinuity detected: diff: %7.3f ms MAX_SAMPLE_TIME_ERROR: %7.3f ms resync: %d", ((double)rtStart - (double)m_rtNextSampleTime) / 10000.0, (double)MAX_SAMPLE_TIME_ERROR / 10000.0, resync);
  }

  m_rtNextSampleTime = rtStart + rtDuration;

  REFERENCE_TIME offsetDelay = 0;
  if (sampleOffset > 0)
    offsetDelay = sampleOffset / m_pInputFormat->Format.nBlockAlign * UNITS / m_pInputFormat->Format.nSamplesPerSec;

  *pDueTime = rtStart + offsetDelay;

  if (*pDueTime < rtHWTime - Latency())
  {
    // TODO implement partial sample dropping
    Log("   dropping late sample - pDueTime: %6.3f rtHWTime: %6.3f", *pDueTime / 10000000.0, rtHWTime / 10000000.0);
    m_nSampleNum = 0;

    return MPAR_S_DROP_SAMPLE;
  }
  else if ((m_nSampleNum == 0 && *pDueTime > rtHWTime) || resync)
  {
    m_nSampleNum++;

    if (m_pSettings->m_bLogSampleTimes)
      Log("   MPAR_S_WAIT_RENDER_TIME - %6.3f", *pDueTime / 10000000.0);

    return MPAR_S_WAIT_RENDER_TIME;
  }

  m_nSampleNum++;

  return MPAR_S_RENDER_SAMPLE;
}