Esempio n. 1
0
HRESULT CMpcAudioRenderer::CheckAudioClient(WAVEFORMATEX *pWaveFormatEx)
{
    HRESULT hr = S_OK;
    CAutoLock cAutoLock(&m_csCheck);
    TRACE(_T("CMpcAudioRenderer::CheckAudioClient\n"));
    if (pMMDevice == NULL) {
        hr = GetAudioDevice(&pMMDevice);
    }

    // If no WAVEFORMATEX structure provided and client already exists, return it
    if (pAudioClient != NULL && pWaveFormatEx == NULL) {
        return hr;
    }

    // Just create the audio client if no WAVEFORMATEX provided
    if (pAudioClient == NULL && pWaveFormatEx==NULL) {
        if (SUCCEEDED (hr)) {
            hr=CreateAudioClient(pMMDevice, &pAudioClient);
        }
        return hr;
    }

    // Compare the exisiting WAVEFORMATEX with the one provided
    WAVEFORMATEX *pNewWaveFormatEx = NULL;
    if (CheckFormatChanged(pWaveFormatEx, &pNewWaveFormatEx)) {
        // Format has changed, audio client has to be reinitialized
        TRACE(_T("CMpcAudioRenderer::CheckAudioClient Format changed, reinitialize the audio client\n"));
        if (m_pWaveFileFormat) {
            BYTE *p = (BYTE *)m_pWaveFileFormat;
            SAFE_DELETE_ARRAY(p);
        }
        m_pWaveFileFormat=pNewWaveFormatEx;
        hr = pAudioClient->IsFormatSupported(AUDCLNT_SHAREMODE_EXCLUSIVE, pWaveFormatEx, NULL);
        if (SUCCEEDED(hr)) {
            if (pAudioClient!=NULL && isAudioClientStarted) {
                pAudioClient->Stop();
            }
            isAudioClientStarted=false;
            SAFE_RELEASE(pRenderClient);
            SAFE_RELEASE(pAudioClient);
            if (SUCCEEDED (hr)) {
                hr=CreateAudioClient(pMMDevice, &pAudioClient);
            }
        } else {
            TRACE(_T("CMpcAudioRenderer::CheckAudioClient New format not supported, accept it anyway\n"));
            return S_OK;
        }
    } else if (pRenderClient == NULL) {
        TRACE(_T("CMpcAudioRenderer::CheckAudioClient First initialization of the audio renderer\n"));
    } else {
        return hr;
    }


    SAFE_RELEASE(pRenderClient);
    if (SUCCEEDED (hr)) {
        hr=InitAudioClient(pWaveFormatEx, pAudioClient, &pRenderClient);
    }
    return hr;
}
Esempio n. 2
0
HRESULT CMpcAudioRenderer::InitAudioClient(WAVEFORMATEX *pWaveFormatEx, IAudioClient *pAudioClient, IAudioRenderClient **ppRenderClient)
{
    TRACE(_T("CMpcAudioRenderer::InitAudioClient\n"));
    HRESULT hr=S_OK;
    // Initialize the stream to play at the minimum latency.
    //if (SUCCEEDED (hr)) hr = pAudioClient->GetDevicePeriod(NULL, &hnsPeriod);
    hnsPeriod=500000; //50 ms is the best according to James @Slysoft

    hr = pAudioClient->IsFormatSupported(AUDCLNT_SHAREMODE_EXCLUSIVE, pWaveFormatEx, NULL);
    if (FAILED(hr)) {
        TRACE(_T("CMpcAudioRenderer::InitAudioClient not supported (0x%08x)\n"), hr);
    } else {
        TRACE(_T("CMpcAudioRenderer::InitAudioClient format supported\n"));
    }

    GetBufferSize(pWaveFormatEx, &hnsPeriod);

    if (SUCCEEDED (hr)) hr = pAudioClient->Initialize(AUDCLNT_SHAREMODE_EXCLUSIVE,0/*AUDCLNT_STREAMFLAGS_EVENTCALLBACK*/,
                                 hnsPeriod,hnsPeriod,pWaveFormatEx,NULL);
    if (FAILED (hr) && hr != AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED) {
        TRACE(_T("CMpcAudioRenderer::InitAudioClient failed (0x%08x)\n"), hr);
        return hr;
    }

    if (AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED == hr) {
        // if the buffer size was not aligned, need to do the alignment dance
        TRACE(_T("CMpcAudioRenderer::InitAudioClient Buffer size not aligned. Realigning\n"));

        // get the buffer size, which will be aligned
        hr = pAudioClient->GetBufferSize(&nFramesInBuffer);

        // throw away this IAudioClient
        pAudioClient->Release();
        pAudioClient=NULL;

        // calculate the new aligned periodicity
        hnsPeriod = // hns =
            (REFERENCE_TIME)(
                10000.0 * // (hns / ms) *
                1000 * // (ms / s) *
                nFramesInBuffer / // frames /
                pWaveFormatEx->nSamplesPerSec  // (frames / s)
                + 0.5 // rounding
            );

        if (SUCCEEDED (hr)) {
            hr=CreateAudioClient(pMMDevice, &pAudioClient);
        }
        TRACE(_T("CMpcAudioRenderer::InitAudioClient Trying again with periodicity of %I64u hundred-nanoseconds, or %u frames.\n"), hnsPeriod, nFramesInBuffer);
        if (SUCCEEDED (hr))
            hr = pAudioClient->Initialize(AUDCLNT_SHAREMODE_EXCLUSIVE,0/*AUDCLNT_STREAMFLAGS_EVENTCALLBACK*/,
                                          hnsPeriod, hnsPeriod, pWaveFormatEx, NULL);
        if (FAILED(hr)) {
            TRACE(_T("CMpcAudioRenderer::InitAudioClient Failed to reinitialize the audio client\n"));
            return hr;
        }
    } // if (AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED == hr)

    // get the buffer size, which is aligned
    if (SUCCEEDED (hr)) {
        hr = pAudioClient->GetBufferSize(&nFramesInBuffer);
    }

    // calculate the new period
    if (SUCCEEDED (hr)) {
        hr = pAudioClient->GetService(__uuidof(IAudioRenderClient), (void**)(ppRenderClient));
    }

    if (FAILED (hr)) {
        TRACE(_T("CMpcAudioRenderer::InitAudioClient service initialization failed (0x%08x)\n"), hr);
    } else {
        TRACE(_T("CMpcAudioRenderer::InitAudioClient service initialization success\n"));
    }

    return hr;
}
// Format negotiation
HRESULT CWASAPIRenderFilter::NegotiateFormat(const WAVEFORMATEXTENSIBLE* pwfx, int nApplyChangesDepth, ChannelOrder* pChOrder)
{
  if (!pwfx)
    return VFW_E_TYPE_NOT_ACCEPTED;

  if (FormatsEqual(pwfx, m_pInputFormat))
  {
    *pChOrder = m_chOrder;
    return S_OK;
  }

  bool bApplyChanges = nApplyChangesDepth != 0;

  bool bitDepthForced = (m_pSettings->m_nForceBitDepth != 0 && m_pSettings->m_nForceBitDepth != pwfx->Format.wBitsPerSample);
  bool sampleRateForced = (m_pSettings->m_nForceSamplingRate != 0 && m_pSettings->m_nForceSamplingRate != pwfx->Format.nSamplesPerSec);
  
  if ((bitDepthForced || sampleRateForced) &&
       pwfx->SubFormat == KSDATAFORMAT_SUBTYPE_IEC61937_DOLBY_DIGITAL ||
       pwfx->SubFormat == KSDATAFORMAT_SUBTYPE_IEEE_FLOAT && bitDepthForced)
    return VFW_E_TYPE_NOT_ACCEPTED;
  
  if (((bitDepthForced && m_pSettings->m_nForceBitDepth != pwfx->Format.wBitsPerSample) ||
       (sampleRateForced && m_pSettings->m_nForceSamplingRate != pwfx->Format.nSamplesPerSec)))
    return VFW_E_TYPE_NOT_ACCEPTED;

  CAutoLock lock(&m_csResources);

  HRESULT hr = CreateAudioClient();
  if (FAILED(hr))
  {
    Log("CWASAPIRenderFilter::NegotiateFormat Error, audio client not initialized: (0x%08x)", hr);
    return VFW_E_CANNOT_CONNECT;
  }

  WAVEFORMATEXTENSIBLE* pwfxAccepted = NULL;
  hr = IsFormatSupported(pwfx, &pwfxAccepted);
  if (FAILED(hr))
  {
    SAFE_DELETE_WAVEFORMATEX(pwfxAccepted);
    return hr;
  }

  if (bApplyChanges)
  {
    LogWaveFormat(pwfx, "REN - applying  ");

    // Stop and discard audio client
    StopAudioClient();
    SAFE_RELEASE(m_pRenderClient);
    SAFE_RELEASE(m_pAudioClock);
    SAFE_RELEASE(m_pAudioClient);

    // We must use incoming format so the WAVEFORMATEXTENSIBLE to WAVEFORMATEXT difference
    // that some audio drivers require is not causing an infonite loop of format changes
    SetInputFormat(pwfx);

    // Reinitialize audio client
    hr = CreateAudioClient(true);
  }
  else
    LogWaveFormat(pwfx, "Input format    ");

  m_chOrder = *pChOrder = DS_ORDER;
  SAFE_DELETE_WAVEFORMATEX(pwfxAccepted);

  return hr;
}
DWORD CWASAPIRenderFilter::ThreadProc()
{
  Log("CWASAPIRenderFilter::Render thread - starting up - thread ID: %d", m_ThreadId);
  
  SetThreadName(0, "WASAPI-renderer");

  // Polling delay
  LARGE_INTEGER liDueTime; 
  liDueTime.QuadPart = -1LL;

  AudioSinkCommand command;
  
  LONGLONG writeSilence = 0;
  BYTE* sampleData = NULL;

  bool flush = false;
  bool sampleProcessed = false;

  REFERENCE_TIME dueTime = 0;
  REFERENCE_TIME maxSampleWaitTime = Latency() / 20000;

  HRESULT hr = S_FALSE;

  m_csResources.Lock();

  if (m_pSettings->m_bReleaseDeviceOnStop && !m_pAudioClient && m_pInputFormat)
  {
    hr = CreateAudioClient(true);
    if (FAILED(hr))
    {
      Log("CWASAPIRenderFilter::Render thread Error, audio client not available: (0x%08x)", hr);
      StopRenderThread();
      m_csResources.Unlock();
      return 0;
    }
  }

  if (m_pAudioClient)
  {
    hr = StartAudioClient();
    if (FAILED(hr))
    {
      Log("CWASAPIRenderFilter::Render thread Error, starting audio client failed: (0x%08x)", hr);
      StopRenderThread();
      m_csResources.Unlock();
      return 0;
    }
  }

  if (!m_bDeviceInitialized)
  {
    Log("CWASAPIRenderFilter::Render thread Error, device not initialized");
    StopRenderThread();
    m_csResources.Unlock();
    return 0;
  }

  EnableMMCSS();
  m_state = StateRunning;

  while (true)
  {
    if (flush)
    {
      Log("CWASAPIRenderFilter::Render thread flushing buffers");
      HandleFlush();
      flush = false;
    }
    
    m_csResources.Unlock();
    hr = WaitForEvents(INFINITE, &m_hDataEvents, &m_dwDataWaitObjects);
    m_csResources.Lock();

    if (hr == MPAR_S_THREAD_STOPPING || !m_pAudioClient)
    {
      StopRenderThread();
      return 0;
    }
    else if (hr == MPAR_S_NEED_DATA)
    {
      UpdateAudioClock();

      UINT32 bytesFilled = 0;
      UINT32 bufferSize = 0;
      UINT32 currentPadding = 0;
      UINT32 bufferSizeInBytes = 0;
      BYTE* data = NULL;
      DWORD flags = 0;

      static BYTE* prevData = NULL;
       
      hr = GetWASAPIBuffer(bufferSize, currentPadding, bufferSizeInBytes, &data);
      if (SUCCEEDED(hr))
      {
        do
        {
          fetchSample:

          bool OOBCommandOnly = m_nDataLeftInSample > 0;

          if (m_nDataLeftInSample == 0 || OOBCommandOnly)
          {
            m_csResources.Unlock();
            HRESULT result = GetNextSampleOrCommand(&command, &m_pCurrentSample.p, maxSampleWaitTime, &m_hSampleEvents,
                                                    &m_dwSampleWaitObjects, OOBCommandOnly);
            m_csResources.Lock();

            if (result == MPAR_S_THREAD_STOPPING || !m_pAudioClient)
            {
              if (m_pAudioClient)
              {
                hr = m_pRenderClient->ReleaseBuffer(bufferSize - currentPadding, flags);
                if (FAILED(hr) && hr != AUDCLNT_E_OUT_OF_ORDER)
                  Log("CWASAPIRenderFilter::Render thread: ReleaseBuffer failed (0x%08x)", hr);
              }

              StopRenderThread();
              return 0;
            }

            if (!m_pCurrentSample)
              m_nDataLeftInSample = 0;
              
            if (command == ASC_PutSample && m_pCurrentSample)
            {
              sampleProcessed = false;
              m_nSampleOffset = 0;
              m_nDataLeftInSample = m_pCurrentSample->GetActualDataLength();
            }
            else if (command == ASC_Flush)
            {
              m_pCurrentSample.Release();

              flush = true;
              sampleData = NULL;
              m_nSampleOffset = 0;
              m_nDataLeftInSample = 0;

              break;
            }
            else if (command == ASC_Pause)
            {
              m_pCurrentSample.Release();
              m_state = StatePaused;
            }
            else if (command == ASC_Resume)
            {
              sampleProcessed = false;
              writeSilence = 0;
              m_state = StateRunning;
              if (!m_pCurrentSample)
              {
                m_nDataLeftInSample = 0;
                goto fetchSample;
              }
            }
          }

          if (m_state != StateRunning)
            writeSilence = bufferSizeInBytes - bytesFilled;
          else if (m_nSampleOffset == 0 && !OOBCommandOnly)
          {
            // TODO error checking
            if (CheckSample(m_pCurrentSample, bufferSize - currentPadding) == S_FALSE)
            {
              GetWASAPIBuffer(bufferSize, currentPadding, bufferSizeInBytes, &data);
              bytesFilled = 0;
            }
          }

          if (writeSilence == 0 && (m_nSampleOffset == 0 || m_nSampleNum == 0) && !sampleProcessed)
          {
            HRESULT schedulingHR = CheckStreamTimeline(m_pCurrentSample, &dueTime, m_nSampleOffset);
            sampleProcessed = true;
              
            // m_pCurrentSample must exist if CheckStreamTimeline returns either of these
            if (schedulingHR == MPAR_S_DROP_SAMPLE)
            {
              m_pCurrentSample.Release();
              m_nDataLeftInSample = 0;
              goto fetchSample;
            }
            else if (schedulingHR == MPAR_S_WAIT_RENDER_TIME)
              CalculateSilence(&dueTime, &writeSilence);
          }

          if (writeSilence == 0 && m_pCurrentSample)
            RenderAudio(data, bufferSizeInBytes, m_nDataLeftInSample, m_nSampleOffset, m_pCurrentSample, bytesFilled);
          else
          {
            if (bufferSizeInBytes == writeSilence)
              flags = AUDCLNT_BUFFERFLAGS_SILENT;

            if (!m_pCurrentSample)
              writeSilence = bufferSizeInBytes;

            RenderSilence(data, bufferSizeInBytes, writeSilence, bytesFilled);
          }
        } while (bytesFilled < bufferSizeInBytes);

        hr = m_pRenderClient->ReleaseBuffer(bufferSize - currentPadding, flags);

        if (FAILED(hr) && hr != AUDCLNT_E_OUT_OF_ORDER)
          Log("CWASAPIRenderFilter::Render thread: ReleaseBuffer failed (0x%08x)", hr);
      }

      if (!m_pSettings->m_bWASAPIUseEventMode)
      {
        if (m_pAudioClient)
          hr = m_pAudioClient->GetCurrentPadding(&currentPadding);
        else
          hr = S_FALSE;

        if (SUCCEEDED(hr) && bufferSize > 0)
        {
          liDueTime.QuadPart = (double)currentPadding / (double)bufferSize * (double)m_pSettings->m_hnsPeriod * -0.9;
          // Log(" currentPadding: %d QuadPart: %lld", currentPadding, liDueTime.QuadPart);
        }
        else
        {
          liDueTime.QuadPart = (double)m_pSettings->m_hnsPeriod * -0.9;
          if (hr != AUDCLNT_E_NOT_INITIALIZED)
            Log("CWASAPIRenderFilter::Render thread: GetCurrentPadding failed (0x%08x)", hr);  
        }
        SetWaitableTimer(m_hDataEvent, &liDueTime, 0, NULL, NULL, 0);
      }
    }
  }
  
  m_csResources.Unlock();
  return 0;
}
HRESULT CWASAPIRenderFilter::InitAudioClient()
{
  Log("WASAPIRenderFilter::InitAudioClient");
  HRESULT hr = S_OK;
  
  if (m_pSettings->m_hnsPeriod == 0 || m_pSettings->m_hnsPeriod == 1)
  {
    REFERENCE_TIME defaultPeriod(0);
    REFERENCE_TIME minimumPeriod(0);

    hr = m_pAudioClient->GetDevicePeriod(&defaultPeriod, &minimumPeriod);
    if (SUCCEEDED(hr))
    {
      if (m_pSettings->m_hnsPeriod == 0)
        m_pSettings->m_hnsPeriod = defaultPeriod;
      else
        m_pSettings->m_hnsPeriod = minimumPeriod;
      Log("WASAPIRenderFilter::InitAudioClient using device period from driver %I64u ms", m_pSettings->m_hnsPeriod / 10000);
    }
    else
    {
      Log("WASAPIRenderFilter::InitAudioClient failed to get device period from driver (0x%08x) - using 50 ms", hr); 
      m_pSettings->m_hnsPeriod = 500000; //50 ms is the best according to James @Slysoft
    }
  }

  WAVEFORMATEXTENSIBLE* pwfxAccepted = NULL;
  hr = IsFormatSupported(m_pInputFormat, &pwfxAccepted);
  if (FAILED(hr))
  {
    SAFE_DELETE_WAVEFORMATEX(pwfxAccepted);
    return hr;
  }

  GetBufferSize((WAVEFORMATEX*)pwfxAccepted, &m_pSettings->m_hnsPeriod);

  if (SUCCEEDED(hr))
    hr = m_pAudioClient->Initialize(m_pSettings->m_WASAPIShareMode, m_dwStreamFlags,
	                                m_pSettings->m_hnsPeriod, m_pSettings->m_hnsPeriod, (WAVEFORMATEX*)pwfxAccepted, NULL);

  if (FAILED(hr) && hr != AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED)
  {
    Log("WASAPIRenderFilter::InitAudioClient Initialize failed (0x%08x)", hr);
    SAFE_DELETE_WAVEFORMATEX(pwfxAccepted);
    return hr;
  }

  if (hr == S_OK)
  {
    SAFE_RELEASE(m_pAudioClock);
    hr = m_pAudioClient->GetService(__uuidof(IAudioClock), (void**)&m_pAudioClock);
    if (SUCCEEDED(hr))
      m_pAudioClock->GetFrequency(&m_nHWfreq);
    else
      Log("WASAPIRenderFilter::IAudioClock not found!");
  }

  if (hr == AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED) 
  {
    // if the buffer size was not aligned, need to do the alignment dance
    Log("WASAPIRenderFilter::InitAudioClient Buffer size not aligned. Realigning");

    // get the buffer size, which will be aligned
    hr = m_pAudioClient->GetBufferSize(&m_nFramesInBuffer);

    // throw away this IAudioClient
    SAFE_RELEASE(m_pAudioClient);

    // calculate the new aligned periodicity
    m_pSettings->m_hnsPeriod = // hns =
                  (REFERENCE_TIME)(
                  10000.0 * // (hns / ms) *
                  1000 * // (ms / s) *
                  m_nFramesInBuffer / // frames /
                  m_pInputFormat->Format.nSamplesPerSec  // (frames / s)
                  + 0.5 // rounding
    );

    if (SUCCEEDED(hr)) 
      hr = CreateAudioClient();
      
    Log("WASAPIRenderFilter::InitAudioClient Trying again with periodicity of %I64u hundred-nanoseconds, or %u frames", m_pSettings->m_hnsPeriod, m_nFramesInBuffer);

    if (SUCCEEDED (hr)) 
      hr = m_pAudioClient->Initialize(m_pSettings->m_WASAPIShareMode, m_dwStreamFlags, 
	                                    m_pSettings->m_hnsPeriod, m_pSettings->m_hnsPeriod, (WAVEFORMATEX*)pwfxAccepted, NULL);
 
    if (FAILED(hr))
    {
      Log("WASAPIRenderFilter::InitAudioClient Failed to reinitialize the audio client");
      SAFE_DELETE_WAVEFORMATEX(pwfxAccepted);
      return hr;
    }
    else
    {
      SAFE_RELEASE(m_pAudioClock);
      hr = m_pAudioClient->GetService(__uuidof(IAudioClock), (void**)&m_pAudioClock);
      if (FAILED(hr))
        Log("WASAPIRenderFilter::IAudioClock not found!");
      else
        m_pAudioClock->GetFrequency(&m_nHWfreq);
    }
  } // if (AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED == hr) 

  // get the buffer size, which is aligned
  if (SUCCEEDED(hr)) 
    hr = m_pAudioClient->GetBufferSize(&m_nFramesInBuffer);

  // calculate the new period
  if (SUCCEEDED (hr)) 
    hr = m_pAudioClient->GetService(__uuidof(IAudioRenderClient), (void**)(&m_pRenderClient));

  if (FAILED(hr))
    Log("WASAPIRenderFilter::InitAudioClient service initialization failed (0x%08x)", hr);
  else
    Log("WASAPIRenderer::InitAudioClient service initialization success");

  if (m_pSettings->m_bWASAPIUseEventMode)
  {
    hr = m_pAudioClient->SetEventHandle(m_hDataEvent);
    if (FAILED(hr))
    {
      Log("WASAPIRenderFilter::InitAudioClient SetEventHandle failed (0x%08x)", hr);
      SAFE_DELETE_WAVEFORMATEX(pwfxAccepted);
      return hr;
    }
  }

  REFERENCE_TIME latency(0);
  m_pAudioClient->GetStreamLatency(&latency);
  
  Log("WASAPIRenderFilter::InitAudioClient device reported latency %I64u ms - buffer based latency %I64u ms", 
    latency / 10000, Latency() / 10000);

  // Dynamic format change requires restart for the audio client
  if (m_state != StateStopped)
    StartAudioClient();

  m_bDeviceInitialized = true;

  SAFE_DELETE_WAVEFORMATEX(pwfxAccepted);
  return hr;
}