// Processing DWORD CTimeStretchFilter::ThreadProc() { Log("CTimeStretchFilter::timestretch thread - starting up - thread ID: %d", m_ThreadId); SetThreadName(0, "TimeStretchFilter"); AudioSinkCommand command; CComPtr<IMediaSample> sample; while (true) { m_csResources.Unlock(); HRESULT hr = GetNextSampleOrCommand(&command, &sample.p, INFINITE, &m_hSampleEvents, &m_dwSampleWaitObjects); m_csResources.Lock(); if (hr == MPAR_S_THREAD_STOPPING) { Log("CTimeStretchFilter::timestretch thread - closing down - thread ID: %d", m_ThreadId); SetEvent(m_hCurrentSampleReleased); CloseThread(); m_csResources.Unlock(); return 0; } else { if (command == ASC_Flush) { Log("CTimeStretchFilter::timestretch thread - flushing"); m_rtInSampleTime = m_rtNextIncomingSampleTime = 0; m_rtLastOuputStart = m_rtLastOuputEnd = -1; if (m_pNextOutSample) m_pNextOutSample.Release(); flush(); m_pClock->Flush(); sample.Release(); SetEvent(m_hCurrentSampleReleased); } else if (command == ASC_Pause || command == ASC_Resume) continue; else if (sample) { BYTE *pMediaBuffer = NULL; long size = sample->GetActualDataLength(); if (sample->IsDiscontinuity() == S_OK) { sample->SetDiscontinuity(false); m_bDiscontinuity = true; } REFERENCE_TIME rtDrained = 0; if (CheckSample(sample, &rtDrained) == S_FALSE) { DeleteMediaType(m_pMediaType); sample->GetMediaType(&m_pMediaType); } CheckStreamContinuity(sample, rtDrained); m_nSampleNum++; hr = sample->GetPointer(&pMediaBuffer); if ((hr == S_OK) && m_pMemAllocator) { REFERENCE_TIME rtStart = 0; REFERENCE_TIME rtAdjustedStart = 0; REFERENCE_TIME rtEnd = 0; REFERENCE_TIME rtAdjustedEnd = 0; REFERENCE_TIME rtAHwTime = 0; REFERENCE_TIME rtRCTime = 0; m_pClock->GetHWTime(&rtRCTime, &rtAHwTime); sample->GetTime(&rtStart, &rtEnd); REFERENCE_TIME sampleDuration = rtEnd - rtStart; uint unprocessedSamplesBefore = numUnprocessedSamples(); uint unprocessedSamplesAfter = 0; UINT32 nFrames = size / m_pOutputFormat->Format.nBlockAlign; double bias = m_pClock->GetBias(); double adjustment = m_pClock->Adjustment(); double AVMult = m_pClock->SuggestedAudioMultiplier(rtAHwTime, rtRCTime, bias, adjustment); setTempoInternal(AVMult, 1.0); if (m_rtLastOuputEnd == -1) m_rtLastOuputEnd = rtStart / AVMult - 1; m_rtLastOuputStart = m_rtLastOuputEnd + 1; // Process the sample putSamplesInternal((const short*)pMediaBuffer, size / m_pOutputFormat->Format.nBlockAlign); unprocessedSamplesAfter = numUnprocessedSamples(); UINT32 nInFrames = (size / m_pOutputFormat->Format.nBlockAlign) - unprocessedSamplesAfter + unprocessedSamplesBefore; UINT32 nOutFrames = numSamples(); // TODO: Soundtouch can provide less samples than asked (but never more) so a cummulative error is possible. This will not happen over the course of a long TV stint, but could be solved for correctness // m_rtLastOuputEnd += (nOutFrames + unprocessedSamplesAfter - unprocessedSamplesBefore) * UNITS / m_pOutputFormat->Format.nSamplesPerSec; //rtStart = m_rtInSampleTime; rtEnd = rtStart + sampleDuration; rtAdjustedStart = m_rtLastOuputEnd +1; rtAdjustedEnd = rtAdjustedStart + sampleDuration / AVMult; m_rtLastOuputEnd += sampleDuration / AVMult; CreateOutput(nInFrames, nOutFrames, bias, adjustment, AVMult, false); m_pClock->AddSample(rtStart, rtAdjustedStart, rtEnd, rtAdjustedEnd); } } } } }
// Processing DWORD CTimeStretchFilter::ThreadProc() { Log("CTimeStretchFilter::timestretch thread - starting up - thread ID: %d", m_ThreadId); SetThreadName(0, "TimeStretchFilter"); AudioSinkCommand command; CComPtr<IMediaSample> sample; while (true) { m_csResources.Unlock(); HRESULT hr = GetNextSampleOrCommand(&command, &sample.p, INFINITE, &m_hSampleEvents, &m_dwSampleWaitObjects); m_csResources.Lock(); if (hr == MPAR_S_THREAD_STOPPING) { Log("CTimeStretchFilter::timestretch thread - closing down - thread ID: %d", m_ThreadId); SetEvent(m_hCurrentSampleReleased); CloseThread(); m_csResources.Unlock(); return 0; } else { if (command == ASC_Flush) { Log("CTimeStretchFilter::timestretch thread - flushing"); m_rtInSampleTime = m_rtNextIncomingSampleTime = 0; if (m_pNextOutSample) m_pNextOutSample.Release(); flush(); sample.Release(); SetEvent(m_hCurrentSampleReleased); } else if (command == ASC_Pause || command == ASC_Resume) continue; else if (sample) { BYTE *pMediaBuffer = NULL; long size = sample->GetActualDataLength(); if (sample->IsDiscontinuity() == S_OK) { sample->SetDiscontinuity(false); m_bDiscontinuity = true; } if (CheckSample(sample) == S_FALSE) { DeleteMediaType(m_pMediaType); sample->GetMediaType(&m_pMediaType); } CheckStreamContinuity(sample); m_nSampleNum++; hr = sample->GetPointer(&pMediaBuffer); if ((hr == S_OK) && m_pMemAllocator) { uint unprocessedSamplesBefore = numUnprocessedSamples(); uint unprocessedSamplesAfter = 0; UINT32 nFrames = size / m_pOutputFormat->Format.nBlockAlign; REFERENCE_TIME estimatedSampleDuration = nFrames * UNITS / m_pOutputFormat->Format.nSamplesPerSec; double bias = m_pClock->GetBias(); double adjustment = m_pClock->Adjustment(); double AVMult = m_pClock->SuggestedAudioMultiplier(estimatedSampleDuration, bias, adjustment); setTempoInternal(AVMult, 1.0); // this should be the same as previous line, but in future we want to get rid of the 2nd parameter // Process the sample putSamplesInternal((const short*)pMediaBuffer, size / m_pOutputFormat->Format.nBlockAlign); unprocessedSamplesAfter = numUnprocessedSamples(); UINT32 nInFrames = (size / m_pOutputFormat->Format.nBlockAlign) - unprocessedSamplesAfter + unprocessedSamplesBefore; UINT32 nOutFrames = numSamples(); CreateOutput(nInFrames, nOutFrames, bias, adjustment, AVMult, false); } } } } }
DWORD CWASAPIRenderFilter::ThreadProc() { Log("CWASAPIRenderFilter::Render thread - starting up - thread ID: %d", m_ThreadId); SetThreadName(0, "WASAPI-renderer"); // Polling delay LARGE_INTEGER liDueTime; liDueTime.QuadPart = -1LL; AudioSinkCommand command; LONGLONG writeSilence = 0; BYTE* sampleData = NULL; bool flush = false; bool sampleProcessed = false; REFERENCE_TIME dueTime = 0; REFERENCE_TIME maxSampleWaitTime = Latency() / 20000; HRESULT hr = S_FALSE; m_csResources.Lock(); if (m_pSettings->m_bReleaseDeviceOnStop && !m_pAudioClient && m_pInputFormat) { hr = CreateAudioClient(true); if (FAILED(hr)) { Log("CWASAPIRenderFilter::Render thread Error, audio client not available: (0x%08x)", hr); StopRenderThread(); m_csResources.Unlock(); return 0; } } if (m_pAudioClient) { hr = StartAudioClient(); if (FAILED(hr)) { Log("CWASAPIRenderFilter::Render thread Error, starting audio client failed: (0x%08x)", hr); StopRenderThread(); m_csResources.Unlock(); return 0; } } if (!m_bDeviceInitialized) { Log("CWASAPIRenderFilter::Render thread Error, device not initialized"); StopRenderThread(); m_csResources.Unlock(); return 0; } EnableMMCSS(); m_state = StateRunning; while (true) { if (flush) { Log("CWASAPIRenderFilter::Render thread flushing buffers"); HandleFlush(); flush = false; } m_csResources.Unlock(); hr = WaitForEvents(INFINITE, &m_hDataEvents, &m_dwDataWaitObjects); m_csResources.Lock(); if (hr == MPAR_S_THREAD_STOPPING || !m_pAudioClient) { StopRenderThread(); return 0; } else if (hr == MPAR_S_NEED_DATA) { UpdateAudioClock(); UINT32 bytesFilled = 0; UINT32 bufferSize = 0; UINT32 currentPadding = 0; UINT32 bufferSizeInBytes = 0; BYTE* data = NULL; DWORD flags = 0; static BYTE* prevData = NULL; hr = GetWASAPIBuffer(bufferSize, currentPadding, bufferSizeInBytes, &data); if (SUCCEEDED(hr)) { do { fetchSample: bool OOBCommandOnly = m_nDataLeftInSample > 0; if (m_nDataLeftInSample == 0 || OOBCommandOnly) { m_csResources.Unlock(); HRESULT result = GetNextSampleOrCommand(&command, &m_pCurrentSample.p, maxSampleWaitTime, &m_hSampleEvents, &m_dwSampleWaitObjects, OOBCommandOnly); m_csResources.Lock(); if (result == MPAR_S_THREAD_STOPPING || !m_pAudioClient) { if (m_pAudioClient) { hr = m_pRenderClient->ReleaseBuffer(bufferSize - currentPadding, flags); if (FAILED(hr) && hr != AUDCLNT_E_OUT_OF_ORDER) Log("CWASAPIRenderFilter::Render thread: ReleaseBuffer failed (0x%08x)", hr); } StopRenderThread(); return 0; } if (!m_pCurrentSample) m_nDataLeftInSample = 0; if (command == ASC_PutSample && m_pCurrentSample) { sampleProcessed = false; m_nSampleOffset = 0; m_nDataLeftInSample = m_pCurrentSample->GetActualDataLength(); } else if (command == ASC_Flush) { m_pCurrentSample.Release(); flush = true; sampleData = NULL; m_nSampleOffset = 0; m_nDataLeftInSample = 0; break; } else if (command == ASC_Pause) { m_pCurrentSample.Release(); m_state = StatePaused; } else if (command == ASC_Resume) { sampleProcessed = false; writeSilence = 0; m_state = StateRunning; if (!m_pCurrentSample) { m_nDataLeftInSample = 0; goto fetchSample; } } } if (m_state != StateRunning) writeSilence = bufferSizeInBytes - bytesFilled; else if (m_nSampleOffset == 0 && !OOBCommandOnly) { // TODO error checking if (CheckSample(m_pCurrentSample, bufferSize - currentPadding) == S_FALSE) { GetWASAPIBuffer(bufferSize, currentPadding, bufferSizeInBytes, &data); bytesFilled = 0; } } if (writeSilence == 0 && (m_nSampleOffset == 0 || m_nSampleNum == 0) && !sampleProcessed) { HRESULT schedulingHR = CheckStreamTimeline(m_pCurrentSample, &dueTime, m_nSampleOffset); sampleProcessed = true; // m_pCurrentSample must exist if CheckStreamTimeline returns either of these if (schedulingHR == MPAR_S_DROP_SAMPLE) { m_pCurrentSample.Release(); m_nDataLeftInSample = 0; goto fetchSample; } else if (schedulingHR == MPAR_S_WAIT_RENDER_TIME) CalculateSilence(&dueTime, &writeSilence); } if (writeSilence == 0 && m_pCurrentSample) RenderAudio(data, bufferSizeInBytes, m_nDataLeftInSample, m_nSampleOffset, m_pCurrentSample, bytesFilled); else { if (bufferSizeInBytes == writeSilence) flags = AUDCLNT_BUFFERFLAGS_SILENT; if (!m_pCurrentSample) writeSilence = bufferSizeInBytes; RenderSilence(data, bufferSizeInBytes, writeSilence, bytesFilled); } } while (bytesFilled < bufferSizeInBytes); hr = m_pRenderClient->ReleaseBuffer(bufferSize - currentPadding, flags); if (FAILED(hr) && hr != AUDCLNT_E_OUT_OF_ORDER) Log("CWASAPIRenderFilter::Render thread: ReleaseBuffer failed (0x%08x)", hr); } if (!m_pSettings->m_bWASAPIUseEventMode) { if (m_pAudioClient) hr = m_pAudioClient->GetCurrentPadding(¤tPadding); else hr = S_FALSE; if (SUCCEEDED(hr) && bufferSize > 0) { liDueTime.QuadPart = (double)currentPadding / (double)bufferSize * (double)m_pSettings->m_hnsPeriod * -0.9; // Log(" currentPadding: %d QuadPart: %lld", currentPadding, liDueTime.QuadPart); } else { liDueTime.QuadPart = (double)m_pSettings->m_hnsPeriod * -0.9; if (hr != AUDCLNT_E_NOT_INITIALIZED) Log("CWASAPIRenderFilter::Render thread: GetCurrentPadding failed (0x%08x)", hr); } SetWaitableTimer(m_hDataEvent, &liDueTime, 0, NULL, NULL, 0); } } } m_csResources.Unlock(); return 0; }