void SendPacket(Pos const &pos, double time) { if ((rand() & 0x7fff) / 32768.0 < DROPRATE) { return; } Packet p; p.time = time; p.pos = pos; packetQueue[time + Latency()] = p; }
void MidiPortConsumer::Data(uchar* data, size_t length, bool atomic, bigtime_t time) { snooze_until(time - Latency(), B_SYSTEM_TIMEBASE); if (write(fFileDescriptor, data, length) == -1) { perror("Error sending data to driver"); } }
DWORD CWASAPIRenderFilter::ThreadProc() { Log("CWASAPIRenderFilter::Render thread - starting up - thread ID: %d", m_ThreadId); SetThreadName(0, "WASAPI-renderer"); // Polling delay LARGE_INTEGER liDueTime; liDueTime.QuadPart = -1LL; AudioSinkCommand command; LONGLONG writeSilence = 0; BYTE* sampleData = NULL; bool flush = false; bool sampleProcessed = false; REFERENCE_TIME dueTime = 0; REFERENCE_TIME maxSampleWaitTime = Latency() / 20000; HRESULT hr = S_FALSE; m_csResources.Lock(); if (m_pSettings->m_bReleaseDeviceOnStop && !m_pAudioClient && m_pInputFormat) { hr = CreateAudioClient(true); if (FAILED(hr)) { Log("CWASAPIRenderFilter::Render thread Error, audio client not available: (0x%08x)", hr); StopRenderThread(); m_csResources.Unlock(); return 0; } } if (m_pAudioClient) { hr = StartAudioClient(); if (FAILED(hr)) { Log("CWASAPIRenderFilter::Render thread Error, starting audio client failed: (0x%08x)", hr); StopRenderThread(); m_csResources.Unlock(); return 0; } } if (!m_bDeviceInitialized) { Log("CWASAPIRenderFilter::Render thread Error, device not initialized"); StopRenderThread(); m_csResources.Unlock(); return 0; } EnableMMCSS(); m_state = StateRunning; while (true) { if (flush) { Log("CWASAPIRenderFilter::Render thread flushing buffers"); HandleFlush(); flush = false; } m_csResources.Unlock(); hr = WaitForEvents(INFINITE, &m_hDataEvents, &m_dwDataWaitObjects); m_csResources.Lock(); if (hr == MPAR_S_THREAD_STOPPING || !m_pAudioClient) { StopRenderThread(); return 0; } else if (hr == MPAR_S_NEED_DATA) { UpdateAudioClock(); UINT32 bytesFilled = 0; UINT32 bufferSize = 0; UINT32 currentPadding = 0; UINT32 bufferSizeInBytes = 0; BYTE* data = NULL; DWORD flags = 0; static BYTE* prevData = NULL; hr = GetWASAPIBuffer(bufferSize, currentPadding, bufferSizeInBytes, &data); if (SUCCEEDED(hr)) { do { fetchSample: bool OOBCommandOnly = m_nDataLeftInSample > 0; if (m_nDataLeftInSample == 0 || OOBCommandOnly) { m_csResources.Unlock(); HRESULT result = GetNextSampleOrCommand(&command, &m_pCurrentSample.p, maxSampleWaitTime, &m_hSampleEvents, &m_dwSampleWaitObjects, OOBCommandOnly); m_csResources.Lock(); if (result == MPAR_S_THREAD_STOPPING || !m_pAudioClient) { if (m_pAudioClient) { hr = m_pRenderClient->ReleaseBuffer(bufferSize - currentPadding, flags); if (FAILED(hr) && hr != AUDCLNT_E_OUT_OF_ORDER) Log("CWASAPIRenderFilter::Render thread: ReleaseBuffer failed (0x%08x)", hr); } StopRenderThread(); return 0; } if (!m_pCurrentSample) m_nDataLeftInSample = 0; if (command == ASC_PutSample && m_pCurrentSample) { sampleProcessed = false; m_nSampleOffset = 0; m_nDataLeftInSample = m_pCurrentSample->GetActualDataLength(); } else if (command == ASC_Flush) { m_pCurrentSample.Release(); flush = true; sampleData = NULL; m_nSampleOffset = 0; m_nDataLeftInSample = 0; break; } else if (command == ASC_Pause) { m_pCurrentSample.Release(); m_state = StatePaused; } else if (command == ASC_Resume) { sampleProcessed = false; writeSilence = 0; m_state = StateRunning; if (!m_pCurrentSample) { m_nDataLeftInSample = 0; goto fetchSample; } } } if (m_state != StateRunning) writeSilence = bufferSizeInBytes - bytesFilled; else if (m_nSampleOffset == 0 && !OOBCommandOnly) { // TODO error checking if (CheckSample(m_pCurrentSample, bufferSize - currentPadding) == S_FALSE) { GetWASAPIBuffer(bufferSize, currentPadding, bufferSizeInBytes, &data); bytesFilled = 0; } } if (writeSilence == 0 && (m_nSampleOffset == 0 || m_nSampleNum == 0) && !sampleProcessed) { HRESULT schedulingHR = CheckStreamTimeline(m_pCurrentSample, &dueTime, m_nSampleOffset); sampleProcessed = true; // m_pCurrentSample must exist if CheckStreamTimeline returns either of these if (schedulingHR == MPAR_S_DROP_SAMPLE) { m_pCurrentSample.Release(); m_nDataLeftInSample = 0; goto fetchSample; } else if (schedulingHR == MPAR_S_WAIT_RENDER_TIME) CalculateSilence(&dueTime, &writeSilence); } if (writeSilence == 0 && m_pCurrentSample) RenderAudio(data, bufferSizeInBytes, m_nDataLeftInSample, m_nSampleOffset, m_pCurrentSample, bytesFilled); else { if (bufferSizeInBytes == writeSilence) flags = AUDCLNT_BUFFERFLAGS_SILENT; if (!m_pCurrentSample) writeSilence = bufferSizeInBytes; RenderSilence(data, bufferSizeInBytes, writeSilence, bytesFilled); } } while (bytesFilled < bufferSizeInBytes); hr = m_pRenderClient->ReleaseBuffer(bufferSize - currentPadding, flags); if (FAILED(hr) && hr != AUDCLNT_E_OUT_OF_ORDER) Log("CWASAPIRenderFilter::Render thread: ReleaseBuffer failed (0x%08x)", hr); } if (!m_pSettings->m_bWASAPIUseEventMode) { if (m_pAudioClient) hr = m_pAudioClient->GetCurrentPadding(¤tPadding); else hr = S_FALSE; if (SUCCEEDED(hr) && bufferSize > 0) { liDueTime.QuadPart = (double)currentPadding / (double)bufferSize * (double)m_pSettings->m_hnsPeriod * -0.9; // Log(" currentPadding: %d QuadPart: %lld", currentPadding, liDueTime.QuadPart); } else { liDueTime.QuadPart = (double)m_pSettings->m_hnsPeriod * -0.9; if (hr != AUDCLNT_E_NOT_INITIALIZED) Log("CWASAPIRenderFilter::Render thread: GetCurrentPadding failed (0x%08x)", hr); } SetWaitableTimer(m_hDataEvent, &liDueTime, 0, NULL, NULL, 0); } } } m_csResources.Unlock(); return 0; }
HRESULT CWASAPIRenderFilter::CheckStreamTimeline(IMediaSample* pSample, REFERENCE_TIME* pDueTime, UINT32 sampleOffset) { *pDueTime = 0; if (!pSample) return S_FALSE; REFERENCE_TIME rtHWTime = 0; REFERENCE_TIME rtRefClock = 0; REFERENCE_TIME rtStop = 0; REFERENCE_TIME rtStart = 0; REFERENCE_TIME rtDuration = 0; bool resync = false; HRESULT hr = pSample->GetTime(&rtStart, &rtStop); if (FAILED(hr)) { // Render all samples flat that dont have presentation time m_nSampleNum++; return MPAR_S_RENDER_SAMPLE; } if (m_nSampleNum == 0) m_rtNextSampleTime = rtStart; long sampleLength = pSample->GetActualDataLength(); UINT nFrames = sampleLength / m_pInputFormat->Format.nBlockAlign; rtDuration = nFrames * UNITS / m_pInputFormat->Format.nSamplesPerSec; if (SUCCEEDED(m_pClock->GetHWTime(&rtRefClock, &rtHWTime))) { rtRefClock -= m_rtStart; rtHWTime -= m_rtHwStart; } else { m_nSampleNum++; return MPAR_S_RENDER_SAMPLE; } if (m_pSettings->m_bLogSampleTimes) Log(" sample start: %6.3f stop: %6.3f dur: %6.3f diff: %6.3f rtHWTime: %6.3f rtRefClock: %6.3f early: %6.3f queue: %d %6.3f", rtStart / 10000000.0, rtStop / 10000000.0, rtDuration / 10000000.0, (rtStart - m_rtNextSampleTime) / 10000000.0, rtHWTime / 10000000.0, rtRefClock / 10000000.0, (rtStart - rtHWTime) / 10000000.0, m_inputQueue.size(), BufferredDataDuration() / 10000000.0); // Try to keep the A/V sync when data has been dropped if (abs(rtStart - m_rtNextSampleTime) > MAX_SAMPLE_TIME_ERROR) { resync = true; Log(" Discontinuity detected: diff: %7.3f ms MAX_SAMPLE_TIME_ERROR: %7.3f ms resync: %d", ((double)rtStart - (double)m_rtNextSampleTime) / 10000.0, (double)MAX_SAMPLE_TIME_ERROR / 10000.0, resync); } m_rtNextSampleTime = rtStart + rtDuration; REFERENCE_TIME offsetDelay = 0; if (sampleOffset > 0) offsetDelay = sampleOffset / m_pInputFormat->Format.nBlockAlign * UNITS / m_pInputFormat->Format.nSamplesPerSec; *pDueTime = rtStart + offsetDelay; if (*pDueTime < rtHWTime - Latency()) { // TODO implement partial sample dropping Log(" dropping late sample - pDueTime: %6.3f rtHWTime: %6.3f", *pDueTime / 10000000.0, rtHWTime / 10000000.0); m_nSampleNum = 0; return MPAR_S_DROP_SAMPLE; } else if ((m_nSampleNum == 0 && *pDueTime > rtHWTime) || resync) { m_nSampleNum++; if (m_pSettings->m_bLogSampleTimes) Log(" MPAR_S_WAIT_RENDER_TIME - %6.3f", *pDueTime / 10000000.0); return MPAR_S_WAIT_RENDER_TIME; } m_nSampleNum++; return MPAR_S_RENDER_SAMPLE; }
HRESULT CWASAPIRenderFilter::InitAudioClient() { Log("WASAPIRenderFilter::InitAudioClient"); HRESULT hr = S_OK; if (m_pSettings->m_hnsPeriod == 0 || m_pSettings->m_hnsPeriod == 1) { REFERENCE_TIME defaultPeriod(0); REFERENCE_TIME minimumPeriod(0); hr = m_pAudioClient->GetDevicePeriod(&defaultPeriod, &minimumPeriod); if (SUCCEEDED(hr)) { if (m_pSettings->m_hnsPeriod == 0) m_pSettings->m_hnsPeriod = defaultPeriod; else m_pSettings->m_hnsPeriod = minimumPeriod; Log("WASAPIRenderFilter::InitAudioClient using device period from driver %I64u ms", m_pSettings->m_hnsPeriod / 10000); } else { Log("WASAPIRenderFilter::InitAudioClient failed to get device period from driver (0x%08x) - using 50 ms", hr); m_pSettings->m_hnsPeriod = 500000; //50 ms is the best according to James @Slysoft } } WAVEFORMATEXTENSIBLE* pwfxAccepted = NULL; hr = IsFormatSupported(m_pInputFormat, &pwfxAccepted); if (FAILED(hr)) { SAFE_DELETE_WAVEFORMATEX(pwfxAccepted); return hr; } GetBufferSize((WAVEFORMATEX*)pwfxAccepted, &m_pSettings->m_hnsPeriod); if (SUCCEEDED(hr)) hr = m_pAudioClient->Initialize(m_pSettings->m_WASAPIShareMode, m_dwStreamFlags, m_pSettings->m_hnsPeriod, m_pSettings->m_hnsPeriod, (WAVEFORMATEX*)pwfxAccepted, NULL); if (FAILED(hr) && hr != AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED) { Log("WASAPIRenderFilter::InitAudioClient Initialize failed (0x%08x)", hr); SAFE_DELETE_WAVEFORMATEX(pwfxAccepted); return hr; } if (hr == S_OK) { SAFE_RELEASE(m_pAudioClock); hr = m_pAudioClient->GetService(__uuidof(IAudioClock), (void**)&m_pAudioClock); if (SUCCEEDED(hr)) m_pAudioClock->GetFrequency(&m_nHWfreq); else Log("WASAPIRenderFilter::IAudioClock not found!"); } if (hr == AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED) { // if the buffer size was not aligned, need to do the alignment dance Log("WASAPIRenderFilter::InitAudioClient Buffer size not aligned. Realigning"); // get the buffer size, which will be aligned hr = m_pAudioClient->GetBufferSize(&m_nFramesInBuffer); // throw away this IAudioClient SAFE_RELEASE(m_pAudioClient); // calculate the new aligned periodicity m_pSettings->m_hnsPeriod = // hns = (REFERENCE_TIME)( 10000.0 * // (hns / ms) * 1000 * // (ms / s) * m_nFramesInBuffer / // frames / m_pInputFormat->Format.nSamplesPerSec // (frames / s) + 0.5 // rounding ); if (SUCCEEDED(hr)) hr = CreateAudioClient(); Log("WASAPIRenderFilter::InitAudioClient Trying again with periodicity of %I64u hundred-nanoseconds, or %u frames", m_pSettings->m_hnsPeriod, m_nFramesInBuffer); if (SUCCEEDED (hr)) hr = m_pAudioClient->Initialize(m_pSettings->m_WASAPIShareMode, m_dwStreamFlags, m_pSettings->m_hnsPeriod, m_pSettings->m_hnsPeriod, (WAVEFORMATEX*)pwfxAccepted, NULL); if (FAILED(hr)) { Log("WASAPIRenderFilter::InitAudioClient Failed to reinitialize the audio client"); SAFE_DELETE_WAVEFORMATEX(pwfxAccepted); return hr; } else { SAFE_RELEASE(m_pAudioClock); hr = m_pAudioClient->GetService(__uuidof(IAudioClock), (void**)&m_pAudioClock); if (FAILED(hr)) Log("WASAPIRenderFilter::IAudioClock not found!"); else m_pAudioClock->GetFrequency(&m_nHWfreq); } } // if (AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED == hr) // get the buffer size, which is aligned if (SUCCEEDED(hr)) hr = m_pAudioClient->GetBufferSize(&m_nFramesInBuffer); // calculate the new period if (SUCCEEDED (hr)) hr = m_pAudioClient->GetService(__uuidof(IAudioRenderClient), (void**)(&m_pRenderClient)); if (FAILED(hr)) Log("WASAPIRenderFilter::InitAudioClient service initialization failed (0x%08x)", hr); else Log("WASAPIRenderer::InitAudioClient service initialization success"); if (m_pSettings->m_bWASAPIUseEventMode) { hr = m_pAudioClient->SetEventHandle(m_hDataEvent); if (FAILED(hr)) { Log("WASAPIRenderFilter::InitAudioClient SetEventHandle failed (0x%08x)", hr); SAFE_DELETE_WAVEFORMATEX(pwfxAccepted); return hr; } } REFERENCE_TIME latency(0); m_pAudioClient->GetStreamLatency(&latency); Log("WASAPIRenderFilter::InitAudioClient device reported latency %I64u ms - buffer based latency %I64u ms", latency / 10000, Latency() / 10000); // Dynamic format change requires restart for the audio client if (m_state != StateStopped) StartAudioClient(); m_bDeviceInitialized = true; SAFE_DELETE_WAVEFORMATEX(pwfxAccepted); return hr; }