void DriverAlsa::AudioThread() { try { for (;;) { Msg* msg = iPipeline.Pull(); msg = msg->Process(*this); if (msg != NULL) { msg->RemoveRef(); } AutoMutex am(iMutex); if (iQuit) break; } } catch (ThreadKill&) {} }
void AudioDriver::AudioThread() { HANDLE mmcssHandle = NULL; DWORD mmcssTaskIndex = 0; HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED); if (hr != S_OK) { Log::Print("Unable to initialize COM in render thread: %x\n", hr); return; } // Gain access to the system multimedia audio endpoint and associate an // audio client object with it. if (InitializeAudioClient() == false) { goto Exit; } // Hook up to the Multimedia Class Scheduler Service to prioritise // our render activities. mmcssHandle = AvSetMmThreadCharacteristics(L"Audio", &mmcssTaskIndex); if (mmcssHandle == NULL) { Log::Print("Unable to enable MMCSS on render thread: %d\n", GetLastError()); goto Exit; } // Native events waited on in this thread. HANDLE waitArray[2] = {iAudioSessionDisconnectedEvent, iAudioSamplesReadyEvent}; // Pipeline processing loop. try { for (;;) { #ifdef _TIMINGS_DEBUG LARGE_INTEGER StartingTime, EndingTime, ElapsedMicroseconds; LARGE_INTEGER Frequency; QueryPerformanceFrequency(&Frequency); QueryPerformanceCounter(&StartingTime); #endif /* _TIMINGS_DEBUG */ TUint32 padding = 0; // // Calculate the number of bytes in the render buffer // for this period. // // This is the maximum we will pull from the pipeline. // // If the Audio Engine has not been initialized yet stick with // the default value. // if (iAudioEngineInitialised && ! iAudioSessionDisconnected) { hr = iAudioClient->GetCurrentPadding(&padding); if (hr == S_OK) { iRenderBytesThisPeriod = (iBufferSize - padding) * iFrameSize; } else { Log::Print("ERROR: Couldn't read render buffer padding\n"); iRenderBytesThisPeriod = 0; } iRenderBytesRemaining = iRenderBytesThisPeriod; } // // Process pipeline messages until we've reached the maximum for // this period. // // The pull will block if there are no messages. // for(;;) { if (iPlayable != NULL) { ProcessAudio(iPlayable); } else { Msg* msg = iPipeline.Pull(); ASSERT(msg != NULL); msg = msg->Process(*this); ASSERT(msg == NULL); } // // Have we reached the data limit for this period or been told // to exit ? // if (iPlayable != NULL || iQuit) { break; } } if (iQuit) { break; } // Log some interesting data if we can't fill at least half // of the available space in the render buffer. if (iRenderBytesThisPeriod * 0.5 < iRenderBytesRemaining) { Log::Print("Audio period: Requested Bytes [%u] : Returned Bytes" " [%u]\n", iRenderBytesThisPeriod, iRenderBytesThisPeriod - iRenderBytesRemaining); if (iPlayable) { TUint bytes = iPlayable->Bytes(); if (iResamplingInput) { // Calculate the bytes that will be generated by the // translation. long long tmp = (long long)bytes * (long long)iResampleOutputBps / (long long)iResampleInputBps; bytes = TUint(tmp); // Round up to the nearest frame. bytes += iMixFormat->nBlockAlign; bytes -= bytes % iMixFormat->nBlockAlign; } Log::Print(" Available Bytes [%u]\n", bytes); } else { Log::Print(" Available Bytes [0]\n"); } if (iAudioEngineInitialised) { Log::Print(" Period Start Frames In Buffer [%u]\n", padding); hr = iAudioClient->GetCurrentPadding(&padding); if (hr == S_OK) { Log::Print(" Current Frames In Buffer [%u]\n", padding); } } } #ifdef _TIMINGS_DEBUG QueryPerformanceCounter(&EndingTime); ElapsedMicroseconds.QuadPart = EndingTime.QuadPart - StartingTime.QuadPart; // // We now have the elapsed number of ticks, along with the // number of ticks-per-second. We use these values // to convert to the number of elapsed microseconds. // To guard against loss-of-precision, we convert // to microseconds *before* dividing by ticks-per-second. // ElapsedMicroseconds.QuadPart *= 1000000; ElapsedMicroseconds.QuadPart /= Frequency.QuadPart; Log::Print("Time To Process Messages This Audio Period [%lld us]\n", ElapsedMicroseconds.QuadPart); #endif /* _TIMINGS_DEBUG */ // The audio client isn't capable of playing this stream. // Continue to pull from pipeline until the next playable // stream is available. if (! iStreamFormatSupported) { continue; } // The audio session has been disconnected. // Continue to pull from pipeline until we are instructed to quit. if (iAudioSessionDisconnected) { continue; } // // Start the Audio client once we have pre-loaded some // data to the render buffer. // // This will prevent any initial audio glitches.. // if (! iAudioClientStarted) { // There was no data read this period so try again next period. if (iRenderBytesThisPeriod == iRenderBytesRemaining) { continue; } hr = iAudioClient->Start(); if (hr != S_OK) { Log::Print("Unable to start render client: %x.\n", hr); break; } iAudioClientStarted = true; } // Apply any volume changes if (iAudioClientStarted && iVolumeChanged) { iAudioSessionVolume->SetMasterVolume(iVolumeLevel, NULL); iVolumeChanged = false; } // Wait for a kick from the native audio engine. DWORD waitResult = WaitForMultipleObjects(2, waitArray, FALSE, INFINITE); switch (waitResult) { case WAIT_OBJECT_0 + 0: // iAudioSessionDisconnectedEvent // Stop the audio client iAudioClient->Stop(); iAudioClient->Reset(); iAudioClientStarted = false; iAudioSessionDisconnected = true; break; case WAIT_OBJECT_0 + 1: // iAudioSamplesReadyEvent break; default: Log::Print("ERROR: Unexpected event received [%d]\n", waitResult); } } } catch (ThreadKill&) {} Exit: // Complete any previous resampling session. if (iResamplingInput) { WWMFSampleData sampleData; hr = iResampler.Drain((iBufferSize * iFrameSize), &sampleData); if (hr == S_OK) { Log::Print("Resampler drained correctly [%d bytes].\n", sampleData.bytes); sampleData.Release(); } else { Log::Print("Resampler drain failed.\n"); } } iResampler.Finalize(); // Now we've stopped reading the pipeline, stop the native audio. StopAudioEngine(); // Free up native resources. ShutdownAudioEngine(); // Unhook from MMCSS. AvRevertMmThreadCharacteristics(mmcssHandle); CoUninitialize(); }