void AbstractAudioContext::uninitialize() { ASSERT(isMainThread()); if (!isInitialized()) return; m_isInitialized = false; // This stops the audio thread and all audio rendering. if (m_destinationNode) m_destinationNode->handler().uninitialize(); // Get rid of the sources which may still be playing. releaseActiveSourceNodes(); // Reject any pending resolvers before we go away. rejectPendingResolvers(); didClose(); ASSERT(m_listener); m_listener->waitForHRTFDatabaseLoaderThreadCompletion(); clear(); }
TEST_F(BaseAudioContextTest, AutoplayMetrics_CallResumeNoGesture) { HistogramTester histogramTester; createChildFrame(); childDocument().settings()->setMediaPlaybackRequiresUserGesture(true); ScriptState::Scope scope(getScriptStateFrom(childDocument())); BaseAudioContext* audioContext = BaseAudioContext::create(childDocument(), ASSERT_NO_EXCEPTION); audioContext->resumeContext(getScriptStateFrom(childDocument())); rejectPendingResolvers(audioContext); recordAutoplayStatus(audioContext); histogramTester.expectBucketCount(kCrossOriginMetric, AutoplayStatus::AutoplayStatusFailed, 1); histogramTester.expectTotalCount(kCrossOriginMetric, 1); }
TEST_F(BaseAudioContextTest, AutoplayMetrics_NodeStartGestureThenSucces) { HistogramTester histogramTester; createChildFrame(); childDocument().settings()->setMediaPlaybackRequiresUserGesture(true); ScriptState::Scope scope(getScriptStateFrom(childDocument())); BaseAudioContext* audioContext = BaseAudioContext::create(childDocument(), ASSERT_NO_EXCEPTION); UserGestureIndicator userGestureScope(DocumentUserGestureToken::create( &childDocument(), UserGestureToken::NewGesture)); audioContext->maybeRecordStartAttempt(); audioContext->resumeContext(getScriptStateFrom(childDocument())); rejectPendingResolvers(audioContext); recordAutoplayStatus(audioContext); histogramTester.expectBucketCount(kCrossOriginMetric, AutoplayStatus::AutoplayStatusSucceeded, 1); histogramTester.expectTotalCount(kCrossOriginMetric, 1); }
void AudioContext::uninitialize() { ASSERT(isMainThread()); if (!isInitialized()) return; m_isInitialized = false; // This stops the audio thread and all audio rendering. if (m_destinationNode) m_destinationNode->handler().uninitialize(); if (!isOfflineContext()) { ASSERT(s_hardwareContextCount); --s_hardwareContextCount; } // Get rid of the sources which may still be playing. derefUnfinishedSourceNodes(); // Reject any pending resolvers before we go away. rejectPendingResolvers(); // For an offline audio context, the completion event will set the state to closed. For an // online context, we need to do it here. We only want to set the closed state once. if (!isOfflineContext()) setContextState(Closed); // Resolve the promise now, if any if (m_closeResolver) m_closeResolver->resolve(); ASSERT(m_listener); m_listener->waitForHRTFDatabaseLoaderThreadCompletion(); clear(); }