ScriptPromise AudioContext::resumeContext(ScriptState* scriptState) { ASSERT(isMainThread()); AutoLocker locker(this); if (isOfflineContext()) { return ScriptPromise::rejectWithDOMException( scriptState, DOMException::create( InvalidStateError, "cannot resume an OfflineAudioContext")); } if (isContextClosed()) { return ScriptPromise::rejectWithDOMException( scriptState, DOMException::create( InvalidStateError, "cannot resume a closed AudioContext")); } RefPtrWillBeRawPtr<ScriptPromiseResolver> resolver = ScriptPromiseResolver::create(scriptState); ScriptPromise promise = resolver->promise(); // Restart the destination node to pull on the audio graph. if (m_destinationNode) startRendering(); // Save the resolver which will get resolved when the destination node starts pulling on the // graph again. m_resumeResolvers.append(resolver); return promise; }
ScriptPromise OfflineAudioContext::startOfflineRendering(ScriptState* scriptState) { // Calling close() on an OfflineAudioContext is not supported/allowed, // but it might well have been stopped by its execution context. if (isContextClosed()) { return ScriptPromise::rejectWithDOMException( scriptState, DOMException::create( InvalidStateError, "cannot call startRendering on an OfflineAudioContext in a stopped state.")); } if (m_offlineResolver) { // Can't call startRendering more than once. Return a rejected promise now. return ScriptPromise::rejectWithDOMException( scriptState, DOMException::create( InvalidStateError, "cannot call startRendering more than once")); } m_offlineResolver = ScriptPromiseResolver::create(scriptState); startRendering(); return m_offlineResolver->promise(); }
MediaStreamAudioSourceNode* AudioContext::createMediaStreamSource(MediaStream* mediaStream, ExceptionState& exceptionState) { ASSERT(isMainThread()); if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } if (!mediaStream) { exceptionState.throwDOMException( InvalidStateError, "invalid MediaStream source"); return nullptr; } MediaStreamTrackVector audioTracks = mediaStream->getAudioTracks(); if (audioTracks.isEmpty()) { exceptionState.throwDOMException( InvalidStateError, "MediaStream has no audio track"); return nullptr; } // Use the first audio track in the media stream. MediaStreamTrack* audioTrack = audioTracks[0]; OwnPtr<AudioSourceProvider> provider = audioTrack->createWebAudioSource(); MediaStreamAudioSourceNode* node = MediaStreamAudioSourceNode::create(this, mediaStream, audioTrack, provider.release()); // FIXME: Only stereo streams are supported right now. We should be able to accept multi-channel streams. node->mediaStreamAudioSourceHandler().setFormat(2, sampleRate()); refNode(node); // context keeps reference until node is disconnected return node; }
ChannelMergerNode* AbstractAudioContext::createChannelMerger(size_t numberOfInputs, ExceptionState& exceptionState) { ASSERT(isMainThread()); if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } ChannelMergerNode* node = ChannelMergerNode::create(*this, sampleRate(), numberOfInputs); if (!node) { exceptionState.throwDOMException( IndexSizeError, ExceptionMessages::indexOutsideRange<size_t>( "number of inputs", numberOfInputs, 1, ExceptionMessages::InclusiveBound, AbstractAudioContext::maxNumberOfChannels(), ExceptionMessages::InclusiveBound)); return nullptr; } return node; }
MediaElementAudioSourceNode* AudioContext::createMediaElementSource(HTMLMediaElement* mediaElement, ExceptionState& exceptionState) { ASSERT(isMainThread()); if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } if (!mediaElement) { exceptionState.throwDOMException( InvalidStateError, "invalid HTMLMedialElement."); return nullptr; } // First check if this media element already has a source node. if (mediaElement->audioSourceNode()) { exceptionState.throwDOMException( InvalidStateError, "HTMLMediaElement already connected previously to a different MediaElementSourceNode."); return nullptr; } MediaElementAudioSourceNode* node = MediaElementAudioSourceNode::create(this, mediaElement); mediaElement->setAudioSourceNode(&node->mediaElementAudioSourceHandler()); refNode(node); // context keeps reference until node is disconnected return node; }
PeriodicWave* AudioContext::createPeriodicWave(DOMFloat32Array* real, DOMFloat32Array* imag, ExceptionState& exceptionState) { ASSERT(isMainThread()); if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } if (!real) { exceptionState.throwDOMException( SyntaxError, "invalid real array"); return nullptr; } if (!imag) { exceptionState.throwDOMException( SyntaxError, "invalid imaginary array"); return nullptr; } if (real->length() > PeriodicWave::kMaxPeriodicWaveArraySize) { exceptionState.throwDOMException( IndexSizeError, ExceptionMessages::indexOutsideRange( "length of the real part array", real->length(), 1u, ExceptionMessages::InclusiveBound, PeriodicWave::kMaxPeriodicWaveArraySize, ExceptionMessages::InclusiveBound)); return nullptr; } if (imag->length() > PeriodicWave::kMaxPeriodicWaveArraySize) { exceptionState.throwDOMException( IndexSizeError, ExceptionMessages::indexOutsideRange( "length of the imaginary part array", imag->length(), 1u, ExceptionMessages::InclusiveBound, PeriodicWave::kMaxPeriodicWaveArraySize, ExceptionMessages::InclusiveBound)); return nullptr; } if (real->length() != imag->length()) { exceptionState.throwDOMException( IndexSizeError, "length of real array (" + String::number(real->length()) + ") and length of imaginary array (" + String::number(imag->length()) + ") must match."); return nullptr; } return PeriodicWave::create(sampleRate(), real, imag); }
ScriptProcessorNode* AudioContext::createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, ExceptionState& exceptionState) { if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } // Set number of output channels to stereo by default. return createScriptProcessor(bufferSize, numberOfInputChannels, 2, exceptionState); }
ScriptProcessorNode* AudioContext::createScriptProcessor(ExceptionState& exceptionState) { if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } // Set number of input/output channels to stereo by default. return createScriptProcessor(0, 2, 2, exceptionState); }
DelayNode* AbstractAudioContext::createDelay(double maxDelayTime, ExceptionState& exceptionState) { ASSERT(isMainThread()); if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } return DelayNode::create(*this, sampleRate(), maxDelayTime, exceptionState); }
ChannelMergerNode* AudioContext::createChannelMerger(ExceptionState& exceptionState) { if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } const unsigned ChannelMergerDefaultNumberOfInputs = 6; return createChannelMerger(ChannelMergerDefaultNumberOfInputs, exceptionState); }
DelayNode* AudioContext::createDelay(ExceptionState& exceptionState) { if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } const double defaultMaxDelayTime = 1; return createDelay(defaultMaxDelayTime, exceptionState); }
WaveShaperNode* AudioContext::createWaveShaper(ExceptionState& exceptionState) { ASSERT(isMainThread()); if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } return WaveShaperNode::create(this); }
GainNode* AudioContext::createGain(ExceptionState& exceptionState) { ASSERT(isMainThread()); if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } return GainNode::create(this, sampleRate()); }
AnalyserNode* AbstractAudioContext::createAnalyser(ExceptionState& exceptionState) { ASSERT(isMainThread()); if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } return AnalyserNode::create(*this, sampleRate()); }
MediaStreamAudioDestinationNode* AudioContext::createMediaStreamDestination(ExceptionState& exceptionState) { if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } // Set number of output channels to stereo by default. return MediaStreamAudioDestinationNode::create(this, 2); }
DelayNode* AudioContext::createDelay(double maxDelayTime, ExceptionState& exceptionState) { ASSERT(isMainThread()); if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } DelayNode* node = DelayNode::create(this, sampleRate(), maxDelayTime, exceptionState); if (exceptionState.hadException()) return nullptr; return node; }
OscillatorNode* AudioContext::createOscillator(ExceptionState& exceptionState) { ASSERT(isMainThread()); if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } OscillatorNode* node = OscillatorNode::create(this, sampleRate()); // Do not add a reference to this source node now. The reference will be added when start() is // called. return node; }
void AudioContext::decodeAudioData(DOMArrayBuffer* audioData, AudioBufferCallback* successCallback, AudioBufferCallback* errorCallback, ExceptionState& exceptionState) { if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return; } if (!audioData) { exceptionState.throwDOMException( SyntaxError, "invalid ArrayBuffer for audioData."); return; } m_audioDecoder.decodeAsync(audioData, sampleRate(), successCallback, errorCallback); }
ScriptPromise OfflineAudioContext::startOfflineRendering(ScriptState* scriptState) { ASSERT(isMainThread()); // Calling close() on an OfflineAudioContext is not supported/allowed, // but it might well have been stopped by its execution context. // // See: crbug.com/435867 if (isContextClosed()) { return ScriptPromise::rejectWithDOMException( scriptState, DOMException::create( InvalidStateError, "cannot call startRendering on an OfflineAudioContext in a stopped state.")); } // If the context is not in the suspended state (i.e. running), reject the promise. if (contextState() != AudioContextState::Suspended) { return ScriptPromise::rejectWithDOMException( scriptState, DOMException::create( InvalidStateError, "cannot startRendering when an OfflineAudioContext is " + state())); } // Can't call startRendering more than once. Return a rejected promise now. if (m_isRenderingStarted) { return ScriptPromise::rejectWithDOMException( scriptState, DOMException::create( InvalidStateError, "cannot call startRendering more than once")); } ASSERT(!m_isRenderingStarted); m_completeResolver = ScriptPromiseResolver::create(scriptState); // Start rendering and return the promise. m_isRenderingStarted = true; setContextState(Running); destinationHandler().startRendering(); return m_completeResolver->promise(); }
ChannelMergerNode* AudioContext::createChannelMerger(size_t numberOfInputs, ExceptionState& exceptionState) { ASSERT(isMainThread()); if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } ChannelMergerNode* node = ChannelMergerNode::create(this, sampleRate(), numberOfInputs); if (!node) { exceptionState.throwDOMException( IndexSizeError, "number of inputs (" + String::number(numberOfInputs) + ") must be between 1 and " + String::number(AudioContext::maxNumberOfChannels()) + "."); return nullptr; } return node; }
ScriptProcessorNode* AudioContext::createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionState& exceptionState) { ASSERT(isMainThread()); if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } ScriptProcessorNode* node = ScriptProcessorNode::create(this, sampleRate(), bufferSize, numberOfInputChannels, numberOfOutputChannels); if (!node) { if (!numberOfInputChannels && !numberOfOutputChannels) { exceptionState.throwDOMException( IndexSizeError, "number of input channels and output channels cannot both be zero."); } else if (numberOfInputChannels > AudioContext::maxNumberOfChannels()) { exceptionState.throwDOMException( IndexSizeError, "number of input channels (" + String::number(numberOfInputChannels) + ") exceeds maximum (" + String::number(AudioContext::maxNumberOfChannels()) + ")."); } else if (numberOfOutputChannels > AudioContext::maxNumberOfChannels()) { exceptionState.throwDOMException( IndexSizeError, "number of output channels (" + String::number(numberOfInputChannels) + ") exceeds maximum (" + String::number(AudioContext::maxNumberOfChannels()) + ")."); } else { exceptionState.throwDOMException( IndexSizeError, "buffer size (" + String::number(bufferSize) + ") must be a power of two between 256 and 16384."); } return nullptr; } refNode(node); // context keeps reference until we stop making javascript rendering callbacks return node; }
ScriptPromise AudioContext::closeContext(ScriptState* scriptState) { if (isContextClosed()) { // We've already closed the context previously, but it hasn't yet been resolved, so just // create a new promise and reject it. return ScriptPromise::rejectWithDOMException( scriptState, DOMException::create(InvalidStateError, "Cannot close a context that is being closed or has already been closed.")); } m_closeResolver = ScriptPromiseResolver::create(scriptState); ScriptPromise promise = m_closeResolver->promise(); // Stop the audio context. This will stop the destination node from pulling audio anymore. And // since we have disconnected the destination from the audio graph, and thus has no references, // the destination node can GCed if JS has no references. uninitialize() will also resolve the Promise // created here. uninitialize(); return promise; }
ScriptPromise AudioContext::closeContext(ScriptState* scriptState) { if (isOfflineContext()) { return ScriptPromise::rejectWithDOMException( scriptState, DOMException::create(InvalidStateError, "Cannot call close() on an OfflineAudioContext.")); } if (isContextClosed()) { // We've already closed the context previously, but it hasn't yet been resolved, so just // create a new promise and reject it. return ScriptPromise::rejectWithDOMException( scriptState, DOMException::create(InvalidStateError, "Cannot close a context that is being closed or has already been closed.")); } m_closeResolver = ScriptPromiseResolver::create(scriptState); ScriptPromise promise = m_closeResolver->promise(); // Before closing the context go and disconnect all nodes, allowing them to be collected. This // will also break any connections to the destination node. Any unfinished sourced nodes will // get stopped when the context is unitialized. for (auto& node : m_liveNodes) { if (node) { for (unsigned k = 0; k < node->numberOfOutputs(); ++k) node->handler().disconnectWithoutException(k); } } // Stop the audio context. This will stop the destination node from pulling audio anymore. And // since we have disconnected the destination from the audio graph, and thus has no references, // the destination node can GCed if JS has no references. stop() will also resolve the Promise // created here. stop(); return promise; }
PeriodicWave* AbstractAudioContext::createPeriodicWave(DOMFloat32Array* real, DOMFloat32Array* imag, const Dictionary& options, ExceptionState& exceptionState) { ASSERT(isMainThread()); if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } if (!real) { exceptionState.throwDOMException( SyntaxError, "invalid real array"); return nullptr; } if (!imag) { exceptionState.throwDOMException( SyntaxError, "invalid imaginary array"); return nullptr; } if (real->length() != imag->length()) { exceptionState.throwDOMException( IndexSizeError, "length of real array (" + String::number(real->length()) + ") and length of imaginary array (" + String::number(imag->length()) + ") must match."); return nullptr; } bool isNormalizationDisabled = false; DictionaryHelper::getWithUndefinedOrNullCheck(options, "disableNormalization", isNormalizationDisabled); return PeriodicWave::create(sampleRate(), real, imag, isNormalizationDisabled); }