PassRefPtr<MediaStreamAudioSourceNode> AudioContext::createMediaStreamSource(MediaStream* mediaStream, ExceptionCode& ec) { ASSERT(mediaStream); if (!mediaStream) { ec = INVALID_STATE_ERR; return 0; } ASSERT(isMainThread()); lazyInitialize(); AudioSourceProvider* provider = 0; MediaStreamTrackVector audioTracks = mediaStream->getAudioTracks(); if (mediaStream->isLocal() && audioTracks.size()) { // Enable input for the specific local audio device specified in the MediaStreamSource. RefPtr<MediaStreamTrack> localAudio = audioTracks[0]; MediaStreamSource* source = localAudio->component()->source(); destination()->enableInput(source->deviceId()); provider = destination()->localAudioInputProvider(); } else { // FIXME: get a provider for non-local MediaStreams (like from a remote peer). provider = 0; } RefPtr<MediaStreamAudioSourceNode> node = MediaStreamAudioSourceNode::create(this, mediaStream, provider); // FIXME: Only stereo streams are supported right now. We should be able to accept multi-channel streams. node->setFormat(2, sampleRate()); refNode(node.get()); // context keeps reference until node is disconnected return node; }
MediaElementAudioSourceNode* AudioContext::createMediaElementSource(HTMLMediaElement* mediaElement, ExceptionState& exceptionState) { ASSERT(isMainThread()); if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } if (!mediaElement) { exceptionState.throwDOMException( InvalidStateError, "invalid HTMLMedialElement."); return nullptr; } // First check if this media element already has a source node. if (mediaElement->audioSourceNode()) { exceptionState.throwDOMException( InvalidStateError, "HTMLMediaElement already connected previously to a different MediaElementSourceNode."); return nullptr; } MediaElementAudioSourceNode* node = MediaElementAudioSourceNode::create(this, mediaElement); mediaElement->setAudioSourceNode(&node->mediaElementAudioSourceHandler()); refNode(node); // context keeps reference until node is disconnected return node; }
MediaStreamAudioSourceNode* AudioContext::createMediaStreamSource(MediaStream* mediaStream, ExceptionState& exceptionState) { ASSERT(isMainThread()); if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } if (!mediaStream) { exceptionState.throwDOMException( InvalidStateError, "invalid MediaStream source"); return nullptr; } MediaStreamTrackVector audioTracks = mediaStream->getAudioTracks(); if (audioTracks.isEmpty()) { exceptionState.throwDOMException( InvalidStateError, "MediaStream has no audio track"); return nullptr; } // Use the first audio track in the media stream. MediaStreamTrack* audioTrack = audioTracks[0]; OwnPtr<AudioSourceProvider> provider = audioTrack->createWebAudioSource(); MediaStreamAudioSourceNode* node = MediaStreamAudioSourceNode::create(this, mediaStream, audioTrack, provider.release()); // FIXME: Only stereo streams are supported right now. We should be able to accept multi-channel streams. node->mediaStreamAudioSourceHandler().setFormat(2, sampleRate()); refNode(node); // context keeps reference until node is disconnected return node; }
PassRefPtr<MediaStreamAudioSourceNode> AudioContext::createMediaStreamSource(MediaStream* mediaStream, ExceptionState& es) { ASSERT(mediaStream); if (!mediaStream) { es.throwDOMException(InvalidStateError); return 0; } ASSERT(isMainThread()); lazyInitialize(); AudioSourceProvider* provider = 0; MediaStreamTrackVector audioTracks = mediaStream->getAudioTracks(); // FIXME: get a provider for non-local MediaStreams (like from a remote peer). for (size_t i = 0; i < audioTracks.size(); ++i) { RefPtr<MediaStreamTrack> localAudio = audioTracks[i]; if (localAudio->component()->audioSourceProvider()) { provider = localAudio->component()->audioSourceProvider(); break; } } RefPtr<MediaStreamAudioSourceNode> node = MediaStreamAudioSourceNode::create(this, mediaStream, provider); // FIXME: Only stereo streams are supported right now. We should be able to accept multi-channel streams. node->setFormat(2, sampleRate()); refNode(node.get()); // context keeps reference until node is disconnected return node; }
PassRefPtr<MediaStreamAudioSourceNode> AudioContext::createMediaStreamSource(MediaStream* mediaStream, ExceptionCode& ec) { ASSERT(mediaStream); if (!mediaStream) { ec = INVALID_STATE_ERR; return 0; } ASSERT(isMainThread()); lazyInitialize(); AudioSourceProvider* provider = 0; if (mediaStream->isLocal() && mediaStream->audioTracks()->length()) provider = destination()->localAudioInputProvider(); else { // FIXME: get a provider for non-local MediaStreams (like from a remote peer). provider = 0; } RefPtr<MediaStreamAudioSourceNode> node = MediaStreamAudioSourceNode::create(this, mediaStream, provider); // FIXME: Only stereo streams are supported right now. We should be able to accept multi-channel streams. node->setFormat(2, sampleRate()); refNode(node.get()); // context keeps reference until node is disconnected return node; }
PassRefPtr<JavaScriptAudioNode> AudioContext::createJavaScriptNode(size_t bufferSize) { ASSERT(isMainThread()); lazyInitialize(); RefPtr<JavaScriptAudioNode> node = JavaScriptAudioNode::create(this, m_destinationNode->sampleRate(), bufferSize); refNode(node.get()); // context keeps reference until we stop making javascript rendering callbacks return node; }
PassRefPtr<AudioBufferSourceNode> AudioContext::createBufferSource() { ASSERT(isMainThread()); lazyInitialize(); RefPtr<AudioBufferSourceNode> node = AudioBufferSourceNode::create(this, m_destinationNode->sampleRate()); refNode(node.get()); // context keeps reference until source has finished playing return node; }
PassRefPtr<AudioBufferSourceNode> AudioContext::createBufferSource() { ASSERT(isMainThread()); lazyInitialize(); RefPtr<AudioBufferSourceNode> node = AudioBufferSourceNode::create(this, m_destinationNode->sampleRate()); // Because this is an AudioScheduledSourceNode, the context keeps a reference until it has finished playing. // When this happens, AudioScheduledSourceNode::finish() calls AudioContext::notifyNodeFinishedProcessing(). refNode(node.get()); return node; }
PassRefPtr<ScriptProcessorNode> AudioContext::createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionCode& ec) { ASSERT(isMainThread()); lazyInitialize(); RefPtr<ScriptProcessorNode> node = ScriptProcessorNode::create(this, m_destinationNode->sampleRate(), bufferSize, numberOfInputChannels, numberOfOutputChannels); if (!node.get()) { ec = INDEX_SIZE_ERR; return nullptr; } refNode(node.get()); // context keeps reference until we stop making javascript rendering callbacks return node; }
std::vector<std::shared_ptr<Vertex>> KDTree_Wrapper::find_nearest(const Point& p, double limit) { if (is_dirty_) { kdtree_.optimize(); } std::vector<std::shared_ptr<Vertex>> result; auto v = std::make_shared<Vertex>(-1,p); kdtreeNode refNode(v); std::vector<kdtreeNode> howClose; kdtree_.find_within_range(refNode, limit, std::back_insert_iterator<std::vector<kdtreeNode>>(howClose)); for (auto n : howClose) { result.push_back(n.vertex); } return result; }
ScriptProcessorNode* AudioContext::createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionState& exceptionState) { ASSERT(isMainThread()); if (isContextClosed()) { throwExceptionForClosedState(exceptionState); return nullptr; } ScriptProcessorNode* node = ScriptProcessorNode::create(this, sampleRate(), bufferSize, numberOfInputChannels, numberOfOutputChannels); if (!node) { if (!numberOfInputChannels && !numberOfOutputChannels) { exceptionState.throwDOMException( IndexSizeError, "number of input channels and output channels cannot both be zero."); } else if (numberOfInputChannels > AudioContext::maxNumberOfChannels()) { exceptionState.throwDOMException( IndexSizeError, "number of input channels (" + String::number(numberOfInputChannels) + ") exceeds maximum (" + String::number(AudioContext::maxNumberOfChannels()) + ")."); } else if (numberOfOutputChannels > AudioContext::maxNumberOfChannels()) { exceptionState.throwDOMException( IndexSizeError, "number of output channels (" + String::number(numberOfInputChannels) + ") exceeds maximum (" + String::number(AudioContext::maxNumberOfChannels()) + ")."); } else { exceptionState.throwDOMException( IndexSizeError, "buffer size (" + String::number(bufferSize) + ") must be a power of two between 256 and 16384."); } return nullptr; } refNode(node); // context keeps reference until we stop making javascript rendering callbacks return node; }
PassRefPtr<MediaStreamAudioSourceNode> AudioContext::createMediaStreamSource(MediaStream* mediaStream, ExceptionCode& ec) { ASSERT(isMainThread()); ASSERT(mediaStream); if (!mediaStream) { ec = INVALID_STATE_ERR; return nullptr; } auto audioTracks = mediaStream->getAudioTracks(); if (audioTracks.isEmpty()) { ec = INVALID_STATE_ERR; return nullptr; } MediaStreamTrack* providerTrack = nullptr; for (auto& track : audioTracks) { if (track->audioSourceProvider()) { providerTrack = track.get(); break; } } if (!providerTrack) { ec = INVALID_STATE_ERR; return nullptr; } lazyInitialize(); auto node = MediaStreamAudioSourceNode::create(*this, *mediaStream, *providerTrack); node->setFormat(2, sampleRate()); refNode(&node.get()); // context keeps reference until node is disconnected return &node.get(); }
PassRefPtr<MediaStreamAudioSourceNode> AudioContext::createMediaStreamSource(MediaStream* mediaStream, ExceptionCode& ec) { ASSERT(mediaStream); if (!mediaStream) { ec = INVALID_STATE_ERR; return nullptr; } ASSERT(isMainThread()); lazyInitialize(); AudioSourceProvider* provider = 0; Vector<RefPtr<MediaStreamTrack>> audioTracks = mediaStream->getAudioTracks(); RefPtr<MediaStreamTrack> audioTrack; // FIXME: get a provider for non-local MediaStreams (like from a remote peer). for (size_t i = 0; i < audioTracks.size(); ++i) { audioTrack = audioTracks[i]; if (audioTrack->source()->isAudioStreamSource()) { auto source = static_cast<MediaStreamAudioSource*>(audioTrack->source()); ASSERT(!source->deviceId().isEmpty()); destination()->enableInput(source->deviceId()); provider = destination()->localAudioInputProvider(); break; } } RefPtr<MediaStreamAudioSourceNode> node = MediaStreamAudioSourceNode::create(this, mediaStream, audioTrack.get(), provider); // FIXME: Only stereo streams are supported right now. We should be able to accept multi-channel streams. node->setFormat(2, sampleRate()); refNode(node.get()); // context keeps reference until node is disconnected return node; }
PassRefPtr<MediaElementAudioSourceNode> AudioContext::createMediaElementSource(HTMLMediaElement* mediaElement, ExceptionCode& ec) { ASSERT(mediaElement); if (!mediaElement) { ec = INVALID_STATE_ERR; return nullptr; } ASSERT(isMainThread()); lazyInitialize(); // First check if this media element already has a source node. if (mediaElement->audioSourceNode()) { ec = INVALID_STATE_ERR; return nullptr; } RefPtr<MediaElementAudioSourceNode> node = MediaElementAudioSourceNode::create(this, mediaElement); mediaElement->setAudioSourceNode(node.get()); refNode(node.get()); // context keeps reference until node is disconnected return node; }
PassRefPtr<MediaElementAudioSourceNode> AudioContext::createMediaElementSource(HTMLMediaElement* mediaElement, ExceptionState& es) { ASSERT(mediaElement); if (!mediaElement) { es.throwDOMException(InvalidStateError); return 0; } ASSERT(isMainThread()); lazyInitialize(); // First check if this media element already has a source node. if (mediaElement->audioSourceNode()) { es.throwDOMException(InvalidStateError); return 0; } RefPtr<MediaElementAudioSourceNode> node = MediaElementAudioSourceNode::create(this, mediaElement); mediaElement->setAudioSourceNode(node.get()); refNode(node.get()); // context keeps reference until node is disconnected return node; }
void AudioContext::notifyNodeStartedProcessing(AudioNode* node) { refNode(node); }