PassRefPtr<MediaStreamAudioSourceNode> AudioContext::createMediaStreamSource(MediaStream* mediaStream, ExceptionCode& ec) { ASSERT(mediaStream); if (!mediaStream) { ec = INVALID_STATE_ERR; return 0; } ASSERT(isMainThread()); lazyInitialize(); AudioSourceProvider* provider = 0; MediaStreamTrackVector audioTracks = mediaStream->getAudioTracks(); if (mediaStream->isLocal() && audioTracks.size()) { // Enable input for the specific local audio device specified in the MediaStreamSource. RefPtr<MediaStreamTrack> localAudio = audioTracks[0]; MediaStreamSource* source = localAudio->component()->source(); destination()->enableInput(source->deviceId()); provider = destination()->localAudioInputProvider(); } else { // FIXME: get a provider for non-local MediaStreams (like from a remote peer). provider = 0; } RefPtr<MediaStreamAudioSourceNode> node = MediaStreamAudioSourceNode::create(this, mediaStream, provider); // FIXME: Only stereo streams are supported right now. We should be able to accept multi-channel streams. node->setFormat(2, sampleRate()); refNode(node.get()); // context keeps reference until node is disconnected return node; }
void MediaStream::removeRemoteTrack(MediaStreamComponent* component) { if (ended()) return; MediaStreamTrackVector* tracks = 0; switch (component->source()->type()) { case MediaStreamSource::TypeAudio: tracks = &m_audioTracks; break; case MediaStreamSource::TypeVideo: tracks = &m_videoTracks; break; } size_t index = notFound; for (size_t i = 0; i < tracks->size(); ++i) { if ((*tracks)[i]->component() == component) { index = i; break; } } if (index == notFound) return; RefPtr<MediaStreamTrack> track = (*tracks)[index]; tracks->remove(index); scheduleDispatchEvent(MediaStreamTrackEvent::create(eventNames().removetrackEvent, false, false, track)); }
void MediaStream::removeRemoteSource(MediaStreamSource* source) { if (ended()) return; MediaStreamTrackVector* tracks = 0; switch (source->type()) { case MediaStreamSource::Audio: tracks = &m_audioTracks; break; case MediaStreamSource::Video: tracks = &m_videoTracks; break; } size_t index = notFound; for (size_t i = 0; i < tracks->size(); ++i) { if ((*tracks)[i]->source() == source) { index = i; break; } } if (index == notFound) return; m_descriptor->removeSource(source); RefPtr<MediaStreamTrack> track = (*tracks)[index]; tracks->remove(index); scheduleDispatchEvent(MediaStreamTrackEvent::create(eventNames().removetrackEvent, false, false, track)); }
PassRefPtr<MediaStreamAudioSourceNode> AudioContext::createMediaStreamSource(MediaStream* mediaStream, ExceptionState& es) { ASSERT(mediaStream); if (!mediaStream) { es.throwDOMException(InvalidStateError); return 0; } ASSERT(isMainThread()); lazyInitialize(); AudioSourceProvider* provider = 0; MediaStreamTrackVector audioTracks = mediaStream->getAudioTracks(); // FIXME: get a provider for non-local MediaStreams (like from a remote peer). for (size_t i = 0; i < audioTracks.size(); ++i) { RefPtr<MediaStreamTrack> localAudio = audioTracks[i]; if (localAudio->component()->audioSourceProvider()) { provider = localAudio->component()->audioSourceProvider(); break; } } RefPtr<MediaStreamAudioSourceNode> node = MediaStreamAudioSourceNode::create(this, mediaStream, provider); // FIXME: Only stereo streams are supported right now. We should be able to accept multi-channel streams. node->setFormat(2, sampleRate()); refNode(node.get()); // context keeps reference until node is disconnected return node; }
static bool containsSource(MediaStreamTrackVector& trackVector, MediaStreamSource* source) { for (size_t i = 0; i < trackVector.size(); ++i) { if (source->id() == trackVector[i]->component()->source()->id()) return true; } return false; }
PassRefPtr<MediaStream> MediaStream::create(ScriptExecutionContext* context, const MediaStreamTrackVector& tracks) { MediaStreamSourceVector audioSources; MediaStreamSourceVector videoSources; for (size_t i = 0; i < tracks.size(); ++i) processTrack(tracks[i].get(), tracks[i]->kind() == "audio" ? audioSources : videoSources); return createFromSourceVectors(context, audioSources, videoSources); }
MediaStream::MediaStream(ScriptExecutionContext& context, const MediaStreamTrackVector& tracks) : ContextDestructionObserver(&context) , m_activityEventTimer(*this, &MediaStream::activityEventTimerFired) { // This constructor preserves MediaStreamTrack instances and must be used by calls originating // from the JavaScript MediaStream constructor. MediaStreamTrackPrivateVector trackPrivates; trackPrivates.reserveCapacity(tracks.size()); for (auto& track : tracks) { track->addObserver(this); m_trackSet.add(track->id(), track); trackPrivates.append(&track->privateTrack()); } m_private = MediaStreamPrivate::create(trackPrivates); setIsActive(m_private->active()); m_private->addObserver(*this); MediaStreamRegistry::shared().registerStream(*this); }
void MediaStream::removeRemoteTrack(MediaStreamComponent* component) { DCHECK(component); if (m_executionContext->isContextDestroyed()) return; MediaStreamTrackVector* tracks = 0; switch (component->source()->type()) { case MediaStreamSource::TypeAudio: tracks = &m_audioTracks; break; case MediaStreamSource::TypeVideo: tracks = &m_videoTracks; break; } size_t index = kNotFound; for (size_t i = 0; i < tracks->size(); ++i) { if ((*tracks)[i]->component() == component) { index = i; break; } } if (index == kNotFound) return; m_descriptor->removeComponent(component); MediaStreamTrack* track = (*tracks)[index]; track->unregisterMediaStream(this); tracks->remove(index); scheduleDispatchEvent( MediaStreamTrackEvent::create(EventTypeNames::removetrack, track)); if (active() && emptyOrOnlyEndedTracks()) { m_descriptor->setActive(false); scheduleDispatchEvent(Event::create(EventTypeNames::inactive)); } }