MediaStreamDescriptor::MediaStreamDescriptor(const String& id, const MediaStreamSourceVector& audioSources, const MediaStreamSourceVector& videoSources) : m_client(0) , m_id(id) , m_ended(false) { ASSERT(m_id.length()); for (size_t i = 0; i < audioSources.size(); i++) m_audioComponents.append(MediaStreamComponent::create(this, audioSources[i])); for (size_t i = 0; i < videoSources.size(); i++) m_videoComponents.append(MediaStreamComponent::create(this, videoSources[i])); }
void UserMediaClientImpl::requestUserMedia(PassRefPtr<UserMediaRequest> prpRequest, const MediaStreamSourceVector& audioSources, const MediaStreamSourceVector& videoSources) { if (m_client) { RefPtr<UserMediaRequest> request = prpRequest; // FIXME: Cleanup when the chromium code has switched to the split sources implementation. MediaStreamSourceVector combinedSources; combinedSources.append(audioSources); combinedSources.append(videoSources); m_client->requestUserMedia(PassRefPtr<UserMediaRequest>(request.get()), combinedSources); m_client->requestUserMedia(request.release(), audioSources, videoSources); } }
void WebMediaStreamSourcesRequest::didCompleteQuery(const WebVector<WebMediaStreamSource>& audioSources, const WebVector<WebMediaStreamSource>& videoSources) const { ASSERT(!isNull()); MediaStreamSourceVector audio; for (size_t i = 0; i < audioSources.size(); ++i) { MediaStreamSource* curr = audioSources[i]; audio.append(curr); } MediaStreamSourceVector video; for (size_t i = 0; i < videoSources.size(); ++i) { MediaStreamSource* curr = videoSources[i]; video.append(curr); } m_private->didCompleteQuery(audio, video); }
MediaStreamAudioDestinationHandler::MediaStreamAudioDestinationHandler(AudioNode& node, size_t numberOfChannels) : AudioBasicInspectorHandler(NodeTypeMediaStreamAudioDestination, node, node.context()->sampleRate(), numberOfChannels) , m_mixBus(AudioBus::create(numberOfChannels, ProcessingSizeInFrames)) { m_source = MediaStreamSource::create("WebAudio-" + createCanonicalUUIDString(), MediaStreamSource::TypeAudio, "MediaStreamAudioDestinationNode", false, true, MediaStreamSource::ReadyStateLive, true); MediaStreamSourceVector audioSources; audioSources.append(m_source.get()); MediaStreamSourceVector videoSources; m_stream = MediaStream::create(node.context()->executionContext(), MediaStreamDescriptor::create(audioSources, videoSources)); MediaStreamCenter::instance().didCreateMediaStreamAndTracks(m_stream->descriptor()); m_source->setAudioFormat(numberOfChannels, node.context()->sampleRate()); initialize(); }
static PassRefPtr<MediaStreamDescriptor> toMediaStreamDescriptor(const WebMediaStreamDescriptor& d) { MediaStreamSourceVector audioSources; for (size_t i = 0; i < d.audios().size(); i++) { RefPtr<MediaStreamSource> src = toMediaStreamSource(d.audios()[i]); audioSources.append(src.release()); } MediaStreamSourceVector videoSources; for (size_t i = 0; i < d.videos().size(); i++) { RefPtr<MediaStreamSource> src = toMediaStreamSource(d.videos()[i]); videoSources.append(src.release()); } return MediaStreamDescriptor::create(WTF::String::fromUTF8(d.label().c_str()), audioSources, videoSources); }
MediaStreamDescriptor::MediaStreamDescriptor(const String& id, const MediaStreamSourceVector& audioSources, const MediaStreamSourceVector& videoSources, bool ended) : m_client(0) , m_id(id) , m_ended(ended) { ASSERT(m_id.length()); for (size_t i = 0; i < audioSources.size(); i++) { audioSources[i]->setStream(this); m_audioStreamSources.append(audioSources[i]); } for (size_t i = 0; i < videoSources.size(); i++) { videoSources[i]->setStream(this); m_videoStreamSources.append(videoSources[i]); } }
static void processTrack(MediaStreamTrack* track, MediaStreamSourceVector& sourceVector) { if (track->ended()) return; MediaStreamSource* source = track->source(); if (!containsSource(sourceVector, source)) { if (source->stream()) { // FIXME: this should not be necessary because tracks and streams must be able to share sources, but our code // currenlty assumes each source is attached to just one track. // https://bugs.webkit.org/show_bug.cgi?id=121954 sourceVector.append(MediaStreamSource::create(source->id(), source->type(), source->name(), source->readyState(), source->requiresAudioConsumer()).get()); } else sourceVector.append(source); } }
static bool containsSource(MediaStreamSourceVector& sourceVector, MediaStreamSource* source) { for (size_t i = 0; i < sourceVector.size(); ++i) { if (source->id() == sourceVector[i]->id()) return true; } return false; }
static void processTrack(MediaStreamTrack* track, MediaStreamSourceVector& sourceVector) { if (track->ended()) return; MediaStreamSource* source = track->component()->source(); if (!containsSource(sourceVector, source)) sourceVector.append(source); }