void WebMediaStreamSourcesRequest::didCompleteQuery(const WebVector<WebMediaStreamSource>& audioSources, const WebVector<WebMediaStreamSource>& videoSources) const
{
    ASSERT(!isNull());
    MediaStreamSourceVector audio;
    for (size_t i = 0; i < audioSources.size(); ++i) {
        MediaStreamSource* curr = audioSources[i];
        audio.append(curr);
    }
    MediaStreamSourceVector video;
    for (size_t i = 0; i < videoSources.size(); ++i) {
        MediaStreamSource* curr = videoSources[i];
        video.append(curr);
    }
    m_private->didCompleteQuery(audio, video);
}
static PassRefPtr<MediaStreamDescriptor> toMediaStreamDescriptor(const WebMediaStreamDescriptor& d)
{
    MediaStreamSourceVector audioSources;
    for (size_t i = 0; i < d.audios().size(); i++) {
        RefPtr<MediaStreamSource> src = toMediaStreamSource(d.audios()[i]);
        audioSources.append(src.release());
    }

    MediaStreamSourceVector videoSources;
    for (size_t i = 0; i < d.videos().size(); i++) {
        RefPtr<MediaStreamSource> src = toMediaStreamSource(d.videos()[i]);
        videoSources.append(src.release());
    }

    return MediaStreamDescriptor::create(WTF::String::fromUTF8(d.label().c_str()), audioSources, videoSources);
}
예제 #3
0
static void processTrack(MediaStreamTrack* track, MediaStreamSourceVector& sourceVector)
{
    if (track->ended())
        return;

    MediaStreamSource* source = track->source();
    if (!containsSource(sourceVector, source)) {
        if (source->stream()) {
            // FIXME: this should not be necessary because tracks and streams must be able to share sources, but our code
            // currenlty assumes each source is attached to just one track.
            // https://bugs.webkit.org/show_bug.cgi?id=121954
            sourceVector.append(MediaStreamSource::create(source->id(), source->type(), source->name(), source->readyState(), source->requiresAudioConsumer()).get());
        } else
            sourceVector.append(source);
    }
}
예제 #4
0
static void processTrack(MediaStreamTrack* track, MediaStreamSourceVector& sourceVector)
{
    if (track->ended())
        return;

    MediaStreamSource* source = track->component()->source();
    if (!containsSource(sourceVector, source))
        sourceVector.append(source);
}
예제 #5
0
void UserMediaClientImpl::requestUserMedia(PassRefPtr<UserMediaRequest> prpRequest, const MediaStreamSourceVector& audioSources, const MediaStreamSourceVector& videoSources)
{
    if (m_client) {
        RefPtr<UserMediaRequest> request = prpRequest;

        // FIXME: Cleanup when the chromium code has switched to the split sources implementation.
        MediaStreamSourceVector combinedSources;
        combinedSources.append(audioSources);
        combinedSources.append(videoSources);
        m_client->requestUserMedia(PassRefPtr<UserMediaRequest>(request.get()), combinedSources);

        m_client->requestUserMedia(request.release(), audioSources, videoSources);
    }
}
MediaStreamAudioDestinationHandler::MediaStreamAudioDestinationHandler(AudioNode& node, size_t numberOfChannels)
    : AudioBasicInspectorHandler(NodeTypeMediaStreamAudioDestination, node, node.context()->sampleRate(), numberOfChannels)
    , m_mixBus(AudioBus::create(numberOfChannels, ProcessingSizeInFrames))
{
    m_source = MediaStreamSource::create("WebAudio-" + createCanonicalUUIDString(), MediaStreamSource::TypeAudio, "MediaStreamAudioDestinationNode", false, true, MediaStreamSource::ReadyStateLive, true);
    MediaStreamSourceVector audioSources;
    audioSources.append(m_source.get());
    MediaStreamSourceVector videoSources;
    m_stream = MediaStream::create(node.context()->executionContext(), MediaStreamDescriptor::create(audioSources, videoSources));
    MediaStreamCenter::instance().didCreateMediaStreamAndTracks(m_stream->descriptor());

    m_source->setAudioFormat(numberOfChannels, node.context()->sampleRate());

    initialize();
}