示例#1
0
PassRefPtr<MediaStreamAudioSourceNode> AudioContext::createMediaStreamSource(MediaStream* mediaStream, ExceptionCode& ec)
{
    ASSERT(mediaStream);
    if (!mediaStream) {
        ec = INVALID_STATE_ERR;
        return 0;
    }

    ASSERT(isMainThread());
    lazyInitialize();

    AudioSourceProvider* provider = 0;

    MediaStreamTrackVector audioTracks = mediaStream->getAudioTracks();
    if (mediaStream->isLocal() && audioTracks.size()) {
        // Enable input for the specific local audio device specified in the MediaStreamSource.
        RefPtr<MediaStreamTrack> localAudio = audioTracks[0];
        MediaStreamSource* source = localAudio->component()->source();
        destination()->enableInput(source->deviceId());
        provider = destination()->localAudioInputProvider();
    } else {
        // FIXME: get a provider for non-local MediaStreams (like from a remote peer).
        provider = 0;
    }

    RefPtr<MediaStreamAudioSourceNode> node = MediaStreamAudioSourceNode::create(this, mediaStream, provider);

    // FIXME: Only stereo streams are supported right now. We should be able to accept multi-channel streams.
    node->setFormat(2, sampleRate());

    refNode(node.get()); // context keeps reference until node is disconnected
    return node;
}
// FIXME: Cleanup when the chromium code has switched to the split sources implementation.
void WebMediaStreamDescriptor::initialize(const WebString& label, const WebVector<WebMediaStreamSource>& sources)
{
    MediaStreamSourceVector audio, video;
    for (size_t i = 0; i < sources.size(); ++i) {
        MediaStreamSource* curr = sources[i];
        if (curr->type() == MediaStreamSource::TypeAudio)
            audio.append(curr);
        else if (curr->type() == MediaStreamSource::TypeVideo)
            video.append(curr);
    }
    m_private = MediaStreamDescriptor::create(label, audio, video);
}
示例#3
0
void WebMediaStream::initialize(const WebString& label, const WebVector<WebMediaStreamSource>& audioSources, const WebVector<WebMediaStreamSource>& videoSources)
{
    MediaStreamComponentVector audio, video;
    for (size_t i = 0; i < audioSources.size(); ++i) {
        MediaStreamSource* source = audioSources[i];
        audio.append(MediaStreamComponent::create(source->id(), source));
    }
    for (size_t i = 0; i < videoSources.size(); ++i) {
        MediaStreamSource* source = videoSources[i];
        video.append(MediaStreamComponent::create(source->id(), source));
    }
    m_private = MediaStreamDescriptor::create(label, audio, video);
}
示例#4
0
static void processTrack(MediaStreamTrack* track, MediaStreamSourceVector& sourceVector)
{
    if (track->ended())
        return;

    MediaStreamSource* source = track->source();
    if (!containsSource(sourceVector, source)) {
        if (source->stream()) {
            // FIXME: this should not be necessary because tracks and streams must be able to share sources, but our code
            // currenlty assumes each source is attached to just one track.
            // https://bugs.webkit.org/show_bug.cgi?id=121954
            sourceVector.append(MediaStreamSource::create(source->id(), source->type(), source->name(), source->readyState(), source->requiresAudioConsumer()).get());
        } else
            sourceVector.append(source);
    }
}