MediaStream* HTMLCanvasElementCapture::captureStream(HTMLCanvasElement& element, bool givenFrameRate, double frameRate, ExceptionState& exceptionState)
{
    if (!element.originClean()) {
        exceptionState.throwSecurityError("Canvas is not origin-clean.");
        return nullptr;
    }

    WebMediaStreamTrack track;
    WebSize size(element.width(), element.height());
    OwnPtr<WebCanvasCaptureHandler> handler;
    if (givenFrameRate)
        handler = adoptPtr(Platform::current()->createCanvasCaptureHandler(size, frameRate, &track));
    else
        handler = adoptPtr(Platform::current()->createCanvasCaptureHandler(size, kDefaultFrameRate, &track));
    ASSERT(handler);
    if (!handler) {
        exceptionState.throwDOMException(NotSupportedError, "No CanvasCapture handler can be created.");
        return nullptr;
    }

    MediaStreamTrackVector tracks;
    if (givenFrameRate)
        tracks.append(CanvasCaptureMediaStreamTrack::create(track, &element, handler.release(), frameRate));
    else
        tracks.append(CanvasCaptureMediaStreamTrack::create(track, &element, handler.release()));
    // We want to capture one frame in the beginning.
    element.notifyListenersCanvasChanged();
    return MediaStream::create(element.executionContext(), tracks);
}
PassRefPtr<MediaStreamAudioSourceNode> AudioContext::createMediaStreamSource(MediaStream* mediaStream, ExceptionState& es)
{
    ASSERT(mediaStream);
    if (!mediaStream) {
        es.throwDOMException(InvalidStateError);
        return 0;
    }

    ASSERT(isMainThread());
    lazyInitialize();

    AudioSourceProvider* provider = 0;

    MediaStreamTrackVector audioTracks = mediaStream->getAudioTracks();

    // FIXME: get a provider for non-local MediaStreams (like from a remote peer).
    for (size_t i = 0; i < audioTracks.size(); ++i) {
        RefPtr<MediaStreamTrack> localAudio = audioTracks[i];
        if (localAudio->component()->audioSourceProvider()) {
            provider = localAudio->component()->audioSourceProvider();
            break;
        }
    }

    RefPtr<MediaStreamAudioSourceNode> node = MediaStreamAudioSourceNode::create(this, mediaStream, provider);

    // FIXME: Only stereo streams are supported right now. We should be able to accept multi-channel streams.
    node->setFormat(2, sampleRate());

    refNode(node.get()); // context keeps reference until node is disconnected
    return node;
}
PassRefPtr<MediaStreamAudioSourceNode> AudioContext::createMediaStreamSource(MediaStream* mediaStream, ExceptionCode& ec)
{
    ASSERT(mediaStream);
    if (!mediaStream) {
        ec = INVALID_STATE_ERR;
        return 0;
    }

    ASSERT(isMainThread());
    lazyInitialize();

    AudioSourceProvider* provider = 0;

    MediaStreamTrackVector audioTracks = mediaStream->getAudioTracks();
    if (mediaStream->isLocal() && audioTracks.size()) {
        // Enable input for the specific local audio device specified in the MediaStreamSource.
        RefPtr<MediaStreamTrack> localAudio = audioTracks[0];
        MediaStreamSource* source = localAudio->component()->source();
        destination()->enableInput(source->deviceId());
        provider = destination()->localAudioInputProvider();
    } else {
        // FIXME: get a provider for non-local MediaStreams (like from a remote peer).
        provider = 0;
    }

    RefPtr<MediaStreamAudioSourceNode> node = MediaStreamAudioSourceNode::create(this, mediaStream, provider);

    // FIXME: Only stereo streams are supported right now. We should be able to accept multi-channel streams.
    node->setFormat(2, sampleRate());

    refNode(node.get()); // context keeps reference until node is disconnected
    return node;
}
Exemple #4
0
MediaStream::MediaStream(ExecutionContext* context,
                         const MediaStreamTrackVector& audioTracks,
                         const MediaStreamTrackVector& videoTracks)
    : m_scheduledEventTimer(this, &MediaStream::scheduledEventTimerFired),
      m_executionContext(context) {
  MediaStreamComponentVector audioComponents;
  MediaStreamComponentVector videoComponents;

  MediaStreamTrackVector::const_iterator iter;
  for (iter = audioTracks.begin(); iter != audioTracks.end(); ++iter) {
    (*iter)->registerMediaStream(this);
    audioComponents.append((*iter)->component());
  }
  for (iter = videoTracks.begin(); iter != videoTracks.end(); ++iter) {
    (*iter)->registerMediaStream(this);
    videoComponents.append((*iter)->component());
  }

  m_descriptor =
      MediaStreamDescriptor::create(audioComponents, videoComponents);
  m_descriptor->setClient(this);
  MediaStreamCenter::instance().didCreateMediaStream(m_descriptor);

  m_audioTracks = audioTracks;
  m_videoTracks = videoTracks;
  if (emptyOrOnlyEndedTracks()) {
    m_descriptor->setActive(false);
  }
}
Exemple #5
0
void MediaStream::removeRemoteTrack(MediaStreamComponent* component)
{
    if (ended())
        return;

    MediaStreamTrackVector* tracks = 0;
    switch (component->source()->type()) {
    case MediaStreamSource::TypeAudio:
        tracks = &m_audioTracks;
        break;
    case MediaStreamSource::TypeVideo:
        tracks = &m_videoTracks;
        break;
    }

    size_t index = notFound;
    for (size_t i = 0; i < tracks->size(); ++i) {
        if ((*tracks)[i]->component() == component) {
            index = i;
            break;
        }
    }
    if (index == notFound)
        return;

    RefPtr<MediaStreamTrack> track = (*tracks)[index];
    tracks->remove(index);
    scheduleDispatchEvent(MediaStreamTrackEvent::create(eventNames().removetrackEvent, false, false, track));
}
Exemple #6
0
void MediaStream::removeRemoteSource(MediaStreamSource* source)
{
    if (ended())
        return;

    MediaStreamTrackVector* tracks = 0;
    switch (source->type()) {
    case MediaStreamSource::Audio:
        tracks = &m_audioTracks;
        break;
    case MediaStreamSource::Video:
        tracks = &m_videoTracks;
        break;
    }

    size_t index = notFound;
    for (size_t i = 0; i < tracks->size(); ++i) {
        if ((*tracks)[i]->source() == source) {
            index = i;
            break;
        }
    }
    if (index == notFound)
        return;

    m_descriptor->removeSource(source);

    RefPtr<MediaStreamTrack> track = (*tracks)[index];
    tracks->remove(index);
    scheduleDispatchEvent(MediaStreamTrackEvent::create(eventNames().removetrackEvent, false, false, track));
}
MediaStreamAudioSourceNode* AudioContext::createMediaStreamSource(MediaStream* mediaStream, ExceptionState& exceptionState)
{
    ASSERT(isMainThread());

    if (isContextClosed()) {
        throwExceptionForClosedState(exceptionState);
        return nullptr;
    }

    if (!mediaStream) {
        exceptionState.throwDOMException(
            InvalidStateError,
            "invalid MediaStream source");
        return nullptr;
    }

    MediaStreamTrackVector audioTracks = mediaStream->getAudioTracks();
    if (audioTracks.isEmpty()) {
        exceptionState.throwDOMException(
            InvalidStateError,
            "MediaStream has no audio track");
        return nullptr;
    }

    // Use the first audio track in the media stream.
    MediaStreamTrack* audioTrack = audioTracks[0];
    OwnPtr<AudioSourceProvider> provider = audioTrack->createWebAudioSource();
    MediaStreamAudioSourceNode* node = MediaStreamAudioSourceNode::create(this, mediaStream, audioTrack, provider.release());

    // FIXME: Only stereo streams are supported right now. We should be able to accept multi-channel streams.
    node->mediaStreamAudioSourceHandler().setFormat(2, sampleRate());

    refNode(node); // context keeps reference until node is disconnected
    return node;
}
MediaStreamTrackVector MediaStream::getTracks() const
{
    MediaStreamTrackVector tracks;
    tracks.reserveCapacity(m_trackSet.size());
    copyValuesToVector(m_trackSet, tracks);

    return tracks;
}
MediaStream* MediaStream::clone(ExecutionContext* context)
{
    MediaStreamTrackVector tracks;
    for (MediaStreamTrackVector::iterator iter = m_audioTracks.begin(); iter != m_audioTracks.end(); ++iter)
        tracks.append((*iter)->clone(context));
    for (MediaStreamTrackVector::iterator iter = m_videoTracks.begin(); iter != m_videoTracks.end(); ++iter)
        tracks.append((*iter)->clone(context));
    return MediaStream::create(context, tracks);
}
MediaStreamTrackVector MediaStream::getTracks()
{
    MediaStreamTrackVector tracks;
    for (MediaStreamTrackVector::iterator iter = m_audioTracks.begin(); iter != m_audioTracks.end(); ++iter)
        tracks.append(iter->get());
    for (MediaStreamTrackVector::iterator iter = m_videoTracks.begin(); iter != m_videoTracks.end(); ++iter)
        tracks.append(iter->get());
    return tracks;
}
MediaStreamTrackVector MediaStream::trackVectorForType(RealtimeMediaSource::Type filterType) const
{
    MediaStreamTrackVector tracks;
    for (auto& track : m_trackSet.values()) {
        if (track->source().type() == filterType)
            tracks.append(track);
    }

    return tracks;
}
RefPtr<MediaStream> MediaStream::clone()
{
    MediaStreamTrackVector clonedTracks;
    clonedTracks.reserveCapacity(m_trackSet.size());

    for (auto& track : m_trackSet.values())
        clonedTracks.append(track->clone());

    return MediaStream::create(*scriptExecutionContext(), clonedTracks);
}
Exemple #13
0
void UserMediaRequest::callSuccessHandler(PassRefPtr<MediaStreamDescriptor> streamDescriptor)
{
    // 4 - Create the MediaStream and pass it to the success callback.
    ASSERT(m_successCallback);

    RefPtr<MediaStream> stream = MediaStream::create(m_scriptExecutionContext, streamDescriptor);

    MediaStreamTrackVector tracks = stream->getAudioTracks();
    for (MediaStreamTrackVector::iterator iter = tracks.begin(); iter != tracks.end(); ++iter)
        (*iter)->source()->setConstraints(m_audioConstraints);

    tracks = stream->getVideoTracks();
    for (MediaStreamTrackVector::iterator iter = tracks.begin(); iter != tracks.end(); ++iter)
        (*iter)->source()->setConstraints(m_videoConstraints);

    m_successCallback->handleEvent(stream.get());
}
Exemple #14
0
static bool containsSource(MediaStreamTrackVector& trackVector,
                           MediaStreamSource* source) {
  for (size_t i = 0; i < trackVector.size(); ++i) {
    if (source->id() == trackVector[i]->component()->source()->id())
      return true;
  }
  return false;
}
Exemple #15
0
static void processTrack(MediaStreamTrack* track,
                         MediaStreamTrackVector& trackVector) {
  if (track->ended())
    return;

  MediaStreamSource* source = track->component()->source();
  if (!containsSource(trackVector, source))
    trackVector.append(track);
}
Exemple #16
0
PassRefPtr<MediaStream> MediaStream::create(ScriptExecutionContext* context, const MediaStreamTrackVector& tracks)
{
    MediaStreamSourceVector audioSources;
    MediaStreamSourceVector videoSources;

    for (size_t i = 0; i < tracks.size(); ++i)
        processTrack(tracks[i].get(), tracks[i]->kind() == "audio" ? audioSources : videoSources);

    return createFromSourceVectors(context, audioSources, videoSources);
}
MediaStream* HTMLCanvasElementCapture::captureStream(
    HTMLCanvasElement& element,
    bool givenFrameRate,
    double frameRate,
    ExceptionState& exceptionState) {
    if (!element.originClean()) {
        exceptionState.throwSecurityError("Canvas is not origin-clean.");
        return nullptr;
    }

    WebMediaStreamTrack track;
    const WebSize size(element.width(), element.height());
    std::unique_ptr<WebCanvasCaptureHandler> handler;
    if (givenFrameRate)
        handler = wrapUnique(Platform::current()->createCanvasCaptureHandler(
                                 size, frameRate, &track));
    else
        handler = wrapUnique(Platform::current()->createCanvasCaptureHandler(
                                 size, kDefaultFrameRate, &track));

    if (!handler) {
        exceptionState.throwDOMException(
            NotSupportedError, "No CanvasCapture handler can be created.");
        return nullptr;
    }

    CanvasCaptureMediaStreamTrack* canvasTrack;
    if (givenFrameRate)
        canvasTrack = CanvasCaptureMediaStreamTrack::create(
                          track, &element, std::move(handler), frameRate);
    else
        canvasTrack = CanvasCaptureMediaStreamTrack::create(track, &element,
                      std::move(handler));
    // We want to capture a frame in the beginning.
    canvasTrack->requestFrame();

    MediaStreamTrackVector tracks;
    tracks.append(canvasTrack);
    return MediaStream::create(element.getExecutionContext(), tracks);
}
Exemple #18
0
void MediaStream::removeRemoteTrack(MediaStreamComponent* component) {
  DCHECK(component);
  if (m_executionContext->isContextDestroyed())
    return;

  MediaStreamTrackVector* tracks = 0;
  switch (component->source()->type()) {
    case MediaStreamSource::TypeAudio:
      tracks = &m_audioTracks;
      break;
    case MediaStreamSource::TypeVideo:
      tracks = &m_videoTracks;
      break;
  }

  size_t index = kNotFound;
  for (size_t i = 0; i < tracks->size(); ++i) {
    if ((*tracks)[i]->component() == component) {
      index = i;
      break;
    }
  }
  if (index == kNotFound)
    return;

  m_descriptor->removeComponent(component);

  MediaStreamTrack* track = (*tracks)[index];
  track->unregisterMediaStream(this);
  tracks->remove(index);
  scheduleDispatchEvent(
      MediaStreamTrackEvent::create(EventTypeNames::removetrack, track));

  if (active() && emptyOrOnlyEndedTracks()) {
    m_descriptor->setActive(false);
    scheduleDispatchEvent(Event::create(EventTypeNames::inactive));
  }
}
MediaStream::MediaStream(ScriptExecutionContext& context, const MediaStreamTrackVector& tracks)
    : ContextDestructionObserver(&context)
    , m_activityEventTimer(*this, &MediaStream::activityEventTimerFired)
{
    // This constructor preserves MediaStreamTrack instances and must be used by calls originating
    // from the JavaScript MediaStream constructor.
    MediaStreamTrackPrivateVector trackPrivates;
    trackPrivates.reserveCapacity(tracks.size());

    for (auto& track : tracks) {
        track->addObserver(this);
        m_trackSet.add(track->id(), track);
        trackPrivates.append(&track->privateTrack());
    }

    m_private = MediaStreamPrivate::create(trackPrivates);
    setIsActive(m_private->active());
    m_private->addObserver(*this);
    MediaStreamRegistry::shared().registerStream(*this);
}
void UserMediaRequest::succeed(PassRefPtr<MediaStreamDescriptor> streamDescriptor)
{
    if (!executionContext())
        return;

    RefPtrWillBeRawPtr<MediaStream> stream = MediaStream::create(executionContext(), streamDescriptor);

    MediaStreamTrackVector audioTracks = stream->getAudioTracks();
    for (MediaStreamTrackVector::iterator iter = audioTracks.begin(); iter != audioTracks.end(); ++iter) {
        (*iter)->component()->source()->setConstraints(m_audio);
    }

    MediaStreamTrackVector videoTracks = stream->getVideoTracks();
    for (MediaStreamTrackVector::iterator iter = videoTracks.begin(); iter != videoTracks.end(); ++iter) {
        (*iter)->component()->source()->setConstraints(m_video);
    }

    m_successCallback->handleEvent(stream.get());
}