コード例 #1
0
void
DOMMediaStream::InitTrackUnionStream(nsIDOMWindow* aWindow, uint32_t aHintContents)
{
  mWindow = aWindow;
  SetHintContents(aHintContents);
  MediaStreamGraph* gm = MediaStreamGraph::GetInstance();
  mStream = gm->CreateTrackUnionStream(this);
}
コード例 #2
0
void
MediaRecorder::Start(const Optional<int32_t>& aTimeSlice, ErrorResult& aResult)
{
  if (mState != RecordingState::Inactive) {
    aResult.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
    return;
  }

  if (aTimeSlice.WasPassed()) {
    if (aTimeSlice.Value() < 0) {
      aResult.Throw(NS_ERROR_INVALID_ARG);
      return;
    }
    mTimeSlice = aTimeSlice.Value();
  } else {
    mTimeSlice = 0;
  }

  // Create a TrackUnionStream to support Pause/Resume by using ChangeExplicitBlockerCount
  MediaStreamGraph* gm = mStream->GetStream()->Graph();
  mTrackUnionStream = gm->CreateTrackUnionStream(mStream);
  MOZ_ASSERT(mTrackUnionStream, "CreateTrackUnionStream failed");

  if (!CheckPrincipal()) {
    aResult.Throw(NS_ERROR_DOM_SECURITY_ERR);
    return;
  }

  if (mEncodedBufferCache == nullptr) {
    mEncodedBufferCache = new EncodedBufferCache(MAX_ALLOW_MEMORY_BUFFER);
  }

  mEncoder = MediaEncoder::CreateEncoder(NS_LITERAL_STRING(""));
  MOZ_ASSERT(mEncoder, "CreateEncoder failed");

  mTrackUnionStream->SetAutofinish(true);
  nsRefPtr<MediaInputPort> port =
    mTrackUnionStream->AllocateInputPort(mStream->GetStream(), MediaInputPort::FLAG_BLOCK_OUTPUT);

  if (mEncoder) {
    mTrackUnionStream->AddListener(mEncoder);
  } else {
    aResult.Throw(NS_ERROR_DOM_ABORT_ERR);
  }

  if (!mReadThread) {
    nsresult rv = NS_NewNamedThread("Media Encoder",
                                    getter_AddRefs(mReadThread));
    if (NS_FAILED(rv)) {
      aResult.Throw(rv);
      return;
    }
    nsRefPtr<ExtractEncodedDataTask> event = new ExtractEncodedDataTask(this, mEncoder);
    mReadThread->Dispatch(event, NS_DISPATCH_NORMAL);
    mState = RecordingState::Recording;
  }
}
コード例 #3
0
CameraPreview::CameraPreview(nsIThread* aCameraThread, uint32_t aWidth, uint32_t aHeight)
  : nsDOMMediaStream()
  , mWidth(aWidth)
  , mHeight(aHeight)
  , mFramesPerSecond(0)
  , mFrameCount(0)
  , mCameraThread(aCameraThread)
{
  DOM_CAMERA_LOGI("%s:%d : mWidth=%d, mHeight=%d : this=%p\n", __func__, __LINE__, mWidth, mHeight, this);

  mImageContainer = LayerManager::CreateImageContainer();
  MediaStreamGraph* gm = MediaStreamGraph::GetInstance();
  mStream = gm->CreateInputStream(this);
  mInput = GetStream()->AsSourceStream();
  mInput->AddListener(new CameraPreviewListener(this));
}
コード例 #4
0
/* static */ already_AddRefed<AudioNodeStream>
AudioNodeStream::Create(AudioContext* aCtx, AudioNodeEngine* aEngine,
                        Flags aFlags, MediaStreamGraph* aGraph)
{
  MOZ_ASSERT(NS_IsMainThread());

  // MediaRecorders use an AudioNodeStream, but no AudioNode
  AudioNode* node = aEngine->NodeMainThread();
  MediaStreamGraph* graph = aGraph ? aGraph : aCtx->Graph();

  RefPtr<AudioNodeStream> stream =
    new AudioNodeStream(aEngine, aFlags, graph->GraphRate());
  stream->mSuspendedCount += aCtx->ShouldSuspendNewStream();
  if (node) {
    stream->SetChannelMixingParametersImpl(node->ChannelCount(),
                                           node->ChannelCountModeValue(),
                                           node->ChannelInterpretationValue());
  }
  graph->AddStream(stream);
  return stream.forget();
}
コード例 #5
0
DOMCameraPreview::DOMCameraPreview(ICameraControl* aCameraControl, uint32_t aWidth, uint32_t aHeight, uint32_t aFrameRate)
  : nsDOMMediaStream()
  , mState(STOPPED)
  , mWidth(aWidth)
  , mHeight(aHeight)
  , mFramesPerSecond(aFrameRate)
  , mFrameCount(0)
  , mCameraControl(aCameraControl)
{
  DOM_CAMERA_LOGT("%s:%d : this=%p : mWidth=%d, mHeight=%d, mFramesPerSecond=%d\n", __func__, __LINE__, this, mWidth, mHeight, mFramesPerSecond);

  mImageContainer = LayerManager::CreateImageContainer();
  MediaStreamGraph* gm = MediaStreamGraph::GetInstance();
  mStream = gm->CreateSourceStream(this);
  mInput = GetStream()->AsSourceStream();

  mListener = new DOMCameraPreviewListener(this);
  mInput->AddListener(mListener);

  mInput->AddTrack(TRACK_VIDEO, mFramesPerSecond, 0, new VideoSegment());
  mInput->AdvanceKnownTracksTime(MEDIA_TIME_MAX);
}
コード例 #6
0
  NS_IMETHOD
  Run()
  {
    NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");

    // We're on main-thread, and the windowlist can only
    // be invalidated from the main-thread (see OnNavigation)
    StreamListeners* listeners = mManager->GetWindowListeners(mWindowID);
    if (!listeners) {
      // This window is no longer live.
      return NS_OK;
    }

    // Create a media stream.
    uint32_t hints = (mAudioSource ? nsDOMMediaStream::HINT_CONTENTS_AUDIO : 0);
    hints |= (mVideoSource ? nsDOMMediaStream::HINT_CONTENTS_VIDEO : 0);

    nsRefPtr<nsDOMUserMediaStream> trackunion =
      nsDOMUserMediaStream::CreateTrackUnionStream(hints);
    if (!trackunion) {
      nsCOMPtr<nsIDOMGetUserMediaErrorCallback> error(mError);
      LOG(("Returning error for getUserMedia() - no stream"));
      error->OnError(NS_LITERAL_STRING("NO_STREAM"));
      return NS_OK;
    }

    MediaStreamGraph* gm = MediaStreamGraph::GetInstance();
    nsRefPtr<SourceMediaStream> stream = gm->CreateSourceStream(nullptr);

    // connect the source stream to the track union stream to avoid us blocking
    trackunion->GetStream()->AsProcessedStream()->SetAutofinish(true);
    nsRefPtr<MediaInputPort> port = trackunion->GetStream()->AsProcessedStream()->
      AllocateInputPort(stream, MediaInputPort::FLAG_BLOCK_OUTPUT);
    trackunion->mSourceStream = stream;
    trackunion->mPort = port;

    nsPIDOMWindow *window = static_cast<nsPIDOMWindow*>
      (nsGlobalWindow::GetInnerWindowWithId(mWindowID));
    if (window && window->GetExtantDoc()) {
      trackunion->CombineWithPrincipal(window->GetExtantDoc()->NodePrincipal());
    }

    // Ensure there's a thread for gum to proxy to off main thread
    nsIThread *mediaThread = MediaManager::GetThread();

    // Add our listener. We'll call Start() on the source when get a callback
    // that the MediaStream has started consuming. The listener is freed
    // when the page is invalidated (on navigation or close).
    GetUserMediaCallbackMediaStreamListener* listener =
      new GetUserMediaCallbackMediaStreamListener(mediaThread, stream.forget(),
                                                  port.forget(),
                                                  mAudioSource,
                                                  mVideoSource);
    listener->Stream()->AddListener(listener);

    // No need for locking because we always do this in the main thread.
    listeners->AppendElement(listener);

    // Dispatch to the media thread to ask it to start the sources,
    // because that can take a while
    nsRefPtr<MediaOperationRunnable> runnable(
      new MediaOperationRunnable(MEDIA_START, listener,
                                 mAudioSource, mVideoSource));
    mediaThread->Dispatch(runnable, NS_DISPATCH_NORMAL);

    // We're in the main thread, so no worries here either.
    nsCOMPtr<nsIDOMGetUserMediaSuccessCallback> success(mSuccess);
    nsCOMPtr<nsIDOMGetUserMediaErrorCallback> error(mError);

    if (!(mManager->IsWindowStillActive(mWindowID))) {
      return NS_OK;
    }
    // This is safe since we're on main-thread, and the windowlist can only
    // be invalidated from the main-thread (see OnNavigation)
    LOG(("Returning success for getUserMedia()"));
    success->OnSuccess(static_cast<nsIDOMLocalMediaStream*>(trackunion));

    return NS_OK;
  }
コード例 #7
0
    NS_IMETHOD
    Run()
    {
        NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
        nsPIDOMWindow *window = static_cast<nsPIDOMWindow*>
                                (nsGlobalWindow::GetInnerWindowWithId(mWindowID));

        // We're on main-thread, and the windowlist can only
        // be invalidated from the main-thread (see OnNavigation)
        StreamListeners* listeners = mManager->GetWindowListeners(mWindowID);
        if (!listeners || !window || !window->GetExtantDoc()) {
            // This window is no longer live.  mListener has already been removed
            return NS_OK;
        }

        // Create a media stream.
        uint32_t hints = (mAudioSource ? DOMMediaStream::HINT_CONTENTS_AUDIO : 0);
        hints |= (mVideoSource ? DOMMediaStream::HINT_CONTENTS_VIDEO : 0);

        nsRefPtr<nsDOMUserMediaStream> trackunion =
            nsDOMUserMediaStream::CreateTrackUnionStream(window, hints);
        if (!trackunion) {
            nsCOMPtr<nsIDOMGetUserMediaErrorCallback> error(mError);
            LOG(("Returning error for getUserMedia() - no stream"));
            error->OnError(NS_LITERAL_STRING("NO_STREAM"));
            return NS_OK;
        }

        MediaStreamGraph* gm = MediaStreamGraph::GetInstance();
        nsRefPtr<SourceMediaStream> stream = gm->CreateSourceStream(nullptr);

        // connect the source stream to the track union stream to avoid us blocking
        trackunion->GetStream()->AsProcessedStream()->SetAutofinish(true);
        nsRefPtr<MediaInputPort> port = trackunion->GetStream()->AsProcessedStream()->
                                        AllocateInputPort(stream, MediaInputPort::FLAG_BLOCK_OUTPUT);
        trackunion->mSourceStream = stream;
        trackunion->mPort = port.forget();

        trackunion->CombineWithPrincipal(window->GetExtantDoc()->NodePrincipal());

        // The listener was added at the begining in an inactive state.
        // Activate our listener. We'll call Start() on the source when get a callback
        // that the MediaStream has started consuming. The listener is freed
        // when the page is invalidated (on navigation or close).
        mListener->Activate(stream.forget(), mAudioSource, mVideoSource);

        // Dispatch to the media thread to ask it to start the sources,
        // because that can take a while
        nsIThread *mediaThread = MediaManager::GetThread();
        nsRefPtr<MediaOperationRunnable> runnable(
            new MediaOperationRunnable(MEDIA_START, mListener,
                                       mAudioSource, mVideoSource, false));
        mediaThread->Dispatch(runnable, NS_DISPATCH_NORMAL);

#ifdef MOZ_WEBRTC
        // Right now these configs are only of use if webrtc is available
        nsresult rv;
        nsCOMPtr<nsIPrefService> prefs = do_GetService("@mozilla.org/preferences-service;1", &rv);
        if (NS_SUCCEEDED(rv)) {
            nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs);

            if (branch) {
                int32_t aec = (int32_t) webrtc::kEcUnchanged;
                int32_t agc = (int32_t) webrtc::kAgcUnchanged;
                int32_t noise = (int32_t) webrtc::kNsUnchanged;
                bool aec_on = false, agc_on = false, noise_on = false;

                branch->GetBoolPref("media.peerconnection.aec_enabled", &aec_on);
                branch->GetIntPref("media.peerconnection.aec", &aec);
                branch->GetBoolPref("media.peerconnection.agc_enabled", &agc_on);
                branch->GetIntPref("media.peerconnection.agc", &agc);
                branch->GetBoolPref("media.peerconnection.noise_enabled", &noise_on);
                branch->GetIntPref("media.peerconnection.noise", &noise);

                mListener->AudioConfig(aec_on, (uint32_t) aec,
                                       agc_on, (uint32_t) agc,
                                       noise_on, (uint32_t) noise);
            }
        }
#endif

        // We're in the main thread, so no worries here either.
        nsCOMPtr<nsIDOMGetUserMediaSuccessCallback> success(mSuccess);
        nsCOMPtr<nsIDOMGetUserMediaErrorCallback> error(mError);

        if (!(mManager->IsWindowStillActive(mWindowID))) {
            return NS_OK;
        }
        // This is safe since we're on main-thread, and the windowlist can only
        // be invalidated from the main-thread (see OnNavigation)
        LOG(("Returning success for getUserMedia()"));
        success->OnSuccess(static_cast<nsIDOMLocalMediaStream*>(trackunion));

        return NS_OK;
    }