void MediaSourceReader::PrepareInitialization() { ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); MSE_DEBUG("trackBuffers=%u", mTrackBuffers.Length()); mEssentialTrackBuffers.AppendElements(mTrackBuffers); mHasEssentialTrackBuffers = true; if (!IsWaitingMediaResources()) { mDecoder->NotifyWaitingForResourcesStatusChanged(); } }
void SourceBuffer::Ended() { MOZ_ASSERT(NS_IsMainThread()); MOZ_ASSERT(IsAttached()); MSE_DEBUG("Ended"); mContentManager->Ended(); // We want the MediaSourceReader to refresh its buffered range as it may // have been modified (end lined up). mMediaSource->GetDecoder()->NotifyDataArrived(1, mReportedOffset++, /* aThrottleUpdates = */ false); }
void SourceBuffer::Detach() { MOZ_ASSERT(NS_IsMainThread()); MSE_DEBUG("Detach"); if (!mMediaSource) { MSE_DEBUG("Already detached"); return; } AbortBufferAppend(); if (mContentManager) { mContentManager->Detach(); if (mIsUsingFormatReader) { mMediaSource->GetDecoder()->GetDemuxer()->DetachSourceBuffer( static_cast<mozilla::TrackBuffersManager*>(mContentManager.get())); } } mContentManager = nullptr; mMediaSource = nullptr; }
bool ContainerParser::IsMediaSegmentPresent(MediaLargeByteBuffer* aData) { MSE_DEBUG(ContainerParser, "aLength=%u [%x%x%x%x]", aData->Length(), aData->Length() > 0 ? (*aData)[0] : 0, aData->Length() > 1 ? (*aData)[1] : 0, aData->Length() > 2 ? (*aData)[2] : 0, aData->Length() > 3 ? (*aData)[3] : 0); return false; }
void SourceBuffer::Detach() { MOZ_ASSERT(NS_IsMainThread()); MSE_DEBUG("SourceBuffer(%p)::Detach", this); if (mTrackBuffer) { mTrackBuffer->Detach(); } mTrackBuffer = nullptr; mMediaSource = nullptr; }
void MediaSource::DurationChange(double aOldDuration, double aNewDuration) { MOZ_ASSERT(NS_IsMainThread()); MSE_DEBUG("DurationChange(aOldDuration=%f, aNewDuration=%f)", aOldDuration, aNewDuration); if (aNewDuration < aOldDuration) { // Remove all buffered data from aNewDuration. mSourceBuffers->RangeRemoval(aNewDuration, PositiveInfinity<double>()); } // TODO: If partial audio frames/text cues exist, clamp duration based on mSourceBuffers. }
/* static */ bool MediaSource::IsTypeSupported(const GlobalObject&, const nsAString& aType) { #ifdef PR_LOGGING if (!gMediaSourceLog) { gMediaSourceLog = PR_NewLogModule("MediaSource"); } #endif nsresult rv = mozilla::IsTypeSupported(aType); MSE_DEBUG("MediaSource::IsTypeSupported(Type=%s) -> %x", NS_ConvertUTF16toUTF8(aType).get(), rv); return NS_SUCCEEDED(rv); }
void MediaSource::Detach() { MSE_DEBUG("%p Detaching decoder %p owner %p", this, mDecoder.get(), mDecoder->GetOwner()); MOZ_ASSERT(mDecoder); mDecoder->DetachMediaSource(); mDecoder = nullptr; mDuration = UnspecifiedNaN<double>(); mActiveSourceBuffers->Clear(); mSourceBuffers->Clear(); SetReadyState(MediaSourceReadyState::Closed); }
void SourceBuffer::Detach() { MOZ_ASSERT(NS_IsMainThread()); MSE_DEBUG("Detach"); AbortBufferAppend(); if (mTrackBuffer) { mTrackBuffer->Detach(); } mTrackBuffer = nullptr; mMediaSource = nullptr; }
void MediaSource::QueueInitializationEvent() { MOZ_ASSERT(NS_IsMainThread()); if (!mFirstSourceBufferInitialization) { mFirstSourceBufferInitialization = true; } MSE_DEBUG("MediaSource(%p)::QueueInitializationEvent()", this); nsRefPtr<nsIRunnable> task = NS_NewRunnableMethod(this, &MediaSource::InitializationEvent); NS_DispatchToMainThread(task); }
SourceBufferResource::SourceBufferResource(nsIPrincipal* aPrincipal, const nsACString& aType) : mPrincipal(aPrincipal) , mType(aType) , mMonitor("mozilla::SourceBufferResource::mMonitor") , mOffset(0) , mClosed(false) , mEnded(false) { MOZ_COUNT_CTOR(SourceBufferResource); MSE_DEBUG("%p SBR::SBR()", this); }
bool MediaSource::Attach(MediaSourceDecoder* aDecoder) { MSE_DEBUG("%p Attaching decoder %p owner %p", this, aDecoder, aDecoder->GetOwner()); MOZ_ASSERT(aDecoder); if (mReadyState != MediaSourceReadyState::Closed) { return false; } mDecoder = aDecoder; mDecoder->AttachMediaSource(this); SetReadyState(MediaSourceReadyState::Open); return true; }
void MediaSourceReader::RequestAudioData() { MSE_DEBUGV("MediaSourceReader(%p)::RequestAudioData", this); if (!mAudioReader) { MSE_DEBUG("MediaSourceReader(%p)::RequestAudioData called with no audio reader", this); GetCallback()->OnNotDecoded(MediaData::AUDIO_DATA, RequestSampleCallback::DECODE_ERROR); return; } mAudioIsSeeking = false; SwitchAudioReader(mLastAudioTime); mAudioReader->RequestAudioData(); }
void MediaSourceReader::RequestAudioData() { MSE_DEBUGV("MediaSourceReader(%p)::RequestAudioData", this); if (!mAudioReader) { MSE_DEBUG("MediaSourceReader(%p)::RequestAudioData called with no audio reader", this); GetCallback()->OnDecodeError(); return; } mAudioIsSeeking = false; SwitchAudioReader(mLastAudioTime); mAudioReader->RequestAudioData(); }
void MediaSource::QueueInitializationEvent() { MOZ_ASSERT(NS_IsMainThread()); if (mFirstSourceBufferInitialized) { return; } mFirstSourceBufferInitialized = true; MSE_DEBUG(""); nsCOMPtr<nsIRunnable> task = NS_NewRunnableMethod(this, &MediaSource::InitializationEvent); NS_DispatchToMainThread(task); }
void MediaSourceReader::RemoveTrackBuffer(TrackBuffer* aTrackBuffer) { ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); MSE_DEBUG("RemoveTrackBuffer(%p)", aTrackBuffer); mTrackBuffers.RemoveElement(aTrackBuffer); if (mAudioTrack == aTrackBuffer) { mAudioTrack = nullptr; } if (mVideoTrack == aTrackBuffer) { mVideoTrack = nullptr; } }
void MediaSourceDecoder::Shutdown() { MSE_DEBUG("MediaSourceDecoder(%p)::Shutdown", this); MediaDecoder::Shutdown(); if (mMediaSource) { mMediaSource->Detach(); } // Kick WaitForData out of its slumber. ReentrantMonitorAutoEnter mon(GetReentrantMonitor()); mon.NotifyAll(); }
void SourceBuffer::Evict(double aStart, double aEnd) { MOZ_ASSERT(NS_IsMainThread()); MSE_DEBUG("Evict(aStart=%f, aEnd=%f)", aStart, aEnd); double currentTime = mMediaSource->GetDecoder()->GetCurrentTime(); double evictTime = aEnd; const double safety_threshold = 5; if (currentTime + safety_threshold >= evictTime) { evictTime -= safety_threshold; } mTrackBuffer->EvictBefore(evictTime); }
void MediaSourceDecoder::Shutdown() { MOZ_ASSERT(NS_IsMainThread()); MSE_DEBUG("Shutdown"); // Detach first so that TrackBuffers are unused on the main thread when // shut down on the decode task queue. if (mMediaSource) { mMediaSource->Detach(); } mDemuxer = nullptr; MediaDecoder::Shutdown(); }
bool MediaSource::Attach(MediaSourceDecoder* aDecoder) { MOZ_ASSERT(NS_IsMainThread()); MSE_DEBUG("MediaSource(%p)::Attach(aDecoder=%p) owner=%p", this, aDecoder, aDecoder->GetOwner()); MOZ_ASSERT(aDecoder); if (mReadyState != MediaSourceReadyState::Closed) { return false; } mDecoder = aDecoder; mDecoder->AttachMediaSource(this); SetReadyState(MediaSourceReadyState::Open); return true; }
void OnVideoDecoded(VideoData* aSample) { if (mDropVideoBeforeThreshold) { if (aSample->mTime < mTimeThreshold) { MSE_DEBUG("%p MSR::OnVideoDecoded VideoData mTime %lld below mTimeThreshold %lld", this, aSample->mTime, mTimeThreshold); delete aSample; GetVideoReader()->RequestVideoData(false, mTimeThreshold); return; } mDropVideoBeforeThreshold = false; } GetCallback()->OnVideoDecoded(aSample); }
void SourceBuffer::Abort(ErrorResult& aRv) { MSE_DEBUG("%p Abort()", this); if (!IsAttached()) { aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR); return; } if (mMediaSource->ReadyState() != MediaSourceReadyState::Open) { aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR); return; } if (mUpdating) { // TODO: Abort segment parser loop, buffer append, and stream append loop algorithms. AbortUpdating(); } // TODO: Run reset parser algorithm. mAppendWindowStart = 0; mAppendWindowEnd = PositiveInfinity<double>(); MSE_DEBUG("%p Abort: Discarding decoder.", this); DiscardDecoder(); }
void MediaSourceDecoder::Shutdown() { MSE_DEBUG("Shutdown"); // Detach first so that TrackBuffers are unused on the main thread when // shut down on the decode task queue. if (mMediaSource) { mMediaSource->Detach(); } MediaDecoder::Shutdown(); // Kick WaitForData out of its slumber. ReentrantMonitorAutoEnter mon(GetReentrantMonitor()); mon.NotifyAll(); }
void MediaSourceReader::OnVideoNotDecoded(NotDecodedReason aReason) { MOZ_DIAGNOSTIC_ASSERT(!IsSeeking()); mVideoRequest.Complete(); MSE_DEBUG("aReason=%u IsEnded: %d", aReason, IsEnded()); if (aReason == CANCELED) { mVideoPromise.Reject(CANCELED, __func__); return; } // if End of stream. Force switching past this stream to another reader by // switching to the end of the buffered range. int64_t lastVideoTime = mLastVideoTime; if (aReason == END_OF_STREAM && mVideoSourceDecoder) { AdjustEndTime(&mLastVideoTime, mVideoSourceDecoder); } // See if we can find a different reader that can pick up where we left off. SwitchSourceResult result = SwitchVideoSource(&mLastVideoTime); if (result == SOURCE_NEW) { GetVideoReader()->ResetDecode(); mVideoSeekRequest.Begin(GetVideoReader()->Seek(GetReaderVideoTime(mLastVideoTime), 0) ->RefableThen(GetTaskQueue(), __func__, this, &MediaSourceReader::CompleteVideoSeekAndDoRequest, &MediaSourceReader::CompleteVideoSeekAndRejectPromise)); return; } // If we got a DECODE_ERROR and we have buffered data in the requested range // then it must be a genuine decoding error. // Otherwise we can assume that the data was either evicted or explicitely // removed from the source buffer and we should wait for new data. if (aReason == DECODE_ERROR && result != SOURCE_NONE) { mVideoPromise.Reject(DECODE_ERROR, __func__); return; } CheckForWaitOrEndOfStream(MediaData::VIDEO_DATA, mLastVideoTime); if (mLastVideoTime - lastVideoTime >= EOS_FUZZ_US) { // No decoders are available to switch to. We will re-attempt from the last // failing position. mLastVideoTime = lastVideoTime; } }
SourceBuffer::SourceBuffer(MediaSource* aMediaSource, const nsACString& aType) : DOMEventTargetHelper(aMediaSource->GetParentObject()) , mMediaSource(aMediaSource) , mType(aType) , mAppendWindowStart(0) , mAppendWindowEnd(PositiveInfinity<double>()) , mTimestampOffset(0) , mAppendMode(SourceBufferAppendMode::Segments) , mUpdating(false) , mDecoderInitialized(false) { MOZ_ASSERT(aMediaSource); mParser = ContainerParser::CreateForMIMEType(aType); MSE_DEBUG("%p SourceBuffer: Creating initial decoder.", this); InitNewDecoder(); }
void SourceBuffer::AppendData(const uint8_t* aData, uint32_t aLength, ErrorResult& aRv) { MSE_DEBUG("AppendData(aLength=%u)", aLength); RefPtr<MediaByteBuffer> data = PrepareAppend(aData, aLength, aRv); if (!data) { return; } mContentManager->AppendData(data, mAttributes->GetTimestampOffset()); StartUpdating(); nsCOMPtr<nsIRunnable> task = new BufferAppendRunnable(this, mUpdateID); NS_DispatchToMainThread(task); }
bool SourceBuffer::InitNewDecoder() { MOZ_ASSERT(NS_IsMainThread()); MSE_DEBUG("SourceBuffer(%p)::InitNewDecoder", this); MOZ_ASSERT(!mDecoder); MediaSourceDecoder* parentDecoder = mMediaSource->GetDecoder(); nsRefPtr<SubBufferDecoder> decoder = parentDecoder->CreateSubDecoder(mType); if (!decoder) { return false; } mDecoder = decoder; mDecoderInitialized = false; mDecoders.AppendElement(mDecoder); return true; }
void MediaSourceReader::MaybeNotifyHaveData() { ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); bool haveAudio = false, haveVideo = false; if (!mAudioIsSeeking && mAudioTrack && HaveData(mLastAudioTime, MediaData::AUDIO_DATA)) { haveAudio = true; WaitPromise(MediaData::AUDIO_DATA).ResolveIfExists(MediaData::AUDIO_DATA, __func__); } if (!mVideoIsSeeking && mVideoTrack && HaveData(mLastVideoTime, MediaData::VIDEO_DATA)) { haveVideo = true; WaitPromise(MediaData::VIDEO_DATA).ResolveIfExists(MediaData::VIDEO_DATA, __func__); } MSE_DEBUG("MediaSourceReader(%p)::MaybeNotifyHaveData haveAudio=%d, haveVideo=%d", this, haveAudio, haveVideo); }
void SourceBuffer::AppendData(const uint8_t* aData, uint32_t aLength, ErrorResult& aRv) { MSE_DEBUG("AppendData(aLength=%u)", aLength); RefPtr<MediaByteBuffer> data = PrepareAppend(aData, aLength, aRv); if (!data) { return; } StartUpdating(); mPendingAppend.Begin(mTrackBuffersManager->AppendData(data, mCurrentAttributes) ->Then(AbstractThread::MainThread(), __func__, this, &SourceBuffer::AppendDataCompletedWithSuccess, &SourceBuffer::AppendDataErrored)); }
void SourceBuffer::AppendData(const uint8_t* aData, uint32_t aLength, ErrorResult& aRv) { MSE_DEBUG("AppendData(aLength=%u)", aLength); nsRefPtr<LargeDataBuffer> data = PrepareAppend(aData, aLength, aRv); if (!data) { return; } StartUpdating(); MOZ_ASSERT(mAppendMode == SourceBufferAppendMode::Segments, "We don't handle timestampOffset for sequence mode yet"); nsCOMPtr<nsIRunnable> task = new AppendDataRunnable(this, data, mTimestampOffset, mUpdateID); NS_DispatchToMainThread(task); }