void TrackUnionStream::CopyTrackData(StreamBuffer::Track* aInputTrack, uint32_t aMapIndex, GraphTime aFrom, GraphTime aTo, bool* aOutputTrackFinished) { TrackMapEntry* map = &mTrackMap[aMapIndex]; StreamBuffer::Track* outputTrack = mBuffer.FindTrack(map->mOutputTrackID); MOZ_ASSERT(outputTrack && !outputTrack->IsEnded(), "Can't copy to ended track"); MediaSegment* segment = map->mSegment; MediaStream* source = map->mInputPort->GetSource(); GraphTime next; *aOutputTrackFinished = false; for (GraphTime t = aFrom; t < aTo; t = next) { MediaInputPort::InputInterval interval = map->mInputPort->GetNextInputInterval(t); interval.mEnd = std::min(interval.mEnd, aTo); StreamTime inputEnd = source->GraphTimeToStreamTime(interval.mEnd); StreamTime inputTrackEndPoint = STREAM_TIME_MAX; if (aInputTrack->IsEnded() && aInputTrack->GetEnd() <= inputEnd) { inputTrackEndPoint = aInputTrack->GetEnd(); *aOutputTrackFinished = true; } if (interval.mStart >= interval.mEnd) { break; } StreamTime ticks = interval.mEnd - interval.mStart; next = interval.mEnd; StreamTime outputStart = outputTrack->GetEnd(); if (interval.mInputIsBlocked) { // Maybe the input track ended? segment->AppendNullData(ticks); STREAM_LOG(PR_LOG_DEBUG+1, ("TrackUnionStream %p appending %lld ticks of null data to track %d", this, (long long)ticks, outputTrack->GetID())); } else if (InMutedCycle()) { segment->AppendNullData(ticks); } else { MOZ_ASSERT(outputTrack->GetEnd() == GraphTimeToStreamTime(interval.mStart), "Samples missing"); StreamTime inputStart = source->GraphTimeToStreamTime(interval.mStart); segment->AppendSlice(*aInputTrack->GetSegment(), std::min(inputTrackEndPoint, inputStart), std::min(inputTrackEndPoint, inputEnd)); } ApplyTrackDisabling(outputTrack->GetID(), segment); for (uint32_t j = 0; j < mListeners.Length(); ++j) { MediaStreamListener* l = mListeners[j]; l->NotifyQueuedTrackChanges(Graph(), outputTrack->GetID(), outputStart, 0, *segment); } outputTrack->GetSegment()->AppendFrom(segment); } }
void MediaPipelineTransmit::PipelineListener:: NotifyQueuedTrackChanges(MediaStreamGraph* graph, TrackID tid, TrackRate rate, TrackTicks offset, uint32_t events, const MediaSegment& queued_media) { MOZ_MTLOG(PR_LOG_DEBUG, "MediaPipeline::NotifyQueuedTrackChanges()"); if (!active_) { MOZ_MTLOG(PR_LOG_DEBUG, "Discarding packets because transport not ready"); return; } // TODO([email protected]): For now assume that we have only one // track type and it's destined for us // See bug 784517 if (queued_media.GetType() == MediaSegment::AUDIO) { if (conduit_->type() != MediaSessionConduit::AUDIO) { // Ignore data in case we have a muxed stream return; } AudioSegment* audio = const_cast<AudioSegment *>( static_cast<const AudioSegment *>(&queued_media)); AudioSegment::ChunkIterator iter(*audio); while(!iter.IsEnded()) { ProcessAudioChunk(static_cast<AudioSessionConduit*>(conduit_.get()), rate, *iter); iter.Next(); } } else if (queued_media.GetType() == MediaSegment::VIDEO) { #ifdef MOZILLA_INTERNAL_API if (conduit_->type() != MediaSessionConduit::VIDEO) { // Ignore data in case we have a muxed stream return; } VideoSegment* video = const_cast<VideoSegment *>( static_cast<const VideoSegment *>(&queued_media)); VideoSegment::ChunkIterator iter(*video); while(!iter.IsEnded()) { ProcessVideoChunk(static_cast<VideoSessionConduit*>(conduit_.get()), rate, *iter); iter.Next(); } #endif } else { // Ignore } }
void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID, StreamTime aTrackOffset, TrackEventCommand aTrackEvents, const MediaSegment& aQueuedMedia, MediaStream* aInputStream, TrackID aInputTrackID) override { if (aTrackEvents & TrackEventCommand::TRACK_EVENT_CREATED) { aGraph->DispatchToMainThreadAfterStreamStateUpdate( NewRunnableMethod<MediaStreamGraph*, TrackID, MediaSegment::Type, RefPtr<MediaStream>, TrackID>( "DOMMediaStream::OwnedStreamListener::DoNotifyTrackCreated", this, &OwnedStreamListener::DoNotifyTrackCreated, aGraph, aID, aQueuedMedia.GetType(), aInputStream, aInputTrackID)); } else if (aTrackEvents & TrackEventCommand::TRACK_EVENT_ENDED) { aGraph->DispatchToMainThreadAfterStreamStateUpdate( NewRunnableMethod<MediaStreamGraph*, RefPtr<MediaStream>, TrackID, TrackID>( "DOMMediaStream::OwnedStreamListener::DoNotifyTrackEnded", this, &OwnedStreamListener::DoNotifyTrackEnded, aGraph, aInputStream, aInputTrackID, aID)); } }
void AudioTrackEncoder::NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID, StreamTime aTrackOffset, uint32_t aTrackEvents, const MediaSegment& aQueuedMedia) { if (mCanceled) { return; } const AudioSegment& audio = static_cast<const AudioSegment&>(aQueuedMedia); // Check and initialize parameters for codec encoder. if (!mInitialized) { mInitCounter++; TRACK_LOG(LogLevel::Debug, ("Init the audio encoder %d times", mInitCounter)); AudioSegment::ChunkIterator iter(const_cast<AudioSegment&>(audio)); while (!iter.IsEnded()) { AudioChunk chunk = *iter; // The number of channels is determined by the first non-null chunk, and // thus the audio encoder is initialized at this time. if (!chunk.IsNull()) { nsresult rv = Init(chunk.mChannelData.Length(), aGraph->GraphRate()); if (NS_FAILED(rv)) { LOG("[AudioTrackEncoder]: Fail to initialize the encoder!"); NotifyCancel(); } break; } iter.Next(); } mNotInitDuration += aQueuedMedia.GetDuration(); if (!mInitialized && (mNotInitDuration / aGraph->GraphRate() > INIT_FAILED_DURATION) && mInitCounter > 1) { LOG("[AudioTrackEncoder]: Initialize failed for 30s."); NotifyEndOfStream(); return; } } // Append and consume this raw segment. AppendAudioSegment(audio); // The stream has stopped and reached the end of track. if (aTrackEvents == MediaStreamListener::TRACK_EVENT_ENDED) { LOG("[AudioTrackEncoder]: Receive TRACK_EVENT_ENDED ."); NotifyEndOfStream(); } }
void VideoTrackEncoder::NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID, StreamTime aTrackOffset, uint32_t aTrackEvents, const MediaSegment& aQueuedMedia) { if (mCanceled) { return; } const VideoSegment& video = static_cast<const VideoSegment&>(aQueuedMedia); // Check and initialize parameters for codec encoder. if (!mInitialized) { mInitCounter++; TRACK_LOG(LogLevel::Debug, ("Init the video encoder %d times", mInitCounter)); VideoSegment::ChunkIterator iter(const_cast<VideoSegment&>(video)); while (!iter.IsEnded()) { VideoChunk chunk = *iter; if (!chunk.IsNull()) { gfx::IntSize imgsize = chunk.mFrame.GetImage()->GetSize(); gfx::IntSize intrinsicSize = chunk.mFrame.GetIntrinsicSize(); nsresult rv = Init(imgsize.width, imgsize.height, intrinsicSize.width, intrinsicSize.height, aGraph->GraphRate()); if (NS_FAILED(rv)) { LOG("[VideoTrackEncoder]: Fail to initialize the encoder!"); NotifyCancel(); } break; } iter.Next(); } mNotInitDuration += aQueuedMedia.GetDuration(); if (!mInitialized && (mNotInitDuration / aGraph->GraphRate() > INIT_FAILED_DURATION) && mInitCounter > 1) { LOG("[VideoTrackEncoder]: Initialize failed for 30s."); NotifyEndOfStream(); return; } } AppendVideoSegment(video); // The stream has stopped and reached the end of track. if (aTrackEvents == MediaStreamListener::TRACK_EVENT_ENDED) { LOG("[VideoTrackEncoder]: Receive TRACK_EVENT_ENDED ."); NotifyEndOfStream(); } }
void MediaEncoder::NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID, StreamTime aTrackOffset, TrackEventCommand aTrackEvents, const MediaSegment& aQueuedMedia, MediaStream* aInputStream, TrackID aInputTrackID) { if (!mDirectConnected) { NotifyRealtimeData(aGraph, aID, aTrackOffset, aTrackEvents, aQueuedMedia); } else { if (aTrackEvents != TrackEventCommand::TRACK_EVENT_NONE) { // forward events (TRACK_EVENT_ENDED) but not the media if (aQueuedMedia.GetType() == MediaSegment::VIDEO) { VideoSegment segment; NotifyRealtimeData(aGraph, aID, aTrackOffset, aTrackEvents, segment); } else { AudioSegment segment; NotifyRealtimeData(aGraph, aID, aTrackOffset, aTrackEvents, segment); } } if (mSuspended == RECORD_RESUMED) { if (mVideoEncoder) { if (aQueuedMedia.GetType() == MediaSegment::VIDEO) { // insert a null frame of duration equal to the first segment passed // after Resume(), so it'll get added to one of the DirectListener frames VideoSegment segment; gfx::IntSize size(0,0); segment.AppendFrame(nullptr, aQueuedMedia.GetDuration(), size, PRINCIPAL_HANDLE_NONE); mVideoEncoder->NotifyQueuedTrackChanges(aGraph, aID, aTrackOffset, aTrackEvents, segment); mSuspended = RECORD_NOT_SUSPENDED; } } else { mSuspended = RECORD_NOT_SUSPENDED; // no video } } } }
void MediaEncoder::NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID, StreamTime aTrackOffset, uint32_t aTrackEvents, const MediaSegment& aQueuedMedia) { // Process the incoming raw track data from MediaStreamGraph, called on the // thread of MediaStreamGraph. if (mAudioEncoder && aQueuedMedia.GetType() == MediaSegment::AUDIO) { mAudioEncoder->NotifyQueuedTrackChanges(aGraph, aID, aTrackOffset, aTrackEvents, aQueuedMedia); } else if (mVideoEncoder && aQueuedMedia.GetType() == MediaSegment::VIDEO) { mVideoEncoder->NotifyQueuedTrackChanges(aGraph, aID, aTrackOffset, aTrackEvents, aQueuedMedia); } }
void MediaEncoder::NotifyRealtimeData(MediaStreamGraph* aGraph, TrackID aID, StreamTime aTrackOffset, uint32_t aTrackEvents, const MediaSegment& aRealtimeMedia) { if (mSuspended) { return; } // Process the incoming raw track data from MediaStreamGraph, called on the // thread of MediaStreamGraph. if (mAudioEncoder && aRealtimeMedia.GetType() == MediaSegment::AUDIO) { mAudioEncoder->NotifyQueuedTrackChanges(aGraph, aID, aTrackOffset, aTrackEvents, aRealtimeMedia); } else if (mVideoEncoder && aRealtimeMedia.GetType() == MediaSegment::VIDEO && aTrackEvents != TrackEventCommand::TRACK_EVENT_NONE) { mVideoEncoder->NotifyQueuedTrackChanges(aGraph, aID, aTrackOffset, aTrackEvents, aRealtimeMedia); } }
void MediaEncoder::NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID, TrackRate aTrackRate, TrackTicks aTrackOffset, uint32_t aTrackEvents, const MediaSegment& aQueuedMedia) { // Process the incoming raw track data from MediaStreamGraph, called on the // thread of MediaStreamGraph. if (aQueuedMedia.GetType() == MediaSegment::AUDIO) { mAudioEncoder->NotifyQueuedTrackChanges(aGraph, aID, aTrackRate, aTrackOffset, aTrackEvents, aQueuedMedia); } else { // Type video is not supported for now. } }
void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID, StreamTime aTrackOffset, uint32_t aTrackEvents, const MediaSegment& aQueuedMedia, MediaStream* aInputStream, TrackID aInputTrackID) override { if (aTrackEvents & TRACK_EVENT_CREATED) { nsCOMPtr<nsIRunnable> runnable = NS_NewRunnableMethodWithArgs<TrackID, MediaSegment::Type>( this, &OwnedStreamListener::DoNotifyTrackCreated, aID, aQueuedMedia.GetType()); aGraph->DispatchToMainThreadAfterStreamStateUpdate(runnable.forget()); } else if (aTrackEvents & TRACK_EVENT_ENDED) { nsCOMPtr<nsIRunnable> runnable = NS_NewRunnableMethodWithArgs<TrackID>( this, &OwnedStreamListener::DoNotifyTrackEnded, aID); aGraph->DispatchToMainThreadAfterStreamStateUpdate(runnable.forget()); } }
void MediaEncoder::NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID, StreamTime aTrackOffset, TrackEventCommand aTrackEvents, const MediaSegment& aQueuedMedia, MediaStream* aInputStream, TrackID aInputTrackID) { if (!mDirectConnected) { NotifyRealtimeData(aGraph, aID, aTrackOffset, aTrackEvents, aQueuedMedia); } else { if (aTrackEvents != TrackEventCommand::TRACK_EVENT_NONE) { // forward events (TRACK_EVENT_ENDED) but not the media if (aQueuedMedia.GetType() == MediaSegment::VIDEO) { VideoSegment segment; NotifyRealtimeData(aGraph, aID, aTrackOffset, aTrackEvents, segment); } else { AudioSegment segment; NotifyRealtimeData(aGraph, aID, aTrackOffset, aTrackEvents, segment); } } } }
void TrackUnionStream::CopyTrackData(StreamTracks::Track* aInputTrack, uint32_t aMapIndex, GraphTime aFrom, GraphTime aTo, bool* aOutputTrackFinished) { TrackMapEntry* map = &mTrackMap[aMapIndex]; StreamTracks::Track* outputTrack = mTracks.FindTrack(map->mOutputTrackID); MOZ_ASSERT(outputTrack && !outputTrack->IsEnded(), "Can't copy to ended track"); MediaSegment* segment = map->mSegment; MediaStream* source = map->mInputPort->GetSource(); GraphTime next; *aOutputTrackFinished = false; for (GraphTime t = aFrom; t < aTo; t = next) { MediaInputPort::InputInterval interval = map->mInputPort->GetNextInputInterval(t); interval.mEnd = std::min(interval.mEnd, aTo); StreamTime inputEnd = source->GraphTimeToStreamTimeWithBlocking(interval.mEnd); StreamTime inputTrackEndPoint = STREAM_TIME_MAX; if (aInputTrack->IsEnded() && aInputTrack->GetEnd() <= inputEnd) { inputTrackEndPoint = aInputTrack->GetEnd(); *aOutputTrackFinished = true; } if (interval.mStart >= interval.mEnd) { break; } StreamTime ticks = interval.mEnd - interval.mStart; next = interval.mEnd; StreamTime outputStart = outputTrack->GetEnd(); if (interval.mInputIsBlocked) { // Maybe the input track ended? segment->AppendNullData(ticks); STREAM_LOG(LogLevel::Verbose, ("TrackUnionStream %p appending %lld ticks of null data to track %d", this, (long long)ticks, outputTrack->GetID())); } else if (InMutedCycle()) { segment->AppendNullData(ticks); } else { if (source->IsSuspended()) { segment->AppendNullData(aTo - aFrom); } else { MOZ_ASSERT(outputTrack->GetEnd() == GraphTimeToStreamTimeWithBlocking(interval.mStart), "Samples missing"); StreamTime inputStart = source->GraphTimeToStreamTimeWithBlocking(interval.mStart); segment->AppendSlice(*aInputTrack->GetSegment(), std::min(inputTrackEndPoint, inputStart), std::min(inputTrackEndPoint, inputEnd)); } } ApplyTrackDisabling(outputTrack->GetID(), segment); for (uint32_t j = 0; j < mListeners.Length(); ++j) { MediaStreamListener* l = mListeners[j]; // Separate Audio and Video. if (segment->GetType() == MediaSegment::AUDIO) { l->NotifyQueuedAudioData(Graph(), outputTrack->GetID(), outputStart, *static_cast<AudioSegment*>(segment), map->mInputPort->GetSource(), map->mInputTrackID); } } for (TrackBound<MediaStreamTrackListener>& b : mTrackListeners) { if (b.mTrackID != outputTrack->GetID()) { continue; } b.mListener->NotifyQueuedChanges(Graph(), outputStart, *segment); } outputTrack->GetSegment()->AppendFrom(segment); } }
void TrackUnionStream::CopyTrackData(StreamTracks::Track* aInputTrack, uint32_t aMapIndex, GraphTime aFrom, GraphTime aTo, bool* aOutputTrackFinished) { TrackMapEntry* map = &mTrackMap[aMapIndex]; TRACE_AUDIO_CALLBACK_COMMENT( "Input stream %p track %i -> TrackUnionStream %p track %i", map->mInputPort->GetSource(), map->mInputTrackID, this, map->mOutputTrackID); StreamTracks::Track* outputTrack = mTracks.FindTrack(map->mOutputTrackID); MOZ_ASSERT(outputTrack && !outputTrack->IsEnded(), "Can't copy to ended track"); MediaSegment* segment = map->mSegment; MediaStream* source = map->mInputPort->GetSource(); GraphTime next; *aOutputTrackFinished = false; for (GraphTime t = aFrom; t < aTo; t = next) { MediaInputPort::InputInterval interval = map->mInputPort->GetNextInputInterval(t); interval.mEnd = std::min(interval.mEnd, aTo); StreamTime inputEnd = source->GraphTimeToStreamTimeWithBlocking(interval.mEnd); if (aInputTrack->IsEnded() && aInputTrack->GetEnd() <= inputEnd) { *aOutputTrackFinished = true; break; } if (interval.mStart >= interval.mEnd) { break; } StreamTime ticks = interval.mEnd - interval.mStart; next = interval.mEnd; StreamTime outputStart = outputTrack->GetEnd(); if (interval.mInputIsBlocked) { segment->AppendNullData(ticks); STREAM_LOG( LogLevel::Verbose, ("TrackUnionStream %p appending %lld ticks of null data to track %d", this, (long long)ticks, outputTrack->GetID())); } else if (InMutedCycle()) { segment->AppendNullData(ticks); } else { if (source->IsSuspended()) { segment->AppendNullData(aTo - aFrom); } else { MOZ_ASSERT(outputTrack->GetEnd() == GraphTimeToStreamTimeWithBlocking(interval.mStart), "Samples missing"); StreamTime inputStart = source->GraphTimeToStreamTimeWithBlocking(interval.mStart); segment->AppendSlice(*aInputTrack->GetSegment(), inputStart, inputEnd); } } ApplyTrackDisabling(outputTrack->GetID(), segment); for (TrackBound<MediaStreamTrackListener>& b : mTrackListeners) { if (b.mTrackID != outputTrack->GetID()) { continue; } b.mListener->NotifyQueuedChanges(Graph(), outputStart, *segment); } outputTrack->GetSegment()->AppendFrom(segment); } }