nsresult H264Converter::CreateDecoder(DecoderDoctorDiagnostics* aDiagnostics) { if (mNeedAVCC && !mp4_demuxer::AnnexB::HasSPS(mCurrentConfig.mExtraData)) { // nothing found yet, will try again later return NS_ERROR_NOT_INITIALIZED; } UpdateConfigFromExtraData(mCurrentConfig.mExtraData); if (!mNeedAVCC) { // When using a decoder handling AnnexB, we get here only once from the // constructor. We do want to get the dimensions extracted from the SPS. mOriginalConfig = mCurrentConfig; } mDecoder = mPDM->CreateVideoDecoder({ mNeedAVCC ? mCurrentConfig : mOriginalConfig, mTaskQueue, mCallback, aDiagnostics, mImageContainer, mLayersBackend, mGMPCrashHelper }); if (!mDecoder) { mLastError = NS_ERROR_FAILURE; return NS_ERROR_FAILURE; } return NS_OK; }
nsresult H264Converter::CreateDecoderAndInit(MediaRawData* aSample) { RefPtr<MediaByteBuffer> extra_data = mp4_demuxer::AnnexB::ExtractExtraData(aSample); if (!mp4_demuxer::AnnexB::HasSPS(extra_data)) { return NS_ERROR_NOT_INITIALIZED; } UpdateConfigFromExtraData(extra_data); nsresult rv = CreateDecoder(/* DecoderDoctorDiagnostics* */ nullptr); if (NS_SUCCEEDED(rv)) { // Queue the incoming sample. mMediaRawSamples.AppendElement(aSample); RefPtr<H264Converter> self = this; mInitPromiseRequest.Begin(mDecoder->Init() ->Then(AbstractThread::GetCurrent()->AsTaskQueue(), __func__, this, &H264Converter::OnDecoderInitDone, &H264Converter::OnDecoderInitFailed)); } return rv; }
nsresult H264Converter::CreateDecoderAndInit(MediaRawData* aSample) { RefPtr<MediaByteBuffer> extra_data = mp4_demuxer::AnnexB::ExtractExtraData(aSample); if (!mp4_demuxer::AnnexB::HasSPS(extra_data)) { return NS_ERROR_NOT_INITIALIZED; } UpdateConfigFromExtraData(extra_data); nsresult rv = CreateDecoder(mCurrentConfig, /* DecoderDoctorDiagnostics* */ nullptr); if (NS_SUCCEEDED(rv)) { // Queue the incoming sample. mPendingSample = aSample; mDecoder->Init() ->Then(AbstractThread::GetCurrent()->AsTaskQueue(), __func__, this, &H264Converter::OnDecoderInitDone, &H264Converter::OnDecoderInitFailed) ->Track(mInitPromiseRequest); return NS_ERROR_DOM_MEDIA_INITIALIZING_DECODER; } return rv; }
MediaResult H264Converter::CreateDecoderAndInit(MediaRawData* aSample) { RefPtr<MediaByteBuffer> extra_data = H264::ExtractExtraData(aSample); bool inbandExtradata = H264::HasSPS(extra_data); if (!inbandExtradata && !H264::HasSPS(mCurrentConfig.mExtraData)) { return NS_ERROR_NOT_INITIALIZED; } if (inbandExtradata) { UpdateConfigFromExtraData(extra_data); } MediaResult rv = CreateDecoder(mCurrentConfig, /* DecoderDoctorDiagnostics* */ nullptr); if (NS_SUCCEEDED(rv)) { RefPtr<H264Converter> self = this; RefPtr<MediaRawData> sample = aSample; mDecoder->Init() ->Then( AbstractThread::GetCurrent()->AsTaskQueue(), __func__, [self, sample, this](const TrackType aTrackType) { mInitPromiseRequest.Complete(); mNeedAVCC = Some(mDecoder->NeedsConversion() == ConversionRequired::kNeedAVCC); mCanRecycleDecoder = Some(CanRecycleDecoder()); if (!mFlushPromise.IsEmpty()) { // A Flush is pending, abort the current operation. mFlushPromise.Resolve(true, __func__); return; } DecodeFirstSample(sample); }, [self, this](const MediaResult& aError) { mInitPromiseRequest.Complete(); if (!mFlushPromise.IsEmpty()) { // A Flush is pending, abort the current operation. mFlushPromise.Reject(aError, __func__); return; } mDecodePromise.Reject( MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, RESULT_DETAIL("Unable to initialize H264 decoder")), __func__); }) ->Track(mInitPromiseRequest); return NS_ERROR_DOM_MEDIA_INITIALIZING_DECODER; } return rv; }
MediaResult H264Converter::CreateDecoder(const VideoInfo& aConfig, DecoderDoctorDiagnostics* aDiagnostics) { if (!H264::HasSPS(aConfig.mExtraData)) { // nothing found yet, will try again later return NS_ERROR_NOT_INITIALIZED; } UpdateConfigFromExtraData(aConfig.mExtraData); SPSData spsdata; if (H264::DecodeSPSFromExtraData(aConfig.mExtraData, spsdata)) { // Do some format check here. // WMF H.264 Video Decoder and Apple ATDecoder do not support YUV444 format. if (spsdata.profile_idc == 244 /* Hi444PP */ || spsdata.chroma_format_idc == PDMFactory::kYUV444) { if (aDiagnostics) { aDiagnostics->SetVideoNotSupported(); } return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, RESULT_DETAIL("No support for YUV444 format.")); } } else { return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, RESULT_DETAIL("Invalid SPS NAL.")); } MediaResult error = NS_OK; mDecoder = mPDM->CreateVideoDecoder({ aConfig, mTaskQueue, aDiagnostics, mImageContainer, mKnowsCompositor, mGMPCrashHelper, mType, mOnWaitingForKeyEvent, mDecoderOptions, mRate, &error }); if (!mDecoder) { if (NS_FAILED(error)) { // The decoder supports CreateDecoderParam::mError, returns the value. return error; } else { return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, RESULT_DETAIL("Unable to create H264 decoder")); } } DDLINKCHILD("decoder", mDecoder.get()); mNeedKeyframe = true; return NS_OK; }
nsresult H264Converter::CheckForSPSChange(MediaRawData* aSample) { RefPtr<MediaByteBuffer> extra_data = mp4_demuxer::AnnexB::ExtractExtraData(aSample); if (!mp4_demuxer::AnnexB::HasSPS(extra_data) || mp4_demuxer::AnnexB::CompareExtraData(extra_data, mCurrentConfig.mExtraData)) { return NS_OK; } RefPtr<MediaRawData> sample = aSample; if (CanRecycleDecoder()) { // Do not recreate the decoder, reuse it. UpdateConfigFromExtraData(extra_data); if (!sample->mTrackInfo) { sample->mTrackInfo = new TrackInfoSharedPtr(mCurrentConfig, 0); } mNeedKeyframe = true; return NS_OK; } // The SPS has changed, signal to flush the current decoder and create a // new one. RefPtr<H264Converter> self = this; mDecoder->Flush() ->Then(AbstractThread::GetCurrent()->AsTaskQueue(), __func__, [self, sample, this]() { mFlushRequest.Complete(); mShutdownPromise = Shutdown(); mShutdownPromise ->Then(AbstractThread::GetCurrent()->AsTaskQueue(), __func__, [self, sample, this]() { mShutdownRequest.Complete(); mShutdownPromise = nullptr; mNeedAVCC.reset(); nsresult rv = CreateDecoderAndInit(sample); if (rv == NS_ERROR_DOM_MEDIA_INITIALIZING_DECODER) { // All good so far, will continue later. return; } MOZ_ASSERT(NS_FAILED(rv)); mDecodePromise.Reject(rv, __func__); return; }, [] { MOZ_CRASH("Can't reach here'"); }) ->Track(mShutdownRequest); }, [self, this](const MediaResult& aError) { mFlushRequest.Complete(); mDecodePromise.Reject(aError, __func__); }) ->Track(mFlushRequest); return NS_ERROR_DOM_MEDIA_INITIALIZING_DECODER; }
nsresult H264Converter::CreateDecoderAndInit(MediaRawData* aSample) { nsRefPtr<MediaByteBuffer> extra_data = mp4_demuxer::AnnexB::ExtractExtraData(aSample); if (!mp4_demuxer::AnnexB::HasSPS(extra_data)) { return NS_ERROR_NOT_INITIALIZED; } UpdateConfigFromExtraData(extra_data); nsresult rv = CreateDecoder(); NS_ENSURE_SUCCESS(rv, rv); return Init(); }
nsresult H264Converter::CreateDecoder(const VideoInfo& aConfig, DecoderDoctorDiagnostics* aDiagnostics) { if (!mp4_demuxer::AnnexB::HasSPS(aConfig.mExtraData)) { // nothing found yet, will try again later return NS_ERROR_NOT_INITIALIZED; } UpdateConfigFromExtraData(aConfig.mExtraData); mp4_demuxer::SPSData spsdata; if (mp4_demuxer::H264::DecodeSPSFromExtraData(aConfig.mExtraData, spsdata)) { // Do some format check here. // WMF H.264 Video Decoder and Apple ATDecoder do not support YUV444 format. if (spsdata.profile_idc == 244 /* Hi444PP */ || spsdata.chroma_format_idc == PDMFactory::kYUV444) { mLastError = NS_ERROR_FAILURE; if (aDiagnostics) { aDiagnostics->SetVideoNotSupported(); } return NS_ERROR_FAILURE; } } else { // SPS was invalid. mLastError = NS_ERROR_FAILURE; return NS_ERROR_FAILURE; } mDecoder = mPDM->CreateVideoDecoder({ aConfig, mTaskQueue, aDiagnostics, mImageContainer, mKnowsCompositor, mGMPCrashHelper, mType, mOnWaitingForKeyEvent, mDecoderOptions }); if (!mDecoder) { mLastError = NS_ERROR_FAILURE; return NS_ERROR_FAILURE; } mNeedKeyframe = true; return NS_OK; }
MediaResult H264Converter::CheckForSPSChange(MediaRawData* aSample) { RefPtr<MediaByteBuffer> extra_data = H264::ExtractExtraData(aSample); if (!H264::HasSPS(extra_data)) { MOZ_ASSERT(mCanRecycleDecoder.isSome()); if (!*mCanRecycleDecoder) { // If the decoder can't be recycled, the out of band extradata will never // change as the H264Converter will be recreated by the MediaFormatReader // instead. So there's no point in testing for changes. return NS_OK; } // This sample doesn't contain inband SPS/PPS // We now check if the out of band one has changed. // This scenario can only occur on Android with devices that can recycle a // decoder. if (!H264::HasSPS(aSample->mExtraData) || H264::CompareExtraData(aSample->mExtraData, mOriginalExtraData)) { return NS_OK; } extra_data = mOriginalExtraData = aSample->mExtraData; } if (H264::CompareExtraData(extra_data, mCurrentConfig.mExtraData)) { return NS_OK; } MOZ_ASSERT(mCanRecycleDecoder.isSome()); if (*mCanRecycleDecoder) { // Do not recreate the decoder, reuse it. UpdateConfigFromExtraData(extra_data); if (!aSample->mTrackInfo) { aSample->mTrackInfo = new TrackInfoSharedPtr(mCurrentConfig, 0); } mNeedKeyframe = true; return NS_OK; } // The SPS has changed, signal to drain the current decoder and once done // create a new one. DrainThenFlushDecoder(aSample); return NS_ERROR_DOM_MEDIA_INITIALIZING_DECODER; }
nsresult H264Converter::CreateDecoder(DecoderDoctorDiagnostics* aDiagnostics) { if (!mp4_demuxer::AnnexB::HasSPS(mCurrentConfig.mExtraData)) { // nothing found yet, will try again later return NS_ERROR_NOT_INITIALIZED; } UpdateConfigFromExtraData(mCurrentConfig.mExtraData); mp4_demuxer::SPSData spsdata; if (mp4_demuxer::H264::DecodeSPSFromExtraData(mCurrentConfig.mExtraData, spsdata)) { // Do some format check here. // WMF H.264 Video Decoder and Apple ATDecoder do not support YUV444 format. if (spsdata.chroma_format_idc == 3 /*YUV444*/) { mLastError = NS_ERROR_FAILURE; if (aDiagnostics) { aDiagnostics->SetVideoNotSupported(); } return NS_ERROR_FAILURE; } } else { // SPS was invalid. mLastError = NS_ERROR_FAILURE; return NS_ERROR_FAILURE; } mDecoder = mPDM->CreateVideoDecoder({ mCurrentConfig, mTaskQueue, mCallback, aDiagnostics, mImageContainer, mKnowsCompositor, mGMPCrashHelper }); if (!mDecoder) { mLastError = NS_ERROR_FAILURE; return NS_ERROR_FAILURE; } mNeedKeyframe = true; return NS_OK; }
nsresult H264Converter::CreateDecoder() { if (mNeedAVCC && !mp4_demuxer::AnnexB::HasSPS(mCurrentConfig.mExtraData)) { // nothing found yet, will try again later return NS_ERROR_NOT_INITIALIZED; } UpdateConfigFromExtraData(mCurrentConfig.mExtraData); mDecoder = mPDM->CreateVideoDecoder(mNeedAVCC ? mCurrentConfig : mOriginalConfig, mLayersBackend, mImageContainer, mVideoTaskQueue, mCallback); if (!mDecoder) { mLastError = NS_ERROR_FAILURE; return NS_ERROR_FAILURE; } return NS_OK; }
nsresult H264Converter::CheckForSPSChange(MediaRawData* aSample) { RefPtr<MediaByteBuffer> extra_data = mp4_demuxer::AnnexB::ExtractExtraData(aSample); if (!mp4_demuxer::AnnexB::HasSPS(extra_data) || mp4_demuxer::AnnexB::CompareExtraData(extra_data, mCurrentConfig.mExtraData)) { return NS_OK; } if (!mNeedAVCC) { UpdateConfigFromExtraData(extra_data); mDecoder->ConfigurationChanged(mCurrentConfig); return NS_OK; } // The SPS has changed, signal to flush the current decoder and create a // new one. mDecoder->Flush(); Shutdown(); return CreateDecoderAndInit(aSample); }
nsresult H264Converter::CheckForSPSChange(MediaRawData* aSample) { RefPtr<MediaByteBuffer> extra_data = mp4_demuxer::AnnexB::ExtractExtraData(aSample); if (!mp4_demuxer::AnnexB::HasSPS(extra_data) || mp4_demuxer::AnnexB::CompareExtraData(extra_data, mCurrentConfig.mExtraData)) { return NS_OK; } if (MediaPrefs::MediaDecoderCheckRecycling() && mDecoder->SupportDecoderRecycling()) { // Do not recreate the decoder, reuse it. UpdateConfigFromExtraData(extra_data); mNeedKeyframe = true; return NS_OK; } // The SPS has changed, signal to flush the current decoder and create a // new one. mDecoder->Flush(); Shutdown(); return CreateDecoderAndInit(aSample); }
nsresult H264Converter::CheckForSPSChange(MediaRawData* aSample) { RefPtr<MediaByteBuffer> extra_data = mp4_demuxer::AnnexB::ExtractExtraData(aSample); if (!mp4_demuxer::AnnexB::HasSPS(extra_data) || mp4_demuxer::AnnexB::CompareExtraData(extra_data, mCurrentConfig.mExtraData)) { return NS_OK; } mPendingSample = aSample; if (CanRecycleDecoder()) { // Do not recreate the decoder, reuse it. UpdateConfigFromExtraData(extra_data); // Ideally we would want to drain the decoder instead of flushing it. // However the draining operation requires calling Drain and looping several // times which isn't possible from within the H264Converter. So instead we // flush the decoder. In practice, this is a no-op as SPS change will only // be used with MSE. And with MSE, the MediaFormatReader would have drained // the decoder already. RefPtr<H264Converter> self = this; mDecoder->Flush() ->Then(AbstractThread::GetCurrent()->AsTaskQueue(), __func__, [self, this]() { mFlushRequest.Complete(); DecodeFirstSample(mPendingSample); mPendingSample = nullptr; }, [self, this](const MediaResult& aError) { mFlushRequest.Complete(); mDecodePromise.Reject(aError, __func__); }) ->Track(mFlushRequest); mNeedKeyframe = true; // This is not really initializing the decoder, but it will do as it // indicates an operation is pending. return NS_ERROR_DOM_MEDIA_INITIALIZING_DECODER; } // The SPS has changed, signal to flush the current decoder and create a // new one. RefPtr<H264Converter> self = this; mDecoder->Flush() ->Then(AbstractThread::GetCurrent()->AsTaskQueue(), __func__, [self, this]() { mFlushRequest.Complete(); mShutdownPromise = Shutdown(); mShutdownPromise ->Then(AbstractThread::GetCurrent()->AsTaskQueue(), __func__, [self, this]() { mShutdownRequest.Complete(); mShutdownPromise = nullptr; mNeedAVCC.reset(); RefPtr<MediaRawData> sample = mPendingSample.forget(); nsresult rv = CreateDecoderAndInit(sample); if (rv == NS_ERROR_DOM_MEDIA_INITIALIZING_DECODER) { // All good so far, will continue later. return; } MOZ_ASSERT(NS_FAILED(rv)); mDecodePromise.Reject(rv, __func__); return; }, [] { MOZ_CRASH("Can't reach here'"); }) ->Track(mShutdownRequest); }, [self, this](const MediaResult& aError) { mFlushRequest.Complete(); mDecodePromise.Reject(aError, __func__); }) ->Track(mFlushRequest); return NS_ERROR_DOM_MEDIA_INITIALIZING_DECODER; }