GMPVideoDecoderParams::GMPVideoDecoderParams(const CreateDecoderParams& aParams) : mConfig(aParams.VideoConfig()) , mTaskQueue(aParams.mTaskQueue) , mImageContainer(aParams.mImageContainer) , mLayersBackend(aParams.GetLayersBackend()) , mCrashHelper(aParams.mCrashHelper) { }
VPXDecoder::VPXDecoder(const CreateDecoderParams& aParams) : mImageContainer(aParams.mImageContainer) , mTaskQueue(aParams.mTaskQueue) , mInfo(aParams.VideoConfig()) , mCodec(MimeTypeToCodec(aParams.VideoConfig().mMimeType)) { MOZ_COUNT_CTOR(VPXDecoder); PodZero(&mVPX); PodZero(&mVPXAlpha); }
VPXDecoder::VPXDecoder(const CreateDecoderParams& aParams) : mImageContainer(aParams.mImageContainer), mImageAllocator(aParams.mKnowsCompositor), mTaskQueue(aParams.mTaskQueue), mInfo(aParams.VideoConfig()), mCodec(MimeTypeToCodec(aParams.VideoConfig().mMimeType)), mLowLatency( aParams.mOptions.contains(CreateDecoderParams::Option::LowLatency)) { MOZ_COUNT_CTOR(VPXDecoder); PodZero(&mVPX); PodZero(&mVPXAlpha); }
AOMDecoder::AOMDecoder(const CreateDecoderParams& aParams) : mImageContainer(aParams.mImageContainer) , mTaskQueue(aParams.mTaskQueue) , mInfo(aParams.VideoConfig()) { PodZero(&mCodec); }
H264Converter::H264Converter(PlatformDecoderModule* aPDM, const CreateDecoderParams& aParams) : mPDM(aPDM) , mOriginalConfig(aParams.VideoConfig()) , mCurrentConfig(aParams.VideoConfig()) , mKnowsCompositor(aParams.mKnowsCompositor) , mImageContainer(aParams.mImageContainer) , mTaskQueue(aParams.mTaskQueue) , mDecoder(nullptr) , mGMPCrashHelper(aParams.mCrashHelper) , mLastError(NS_OK) , mType(aParams.mType) , mOnWaitingForKeyEvent(aParams.mOnWaitingForKeyEvent) { CreateDecoder(aParams.mDiagnostics); }
already_AddRefed<MediaDataDecoder> AppleDecoderModule::CreateVideoDecoder( const CreateDecoderParams& aParams) { RefPtr<MediaDataDecoder> decoder = new AppleVTDecoder(aParams.VideoConfig(), aParams.mTaskQueue, aParams.mImageContainer, aParams.mOptions); return decoder.forget(); }
H264Converter::H264Converter(PlatformDecoderModule* aPDM, const CreateDecoderParams& aParams) : mPDM(aPDM) , mOriginalConfig(aParams.VideoConfig()) , mCurrentConfig(aParams.VideoConfig()) , mLayersBackend(aParams.mLayersBackend) , mImageContainer(aParams.mImageContainer) , mTaskQueue(aParams.mTaskQueue) , mCallback(aParams.mCallback) , mDecoder(nullptr) , mGMPCrashHelper(aParams.mCrashHelper) , mNeedAVCC(aPDM->DecoderNeedsConversion(aParams.mConfig) == PlatformDecoderModule::kNeedAVCC) , mLastError(NS_OK) { CreateDecoder(aParams.mDiagnostics); }
already_AddRefed<MediaDataDecoder> AppleDecoderModule::CreateAudioDecoder(const CreateDecoderParams& aParams) { RefPtr<MediaDataDecoder> decoder = new AppleATDecoder(aParams.AudioConfig(), aParams.mTaskQueue); return decoder.forget(); }
already_AddRefed<MediaDataDecoder> GonkDecoderModule::CreateAudioDecoder(const CreateDecoderParams& aParams) { RefPtr<MediaDataDecoder> decoder = new GonkMediaDataDecoder(new GonkAudioDecoderManager(aParams.AudioConfig()), aParams.mCallback); return decoder.forget(); }
// Decode thread. already_AddRefed<MediaDataDecoder> CreateVideoDecoder(const CreateDecoderParams& aParams) override { const VideoInfo& config = aParams.VideoConfig(); BlankVideoDataCreator* creator = new BlankVideoDataCreator( config.mDisplay.width, config.mDisplay.height, aParams.mImageContainer); RefPtr<MediaDataDecoder> decoder = new BlankMediaDataDecoder<BlankVideoDataCreator>(creator, aParams); return decoder.forget(); }
GMPVideoDecoderParams::GMPVideoDecoderParams(const CreateDecoderParams& aParams) : mConfig(aParams.VideoConfig()) , mTaskQueue(aParams.mTaskQueue) , mCallback(nullptr) , mAdapter(nullptr) , mImageContainer(aParams.mImageContainer) , mLayersBackend(aParams.mLayersBackend) , mCrashHelper(aParams.mCrashHelper) {}
// Decode thread. already_AddRefed<MediaDataDecoder> CreateAudioDecoder(const CreateDecoderParams& aParams) override { const AudioInfo& config = aParams.AudioConfig(); BlankAudioDataCreator* creator = new BlankAudioDataCreator( config.mChannels, config.mRate); RefPtr<MediaDataDecoder> decoder = new BlankMediaDataDecoder<BlankAudioDataCreator>(creator, aParams); return decoder.forget(); }
BlankMediaDataDecoder(BlankMediaDataCreator* aCreator, const CreateDecoderParams& aParams) : mCreator(aCreator) , mCallback(aParams.mCallback) , mMaxRefFrames(aParams.mConfig.GetType() == TrackInfo::kVideoTrack && MP4Decoder::IsH264(aParams.mConfig.mMimeType) ? mp4_demuxer::H264::ComputeMaxRefFrames(aParams.VideoConfig().mExtraData) : 0) , mType(aParams.mConfig.GetType()) { }
VorbisDataDecoder::VorbisDataDecoder(const CreateDecoderParams& aParams) : mInfo(aParams.AudioConfig()) , mTaskQueue(aParams.mTaskQueue) , mPacketCount(0) , mFrames(0) { // Zero these member vars to avoid crashes in Vorbis clear functions when // destructor is called before |Init|. PodZero(&mVorbisBlock); PodZero(&mVorbisDsp); PodZero(&mVorbisInfo); PodZero(&mVorbisComment); }
already_AddRefed<MediaDataDecoder> WMFDecoderModule::CreateAudioDecoder(const CreateDecoderParams& aParams) { nsAutoPtr<WMFAudioMFTManager> manager(new WMFAudioMFTManager(aParams.AudioConfig())); if (!manager->Init()) { return nullptr; } RefPtr<MediaDataDecoder> decoder = new WMFMediaDataDecoder(manager.forget(), aParams.mTaskQueue, aParams.mCallback); return decoder.forget(); }
H264Converter::H264Converter(PlatformDecoderModule* aPDM, const CreateDecoderParams& aParams) : mPDM(aPDM) , mOriginalConfig(aParams.VideoConfig()) , mCurrentConfig(aParams.VideoConfig()) , mKnowsCompositor(aParams.mKnowsCompositor) , mImageContainer(aParams.mImageContainer) , mTaskQueue(aParams.mTaskQueue) , mDecoder(nullptr) , mGMPCrashHelper(aParams.mCrashHelper) , mLastError(NS_OK) , mType(aParams.mType) , mOnWaitingForKeyEvent(aParams.mOnWaitingForKeyEvent) , mDecoderOptions(aParams.mOptions) , mRate(aParams.mRate) { mLastError = CreateDecoder(mOriginalConfig, aParams.mDiagnostics); if (mDecoder) { MOZ_ASSERT(H264::HasSPS(mOriginalConfig.mExtraData)); // The video metadata contains out of band SPS/PPS (AVC1) store it. mOriginalExtraData = mOriginalConfig.mExtraData; } }
already_AddRefed<MediaDataDecoder> WMFDecoderModule::CreateVideoDecoder(const CreateDecoderParams& aParams) { nsAutoPtr<WMFVideoMFTManager> manager( new WMFVideoMFTManager(aParams.VideoConfig(), aParams.mKnowsCompositor, aParams.mImageContainer, sDXVAEnabled)); if (!manager->Init()) { return nullptr; } RefPtr<MediaDataDecoder> decoder = new WMFMediaDataDecoder(manager.forget(), aParams.mTaskQueue, aParams.mCallback); return decoder.forget(); }
already_AddRefed<MediaDataDecoder> AndroidDecoderModule::CreateAudioDecoder( const CreateDecoderParams& aParams) { const AudioInfo& config = aParams.AudioConfig(); if (config.mBitDepth != 16) { // We only handle 16-bit audio. return nullptr; } LOG("CreateAudioFormat with mimeType=%s, mRate=%d, channels=%d", config.mMimeType.Data(), config.mRate, config.mChannels); nsString drmStubId; if (mProxy) { drmStubId = mProxy->GetMediaDrmStubId(); } RefPtr<MediaDataDecoder> decoder = RemoteDataDecoder::CreateAudioDecoder(aParams, drmStubId, mProxy); return decoder.forget(); }
already_AddRefed<MediaDataDecoder> AndroidDecoderModule::CreateVideoDecoder( const CreateDecoderParams& aParams) { // Temporary - forces use of VPXDecoder when alpha is present. // Bug 1263836 will handle alpha scenario once implemented. It will shift // the check for alpha to PDMFactory but not itself remove the need for a // check. if (aParams.VideoConfig().HasAlpha()) { return nullptr; } nsString drmStubId; if (mProxy) { drmStubId = mProxy->GetMediaDrmStubId(); } RefPtr<MediaDataDecoder> decoder = RemoteDataDecoder::CreateVideoDecoder(aParams, drmStubId, mProxy); return decoder.forget(); }
already_AddRefed<MediaDataDecoder> RemoteDecoderModule::CreateVideoDecoder( const CreateDecoderParams& aParams) { LaunchRDDProcessIfNeeded(); if (!mManagerThread) { return nullptr; } RefPtr<RemoteVideoDecoderChild> child = new RemoteVideoDecoderChild(); MediaResult result(NS_OK); // We can use child as a ref here because this is a sync dispatch. In // the error case for InitIPDL, we can't just let the RefPtr go out of // scope at the end of the method because it will release the // RemoteVideoDecoderChild on the wrong thread. This will assert in // RemoteDecoderChild's destructor. Passing the RefPtr by reference // allows us to release the RemoteVideoDecoderChild on the manager // thread during this single dispatch. RefPtr<Runnable> task = NS_NewRunnableFunction("RemoteDecoderModule::CreateVideoDecoder", [&]() { result = child->InitIPDL(aParams.VideoConfig(), aParams.mRate.mValue, aParams.mOptions); if (NS_FAILED(result)) { // Release RemoteVideoDecoderChild here, while we're on // manager thread. Don't just let the RefPtr go out of scope. child = nullptr; } }); SyncRunnable::DispatchToThread(mManagerThread, task); if (NS_FAILED(result)) { if (aParams.mError) { *aParams.mError = result; } return nullptr; } RefPtr<RemoteMediaDataDecoder> object = new RemoteMediaDataDecoder( child, mManagerThread, RemoteDecoderManagerChild::GetManagerAbstractThread()); return object.forget(); }
already_AddRefed<MediaDataDecoder> RemoteDecoderModule::CreateVideoDecoder(const CreateDecoderParams& aParams) { if (!aParams.mKnowsCompositor) { return nullptr; } MediaDataDecoderCallback* callback = aParams.mCallback; MOZ_ASSERT(callback->OnReaderTaskQueue()); RefPtr<RemoteVideoDecoder> object = new RemoteVideoDecoder(callback); VideoInfo info = aParams.VideoConfig(); RefPtr<layers::KnowsCompositor> knowsCompositor = aParams.mKnowsCompositor; VideoDecoderManagerChild::GetManagerThread()->Dispatch(NS_NewRunnableFunction([=]() { object->mActor->InitIPDL(callback, info, knowsCompositor); }), NS_DISPATCH_NORMAL); return object.forget(); }
already_AddRefed<MediaDataDecoder> AndroidDecoderModule::CreateAudioDecoder(const CreateDecoderParams& aParams) { const AudioInfo& config = aParams.AudioConfig(); MOZ_ASSERT(config.mBitDepth == 16, "We only handle 16-bit audio!"); MediaFormat::LocalRef format; LOG("CreateAudioFormat with mimeType=%s, mRate=%d, channels=%d", config.mMimeType.Data(), config.mRate, config.mChannels); NS_ENSURE_SUCCESS(MediaFormat::CreateAudioFormat( config.mMimeType, config.mRate, config.mChannels, &format), nullptr); RefPtr<MediaDataDecoder> decoder = MediaPrefs::PDMAndroidRemoteCodecEnabled() ? RemoteDataDecoder::CreateAudioDecoder(config, format, aParams.mCallback) : MediaCodecDataDecoder::CreateAudioDecoder(config, format, aParams.mCallback); return decoder.forget(); }
already_AddRefed<MediaDataDecoder> AndroidDecoderModule::CreateVideoDecoder(const CreateDecoderParams& aParams) { MediaFormat::LocalRef format; const VideoInfo& config = aParams.VideoConfig(); NS_ENSURE_SUCCESS(MediaFormat::CreateVideoFormat( TranslateMimeType(config.mMimeType), config.mDisplay.width, config.mDisplay.height, &format), nullptr); RefPtr<MediaDataDecoder> decoder = MediaPrefs::PDMAndroidRemoteCodecEnabled() ? RemoteDataDecoder::CreateVideoDecoder(config, format, aParams.mCallback, aParams.mImageContainer) : MediaCodecDataDecoder::CreateVideoDecoder(config, format, aParams.mCallback, aParams.mImageContainer); return decoder.forget(); }
already_AddRefed<MediaDataDecoder> RemoteDecoderModule::CreateVideoDecoder(const CreateDecoderParams& aParams) { if (!StaticPrefs::MediaGpuProcessDecoder() || !aParams.mKnowsCompositor || !IsRemoteAcceleratedCompositor(aParams.mKnowsCompositor)) { return mWrapped->CreateVideoDecoder(aParams); } RefPtr<RemoteVideoDecoder> object = new RemoteVideoDecoder(); SynchronousTask task("InitIPDL"); MediaResult result(NS_OK); VideoDecoderManagerChild::GetManagerThread()->Dispatch( NS_NewRunnableFunction( "dom::RemoteDecoderModule::CreateVideoDecoder", [&]() { AutoCompleteTask complete(&task); result = object->mActor->InitIPDL( aParams.VideoConfig(), aParams.mRate.mValue, aParams.mKnowsCompositor->GetTextureFactoryIdentifier()); }), NS_DISPATCH_NORMAL); task.Wait(); if (NS_FAILED(result)) { if (aParams.mError) { *aParams.mError = result; } return nullptr; } return object.forget(); }
WaveDataDecoder::WaveDataDecoder(const CreateDecoderParams& aParams) : mInfo(aParams.AudioConfig()) , mTaskQueue(aParams.mTaskQueue) { }