already_AddRefed<MediaDataDecoder> EMEDecoderModule::CreateAudioDecoder(const CreateDecoderParams& aParams) { MOZ_ASSERT(aParams.mConfig.mCrypto.mValid); if (SupportsMimeType(aParams.mConfig.mMimeType, nullptr)) { // GMP decodes. Assume that means it can decrypt too. RefPtr<MediaDataDecoderProxy> wrapper = CreateDecoderWrapper(aParams.mCallback, mProxy, aParams.mTaskQueue); auto gmpParams = GMPAudioDecoderParams(aParams).WithCallback(wrapper); wrapper->SetProxyTarget(new EMEAudioDecoder(mProxy, gmpParams)); return wrapper.forget(); } MOZ_ASSERT(mPDM); RefPtr<MediaDataDecoder> decoder(mPDM->CreateDecoder(aParams)); if (!decoder) { return nullptr; } RefPtr<MediaDataDecoder> emeDecoder(new EMEDecryptor(decoder, aParams.mCallback, mProxy, AbstractThread::GetCurrent()->AsTaskQueue())); return emeDecoder.forget(); }
bool GMPDecoderModule::SupportsMimeType(const nsACString& aMimeType, DecoderDoctorDiagnostics* aDiagnostics) const { const Maybe<nsCString> preferredGMP = PreferredGMP(aMimeType); bool rv = SupportsMimeType(aMimeType, preferredGMP); if (rv && aDiagnostics && preferredGMP.isSome()) { aDiagnostics->SetGMP(preferredGMP.value()); } return rv; }
bool Model::IsDropTarget(const Model* forDocument, bool traverse) const { switch (CanHandleDrops()) { case kCanHandle: return true; case kCannotHandle: return false; default: break; } if (forDocument == NULL) return true; if (traverse) { BEntry entry(forDocument->EntryRef(), true); if (entry.InitCheck() != B_OK) return false; BFile file(&entry, O_RDONLY); BNodeInfo mime(&file); if (mime.InitCheck() != B_OK) return false; char mimeType[B_MIME_TYPE_LENGTH]; mime.GetType(mimeType); return SupportsMimeType(mimeType, 0) != kDoesNotSupportType; } // do some mime-based matching const char* documentMimeType = forDocument->MimeType(); if (documentMimeType == NULL) return false; return SupportsMimeType(documentMimeType, 0) != kDoesNotSupportType; }
already_AddRefed<MediaDataDecoder> PlatformDecoderModule::CreateDecoder(const TrackInfo& aConfig, FlushableTaskQueue* aTaskQueue, MediaDataDecoderCallback* aCallback, layers::LayersBackend aLayersBackend, layers::ImageContainer* aImageContainer) { nsRefPtr<MediaDataDecoder> m; bool hasPlatformDecoder = SupportsMimeType(aConfig.mMimeType); if (aConfig.GetAsAudioInfo()) { if (!hasPlatformDecoder && VorbisDataDecoder::IsVorbis(aConfig.mMimeType)) { m = new VorbisDataDecoder(*aConfig.GetAsAudioInfo(), aTaskQueue, aCallback); } else if (!hasPlatformDecoder && OpusDataDecoder::IsOpus(aConfig.mMimeType)) { m = new OpusDataDecoder(*aConfig.GetAsAudioInfo(), aTaskQueue, aCallback); } else { m = CreateAudioDecoder(*aConfig.GetAsAudioInfo(), aTaskQueue, aCallback); } return m.forget(); } if (!aConfig.GetAsVideoInfo()) { return nullptr; } if (H264Converter::IsH264(aConfig)) { m = new H264Converter(this, *aConfig.GetAsVideoInfo(), aLayersBackend, aImageContainer, aTaskQueue, aCallback); } else if (!hasPlatformDecoder && VPXDecoder::IsVPX(aConfig.mMimeType)) { m = new VPXDecoder(*aConfig.GetAsVideoInfo(), aImageContainer, aTaskQueue, aCallback); } else { m = CreateVideoDecoder(*aConfig.GetAsVideoInfo(), aLayersBackend, aImageContainer, aTaskQueue, aCallback); } return m.forget(); }
bool Model::IsDropTargetForList(const BObjectList<BString> *list) const { switch (CanHandleDrops()) { case kCanHandle: return true; case kCannotHandle: return false; default: break; } return SupportsMimeType(0, list) != kDoesNotSupportType; }
already_AddRefed<MediaDataDecoder> PlatformDecoderModule::CreateDecoder(const TrackInfo& aConfig, FlushableTaskQueue* aTaskQueue, MediaDataDecoderCallback* aCallback, layers::LayersBackend aLayersBackend, layers::ImageContainer* aImageContainer) { RefPtr<MediaDataDecoder> m; bool hasPlatformDecoder = SupportsMimeType(aConfig.mMimeType); if (aConfig.GetAsAudioInfo()) { if (!hasPlatformDecoder && VorbisDataDecoder::IsVorbis(aConfig.mMimeType)) { m = new VorbisDataDecoder(*aConfig.GetAsAudioInfo(), aTaskQueue, aCallback); } else if (!hasPlatformDecoder && OpusDataDecoder::IsOpus(aConfig.mMimeType)) { m = new OpusDataDecoder(*aConfig.GetAsAudioInfo(), aTaskQueue, aCallback); } else { m = CreateAudioDecoder(*aConfig.GetAsAudioInfo(), aTaskQueue, aCallback); } return m.forget(); } if (!aConfig.GetAsVideoInfo()) { return nullptr; } MediaDataDecoderCallback* callback = aCallback; RefPtr<DecoderCallbackFuzzingWrapper> callbackWrapper; if (sEnableFuzzingWrapper) { callbackWrapper = new DecoderCallbackFuzzingWrapper(aCallback); callbackWrapper->SetVideoOutputMinimumInterval( TimeDuration::FromMilliseconds(sVideoOutputMinimumInterval_ms)); callbackWrapper->SetDontDelayInputExhausted(sDontDelayInputExhausted); callback = callbackWrapper.get(); } if (H264Converter::IsH264(aConfig)) { RefPtr<H264Converter> h = new H264Converter(this, *aConfig.GetAsVideoInfo(), aLayersBackend, aImageContainer, aTaskQueue, callback); const nsresult rv = h->GetLastError(); if (NS_SUCCEEDED(rv) || rv == NS_ERROR_NOT_INITIALIZED) { // The H264Converter either successfully created the wrapped decoder, // or there wasn't enough AVCC data to do so. Otherwise, there was some // problem, for example WMF DLLs were missing. m = h.forget(); } } else if (!hasPlatformDecoder && VPXDecoder::IsVPX(aConfig.mMimeType)) { m = new VPXDecoder(*aConfig.GetAsVideoInfo(), aImageContainer, aTaskQueue, callback); } else { m = CreateVideoDecoder(*aConfig.GetAsVideoInfo(), aLayersBackend, aImageContainer, aTaskQueue, callback); } if (callbackWrapper && m) { m = new DecoderFuzzingWrapper(m.forget(), callbackWrapper.forget()); } return m.forget(); }
already_AddRefed<MediaDataDecoder> PlatformDecoderModule::CreateDecoder(const TrackInfo& aConfig, FlushableTaskQueue* aTaskQueue, MediaDataDecoderCallback* aCallback, layers::LayersBackend aLayersBackend, layers::ImageContainer* aImageContainer) { nsRefPtr<MediaDataDecoder> m; bool hasPlatformDecoder = SupportsMimeType(aConfig.mMimeType); if (aConfig.GetAsAudioInfo()) { if (!hasPlatformDecoder && VorbisDataDecoder::IsVorbis(aConfig.mMimeType)) { m = new VorbisDataDecoder(*aConfig.GetAsAudioInfo(), aTaskQueue, aCallback); } else if (!hasPlatformDecoder && OpusDataDecoder::IsOpus(aConfig.mMimeType)) { m = new OpusDataDecoder(*aConfig.GetAsAudioInfo(), aTaskQueue, aCallback); } else { m = CreateAudioDecoder(*aConfig.GetAsAudioInfo(), aTaskQueue, aCallback); } return m.forget(); } if (!aConfig.GetAsVideoInfo()) { return nullptr; } MediaDataDecoderCallback* callback = aCallback; nsRefPtr<DecoderCallbackFuzzingWrapper> callbackWrapper; if (sEnableFuzzingWrapper) { callbackWrapper = new DecoderCallbackFuzzingWrapper(aCallback); callbackWrapper->SetVideoOutputMinimumInterval( TimeDuration::FromMilliseconds(sVideoOutputMinimumInterval_ms)); callbackWrapper->SetDontDelayInputExhausted(sDontDelayInputExhausted); callback = callbackWrapper.get(); } if (H264Converter::IsH264(aConfig)) { m = new H264Converter(this, *aConfig.GetAsVideoInfo(), aLayersBackend, aImageContainer, aTaskQueue, callback); } else if (!hasPlatformDecoder && VPXDecoder::IsVPX(aConfig.mMimeType)) { m = new VPXDecoder(*aConfig.GetAsVideoInfo(), aImageContainer, aTaskQueue, callback); } else { m = CreateVideoDecoder(*aConfig.GetAsVideoInfo(), aLayersBackend, aImageContainer, aTaskQueue, callback); } if (callbackWrapper && m) { m = new DecoderFuzzingWrapper(m.forget(), callbackWrapper.forget()); } return m.forget(); }
bool GMPDecoderModule::SupportsMimeType(const nsACString& aMimeType) { return SupportsMimeType(aMimeType, PreferredGMP(aMimeType)); }