nsPluginStreamListenerPeer::~nsPluginStreamListenerPeer() { #ifdef PLUGIN_LOGGING MOZ_LOG(nsPluginLogging::gPluginLog, PLUGIN_LOG_NORMAL, ("nsPluginStreamListenerPeer::dtor this=%p, url=%s\n", this, mURLSpec.get())); #endif if (mPStreamListener) { mPStreamListener->SetStreamListenerPeer(nullptr); } }
long AudioStream::DataCallback(void* aBuffer, long aFrames) { MonitorAutoLock mon(mMonitor); MOZ_ASSERT(mState != SHUTDOWN, "No data callback after shutdown"); uint32_t available = std::min(static_cast<uint32_t>(FramesToBytes(aFrames)), mBuffer.Length()); MOZ_ASSERT(available % mBytesPerFrame == 0, "Must copy complete frames"); AudioDataValue* output = reinterpret_cast<AudioDataValue*>(aBuffer); uint32_t underrunFrames = 0; uint32_t servicedFrames = 0; // NOTE: wasapi (others?) can call us back *after* stop()/Shutdown() (mState == SHUTDOWN) // Bug 996162 // callback tells us cubeb succeeded initializing if (mState == STARTED) { mState = RUNNING; } if (available) { if (mInRate == mOutRate) { servicedFrames = GetUnprocessed(output, aFrames); } else { servicedFrames = GetTimeStretched(output, aFrames); } MOZ_ASSERT(mBuffer.Length() % mBytesPerFrame == 0, "Must copy complete frames"); // Notify any blocked Write() call that more space is available in mBuffer. mon.NotifyAll(); } underrunFrames = aFrames - servicedFrames; // Always send audible frames first, and silent frames later. // Otherwise it will break the assumption of FrameHistory. if (mState != DRAINING) { mAudioClock.UpdateFrameHistory(servicedFrames, underrunFrames); uint8_t* rpos = static_cast<uint8_t*>(aBuffer) + FramesToBytes(aFrames - underrunFrames); memset(rpos, 0, FramesToBytes(underrunFrames)); if (underrunFrames) { MOZ_LOG(gAudioStreamLog, LogLevel::Warning, ("AudioStream %p lost %d frames", this, underrunFrames)); } servicedFrames += underrunFrames; } else { mAudioClock.UpdateFrameHistory(servicedFrames, 0); } WriteDumpFile(mDumpFile, this, aFrames, aBuffer); return servicedFrames; }
// Helper to test the rust parser on a data source. static bool try_rust(const UniquePtr<mp4parse_state, FreeMP4ParseState>& aRustState, RefPtr<Stream> aSource, int32_t* aCount) { static LazyLogModule sLog("MP4Metadata"); int64_t length; if (!aSource->Length(&length) || length <= 0) { MOZ_LOG(sLog, LogLevel::Warning, ("Couldn't get source length")); return false; } MOZ_LOG(sLog, LogLevel::Debug, ("Source length %d bytes\n", (long long int)length)); size_t bytes_read = 0; auto buffer = std::vector<uint8_t>(length); bool rv = aSource->ReadAt(0, buffer.data(), length, &bytes_read); if (!rv || bytes_read != size_t(length)) { MOZ_LOG(sLog, LogLevel::Warning, ("Error copying mp4 data")); return false; } *aCount = mp4parse_read(aRustState.get(), buffer.data(), bytes_read); MOZ_LOG(sLog, LogLevel::Info, ("rust parser found %d tracks", int(*aCount))); return true; }
bool StreamAdaptor::Read(uint8_t* buffer, uintptr_t size, size_t* bytes_read) { if (!mOffset.isValid()) { MOZ_LOG(gMP4MetadataLog, LogLevel::Error, ("Overflow in source stream offset")); return false; } bool rv = mSource->ReadAt(mOffset.value(), buffer, size, bytes_read); if (rv) { mOffset += *bytes_read; } return rv; }
NS_IMETHODIMP nsMimeBaseEmitter::Write(const nsACString &buf, uint32_t *amountWritten) { unsigned int written = 0; nsresult rv = NS_OK; uint32_t needToWrite; #ifdef DEBUG_BenB // If you want to see libmime output... printf("%s", buf); #endif MOZ_LOG(gMimeEmitterLogModule, mozilla::LogLevel::Info, ("%s", PromiseFlatCString(buf).get())); // // Make sure that the buffer we are "pushing" into has enough room // for the write operation. If not, we have to buffer, return, and get // it on the next time through // *amountWritten = 0; needToWrite = mBufferMgr->GetSize(); // First, handle any old buffer data... if (needToWrite > 0) { rv = WriteHelper(mBufferMgr->GetBuffer(), &written); mTotalWritten += written; mBufferMgr->ReduceBuffer(written); *amountWritten = written; // if we couldn't write all the old data, buffer the new data // and return if (mBufferMgr->GetSize() > 0) { mBufferMgr->IncreaseBuffer(buf); return rv; } } // if we get here, we are dealing with new data...try to write // and then do the right thing... rv = WriteHelper(buf, &written); *amountWritten = written; mTotalWritten += written; if (written < buf.Length()) { const nsACString &remainder = Substring(buf, written); mBufferMgr->IncreaseBuffer(remainder); } return rv; }
nsresult AudioStream::Init(int32_t aNumChannels, int32_t aRate, const dom::AudioChannel aAudioChannel) { mStartTime = TimeStamp::Now(); mIsFirst = CubebUtils::GetFirstStream(); if (!CubebUtils::GetCubebContext() || aNumChannels < 0 || aRate < 0) { return NS_ERROR_FAILURE; } MOZ_LOG(gAudioStreamLog, LogLevel::Debug, ("%s channels: %d, rate: %d for %p", __FUNCTION__, aNumChannels, aRate, this)); mInRate = mOutRate = aRate; mChannels = aNumChannels; mOutChannels = (aNumChannels > 2) ? 2 : aNumChannels; mDumpFile = OpenDumpFile(this); cubeb_stream_params params; params.rate = aRate; params.channels = mOutChannels; #if defined(__ANDROID__) #if defined(MOZ_B2G) mAudioChannel = aAudioChannel; params.stream_type = CubebUtils::ConvertChannelToCubebType(aAudioChannel); #else mAudioChannel = dom::AudioChannel::Content; params.stream_type = CUBEB_STREAM_TYPE_MUSIC; #endif if (params.stream_type == CUBEB_STREAM_TYPE_MAX) { return NS_ERROR_INVALID_ARG; } #endif if (AUDIO_OUTPUT_FORMAT == AUDIO_FORMAT_S16) { params.format = CUBEB_SAMPLE_S16NE; } else { params.format = CUBEB_SAMPLE_FLOAT32NE; } mBytesPerFrame = sizeof(AudioDataValue) * mOutChannels; mAudioClock.Init(); // Size mBuffer for one second of audio. This value is arbitrary, and was // selected based on the observed behaviour of the existing AudioStream // implementations. uint32_t bufferLimit = FramesToBytes(aRate); MOZ_ASSERT(bufferLimit % mBytesPerFrame == 0, "Must buffer complete frames"); mBuffer.SetCapacity(bufferLimit); return OpenCubeb(params); }
static int32_t read_source(RefPtr<Stream> aSource, std::vector<uint8_t>& aBuffer) { static LazyLogModule sLog("MP4Metadata"); int64_t length; if (!aSource->Length(&length) || length <= 0) { MOZ_LOG(sLog, LogLevel::Warning, ("Couldn't get source length")); return MP4PARSE_ERROR_IO; } MOZ_LOG(sLog, LogLevel::Debug, ("Source length %d bytes\n", (long long int)length)); length = std::min<int64_t>(length, 1024 * 1024); // Don't read the entire file. aBuffer.resize(length); size_t bytes_read = 0; bool rv = aSource->ReadAt(0, aBuffer.data(), aBuffer.size(), &bytes_read); if (!rv || bytes_read != size_t(length)) { MOZ_LOG(sLog, LogLevel::Warning, ("Error copying mp4 data")); return MP4PARSE_ERROR_IO; } return MP4PARSE_OK; }
void CSFLogV(CSFLogLevel priority, const char* sourceFile, int sourceLine, const char* tag , const char* format, va_list args) { #ifdef STDOUT_LOGGING printf("%s\n:",tag); vprintf(format, args); #else mozilla::LogLevel level = static_cast<mozilla::LogLevel>(priority); GetSignalingLogInfo(); // Skip doing any of this work if we're not logging the indicated level... if (!MOZ_LOG_TEST(gLogModuleInfo,level)) { return; } // Trim the path component from the filename const char *lastSlash = sourceFile; while (*sourceFile) { if (*sourceFile == '/' || *sourceFile == '\\') { lastSlash = sourceFile; } sourceFile++; } sourceFile = lastSlash; if (*sourceFile == '/' || *sourceFile == '\\') { sourceFile++; } #define MAX_MESSAGE_LENGTH 1024 char message[MAX_MESSAGE_LENGTH]; const char *threadName = NULL; // Check if we're the main thread... if (NS_IsMainThread()) { threadName = "main"; } else { threadName = PR_GetThreadName(PR_GetCurrentThread()); } // If we can't find it anywhere, use a blank string if (!threadName) { threadName = ""; } VsprintfLiteral(message, format, args); MOZ_LOG(gLogModuleInfo, level, ("[%s|%s] %s:%d: %s", threadName, tag, sourceFile, sourceLine, message)); #endif }
NS_IMETHODIMP nsMsgPurgeService::OnSearchHit(nsIMsgDBHdr* aMsgHdr, nsIMsgFolder *aFolder) { NS_ENSURE_ARG_POINTER(aMsgHdr); nsCString messageId; nsCString author; nsCString subject; aMsgHdr->GetMessageId(getter_Copies(messageId)); MOZ_LOG(MsgPurgeLogModule, mozilla::LogLevel::Info, ("messageId=%s", messageId.get())); aMsgHdr->GetSubject(getter_Copies(subject)); MOZ_LOG(MsgPurgeLogModule, mozilla::LogLevel::Info, ("subject=%s",subject.get())); aMsgHdr->GetAuthor(getter_Copies(author)); MOZ_LOG(MsgPurgeLogModule, mozilla::LogLevel::Info, ("author=%s",author.get())); // double check that the message is junk before adding to // the list of messages to delete // // note, we can't just search for messages that are junk // because not all imap server support keywords // (which we use for the junk score) // so the junk status would be in the message db. // // see bug #194090 nsCString junkScoreStr; nsresult rv = aMsgHdr->GetStringProperty("junkscore", getter_Copies(junkScoreStr)); NS_ENSURE_SUCCESS(rv,rv); MOZ_LOG(MsgPurgeLogModule, mozilla::LogLevel::Info, ("junkScore=%s (if empty or != nsIJunkMailPlugin::IS_SPAM_SCORE, don't add to list delete)", junkScoreStr.get())); // if "junkscore" is not set, don't delete the message if (junkScoreStr.IsEmpty()) return NS_OK; if (atoi(junkScoreStr.get()) == nsIJunkMailPlugin::IS_SPAM_SCORE) { MOZ_LOG(MsgPurgeLogModule, mozilla::LogLevel::Info, ("added message to delete")); return mHdrsToDelete->AppendElement(aMsgHdr); } return NS_OK; }
void AudioStream::CheckForStart() { mMonitor.AssertCurrentThreadOwns(); if (mState == INITIALIZED) { // Start the stream right away when low latency has been requested. This means // that the DataCallback will feed silence to cubeb, until the first frames // are written to this AudioStream. Also start if a start has been queued. if (mLatencyRequest == LowLatency || mNeedsStart) { StartUnlocked(); // mState = STARTED or ERRORED mNeedsStart = false; MOZ_LOG(gAudioStreamLog, LogLevel::Warning, ("Started waiting %s-latency stream", mLatencyRequest == LowLatency ? "low" : "high")); } else { // high latency, not full - OR Pause() was called before we got here MOZ_LOG(gAudioStreamLog, LogLevel::Debug, ("Not starting waiting %s-latency stream", mLatencyRequest == LowLatency ? "low" : "high")); } } }
nsresult MP4Metadata::Parse() { Mp4parseStatus rv = mp4parse_read(mParser.get()); if (rv != MP4PARSE_STATUS_OK) { MOZ_LOG(gMP4MetadataLog, LogLevel::Debug, ("Parse failed, return code %d\n", rv)); return rv == MP4PARSE_STATUS_OOM ? NS_ERROR_OUT_OF_MEMORY : NS_ERROR_DOM_MEDIA_METADATA_ERR; } UpdateCrypto(); return NS_OK; }
// Wrapper to allow rust to call our read adaptor. static intptr_t read_source(uint8_t* buffer, uintptr_t size, void* userdata) { MOZ_ASSERT(buffer); MOZ_ASSERT(userdata); auto source = reinterpret_cast<StreamAdaptor*>(userdata); size_t bytes_read = 0; bool rv = source->Read(buffer, size, &bytes_read); if (!rv) { MOZ_LOG(gMP4MetadataLog, LogLevel::Warning, ("Error reading source data")); return -1; } return bytes_read; }
bool Clipboard::IsTestingPrefEnabled() { static bool sPrefCached = false; static bool sPrefCacheValue = false; if (!sPrefCached) { sPrefCached = true; Preferences::AddBoolVarCache(&sPrefCacheValue, "dom.events.testing.asyncClipboard"); } MOZ_LOG(GetClipboardLog(), LogLevel::Debug, ("Clipboard, Is testing enabled? %d\n", sPrefCacheValue)); return sPrefCacheValue; }
void U2FTokenManager::Sign(PWebAuthnTransactionParent* aTransactionParent, const uint64_t& aTransactionId, const WebAuthnGetAssertionInfo& aTransactionInfo) { MOZ_LOG(gU2FTokenManagerLog, LogLevel::Debug, ("U2FAuthSign")); ClearTransaction(); mTransactionParent = aTransactionParent; mTokenManagerImpl = GetTokenManagerImpl(); if (!mTokenManagerImpl) { AbortTransaction(aTransactionId, NS_ERROR_DOM_NOT_ALLOWED_ERR); return; } if ((aTransactionInfo.RpIdHash().Length() != SHA256_LENGTH) || (aTransactionInfo.ClientDataHash().Length() != SHA256_LENGTH)) { AbortTransaction(aTransactionId, NS_ERROR_DOM_UNKNOWN_ERR); return; } uint64_t tid = mLastTransactionId = aTransactionId; mozilla::TimeStamp startTime = mozilla::TimeStamp::Now(); mTokenManagerImpl->Sign(aTransactionInfo.AllowList(), aTransactionInfo.RpIdHash(), aTransactionInfo.ClientDataHash(), aTransactionInfo.RequireUserVerification(), aTransactionInfo.TimeoutMS()) ->Then(GetCurrentThreadSerialEventTarget(), __func__, [tid, startTime](U2FSignResult&& aResult) { U2FTokenManager* mgr = U2FTokenManager::Get(); mgr->MaybeConfirmSign(tid, aResult); Telemetry::ScalarAdd( Telemetry::ScalarID::SECURITY_WEBAUTHN_USED, NS_LITERAL_STRING("U2FSignFinish"), 1); Telemetry::AccumulateTimeDelta( Telemetry::WEBAUTHN_GET_ASSERTION_MS, startTime); }, [tid](nsresult rv) { MOZ_ASSERT(NS_FAILED(rv)); U2FTokenManager* mgr = U2FTokenManager::Get(); mgr->MaybeAbortSign(tid, rv); Telemetry::ScalarAdd( Telemetry::ScalarID::SECURITY_WEBAUTHN_USED, NS_LITERAL_STRING("U2FSignAbort"), 1); }) ->Track(mSignPromise); }
void OCSPCache::Clear() { MutexAutoLock lock(mMutex); MOZ_LOG(gCertVerifierLog, LogLevel::Debug, ("OCSPCache::Clear: clearing cache")); // First go through and delete the memory being pointed to by the pointers // in the vector. for (Entry** entry = mEntries.begin(); entry < mEntries.end(); entry++) { delete *entry; } // Then remove the pointers themselves. mEntries.clearAndFree(); }
void MediaEngineWebRTCMicrophoneSource::InsertInGraph(const T* aBuffer, size_t aFrames, uint32_t aChannels) { if (mState != kStarted) { return; } if (MOZ_LOG_TEST(AudioLogModule(), LogLevel::Debug)) { mTotalFrames += aFrames; if (mTotalFrames > mLastLogFrames + mSampleFrequency) { // ~ 1 second MOZ_LOG(AudioLogModule(), LogLevel::Debug, ("%p: Inserting %" PRIuSIZE " samples into graph, total frames = %" PRIu64, (void*)this, aFrames, mTotalFrames)); mLastLogFrames = mTotalFrames; } } size_t len = mSources.Length(); for (size_t i = 0; i < len; i++) { if (!mSources[i]) { continue; } RefPtr<SharedBuffer> buffer = SharedBuffer::Create(aFrames * aChannels * sizeof(T)); PodCopy(static_cast<T*>(buffer->Data()), aBuffer, aFrames * aChannels); TimeStamp insertTime; // Make sure we include the stream and the track. // The 0:1 is a flag to note when we've done the final insert for a given input block. LogTime(AsyncLatencyLogger::AudioTrackInsertion, LATENCY_STREAM_ID(mSources[i].get(), mTrackID), (i+1 < len) ? 0 : 1, insertTime); nsAutoPtr<AudioSegment> segment(new AudioSegment()); AutoTArray<const T*, 1> channels; // XXX Bug 971528 - Support stereo capture in gUM MOZ_ASSERT(aChannels == 1, "GraphDriver only supports us stereo audio for now"); channels.AppendElement(static_cast<T*>(buffer->Data())); segment->AppendFrames(buffer.forget(), channels, aFrames, mPrincipalHandles[i]); segment->GetStartTime(insertTime); mSources[i]->AppendToTrack(mTrackID, segment); } }
bool IndiceWrapper::GetIndice(size_t aIndex, Index::Indice& aIndice) const { if (aIndex >= mIndice.length) { MOZ_LOG(gMP4MetadataLog, LogLevel::Error, ("Index overflow in indice")); return false; } const Mp4parseIndice* indice = &mIndice.indices[aIndex]; aIndice.start_offset = indice->start_offset; aIndice.end_offset = indice->end_offset; aIndice.start_composition = indice->start_composition; aIndice.end_composition = indice->end_composition; aIndice.start_decode = indice->start_decode; aIndice.sync = indice->sync; return true; }
nsresult nsPop3Sink::ReleaseFolderLock() { nsresult result = NS_OK; if (!m_folder) return result; bool haveSemaphore; nsCOMPtr <nsISupports> supports = do_QueryInterface(static_cast<nsIPop3Sink*>(this)); result = m_folder->TestSemaphore(supports, &haveSemaphore); MOZ_LOG(POP3LOGMODULE, mozilla::LogLevel::Debug, ("ReleaseFolderLock haveSemaphore = %s", haveSemaphore ? "TRUE" : "FALSE")); if(NS_SUCCEEDED(result) && haveSemaphore) result = m_folder->ReleaseSemaphore(supports); return result; }
NS_IMETHODIMP PSMContentStreamListener::OnStopRequest(nsIRequest* request, nsISupports* context, nsresult aStatus) { MOZ_LOG(gPIPNSSLog, LogLevel::Debug, ("CertDownloader::OnStopRequest\n")); // Because importing the cert can spin the event loop (via alerts), we can't // do it here. Do it off the event loop instead. nsCOMPtr<nsIRunnable> r = NewRunnableMethod(this, &PSMContentStreamListener::ImportCertificate); MOZ_ALWAYS_SUCCEEDS(NS_DispatchToMainThread(r)); return NS_OK; }
void U2FTokenManager::Sign(PWebAuthnTransactionParent* aTransactionParent, const uint64_t& aTransactionId, const WebAuthnGetAssertionInfo& aTransactionInfo) { MOZ_LOG(gU2FTokenManagerLog, LogLevel::Debug, ("U2FAuthSign")); ClearTransaction(); mTransactionParent = aTransactionParent; mTokenManagerImpl = GetTokenManagerImpl(); if (!mTokenManagerImpl) { AbortTransaction(aTransactionId, NS_ERROR_DOM_NOT_ALLOWED_ERR); return; } // Show a prompt that lets the user cancel the ongoing transaction. NS_ConvertUTF16toUTF8 origin(aTransactionInfo.Origin()); SendPromptNotification(kSignPromptNotifcation, aTransactionId, origin.get()); uint64_t tid = mLastTransactionId = aTransactionId; mozilla::TimeStamp startTime = mozilla::TimeStamp::Now(); mTokenManagerImpl ->Sign(aTransactionInfo) ->Then(GetCurrentThreadSerialEventTarget(), __func__, [tid, startTime](WebAuthnGetAssertionResult&& aResult) { U2FTokenManager* mgr = U2FTokenManager::Get(); mgr->MaybeConfirmSign(tid, aResult); Telemetry::ScalarAdd( Telemetry::ScalarID::SECURITY_WEBAUTHN_USED, NS_LITERAL_STRING("U2FSignFinish"), 1); Telemetry::AccumulateTimeDelta( Telemetry::WEBAUTHN_GET_ASSERTION_MS, startTime); }, [tid](nsresult rv) { MOZ_ASSERT(NS_FAILED(rv)); U2FTokenManager* mgr = U2FTokenManager::Get(); mgr->MaybeAbortSign(tid, rv); Telemetry::ScalarAdd( Telemetry::ScalarID::SECURITY_WEBAUTHN_USED, NS_LITERAL_STRING("U2FSignAbort"), 1); }) ->Track(mSignPromise); }
void BrowsingContext::Detach(bool aFromIPC) { MOZ_LOG(GetLog(), LogLevel::Debug, ("%s: Detaching 0x%08" PRIx64 " from 0x%08" PRIx64, XRE_IsParentProcess() ? "Parent" : "Child", Id(), mParent ? mParent->Id() : 0)); RefPtr<BrowsingContext> kungFuDeathGrip(this); BrowsingContextMap<RefPtr>::Ptr p; if (sCachedBrowsingContexts && (p = sCachedBrowsingContexts->lookup(Id()))) { MOZ_DIAGNOSTIC_ASSERT(!mParent || !mParent->mChildren.Contains(this)); MOZ_DIAGNOSTIC_ASSERT(!mGroup || !mGroup->Toplevels().Contains(this)); sCachedBrowsingContexts->remove(p); } else { Children* children = nullptr; if (mParent) { children = &mParent->mChildren; } else if (mGroup) { children = &mGroup->Toplevels(); } if (children) { // TODO(farre): This assert looks extremely fishy, I know, but // what we're actually saying is this: if we're detaching, but our // parent doesn't have any children, it is because we're being // detached by the cycle collector destroying docshells out of // order. MOZ_DIAGNOSTIC_ASSERT(children->IsEmpty() || children->Contains(this)); children->RemoveElement(this); } } if (mGroup) { mGroup->Unregister(this); } // As our nsDocShell is going away, this should implicitly mark us as closed. // We directly set our member, rather than using a transaction as we're going // to send a `Detach` message to other processes either way. mClosed = true; if (!aFromIPC && XRE_IsContentProcess()) { auto cc = ContentChild::GetSingleton(); MOZ_DIAGNOSTIC_ASSERT(cc); cc->SendDetachBrowsingContext(this, false /* aMoveToBFCache */); } }
nsresult SubstitutingProtocolHandler::ResolveURI(nsIURI *uri, nsACString &result) { nsresult rv; nsAutoCString host; nsAutoCString path; rv = uri->GetAsciiHost(host); if (NS_FAILED(rv)) return rv; rv = uri->GetPath(path); if (NS_FAILED(rv)) return rv; if (ResolveSpecialCases(host, path, result)) { return NS_OK; } // Unescape the path so we can perform some checks on it. nsAutoCString unescapedPath(path); NS_UnescapeURL(unescapedPath); // Don't misinterpret the filepath as an absolute URI. if (unescapedPath.FindChar(':') != -1) return NS_ERROR_MALFORMED_URI; if (unescapedPath.FindChar('\\') != -1) return NS_ERROR_MALFORMED_URI; const char *p = path.get() + 1; // path always starts with a slash NS_ASSERTION(*(p-1) == '/', "Path did not begin with a slash!"); if (*p == '/') return NS_ERROR_MALFORMED_URI; nsCOMPtr<nsIURI> baseURI; rv = GetSubstitution(host, getter_AddRefs(baseURI)); if (NS_FAILED(rv)) return rv; rv = baseURI->Resolve(nsDependentCString(p, path.Length()-1), result); if (MOZ_LOG_TEST(gResLog, LogLevel::Debug)) { nsAutoCString spec; uri->GetAsciiSpec(spec); MOZ_LOG(gResLog, LogLevel::Debug, ("%s\n -> %s\n", spec.get(), PromiseFlatCString(result).get())); } return rv; }
MediaTime SystemClockDriver::GetIntervalForIteration() { TimeStamp now = TimeStamp::Now(); MediaTime interval = mGraphImpl->SecondsToMediaTime((now - mCurrentTimeStamp).ToSeconds()); mCurrentTimeStamp = now; MOZ_LOG(gMediaStreamGraphLog, LogLevel::Verbose, ("Updating current time to %f (real %f, StateComputedTime() %f)", mGraphImpl->MediaTimeToSeconds(IterationEnd() + interval), (now - mInitialTimeStamp).ToSeconds(), mGraphImpl->MediaTimeToSeconds(StateComputedTime()))); return interval; }
/* static */ bool MediaSource::IsTypeSupported(const GlobalObject& aOwner, const nsAString& aType) { MOZ_ASSERT(NS_IsMainThread()); DecoderDoctorDiagnostics diagnostics; nsresult rv = IsTypeSupported(aType, &diagnostics); nsCOMPtr<nsPIDOMWindowInner> window = do_QueryInterface(aOwner.GetAsSupports()); diagnostics.StoreFormatDiagnostics(window ? window->GetExtantDoc() : nullptr, aType, NS_SUCCEEDED(rv), __func__); MOZ_LOG(GetMediaSourceAPILog(), mozilla::LogLevel::Debug, ("MediaSource::%s: IsTypeSupported(aType=%s) %s", __func__, NS_ConvertUTF16toUTF8(aType).get(), rv == NS_OK ? "OK" : "[not supported]")); return NS_SUCCEEDED(rv); }
/* static */ bool PureOmxPlatformLayer::Init(void) { if (!OmxCoreLibLinker::Link()) { return false; } OMX_ERRORTYPE err = OMX_Init(); if (err != OMX_ErrorNone) { MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, ("PureOmxPlatformLayer::%s: Failed to initialize OMXCore: 0x%08x", __func__, err)); return false; } return true; }
// CreateFrame() is used for both simple and animated images void nsPNGDecoder::CreateFrame(png_uint_32 x_offset, png_uint_32 y_offset, int32_t width, int32_t height, gfx::SurfaceFormat format) { MOZ_ASSERT(HasSize()); if (format == gfx::SurfaceFormat::B8G8R8A8) { PostHasTransparency(); } // Our first full frame is automatically created by the image decoding // infrastructure. Just use it as long as it matches up. nsIntRect neededRect(x_offset, y_offset, width, height); nsRefPtr<imgFrame> currentFrame = GetCurrentFrame(); if (!currentFrame->GetRect().IsEqualEdges(neededRect)) { if (mNumFrames == 0) { // We need padding on the first frame, which means that we don't draw into // part of the image at all. Report that as transparency. PostHasTransparency(); } NeedNewFrame(mNumFrames, x_offset, y_offset, width, height, format); } else if (mNumFrames != 0) { NeedNewFrame(mNumFrames, x_offset, y_offset, width, height, format); } mFrameRect = neededRect; MOZ_LOG(GetPNGDecoderAccountingLog(), LogLevel::Debug, ("PNGDecoderAccounting: nsPNGDecoder::CreateFrame -- created " "image frame with %dx%d pixels in container %p", width, height, &mImage)); #ifdef PNG_APNG_SUPPORTED if (png_get_valid(mPNG, mInfo, PNG_INFO_acTL)) { mAnimInfo = AnimFrameInfo(mPNG, mInfo); if (mAnimInfo.mDispose == DisposalMethod::CLEAR) { // We may have to display the background under this image during // animation playback, so we regard it as transparent. PostHasTransparency(); } } #endif }
nsresult AudioStream::Init(uint32_t aNumChannels, uint32_t aRate, const dom::AudioChannel aAudioChannel) { mStartTime = TimeStamp::Now(); mIsFirst = CubebUtils::GetFirstStream(); if (!CubebUtils::GetCubebContext()) { return NS_ERROR_FAILURE; } MOZ_LOG(gAudioStreamLog, LogLevel::Debug, ("%s channels: %d, rate: %d for %p", __FUNCTION__, aNumChannels, aRate, this)); mInRate = mOutRate = aRate; mChannels = aNumChannels; mOutChannels = (aNumChannels > 2) ? 2 : aNumChannels; mDumpFile = OpenDumpFile(this); cubeb_stream_params params; params.rate = aRate; params.channels = mOutChannels; #if defined(__ANDROID__) #if defined(MOZ_B2G) mAudioChannel = aAudioChannel; params.stream_type = CubebUtils::ConvertChannelToCubebType(aAudioChannel); #else mAudioChannel = dom::AudioChannel::Content; params.stream_type = CUBEB_STREAM_TYPE_MUSIC; #endif if (params.stream_type == CUBEB_STREAM_TYPE_MAX) { return NS_ERROR_INVALID_ARG; } #endif if (AUDIO_OUTPUT_FORMAT == AUDIO_FORMAT_S16) { params.format = CUBEB_SAMPLE_S16NE; } else { params.format = CUBEB_SAMPLE_FLOAT32NE; } mAudioClock.Init(); return OpenCubeb(params); }
NS_IMETHODIMP PSMContentStreamListener::OnDataAvailable(nsIRequest* request, nsISupports* context, nsIInputStream* aIStream, uint64_t aSourceOffset, uint32_t aLength) { MOZ_LOG(gPIPNSSLog, LogLevel::Debug, ("CertDownloader::OnDataAvailable\n")); nsCString chunk; nsresult rv = NS_ReadInputStreamToString(aIStream, chunk, aLength); if (NS_FAILED(rv)) { return rv; } mByteData.Append(chunk); return NS_OK; }
void ScreenHelperWin::RefreshScreens() { MOZ_LOG(sScreenLog, LogLevel::Debug, ("Refreshing screens")); AutoTArray<RefPtr<Screen>, 4> screens; HDC hdc = ::CreateDC(L"DISPLAY", nullptr, nullptr, nullptr); NS_ASSERTION(hdc,"CreateDC Failure"); BOOL result = ::EnumDisplayMonitors(hdc, nullptr, (MONITORENUMPROC)CollectMonitors, (LPARAM)&screens); ::DeleteDC(hdc); if (!result) { NS_WARNING("Unable to EnumDisplayMonitors"); } ScreenManager& screenManager = ScreenManager::GetSingleton(); screenManager.Refresh(Move(screens)); }
void U2FTokenManager::Register(PWebAuthnTransactionParent* aTransactionParent, const uint64_t& aTransactionId, const WebAuthnMakeCredentialInfo& aTransactionInfo) { MOZ_LOG(gU2FTokenManagerLog, LogLevel::Debug, ("U2FAuthRegister")); ClearTransaction(); mTransactionParent = aTransactionParent; mTokenManagerImpl = GetTokenManagerImpl(); if (!mTokenManagerImpl) { AbortTransaction(aTransactionId, NS_ERROR_DOM_NOT_ALLOWED_ERR); return; } mLastTransactionId = aTransactionId; // Determine whether direct attestation was requested. bool directAttestationRequested = false; if (aTransactionInfo.Extra().type() == WebAuthnMaybeMakeCredentialExtraInfo::TWebAuthnMakeCredentialExtraInfo) { const auto& extra = aTransactionInfo.Extra().get_WebAuthnMakeCredentialExtraInfo(); directAttestationRequested = extra.RequestDirectAttestation(); } // Start a register request immediately if direct attestation // wasn't requested or the test pref is set. if (!directAttestationRequested || U2FPrefManager::Get()->GetAllowDirectAttestationForTesting()) { // Force "none" attestation when "direct" attestation wasn't requested. DoRegister(aTransactionInfo, !directAttestationRequested); return; } // If the RP request direct attestation, ask the user for permission and // store the transaction info until the user proceeds or cancels. NS_ConvertUTF16toUTF8 origin(aTransactionInfo.Origin()); SendPromptNotification(kRegisterDirectPromptNotifcation, aTransactionId, origin.get()); MOZ_ASSERT(mPendingRegisterInfo.isNothing()); mPendingRegisterInfo = Some(aTransactionInfo); }