void NuPlayer::Renderer::onDrainVideoQueue() { if (mVideoQueue.empty()) { return; } QueueEntry *entry = &*mVideoQueue.begin(); if (entry->mBuffer == NULL) { // EOS notifyEOS(false /* audio */, entry->mFinalResult); mVideoQueue.erase(mVideoQueue.begin()); entry = NULL; mVideoLateByUs = 0ll; notifyPosition(); return; } int64_t realTimeUs; int64_t mediaTimeUs; if (mFlags & FLAG_REAL_TIME) { CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs)); } else { CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs; } mVideoLateByUs = ALooper::GetNowUs() - realTimeUs; bool tooLate = (mVideoLateByUs > 40000); if (tooLate) { ALOGV("video late by %lld us (%.2f secs)", mVideoLateByUs, mVideoLateByUs / 1E6); } else if (mFlags & FLAG_REAL_TIME) { ALOGV("rendering video at real time %.2f secs", realTimeUs / 1E6); } else { ALOGV("rendering video at media time %.2f secs", mediaTimeUs / 1E6); } entry->mNotifyConsumed->setInt32("render", !tooLate); entry->mNotifyConsumed->post(); mVideoQueue.erase(mVideoQueue.begin()); entry = NULL; if (!mVideoRenderingStarted) { mVideoRenderingStarted = true; notifyVideoRenderingStart(); } notifyIfMediaRenderingStarted(); notifyPosition(); }
void DashPlayer::Renderer::onDrainVideoQueue() { if (mVideoQueue.empty()) { return; } QueueEntry *entry = &*mVideoQueue.begin(); if (entry->mBuffer == NULL) { // EOS notifyPosition(true); notifyEOS(false /* audio */, entry->mFinalResult); mVideoQueue.erase(mVideoQueue.begin()); entry = NULL; mVideoLateByUs = 0ll; return; } int64_t mediaTimeUs; CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); int64_t realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs; int64_t nowUs = ALooper::GetNowUs(); mVideoLateByUs = nowUs - realTimeUs; bool tooLate = (mVideoLateByUs > 40000); if (tooLate) { ALOGV("video late by %lld us (%.2f secs)", mVideoLateByUs, mVideoLateByUs / 1E6); if(mStats != NULL) { mStats->recordLate(realTimeUs,nowUs,mVideoLateByUs,mAnchorTimeRealUs); } } else { ALOGV("rendering video at media time %.2f secs", mediaTimeUs / 1E6); if(mStats != NULL) { mStats->recordOnTime(realTimeUs,nowUs,mVideoLateByUs); mStats->incrementTotalRenderingFrames(); mStats->logFps(); } } entry->mNotifyConsumed->setInt32("render", !tooLate); entry->mNotifyConsumed->post(); mVideoQueue.erase(mVideoQueue.begin()); entry = NULL; notifyPosition(); }
bool NuPlayer::Renderer::onDrainAudioQueue() { uint32_t numFramesPlayed; if (mAudioSink->getPosition(&numFramesPlayed) != OK) { return false; } ssize_t numFramesAvailableToWrite = mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); #if 0 if (numFramesAvailableToWrite == mAudioSink->frameCount()) { LOGI("audio sink underrun"); } else { ALOGV("audio queue has %d frames left to play", mAudioSink->frameCount() - numFramesAvailableToWrite); } #endif size_t numBytesAvailableToWrite = numFramesAvailableToWrite * mAudioSink->frameSize(); while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) { QueueEntry *entry = &*mAudioQueue.begin(); if (entry->mBuffer == NULL) { // EOS notifyEOS(true /* audio */, entry->mFinalResult); mAudioQueue.erase(mAudioQueue.begin()); entry = NULL; return false; } if (entry->mOffset == 0) { int64_t mediaTimeUs; CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); mAnchorTimeMediaUs = mediaTimeUs; uint32_t numFramesPlayed; CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); uint32_t numFramesPendingPlayout = mNumFramesWritten - numFramesPlayed; int64_t realTimeOffsetUs = (mAudioSink->latency() / 2 /* XXX */ + numFramesPendingPlayout * mAudioSink->msecsPerFrame()) * 1000ll; // LOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs); mAnchorTimeRealUs = ALooper::GetNowUs() + realTimeOffsetUs; } size_t copy = entry->mBuffer->size() - entry->mOffset; if (copy > numBytesAvailableToWrite) { copy = numBytesAvailableToWrite; } CHECK_EQ(mAudioSink->write( entry->mBuffer->data() + entry->mOffset, copy), (ssize_t)copy); entry->mOffset += copy; if (entry->mOffset == entry->mBuffer->size()) { entry->mNotifyConsumed->post(); mAudioQueue.erase(mAudioQueue.begin()); entry = NULL; } numBytesAvailableToWrite -= copy; size_t copiedFrames = copy / mAudioSink->frameSize(); mNumFramesWritten += copiedFrames; } notifyPosition(); return !mAudioQueue.empty(); }
//Data output and erase void NuPlayer::Renderer::onDrainVideoQueue() { if (mVideoQueue.empty()) { return; } QueueEntry *entry = &*mVideoQueue.begin(); if (entry->mBuffer == NULL) { // EOS if(entry->mFinalResult!=OK) { ALOGE("err %d, Line:%d", entry->mFinalResult,__LINE__); } notifyEOS(false /* audio */, entry->mFinalResult); mVideoQueue.erase(mVideoQueue.begin()); entry = NULL; mVideoLateByUs = 0ll; #ifndef ANDROID_DEFAULT_CODE notifyPosition(false/*video*/); #else notifyPosition(); #endif ALOGD("video position EOS"); return; } int64_t realTimeUs; #ifndef ANDROID_DEFAULT_CODE // mtk80902: shame google..mediaTimeUs may be used below int64_t mediaTimeUs = 0; #endif if (mFlags & FLAG_REAL_TIME) { CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs)); } else { #ifdef ANDROID_DEFAULT_CODE int64_t mediaTimeUs; #endif CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs; } mVideoLateByUs = ALooper::GetNowUs() - realTimeUs; #ifndef ANDROID_DEFAULT_CODE bool tooLate = (mVideoLateByUs > 250000); #else bool tooLate = (mVideoLateByUs > 40000); #endif if(mDebugDisableAVsync) { tooLate = false; } if(!mDropvideo) { tooLate = false; ALOGD("No audio data input"); } if (tooLate) { ALOGD("video (%.2f) late by %lld us (%.2f secs)", realTimeUs / 1E6, mVideoLateByUs, mVideoLateByUs / 1E6); } else { ALOGV("rendering video at media time %.2f secs", mediaTimeUs / 1E6); } #ifndef ANDROID_DEFAULT_CODE // if preformance not ok, show one ,then drop one static int32_t SinceLastDropped = 0; if(tooLate) { if (SinceLastDropped > 0) { //drop ALOGE("we're late dropping one after %d frames",SinceLastDropped); SinceLastDropped = 0; }else{ //not drop tooLate = false; SinceLastDropped ++; } }else{ SinceLastDropped ++; } entry->mNotifyConsumed->setInt64("realtimeus", realTimeUs); entry->mNotifyConsumed->setInt64("delaytimeus", -mVideoLateByUs); ALOGV("ACodec delay time(%lldus), video media time(%lldus), mAnchorTimeMediaUs(%lldus)", -mVideoLateByUs, mediaTimeUs, mAnchorTimeMediaUs); #endif entry->mNotifyConsumed->setInt32("render", !tooLate); entry->mNotifyConsumed->post(); mVideoQueue.erase(mVideoQueue.begin()); entry = NULL; if (!mVideoRenderingStarted) { mVideoRenderingStarted = true; notifyVideoRenderingStart(); } notifyIfMediaRenderingStarted(); #ifndef ANDROID_DEFAULT_CODE notifyPosition(false/*video*/); #else notifyPosition(); #endif }
//Data output and erase bool NuPlayer::Renderer::onDrainAudioQueue() { uint32_t numFramesPlayed; #ifndef ANDROID_DEFAULT_CODE //flush mutex Mutex::Autolock autoLock(mFlushLock); #endif if (mAudioSink->getPosition(&numFramesPlayed) != OK) { #ifndef ANDROID_DEFAULT_CODE // mtk80902: ALPS00504270 // ACodec got an error at the beginning, if here returned // false directly then NuPlayer would never receive // audio renderer EOS anymore. ALOGD("audio sink get position false."); if (!mAudioQueue.empty()) { QueueEntry *entry = &*mAudioQueue.begin(); if (entry->mBuffer == NULL) { // EOS if(entry->mFinalResult!=OK) { ALOGE("err %d, Line:%d", entry->mFinalResult,__LINE__); } notifyEOS(true /* audio */, entry->mFinalResult); mAudioQueue.erase(mAudioQueue.begin()); entry = NULL; mNeedNewAudioAnchorTime = false; mHasAudio = false; ALOGD("audio position EOS at the beginning."); } } #endif return false; } ssize_t numFramesAvailableToWrite = mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); #ifndef ANDROID_DEFAULT_CODE if (numFramesPlayed > mNumFramesWritten) ALOGW("numFramesPlayed(%d) > mNumFramesWritten(%d), no reset @ onDrainAudioQueue", numFramesPlayed, mNumFramesWritten); if (numFramesAvailableToWrite > mAudioSink->frameCount()*3/4 || mAudioQueue.size() <= 3) ALOGW("numFramesAvailableToWrite(%d), mAudioSink frameCount(%d), mAudioQueue size(%d), numFramesPlayed(%d), mNumFramesWritten(%d)", numFramesAvailableToWrite, mAudioSink->frameCount(), mAudioQueue.size(), numFramesPlayed, mNumFramesWritten); #endif size_t numBytesAvailableToWrite = numFramesAvailableToWrite * mAudioSink->frameSize(); while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) { QueueEntry *entry = &*mAudioQueue.begin(); if (entry->mBuffer == NULL) { // EOS if(entry->mFinalResult!=OK) { ALOGE("err %d, Line:%d", entry->mFinalResult,__LINE__); } notifyEOS(true /* audio */, entry->mFinalResult); mAudioQueue.erase(mAudioQueue.begin()); entry = NULL; #ifndef ANDROID_DEFAULT_CODE mNeedNewAudioAnchorTime = false; // mtk80902: i cant find out who set the last damn timeUs to 0.. mHasAudio = false; ALOGD("audio position EOS"); #endif return false; } if (entry->mOffset == 0) { int64_t mediaTimeUs; CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); int64_t TimeMin=mediaTimeUs/(1000000ll * 60ll); int64_t TimeSec=(mediaTimeUs/1000000ll)%60; ALOGV("rendering audio at media time %.2f secs.(%lld m: %lld s)", mediaTimeUs / 1E6,TimeMin, TimeSec); #if DUMP_PROFILE dumpProfile("render", mediaTimeUs); #endif #ifndef ANDROID_DEFAULT_CODE // mtk80902: for ALPS00456468 & avoid ALPS00457437 CTS fail if (entry->mBuffer->size() != 0) { mNeedNewAudioAnchorTime = false; mAnchorTimeMediaUs = mediaTimeUs; } #else mAnchorTimeMediaUs = mediaTimeUs; #endif uint32_t numFramesPlayed; CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); #ifndef ANDROID_DEFAULT_CODE // after flush several secs. numFramesPlayed is not zero ?? if (numFramesPlayed > mNumFramesWritten) mNumFramesWritten = numFramesPlayed; #endif uint32_t numFramesPendingPlayout = mNumFramesWritten - numFramesPlayed; #ifndef ANDROID_DEFAULT_CODE int64_t realTimeOffsetUs = (numFramesPendingPlayout * mAudioSink->msecsPerFrame()) * 1000ll; #else int64_t realTimeOffsetUs = (mAudioSink->latency() / 2 /* XXX */ + numFramesPendingPlayout * mAudioSink->msecsPerFrame()) * 1000ll; #endif { int64_t TimeMin=realTimeOffsetUs/(1000000ll * 60ll); int64_t TimeSec=(realTimeOffsetUs/1000000ll)%60; // ALOGI("realTimeOffsetUs = %lld us.(%lld m: %lld s)", realTimeOffsetUs,TimeMin, TimeSec); } #ifndef ANDROID_DEFAULT_CODE // mtk80902: for ALPS00456468 & avoid ALPS00457437 CTS fail if (entry->mBuffer->size() != 0) #endif mAnchorTimeRealUs = ALooper::GetNowUs() + realTimeOffsetUs; } size_t copy = entry->mBuffer->size() - entry->mOffset; if (copy > numBytesAvailableToWrite) { copy = numBytesAvailableToWrite; } #ifndef ANDROID_DEFAULT_CODE if ((ssize_t)copy != mAudioSink->write( entry->mBuffer->data() + entry->mOffset, copy)){ ALOGE("NuPlayer::Renderer::onDrainAudioQueue audio sink write maybe fail"); break; } #else CHECK_EQ(mAudioSink->write( entry->mBuffer->data() + entry->mOffset, copy), (ssize_t)copy); #endif entry->mOffset += copy; if (entry->mOffset == entry->mBuffer->size()) { entry->mNotifyConsumed->post(); mAudioQueue.erase(mAudioQueue.begin()); entry = NULL; } numBytesAvailableToWrite -= copy; size_t copiedFrames = copy / mAudioSink->frameSize(); mNumFramesWritten += copiedFrames; notifyIfMediaRenderingStarted(); } #ifndef ANDROID_DEFAULT_CODE notifyPosition(true/*audio*/); #else notifyPosition(); #endif return !mAudioQueue.empty(); }