void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { int32_t audio; CHECK(msg->findInt32("audio", &audio)); // If we're currently syncing the queues, i.e. dropping audio while // aligning the first audio/video buffer times and only one of the // two queues has data, we may starve that queue by not requesting // more buffers from the decoder. If the other source then encounters // a discontinuity that leads to flushing, we'll never find the // corresponding discontinuity on the other queue. // Therefore we'll stop syncing the queues if at least one of them // is flushed. syncQueuesDone(); ALOGV("flushing %s", audio ? "audio" : "video"); if (audio) { #ifndef ANDROID_DEFAULT_CODE //@debug dumpQueue(&mAudioQueue, audio); mHasAudio = false; #endif flushQueue(&mAudioQueue); #ifndef ANDROID_DEFAULT_CODE mAudioSink->pause(); mAudioSink->flush(); mNumFramesWritten = 0; mAudioSink->start(); #endif Mutex::Autolock autoLock(mFlushLock); mFlushingAudio = false; mDrainAudioQueuePending = false; ++mAudioQueueGeneration; prepareForMediaRenderingStart(); } else { #ifndef ANDROID_DEFAULT_CODE //@debug dumpQueue(&mVideoQueue, audio); mHasVideo = false; #endif flushQueue(&mVideoQueue); Mutex::Autolock autoLock(mFlushLock); mFlushingVideo = false; mDrainVideoQueuePending = false; ++mVideoQueueGeneration; prepareForMediaRenderingStart(); } notifyFlushComplete(audio); }
void NuPlayer::Renderer::onPause() { CHECK(!mPaused); mDrainAudioQueuePending = false; ++mAudioQueueGeneration; mDrainVideoQueuePending = false; ++mVideoQueueGeneration; prepareForMediaRenderingStart(); if (mHasAudio) { mAudioSink->pause(); } //for video only stream, reset mAnchorTimeMediaUs on stream's pause scenario if (mHasVideo && !mHasAudio) mAnchorTimeMediaUs = -1; ALOGV("now paused audio queue has %d entries, video has %d entries", mAudioQueue.size(), mVideoQueue.size()); mPaused = true; }
void NuPlayer::Renderer::notifyPosition(bool audio) { #else void NuPlayer::Renderer::notifyPosition() { #endif if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) { return; } #ifndef ANDROID_DEFAULT_CODE // only use audio position when there is audio if((mHasAudio && !audio) || (!mHasAudio && audio)) { return; } if (mNeedNewAudioAnchorTime && mHasAudio) { ALOGW("need new audio anchor time for position"); return; } #endif int64_t nowUs = ALooper::GetNowUs(); if (mLastPositionUpdateUs >= 0 && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) { return; } mLastPositionUpdateUs = nowUs; int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; sp<AMessage> notify = mNotify->dup(); notify->setInt32("what", kWhatPosition); notify->setInt64("positionUs", positionUs); notify->setInt64("videoLateByUs", mVideoLateByUs); notify->post(); } void NuPlayer::Renderer::onPause() { #ifndef ANDROID_DEFAULT_CODE if (mPaused) { ALOGD("NuPlayer::Renderer::onPause already paused"); return; } #else CHECK(!mPaused); #endif mDrainAudioQueuePending = false; ++mAudioQueueGeneration; mDrainVideoQueuePending = false; ++mVideoQueueGeneration; prepareForMediaRenderingStart(); if (mHasAudio) { mAudioSink->pause(); ALOGD("NuPlayer::Renderer::onPause mAudioSink NULL Line %d\n",__LINE__); } else { ALOGD("NuPlayer::Renderer::onPause no audio Line %d\n",__LINE__); } ALOGV("now paused audio queue has %d entries, video has %d entries", mAudioQueue.size(), mVideoQueue.size()); mPaused = true; } void NuPlayer::Renderer::onResume() { if (!mPaused) { return; } if (mHasAudio) { mAudioSink->start(); } mPaused = false; #ifndef ANDROID_DEFAULT_CODE // mtk80902: ALPS00445484 - this should be renderer's bug // pure video's timestamp should be re-anchored after // renderer pause/play if (!mHasAudio) { mAnchorTimeMediaUs = -1; mAnchorTimeRealUs = -1; } if (mHasAudio) { mNeedNewAudioAnchorTime = true; } #endif mDropvideo = false; ALOGD("onResume"); if (!mAudioQueue.empty()) { postDrainAudioQueue(); } if (!mVideoQueue.empty()) { postDrainVideoQueue(); } } } // namespace android