Beispiel #1
0
void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {
        case kWhatDrainAudioQueue:
        {
            int32_t generation;
            CHECK(msg->findInt32("generation", &generation));
            if (generation != mAudioQueueGeneration) {
                break;
            }

            mDrainAudioQueuePending = false;

            if (onDrainAudioQueue()) {
                uint32_t numFramesPlayed;
                CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
                         (status_t)OK);

                uint32_t numFramesPendingPlayout =
                    mNumFramesWritten - numFramesPlayed;

                // This is how long the audio sink will have data to
                // play back.
                int64_t delayUs =
                    mAudioSink->msecsPerFrame()
                        * numFramesPendingPlayout * 1000ll;

                // Let's give it more data after about half that time
                // has elapsed.
                postDrainAudioQueue(delayUs / 2);
            }
            break;
        }

        case kWhatDrainVideoQueue:
        {
            int32_t generation;
            CHECK(msg->findInt32("generation", &generation));
            if (generation != mVideoQueueGeneration) {
                break;
            }

            mDrainVideoQueuePending = false;

            onDrainVideoQueue();

            postDrainVideoQueue();
            break;
        }

        case kWhatQueueBuffer:
        {
            onQueueBuffer(msg);
            break;
        }

        case kWhatQueueEOS:
        {
            onQueueEOS(msg);
            break;
        }

        case kWhatFlush:
        {
            onFlush(msg);
            break;
        }

        case kWhatAudioSinkChanged:
        {
            onAudioSinkChanged();
            break;
        }

        case kWhatPause:
        {
            onPause();
            break;
        }

        case kWhatResume:
        {
            onResume();
            break;
        }

        default:
            TRESPASS();
            break;
    }
}
void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
    int32_t audio;
    CHECK(msg->findInt32("audio", &audio));

    if (audio) {
        mHasAudio = true;
    } else {
        mHasVideo = true;
    }

    if (dropBufferWhileFlushing(audio, msg)) {
        return;
    }

    sp<RefBase> obj;
    CHECK(msg->findObject("buffer", &obj));
    sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());

    sp<AMessage> notifyConsumed;
    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));

    QueueEntry entry;
    entry.mBuffer = buffer;
    entry.mNotifyConsumed = notifyConsumed;
    entry.mOffset = 0;
    entry.mFinalResult = OK;

    if (audio) {
        mAudioQueue.push_back(entry);
        postDrainAudioQueue();
    } else {
        mVideoQueue.push_back(entry);
        postDrainVideoQueue();
    }

    if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
        return;
    }

    sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
    sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;

    if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
        // EOS signalled on either queue.
        syncQueuesDone();
        return;
    }

    int64_t firstAudioTimeUs;
    int64_t firstVideoTimeUs;
    CHECK(firstAudioBuffer->meta()
            ->findInt64("timeUs", &firstAudioTimeUs));
    CHECK(firstVideoBuffer->meta()
            ->findInt64("timeUs", &firstVideoTimeUs));

    int64_t diff = firstVideoTimeUs - firstAudioTimeUs;

    ALOGV("queueDiff = %.2f secs", diff / 1E6);

    if (diff > 100000ll) {
        // Audio data starts More than 0.1 secs before video.
        // Drop some audio.

        (*mAudioQueue.begin()).mNotifyConsumed->post();
        mAudioQueue.erase(mAudioQueue.begin());
        return;
    }

    syncQueuesDone();
}
void DashPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
    int32_t audio;
    CHECK(msg->findInt32("audio", &audio));

    if (audio) {
        mHasAudio = true;
    } else {
        mHasVideo = true;
    }

    if (dropBufferWhileFlushing(audio, msg)) {
        return;
    }

    sp<ABuffer> buffer;
    CHECK(msg->findBuffer("buffer", &buffer));

    sp<AMessage> notifyConsumed;
    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));

    QueueEntry entry;
    entry.mBuffer = buffer;
    entry.mNotifyConsumed = notifyConsumed;
    entry.mOffset = 0;
    entry.mFinalResult = OK;

    if (audio) {
        mAudioQueue.push_back(entry);
        int64_t audioTimeUs;
        (buffer->meta())->findInt64("timeUs", &audioTimeUs);
        if ((mHasVideo && mIsFirstVideoframeReceived)
            || !mHasVideo){
        postDrainAudioQueue();
            return;
        }
        else
        {
          mPendingPostAudioDrains = true;
          DPR_MSG_HIGH("Not rendering Audio Sample with TS: %lld  as Video frame is not decoded", audioTimeUs);
        }
    } else {
        mVideoQueue.push_back(entry);
        int64_t videoTimeUs;
        (buffer->meta())->findInt64("timeUs", &videoTimeUs);
        if (!mIsFirstVideoframeReceived) {
            mIsFirstVideoframeReceived = true;
            DPR_MSG_HIGH("Received first video Sample with TS: %lld", videoTimeUs);
            if (mPendingPostAudioDrains) {
                mPendingPostAudioDrains = false;
                postDrainAudioQueue();
            }
        }
        postDrainVideoQueue();
    }

    if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
        return;
    }

    sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
    sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;

    if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
        // EOS signalled on either queue.
        syncQueuesDone();
        return;
    }

    int64_t firstAudioTimeUs;
    int64_t firstVideoTimeUs;
    CHECK(firstAudioBuffer->meta()
            ->findInt64("timeUs", &firstAudioTimeUs));
    CHECK(firstVideoBuffer->meta()
            ->findInt64("timeUs", &firstVideoTimeUs));

    int64_t diff = firstVideoTimeUs - firstAudioTimeUs;

    DPR_MSG_LOW("queueDiff = %.2f secs", diff / 1E6);

    if (diff > 100000ll) {
        // Audio data starts More than 0.1 secs before video.
        // Drop some audio.

        (*mAudioQueue.begin()).mNotifyConsumed->post();
        mAudioQueue.erase(mAudioQueue.begin());
        return;
    }

    syncQueuesDone();
}
Beispiel #4
0
void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
    int32_t audio;
    CHECK(msg->findInt32("audio", &audio));

    if (audio) {
        mHasAudio = true;
    } else {
        mHasVideo = true;
    }

    if (dropBufferWhileFlushing(audio, msg)) {
        return;
    }

    sp<ABuffer> buffer;
    CHECK(msg->findBuffer("buffer", &buffer));

    sp<AMessage> notifyConsumed;
    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));

    QueueEntry entry;
    entry.mBuffer = buffer;
    entry.mNotifyConsumed = notifyConsumed;
    entry.mOffset = 0;
    entry.mFinalResult = OK;

    if (audio) {
		mDropvideo = true;
		ALOGD(" audio data input");
        mAudioQueue.push_back(entry);
        postDrainAudioQueue();
    } else {
        mVideoQueue.push_back(entry);
        postDrainVideoQueue();
    }

    if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
        return;
    }

    sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
    sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;

    if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
        // EOS signalled on either queue.
        syncQueuesDone();
        return;
    }

    int64_t firstAudioTimeUs;
    int64_t firstVideoTimeUs;
    CHECK(firstAudioBuffer->meta()
            ->findInt64("timeUs", &firstAudioTimeUs));
    CHECK(firstVideoBuffer->meta()
            ->findInt64("timeUs", &firstVideoTimeUs));

    int64_t diff = firstVideoTimeUs - firstAudioTimeUs;

    ALOGV("queueDiff = %.2f secs", diff / 1E6);

    if (diff > 100000ll) {
        // Audio data starts More than 0.1 secs before video.
        // Drop some audio.

        (*mAudioQueue.begin()).mNotifyConsumed->post();
        mAudioQueue.erase(mAudioQueue.begin());
        return;
    }
#ifndef ANDROID_DEFAULT_CODE
    if(diff <  -100000ll) {
        // video data starts More than 0.1 secs before audio.
        // Drop some video.
        ALOGE("before playback, video is early than audio drop diff = %.2f", diff / 1E6);
        (*mVideoQueue.begin()).mNotifyConsumed->post();
        mVideoQueue.erase(mVideoQueue.begin());
        return;
    }
#endif

    syncQueuesDone();
}
Beispiel #5
0
void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {
        case kWhatDrainAudioQueue:
        {
            int32_t generation;
            CHECK(msg->findInt32("generation", &generation));
            if (generation != mAudioQueueGeneration) {
                break;
            }

            mDrainAudioQueuePending = false;

            if (onDrainAudioQueue()) {
                uint32_t numFramesPlayed;
                CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
                         (status_t)OK);

                uint32_t numFramesPendingPlayout =
                    mNumFramesWritten - numFramesPlayed;

                // This is how long the audio sink will have data to
                // play back.
                int64_t delayUs =
                    mAudioSink->msecsPerFrame()
                        * numFramesPendingPlayout * 1000ll;

                // Let's give it more data after about half that time
                // has elapsed.
#ifndef ANDROID_DEFAULT_CODE
                // half that is to large, set it to 20ms
                if (numFramesPlayed > mNumFramesWritten) 
                   ALOGW("numFramesPlayed(%d) > mNumFramesWritten(%d), no reset @ kWhatDrainAudioQueue", 
                            numFramesPlayed, mNumFramesWritten);
                if (delayUs > 40000)
                    delayUs = 40000;
#endif
                postDrainAudioQueue(delayUs / 2);
            }
            break;
        }

        case kWhatDrainVideoQueue:
        {
            int32_t generation;
            CHECK(msg->findInt32("generation", &generation));
            if (generation != mVideoQueueGeneration) {
                break;
            }

            mDrainVideoQueuePending = false;

            onDrainVideoQueue();

            postDrainVideoQueue();
            break;
        }

        case kWhatQueueBuffer:
        {
            onQueueBuffer(msg);
            break;
        }

        case kWhatQueueEOS:
        {
            LOGI("kWhatQueueEOS");
            mNeedCheckBuffer = false;
            onQueueEOS(msg);
            break;
        }

        case kWhatFlush:
        {
            onFlush(msg);
            break;
        }

        case kWhatAudioSinkChanged:
        {
            onAudioSinkChanged();
            break;
        }

        case kWhatPause:
        {
            onPause();
            break;
        }

        case kWhatResume:
        {
            mIsbufferempty = false;
            onResume();
            break;
        }
#ifndef ANDROID_DEFAULT_CODE
        case kWhatCheckRenderBufferStatus:
        {
            ALOGV("kWhatCheckRenderBufferStatus need check buff : %s",mNeedCheckBuffer?"YES":"NO");
            sp<AMessage> notify = mNotify->dup();
            notify->setInt32("what", kWhatBufferNotify);
            int32_t rate = 0;
            ALOGV("mIsSeeking(%d) mVideoQueue.size(%d) mAudioQueue.size(%d)",mIsSeeking,mVideoQueue.size(),mAudioQueue.size());
            if(mNeedCheckBuffer == true){
                switch(mIsbufferempty){
                case true:
                    if(mIsSeeking == true)
                    { // only check VideoQueue after seek
                        if (mVideoQueue.size()>0){
                            ALOGV("kWhatCheckRenderBufferStatus full");

                            rate = 100;
                            notify->setInt32("bufRate", rate);
                            notify->post();
                            mIsSeeking = false;
                        }
                    }
                    else 
                    {
                        if(mDebugDisableAVsync)//Disable AV SYNC for debug
                        {
                            ALOGV("kWhatCheckRenderBufferStatus full");
                            
                            rate = 100;
                            notify->setInt32("bufRate", rate);
                            notify->post();
                        }
                        else if (mAudioQueue.size()>2)//Wait audio
                        {
                            ALOGV("kWhatCheckRenderBufferStatus full");
                            
                            rate = 100;
                            notify->setInt32("bufRate", rate);
                            notify->post();
                        }
                    }
                    break;
                    
                case false:
                    if(mIsSeeking == true)
                    { // only check VideoQueue after seek
                        if (mVideoQueue.empty())
                        {
                            mIsbufferempty = true;
                            ALOGV("kWhatCheckRenderBufferStatus empty");

                            rate = 0;
                            notify->setInt32("bufRate", rate);
                            notify->post();
                        }
                    }
                    else
                    {
                        if(mDebugDisableAVsync)//Disable AV SYNC for debug
                        {
                             mIsbufferempty = true;
                             ALOGV("kWhatCheckRenderBufferStatus empty");
                             rate = 0;
                             notify->setInt32("bufRate", rate);
                             notify->post();
                        }
                        else if (mAudioQueue.empty())//Wait audio empty
                        {
                            mIsbufferempty = true;
                            ALOGV("kWhatCheckRenderBufferStatus empty");
                            rate = 0;
                            notify->setInt32("bufRate", rate);
                            notify->post();
                        }
                    }
                    break;
                default:
                    break;
                }
            }else{
                    rate = 100;
                    notify->setInt32("bufRate", rate);
                    notify->post();
            }
            (new AMessage(kWhatCheckRenderBufferStatus, id()))->post(300000);
            break;
        }
        case kWhatStatusNotify:
        {
            int32_t status;
            sp<AMessage> message;
            CHECK(msg->findMessage("message", &message));
            CHECK(message->findInt32("status", &status));
            
            LOGI(" kWhatStatusNotify %d",status);
            switch(status){
            case MEDIA_SEEK_COMPLETE:
                mIsSeeking = true;
                break;
            default:
                break;
            }
            break;
        }
#endif
        default:
            TRESPASS();
            break;
    }
}
Beispiel #6
0
void NuPlayer::Renderer::notifyPosition(bool audio) {
#else
void NuPlayer::Renderer::notifyPosition() {
#endif
    if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) {
        return;
    }
    
#ifndef ANDROID_DEFAULT_CODE
    // only use audio position when there is audio
    if((mHasAudio && !audio) || (!mHasAudio && audio)) {
        return;
    }

    if (mNeedNewAudioAnchorTime && mHasAudio) {
        ALOGW("need new audio anchor time for position");
        return;
    }
#endif

    int64_t nowUs = ALooper::GetNowUs();

    if (mLastPositionUpdateUs >= 0
            && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) {
        return;
    }
    mLastPositionUpdateUs = nowUs;

    int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;

    sp<AMessage> notify = mNotify->dup();
    notify->setInt32("what", kWhatPosition);
    notify->setInt64("positionUs", positionUs);
    notify->setInt64("videoLateByUs", mVideoLateByUs);
    notify->post();
}

void NuPlayer::Renderer::onPause() {
#ifndef ANDROID_DEFAULT_CODE
    if (mPaused) {
        ALOGD("NuPlayer::Renderer::onPause already paused");
        return;
    }
#else
    CHECK(!mPaused);
#endif
    mDrainAudioQueuePending = false;
    ++mAudioQueueGeneration;

    mDrainVideoQueuePending = false;
    ++mVideoQueueGeneration;

    prepareForMediaRenderingStart();

    if (mHasAudio) {
        mAudioSink->pause();
        ALOGD("NuPlayer::Renderer::onPause mAudioSink NULL Line %d\n",__LINE__);
    }
    else
    {
        ALOGD("NuPlayer::Renderer::onPause no audio Line %d\n",__LINE__);
    }

    ALOGV("now paused audio queue has %d entries, video has %d entries",
          mAudioQueue.size(), mVideoQueue.size());

    mPaused = true;
    
}

void NuPlayer::Renderer::onResume() {
    if (!mPaused) {
        return;
    }

    if (mHasAudio) {
        mAudioSink->start();
    }

    mPaused = false;
#ifndef ANDROID_DEFAULT_CODE
    // mtk80902: ALPS00445484 - this should be renderer's bug
    // pure video's timestamp should be re-anchored after
    // renderer pause/play
    if (!mHasAudio) {
        mAnchorTimeMediaUs = -1;
        mAnchorTimeRealUs = -1;
    }

    if (mHasAudio) {
        mNeedNewAudioAnchorTime = true;
    }
#endif
	mDropvideo = false;
	ALOGD("onResume");

    if (!mAudioQueue.empty()) {
        postDrainAudioQueue();
    }

    if (!mVideoQueue.empty()) {
        postDrainVideoQueue();
    }
}

}  // namespace android