Пример #1
0
void DirectRenderer::DecoderContext::queueDecoderInputBuffers() {
    if (mDecoder == NULL) {
        return;
    }

    bool submittedMore = false;

    while (!mAccessUnits.empty()
            && !mDecoderInputBuffersAvailable.empty()) {
        size_t index = *mDecoderInputBuffersAvailable.begin();

        mDecoderInputBuffersAvailable.erase(
                mDecoderInputBuffersAvailable.begin());

        sp<ABuffer> srcBuffer = *mAccessUnits.begin();
        mAccessUnits.erase(mAccessUnits.begin());

        const sp<ABuffer> &dstBuffer =
            mDecoderInputBuffers.itemAt(index);

        memcpy(dstBuffer->data(), srcBuffer->data(), srcBuffer->size());

        int64_t timeUs;
        CHECK(srcBuffer->meta()->findInt64("timeUs", &timeUs));

#ifdef MTK_AOSP_ENHANCEMENT
        sp<WfdDebugInfo> debugInfo= defaultWfdDebugInfo();
        int64_t now = ALooper::GetNowUs();
        size_t trackIndex;
        CHECK(mNotify->findSize("trackIndex", &trackIndex));
        debugInfo->addTimeInfoByKey(trackIndex == 0, timeUs, "queueDecIn1", now/1000);
#endif
        status_t err = mDecoder->queueInputBuffer(
                index,
                0 /* offset */,
                srcBuffer->size(),
                timeUs,
                0 /* flags */);
        CHECK_EQ(err, (status_t)OK);
#ifdef MTK_AOSP_ENHANCEMENT
        //now = ALooper::GetNowUs();
        //debugInfo->addTimeInfoByKey(trackIndex == 0, timeUs, "queueDecIn2", now/1000);
#endif

        submittedMore = true;
    }

    if (submittedMore) {
        scheduleDecoderNotification();
    }
}
Пример #2
0
void DirectRenderer::queueAccessUnit(
        size_t trackIndex, const sp<ABuffer> &accessUnit) {
#ifdef MTK_AOSP_ENHANCEMENT
    sp<WfdDebugInfo> debugInfo= defaultWfdDebugInfo();
    int64_t now = ALooper::GetNowUs();
    int64_t timeUs;
    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
    debugInfo->addTimeInfoByKey(trackIndex == 0, timeUs, "queueAccu", now/1000);
#endif
    sp<AMessage> msg = new AMessage(kWhatQueueAccessUnit, id());
    msg->setSize("trackIndex", trackIndex);
    msg->setBuffer("accessUnit", accessUnit);
    msg->post();
}
Пример #3
0
void DirectRenderer::queueOutputBuffer(
        size_t trackIndex,
        size_t index, int64_t timeUs, const sp<ABuffer> &buffer) {
#ifdef MTK_AOSP_ENHANCEMENT
    sp<WfdDebugInfo> debugInfo= defaultWfdDebugInfo();
    int64_t now = ALooper::GetNowUs();
    debugInfo->addTimeInfoByKey(trackIndex == 0, timeUs, "queueOutput", now/1000);
#endif
    if (trackIndex == 1) {
        // Audio
        mAudioRenderer->queueInputBuffer(index, timeUs, buffer);
        return;
    }

    OutputInfo info;
    info.mIndex = index;
    info.mTimeUs = timeUs;
    info.mBuffer = buffer;
    mVideoOutputBuffers.push_back(info);

    scheduleVideoRenderIfNecessary();
}
Пример #4
0
void DirectRenderer::onRenderVideo() {
    mVideoRenderPending = false;

    int64_t nowUs = ALooper::GetNowUs();

    while (!mVideoOutputBuffers.empty()) {
        const OutputInfo &info = *mVideoOutputBuffers.begin();

        //video frame display as soon as possible.
        /*if (info.mTimeUs > nowUs) {
            break;
        }*/

        if (info.mTimeUs + 15000ll < nowUs) {
            ++mNumFramesLate;
        }
        ++mNumFrames;

#ifdef MTK_AOSP_ENHANCEMENT
        sp<WfdDebugInfo> debugInfo= defaultWfdDebugInfo();
        int64_t now = ALooper::GetNowUs();
	 int64_t timeUs = info.mTimeUs;
        debugInfo->addTimeInfoByKey(true, info.mTimeUs, "queueRender1", now/1000);
#endif
        status_t err =
            mDecoderContext[0]->renderOutputBufferAndRelease(info.mIndex);
        CHECK_EQ(err, (status_t)OK);

        mVideoOutputBuffers.erase(mVideoOutputBuffers.begin());
#ifdef MTK_AOSP_ENHANCEMENT
        debugLatency(true, timeUs);    
#endif
    }

    scheduleVideoRenderIfNecessary();
}
Пример #5
0
void DirectRenderer::AudioRenderer::onPushAudio() {
    mPushPending = false;

    while (!mInputBuffers.empty()) {
        const BufferInfo &info = *mInputBuffers.begin();

        ssize_t n = writeNonBlocking(
                info.mBuffer->data(), info.mBuffer->size());

        if (n < (ssize_t)info.mBuffer->size()) {
            CHECK_GE(n, 0);

            info.mBuffer->setRange(
                    info.mBuffer->offset() + n, info.mBuffer->size() - n);
#ifdef MTK_AOSP_ENHANCEMENT
			mcurrentInfosize = info.mBuffer->size();
#endif
            break;
        }
#ifdef MTK_AOSP_ENHANCEMENT		
		else{
			mcurrentInfosize = 0;
		}		
#endif
#ifdef MTK_AOSP_ENHANCEMENT
        sp<WfdDebugInfo> debugInfo= defaultWfdDebugInfo();
        int64_t now = ALooper::GetNowUs();
        debugInfo->addTimeInfoByKey(false, info.mTimeUs, "queueRender1", now/1000);
#endif
        mDecoderContext->releaseOutputBuffer(info.mIndex);

        mInputBuffers.erase(mInputBuffers.begin());
    }

    schedulePushIfNecessary();
}
Пример #6
0
    void DirectRenderer::debugLatency(bool mediaType, int64_t timestamp){
    //mediatype: true:video,false:audio;
    
        sp<WfdDebugInfo> debugInfo= defaultWfdDebugInfo();
	 int64_t offset = debugInfo->getTimeInfoByKey(true, timestamp, "Offset");
	 int64_t now = ALooper::GetNowUs();
	 
	 int64_t oldtimeUs = timestamp + offset;
        int64_t t1 = debugInfo->getTimeInfoByKey(true, oldtimeUs, "rtprecvin");	
        int64_t t2 = debugInfo->getTimeInfoByKey(true, oldtimeUs, "atsparserin");
       // int64_t t3 = debugInfo->getTimeInfoByKey(true, oldtimeUs, "mediarecvout");
        int64_t t4 = debugInfo->getTimeInfoByKey(true, oldtimeUs, "apacksourceout");
        //int64_t t5 = debugInfo->getTimeInfoByKey(true, oldtimeUs, "sinkin");
        //int64_t t6 = debugInfo->getTimeInfoByKey(true, oldtimeUs, "sinkout");
        
        
        
        int64_t q1 = debugInfo->getTimeInfoByKey(true, timestamp, "queueAccu");   
        int64_t q2 = debugInfo->getTimeInfoByKey(true, timestamp, "queueDecIn1");
       // int64_t q3 = debugInfo->getTimeInfoByKey(true, timestamp, "queueDecIn2");
        int64_t q4 = debugInfo->getTimeInfoByKey(true, timestamp, "dequeueDecOut1");
        //int64_t q5 = debugInfo->getTimeInfoByKey(true, timestamp, "dequeueDecOut2");
        int64_t q6 = debugInfo->getTimeInfoByKey(true, timestamp, "queueOutput");   
        int64_t q7 = debugInfo->getTimeInfoByKey(true, timestamp, "queueRender1");   
        

        if ( offset != -1&& t1 != -1&& t2 != -1&& t4!= -1 && q1 != -1 && q2 != -1 && q4 != -1 && q6 != -1 && q7 != -1) {
			
                                                                  ALOGD("[%s][%lld]  rv->parsin %lld,parsein->out %lld ,out->dr %lld, dr->mc %lld, dec %lld, dec->queOut %lld,  dr->rend %lld, rend %lld,     total %lld",
         (mediaType? "video":"audio"),timestamp / 1000 ,              t2-t1 ,                     t4-t2,           q1-t4,             q2 - q1,      q4 - q2,      q6 -q4,                   q7 - q6,     now/1000 - q7, q7 - t1);
        }else if (q1 != -1 && q2 != -1 && q4 != -1 && q6 != -1 && q7 != -1){
                                                   ALOGD("[%s][%lld]Partial   dr->mc %lld, dec %lld, dec->queOut %lld,  dr->rend %lld, rend %lld,     total %lld",
         (mediaType? "video":"audio"),timestamp / 1000 ,             q2 - q1,      q4 - q2,      q6 -q4,                   q7 - q6,     now/1000 - q7, q7 - q1);
        }
		
}
Пример #7
0
void MediaPuller::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {
        case kWhatStart:
        {
            status_t err;
            ALOGI("start mIsAudio=%d",mIsAudio);
            if (mIsAudio) {
                // This atrocity causes AudioSource to deliver absolute
                // systemTime() based timestamps (off by 1 us).
#ifdef MTB_SUPPORT                
                ATRACE_BEGIN_EXT("AudioPuller, kWhatStart");
#endif
                sp<MetaData> params = new MetaData;
                params->setInt64(kKeyTime, 1ll);
                err = mSource->start(params.get());
            } else {
#ifdef MTB_SUPPORT            
                ATRACE_BEGIN_EXT("VideoPuller, kWhatStart");
#endif
                err = mSource->start();
                if (err != OK) {
                    ALOGE("source failed to start w/ err %d", err);
                }
            }

            if (err == OK) {
							 ALOGI("start done, start to schedulePull data");
                schedulePull();
            }

            sp<AMessage> response = new AMessage;
            response->setInt32("err", err);

            uint32_t replyID;
            CHECK(msg->senderAwaitsResponse(&replyID));

            response->postReply(replyID);
#ifdef MTB_SUPPORT			
            ATRACE_END_EXT("VideoPuller, kWhatStart");
#endif
            break;
        }

        case kWhatStop:
        {
            sp<MetaData> meta = mSource->getFormat();
            const char *tmp;
            CHECK(meta->findCString(kKeyMIMEType, &tmp));
            AString mime = tmp;

            ALOGI("MediaPuller(%s) stopping.", mime.c_str());
            mSource->stop();
            ALOGI("MediaPuller(%s) stopped.", mime.c_str());
            ++mPullGeneration;

            sp<AMessage> notify;
            CHECK(msg->findMessage("notify", &notify));
            notify->post();
            break;
        }

        case kWhatPull:
        {
            int32_t generation;
#ifdef MTB_SUPPORT			
            if (mIsAudio) {
                ATRACE_BEGIN_EXT("AudioPuller, kWhatPull");
            } else {
                ATRACE_BEGIN_EXT("VideoPuller, kWhatPull");
            }
#endif			
            CHECK(msg->findInt32("generation", &generation));

            if (generation != mPullGeneration) {
                break;
            }

            MediaBuffer *mbuf;
	 
            status_t err = mSource->read(&mbuf);

            if (mPaused) {
                if (err == OK) {
                    mbuf->release();
                    mbuf = NULL;
                }

                schedulePull();
                break;
            }

            if (err != OK) {
                if (err == ERROR_END_OF_STREAM) {
                    ALOGI("stream ended.");
                } else {
                    ALOGE("error %d reading stream.", err);
                }
                ALOGI("err=%d.post kWhatEOS",err);
                sp<AMessage> notify = mNotify->dup();
                notify->setInt32("what", kWhatEOS);
                notify->post();
            } else {
            
                int64_t timeUs;
                CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
#ifdef MTB_SUPPORT
                if (mIsAudio) {
                    ATRACE_ONESHOT(ATRACE_ONESHOT_ADATA, "AudioPuller, TS: %lld ms", timeUs/1000);
                }
                else {
                    ATRACE_ONESHOT(ATRACE_ONESHOT_VDATA, "VideoPuller, TS: %lld ms", timeUs/1000);
                }
#endif				
                sp<ABuffer> accessUnit = new ABuffer(mbuf->range_length());

                memcpy(accessUnit->data(),
                       (const uint8_t *)mbuf->data() + mbuf->range_offset(),
                       mbuf->range_length());

                accessUnit->meta()->setInt64("timeUs", timeUs);

#ifndef ANDROID_DEFAULT_CODE	
		sp<WfdDebugInfo> debugInfo= defaultWfdDebugInfo();
	   	 int64_t MpMs = ALooper::GetNowUs();
		 debugInfo->addTimeInfoByKey(!mIsAudio , timeUs, "MpIn", MpMs/1000);	
		 
		 int64_t NowMpDelta =0;
		
		 NowMpDelta = (MpMs - timeUs)/1000;	
		 
		 if(mFirstDeltaMs == -1){
			mFirstDeltaMs = NowMpDelta;
			ALOGE("[check Input 1th][%s] ,timestamp=%lld ms,[ts and now delta change]=%lld ms",
			 		mIsAudio?"audio":"video",timeUs/1000,NowMpDelta);
		 }	
		 NowMpDelta = NowMpDelta - mFirstDeltaMs;
		
		 if(NowMpDelta > 500ll || NowMpDelta < -500ll ){
			 ALOGE("[check Input][%s] ,timestamp=%lld ms,[ts and now delta change]=%lld ms",
			 		mIsAudio?"audio":"video",timeUs/1000,NowMpDelta);
		 }
		 
#endif

		 
                if (mIsAudio) {
                    mbuf->release();
                    mbuf = NULL;
		      ALOGI("[WFDP][%s] ,timestamp=%lld ms",mIsAudio?"audio":"video",timeUs/1000);
                } else {
                    // video encoder will release MediaBuffer when done
                    // with underlying data.
                    accessUnit->meta()->setPointer("mediaBuffer", mbuf);
		      ALOGI("[WFDP][%s] ,mediaBuffer=%p,timestamp=%lld ms",mIsAudio?"audio":"video",mbuf,timeUs/1000);
                }

                sp<AMessage> notify = mNotify->dup();

                notify->setInt32("what", kWhatAccessUnit);
                notify->setBuffer("accessUnit", accessUnit);
                notify->post();

                if (mbuf != NULL) {
                    ALOGV("posted mbuf %p", mbuf);
                }

                schedulePull();
#ifdef MTB_SUPPORT			
                if (mIsAudio) {
                    ATRACE_END_EXT("AudioPuller, kWhatPull");
                } else {
                    ATRACE_END_EXT("VideoPuller, kWhatPull");
                }
#endif	
            }
            break;
        }

        case kWhatPause:
        {
            mPaused = true;
            break;
        }

        case kWhatResume:
        {
            mPaused = false;
            break;
        }
        default:
            TRESPASS();
    }
}
Пример #8
0
void DirectRenderer::DecoderContext::onDecoderNotify() {
    mDecoderNotificationPending = false;

    for (;;) {
        size_t index;
        status_t err = mDecoder->dequeueInputBuffer(&index);

        if (err == OK) {
            mDecoderInputBuffersAvailable.push_back(index);
        } else if (err == -EAGAIN) {
            break;
        } else {
            TRESPASS();
        }
    }

    queueDecoderInputBuffers();

    for (;;) {
        size_t index;
        size_t offset;
        size_t size;
        int64_t timeUs;
        uint32_t flags;
#ifdef MTK_AOSP_ENHANCEMENT
        int64_t now1 = ALooper::GetNowUs();
        size_t trackIndex;
        CHECK(mNotify->findSize("trackIndex", &trackIndex));
#endif
        status_t err = mDecoder->dequeueOutputBuffer(
                &index,
                &offset,
                &size,
                &timeUs,
                &flags);

        if (err == OK) {
#ifdef MTK_AOSP_ENHANCEMENT
            sp<WfdDebugInfo> debugInfo= defaultWfdDebugInfo();
            int64_t now2 = ALooper::GetNowUs();
            debugInfo->addTimeInfoByKey(trackIndex == 0, timeUs, "dequeueDecOut1", now1/1000);
            //debugInfo->addTimeInfoByKey(trackIndex == 0, timeUs, "dequeueDecOut2", now2/1000);
	    // ALOGD("[%s][%lld] dequeueDecOut1", ((trackIndex == 0) ? "video":"audio"), (timeUs / 1000));
#endif
            queueOutputBuffer(
                    index, timeUs, mDecoderOutputBuffers.itemAt(index));
        } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
            err = mDecoder->getOutputBuffers(
                    &mDecoderOutputBuffers);
            CHECK_EQ(err, (status_t)OK);
        } else if (err == INFO_FORMAT_CHANGED) {
            // We don't care.
        } else if (err == -EAGAIN) {
            break;
        } else {
            TRESPASS();
        }
    }

    scheduleDecoderNotification();
}