void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
#ifndef MTK_AOSP_ENHANCEMENT
    ALOGV("releaseRecordingFrame");
#else

#ifdef MTB_SUPPORT
    ATRACE_ONESHOT(ATRACE_ONESHOT_SPECIAL, "releaseRecordingFrame");
#endif

#endif
    if (mCameraRecordingProxy != NULL) {
        mCameraRecordingProxy->releaseRecordingFrame(frame);
    } else if (mCamera != NULL) {
        int64_t token = IPCThreadState::self()->clearCallingIdentity();
        mCamera->releaseRecordingFrame(frame);
        IPCThreadState::self()->restoreCallingIdentity(token);
    }
}
示例#2
0
void MediaPuller::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {
        case kWhatStart:
        {
            status_t err;
            ALOGI("start mIsAudio=%d",mIsAudio);
            if (mIsAudio) {
                // This atrocity causes AudioSource to deliver absolute
                // systemTime() based timestamps (off by 1 us).
#ifdef MTB_SUPPORT                
                ATRACE_BEGIN_EXT("AudioPuller, kWhatStart");
#endif
                sp<MetaData> params = new MetaData;
                params->setInt64(kKeyTime, 1ll);
                err = mSource->start(params.get());
            } else {
#ifdef MTB_SUPPORT            
                ATRACE_BEGIN_EXT("VideoPuller, kWhatStart");
#endif
                err = mSource->start();
                if (err != OK) {
                    ALOGE("source failed to start w/ err %d", err);
                }
            }

            if (err == OK) {
							 ALOGI("start done, start to schedulePull data");
                schedulePull();
            }

            sp<AMessage> response = new AMessage;
            response->setInt32("err", err);

            uint32_t replyID;
            CHECK(msg->senderAwaitsResponse(&replyID));

            response->postReply(replyID);
#ifdef MTB_SUPPORT			
            ATRACE_END_EXT("VideoPuller, kWhatStart");
#endif
            break;
        }

        case kWhatStop:
        {
            sp<MetaData> meta = mSource->getFormat();
            const char *tmp;
            CHECK(meta->findCString(kKeyMIMEType, &tmp));
            AString mime = tmp;

            ALOGI("MediaPuller(%s) stopping.", mime.c_str());
            mSource->stop();
            ALOGI("MediaPuller(%s) stopped.", mime.c_str());
            ++mPullGeneration;

            sp<AMessage> notify;
            CHECK(msg->findMessage("notify", &notify));
            notify->post();
            break;
        }

        case kWhatPull:
        {
            int32_t generation;
#ifdef MTB_SUPPORT			
            if (mIsAudio) {
                ATRACE_BEGIN_EXT("AudioPuller, kWhatPull");
            } else {
                ATRACE_BEGIN_EXT("VideoPuller, kWhatPull");
            }
#endif			
            CHECK(msg->findInt32("generation", &generation));

            if (generation != mPullGeneration) {
                break;
            }

            MediaBuffer *mbuf;
	 
            status_t err = mSource->read(&mbuf);

            if (mPaused) {
                if (err == OK) {
                    mbuf->release();
                    mbuf = NULL;
                }

                schedulePull();
                break;
            }

            if (err != OK) {
                if (err == ERROR_END_OF_STREAM) {
                    ALOGI("stream ended.");
                } else {
                    ALOGE("error %d reading stream.", err);
                }
                ALOGI("err=%d.post kWhatEOS",err);
                sp<AMessage> notify = mNotify->dup();
                notify->setInt32("what", kWhatEOS);
                notify->post();
            } else {
            
                int64_t timeUs;
                CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
#ifdef MTB_SUPPORT
                if (mIsAudio) {
                    ATRACE_ONESHOT(ATRACE_ONESHOT_ADATA, "AudioPuller, TS: %lld ms", timeUs/1000);
                }
                else {
                    ATRACE_ONESHOT(ATRACE_ONESHOT_VDATA, "VideoPuller, TS: %lld ms", timeUs/1000);
                }
#endif				
                sp<ABuffer> accessUnit = new ABuffer(mbuf->range_length());

                memcpy(accessUnit->data(),
                       (const uint8_t *)mbuf->data() + mbuf->range_offset(),
                       mbuf->range_length());

                accessUnit->meta()->setInt64("timeUs", timeUs);

#ifndef ANDROID_DEFAULT_CODE	
		sp<WfdDebugInfo> debugInfo= defaultWfdDebugInfo();
	   	 int64_t MpMs = ALooper::GetNowUs();
		 debugInfo->addTimeInfoByKey(!mIsAudio , timeUs, "MpIn", MpMs/1000);	
		 
		 int64_t NowMpDelta =0;
		
		 NowMpDelta = (MpMs - timeUs)/1000;	
		 
		 if(mFirstDeltaMs == -1){
			mFirstDeltaMs = NowMpDelta;
			ALOGE("[check Input 1th][%s] ,timestamp=%lld ms,[ts and now delta change]=%lld ms",
			 		mIsAudio?"audio":"video",timeUs/1000,NowMpDelta);
		 }	
		 NowMpDelta = NowMpDelta - mFirstDeltaMs;
		
		 if(NowMpDelta > 500ll || NowMpDelta < -500ll ){
			 ALOGE("[check Input][%s] ,timestamp=%lld ms,[ts and now delta change]=%lld ms",
			 		mIsAudio?"audio":"video",timeUs/1000,NowMpDelta);
		 }
		 
#endif

		 
                if (mIsAudio) {
                    mbuf->release();
                    mbuf = NULL;
		      ALOGI("[WFDP][%s] ,timestamp=%lld ms",mIsAudio?"audio":"video",timeUs/1000);
                } else {
                    // video encoder will release MediaBuffer when done
                    // with underlying data.
                    accessUnit->meta()->setPointer("mediaBuffer", mbuf);
		      ALOGI("[WFDP][%s] ,mediaBuffer=%p,timestamp=%lld ms",mIsAudio?"audio":"video",mbuf,timeUs/1000);
                }

                sp<AMessage> notify = mNotify->dup();

                notify->setInt32("what", kWhatAccessUnit);
                notify->setBuffer("accessUnit", accessUnit);
                notify->post();

                if (mbuf != NULL) {
                    ALOGV("posted mbuf %p", mbuf);
                }

                schedulePull();
#ifdef MTB_SUPPORT			
                if (mIsAudio) {
                    ATRACE_END_EXT("AudioPuller, kWhatPull");
                } else {
                    ATRACE_END_EXT("VideoPuller, kWhatPull");
                }
#endif	
            }
            break;
        }

        case kWhatPause:
        {
            mPaused = true;
            break;
        }

        case kWhatResume:
        {
            mPaused = false;
            break;
        }
        default:
            TRESPASS();
    }
}
void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
        int32_t msgType, const sp<IMemory> &data) {
    ALOGV("dataCallbackTimestamp: timestamp %" PRId64 " us", timestampUs);
    Mutex::Autolock autoLock(mLock);
#ifdef MTK_AOSP_ENHANCEMENT
#ifdef MTB_SUPPORT
    ATRACE_ONESHOT(ATRACE_ONESHOT_SPECIAL, "dataCallbackTimestamp");
#endif
    if ((!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs))
#ifdef MTK_SLOW_MOTION_VIDEO_SUPPORT
            && (mColorFormat !=OMX_MTK_COLOR_FormatBitStream /*0x7F000300*/)
#endif
       ) {
        ALOGW("Drop frame at %" PRId64 "/%" PRId64 " us", timestampUs, mStartTimeUs);
#ifdef MTB_SUPPORT
        ATRACE_ONESHOT(ATRACE_ONESHOT_SPECIAL, "dropCameraFrame");
#endif
#else
    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
        ALOGV("Drop frame at %" PRId64 "/%" PRId64 " us", timestampUs, mStartTimeUs);
#endif
        releaseOneRecordingFrame(data);
        return;
    }

    if (mNumFramesReceived > 0) {
#ifdef MTK_AOSP_ENHANCEMENT
        if (timestampUs <= mLastFrameTimestampUs)
        {
            ALOGW("[CameraSource][dataCallbackTimestamp][Warning] current frame timestamp: %" PRId64 " <= previous frame timestamp: %" PRId64 "",
                  timestampUs, mLastFrameTimestampUs);
#ifdef HAVE_AEE_FEATURE
            if(timestampUs < mLastFrameTimestampUs)
                aee_system_exception("CRDISPATCH_KEY:Camera issue",NULL,DB_OPT_DEFAULT,"\nCameraSource:current frame timestamp: %" PRId64 " < previous frame timestamp: %" PRId64 "!",timestampUs, mLastFrameTimestampUs);
#endif

        }
#ifdef MTK_SLOW_MOTION_VIDEO_SUPPORT
        if(timestampUs <= mFirstFrameTimeUs+mStartTimeOffsetUs) {
            ALOGI("drop frame for directlink, timestampUs(%" PRId64 " us),mFirstFrameTimeUs(%" PRId64 " us),mStartTimeOffsetUs(%" PRId64 " us)",
                  timestampUs, mFirstFrameTimeUs, mStartTimeOffsetUs);
            releaseOneRecordingFrame(data);
            return;
        }
        else
        {
            timestampUs -= mStartTimeOffsetUs;
        }
#endif

#else
        CHECK(timestampUs > mLastFrameTimestampUs);
#endif
        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
            ++mNumGlitches;
        }
    }

    // May need to skip frame or modify timestamp. Currently implemented
    // by the subclass CameraSourceTimeLapse.
    if (skipCurrentFrame(timestampUs)) {
        releaseOneRecordingFrame(data);
        return;
    }

    mLastFrameTimestampUs = timestampUs;
    if (mNumFramesReceived == 0) {
        mFirstFrameTimeUs = timestampUs;
        // Initial delay
        if (mStartTimeUs > 0) {
#ifdef MTK_AOSP_ENHANCEMENT
#ifdef MTK_SLOW_MOTION_VIDEO_SUPPORT
            if ( mColorFormat == OMX_MTK_COLOR_FormatBitStream /*0x7F000300*/) {
                ALOGI("not drop frame for directlink, reset mStartTimeUs as first frame timestamp");
                if(timestampUs < mStartTimeUs) {
                    mStartTimeOffsetUs = mStartTimeUs - timestampUs;
                    ALOGI("mStartTimeOffsetUs = %" PRId64 "", mStartTimeOffsetUs);
                }
                mStartTimeUs = timestampUs;
            }
#endif
#endif
            if (timestampUs < mStartTimeUs) {
                // Frame was captured before recording was started
                // Drop it without updating the statistical data.
                releaseOneRecordingFrame(data);
#ifdef MTK_AOSP_ENHANCEMENT
                ALOGW("timestampUs=%" PRId64 " < mStartTimeUs=%" PRId64 " drop frame",timestampUs,mStartTimeUs);
#endif
                return;
            }
            mStartTimeUs = timestampUs - mStartTimeUs;
#ifdef MTK_AOSP_ENHANCEMENT
            ALOGI("the first video frame,time offset to mStartTimeUs=%" PRId64 "",mStartTimeUs);
#endif
        }
    }
    ++mNumFramesReceived;
#ifdef MTK_AOSP_ENHANCEMENT
    //if ((mDropRate != 0) && (mNumFramesReceived % mDropRate != 0)) {
    if ((mDropRate > 0) && (mNumFramesReceived != int(mLastNumFramesReceived + mDropRate * mNumRemainFrameReceived  + 0.5))) {
        releaseOneRecordingFrame(data);
        ++mNumFramesDropped;
        ALOGD("Quality adjust drop frame = %d",mNumFramesReceived);
        return;
    }
    //real received frame num
    ++mNumRemainFrameReceived;

#endif

#ifdef HAVE_AEE_FEATURE
    if(data == NULL || data->size() <= 0)
        aee_system_exception("CRDISPATCH_KEY:Camera issue",NULL,DB_OPT_DEFAULT,"\nCameraSource:dataCallbackTimestamp data error 0x%x",data.get());
#endif
    CHECK(data != NULL && data->size() > 0);
    mFramesReceived.push_back(data);
    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
    mFrameTimes.push_back(timeUs);

#ifdef MTK_AOSP_ENHANCEMENT
    if(mNumFramesReceived % LOG_INTERVAL == 1)
        ALOGI("initial delay: %" PRId64 ", current time stamp: %" PRId64",mFramesReceived.size()= %d,mNumFramesReceived= %d",
              mStartTimeUs, timeUs,mFramesReceived.size(),mNumFramesReceived);
#else
    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
          mStartTimeUs, timeUs);
#endif
    mFrameAvailableCondition.signal();
}

bool CameraSource::isMetaDataStoredInVideoBuffers() const {
    ALOGV("isMetaDataStoredInVideoBuffers");
    return mIsMetaDataStoredInVideoBuffers;
}
status_t CameraSource::read(
    MediaBuffer **buffer, const ReadOptions *options) {
#ifdef MTK_AOSP_ENHANCEMENT
    {
        Mutex::Autolock autoLock(mLock);
        ALOGV("read, mFramesReceived.size= %d,mFramesBeingEncoded.size()= %d",\
        mFramesReceived.size(),mFramesBeingEncoded.size());
    }
#else
    ALOGV("read");
#endif

    *buffer = NULL;

    int64_t seekTimeUs;
    ReadOptions::SeekMode mode;
    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
        return ERROR_UNSUPPORTED;
    }

    sp<IMemory> frame;
    int64_t frameTime;

    {
        Mutex::Autolock autoLock(mLock);
        while (mStarted && mFramesReceived.empty()) {
            if (NO_ERROR !=
                    mFrameAvailableCondition.waitRelative(mLock,
                            mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
                if (mCameraRecordingProxy != 0 &&
                        !mCameraRecordingProxy->asBinder()->isBinderAlive()) {
                    ALOGW("camera recording proxy is gone");
                    return ERROR_END_OF_STREAM;
                }

                ALOGW("Timed out waiting for incoming camera video frames: %" PRId64 " us",
                      mLastFrameTimestampUs);
#ifdef HAVE_AEE_FEATURE
                aee_system_warning("CRDISPATCH_KEY:Camera issue",NULL,DB_OPT_FTRACE,"\nCameraSource:Timed out waiting for incoming camera video frames!");
#endif
            }
        }

#ifdef MTK_AOSP_ENHANCEMENT
#ifdef MTB_SUPPORT
        ATRACE_ONESHOT(ATRACE_ONESHOT_SPECIAL, "omxReadData");
#endif
#endif

        if (!mStarted) {
            return OK;
        }
        frame = *mFramesReceived.begin();
        mFramesReceived.erase(mFramesReceived.begin());

        frameTime = *mFrameTimes.begin();
        mFrameTimes.erase(mFrameTimes.begin());
        mFramesBeingEncoded.push_back(frame);
        *buffer = new MediaBuffer(frame->pointer(), frame->size());
        (*buffer)->setObserver(this);
        (*buffer)->add_ref();
        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
#ifdef MTK_AOSP_ENHANCEMENT
#ifdef MTK_SLOW_MOTION_VIDEO_SUPPORT
//for directlink, set codecconfig flags when read the first frame
        if(mColorFormat ==OMX_MTK_COLOR_FormatBitStream /*0x7F000300*/ && (!mCodecConfigReceived)) {
            (*buffer)->meta_data()->setInt32(kKeyIsCodecConfig, true);
            mCodecConfigReceived = true;
        }
#endif
#endif
    }
    return OK;
}