Ejemplo n.º 1
0
void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
        int32_t msgType __unused, const sp<IMemory> &data) {
    ALOGV("dataCallbackTimestamp: timestamp %lld us", (long long)timestampUs);
    Mutex::Autolock autoLock(mLock);
    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
        ALOGV("Drop frame at %lld/%lld us", (long long)timestampUs, (long long)mStartTimeUs);
        releaseOneRecordingFrame(data);
        return;
    }

    // May need to skip frame or modify timestamp. Currently implemented
    // by the subclass CameraSourceTimeLapse.
    if (skipCurrentFrame(timestampUs)) {
        releaseOneRecordingFrame(data);
        return;
    }

    if (mNumFramesReceived > 0) {
        if (timestampUs <= mLastFrameTimestampUs) {
            ALOGW("Dropping frame with backward timestamp %lld (last %lld)",
                    (long long)timestampUs, (long long)mLastFrameTimestampUs);
            releaseOneRecordingFrame(data);
            return;
        }
        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
            ++mNumGlitches;
        }
    }

    mLastFrameTimestampUs = timestampUs;
    if (mNumFramesReceived == 0) {
        mFirstFrameTimeUs = timestampUs;
        // Initial delay
        if (mStartTimeUs > 0) {
            if (timestampUs < mStartTimeUs) {
                // Frame was captured before recording was started
                // Drop it without updating the statistical data.
                releaseOneRecordingFrame(data);
                return;
            }
            mStartTimeUs = timestampUs - mStartTimeUs;
        }
    }
    ++mNumFramesReceived;

    CHECK(data != NULL && data->size() > 0);
    mFramesReceived.push_back(data);
    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
    mFrameTimes.push_back(timeUs);
    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
        mStartTimeUs, timeUs);
    mFrameAvailableCondition.signal();
}
Ejemplo n.º 2
0
void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
        int32_t msgType, const sp<IMemory> &data) {
    ALOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
    if (!mStarted) {
       ALOGD("Stop recording issued. Return here.");
       return;
    }
    Mutex::Autolock autoLock(mLock);
    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
        ALOGV("Drop frame at %lld/%lld us", timestampUs, mStartTimeUs);
        releaseOneRecordingFrame(data);
        return;
    }

    if (mRecPause == true) {
        if(!mFramesReceived.empty()) {
            ALOGV("releaseQueuedFrames - #Queued Frames : %d", mFramesReceived.size());
            releaseQueuedFrames();
        }
        ALOGV("release One Video Frame for Pause : %lld us", timestampUs);
        releaseOneRecordingFrame(data);
        mPauseEndTimeUs = timestampUs;
        return;
    }
    timestampUs -= mPauseAdjTimeUs;
    ALOGV("dataCallbackTimestamp: AdjTimestamp %lld us", timestampUs);
    if (mNumFramesReceived > 0) {
        CHECK(timestampUs > mLastFrameTimestampUs);
        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
            ++mNumGlitches;
        }
    }

    // May need to skip frame or modify timestamp. Currently implemented
    // by the subclass CameraSourceTimeLapse.
    if (skipCurrentFrame(timestampUs)) {
        releaseOneRecordingFrame(data);
        return;
    }

    mLastFrameTimestampUs = timestampUs;
    if (mNumFramesReceived == 0) {
        mFirstFrameTimeUs = timestampUs;
        // Initial delay
        if (mStartTimeUs > 0) {
            if (timestampUs < mStartTimeUs) {
                // Frame was captured before recording was started
                // Drop it without updating the statistical data.
                releaseOneRecordingFrame(data);
                return;
            }
            mStartTimeUs = timestampUs - mStartTimeUs;
        }
    }
    ++mNumFramesReceived;

    CHECK(data != NULL && data->size() > 0);
    mFramesReceived.push_back(data);
    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
    mFrameTimes.push_back(timeUs);
    ALOGV("initial delay: %lld, current time stamp: %lld",
        mStartTimeUs, timeUs);
    mFrameAvailableCondition.signal();
}
void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
        int32_t msgType, const sp<IMemory> &data) {
    ALOGV("dataCallbackTimestamp: timestamp %" PRId64 " us", timestampUs);
    Mutex::Autolock autoLock(mLock);
#ifdef MTK_AOSP_ENHANCEMENT
#ifdef MTB_SUPPORT
    ATRACE_ONESHOT(ATRACE_ONESHOT_SPECIAL, "dataCallbackTimestamp");
#endif
    if ((!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs))
#ifdef MTK_SLOW_MOTION_VIDEO_SUPPORT
            && (mColorFormat !=OMX_MTK_COLOR_FormatBitStream /*0x7F000300*/)
#endif
       ) {
        ALOGW("Drop frame at %" PRId64 "/%" PRId64 " us", timestampUs, mStartTimeUs);
#ifdef MTB_SUPPORT
        ATRACE_ONESHOT(ATRACE_ONESHOT_SPECIAL, "dropCameraFrame");
#endif
#else
    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
        ALOGV("Drop frame at %" PRId64 "/%" PRId64 " us", timestampUs, mStartTimeUs);
#endif
        releaseOneRecordingFrame(data);
        return;
    }

    if (mNumFramesReceived > 0) {
#ifdef MTK_AOSP_ENHANCEMENT
        if (timestampUs <= mLastFrameTimestampUs)
        {
            ALOGW("[CameraSource][dataCallbackTimestamp][Warning] current frame timestamp: %" PRId64 " <= previous frame timestamp: %" PRId64 "",
                  timestampUs, mLastFrameTimestampUs);
#ifdef HAVE_AEE_FEATURE
            if(timestampUs < mLastFrameTimestampUs)
                aee_system_exception("CRDISPATCH_KEY:Camera issue",NULL,DB_OPT_DEFAULT,"\nCameraSource:current frame timestamp: %" PRId64 " < previous frame timestamp: %" PRId64 "!",timestampUs, mLastFrameTimestampUs);
#endif

        }
#ifdef MTK_SLOW_MOTION_VIDEO_SUPPORT
        if(timestampUs <= mFirstFrameTimeUs+mStartTimeOffsetUs) {
            ALOGI("drop frame for directlink, timestampUs(%" PRId64 " us),mFirstFrameTimeUs(%" PRId64 " us),mStartTimeOffsetUs(%" PRId64 " us)",
                  timestampUs, mFirstFrameTimeUs, mStartTimeOffsetUs);
            releaseOneRecordingFrame(data);
            return;
        }
        else
        {
            timestampUs -= mStartTimeOffsetUs;
        }
#endif

#else
        CHECK(timestampUs > mLastFrameTimestampUs);
#endif
        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
            ++mNumGlitches;
        }
    }

    // May need to skip frame or modify timestamp. Currently implemented
    // by the subclass CameraSourceTimeLapse.
    if (skipCurrentFrame(timestampUs)) {
        releaseOneRecordingFrame(data);
        return;
    }

    mLastFrameTimestampUs = timestampUs;
    if (mNumFramesReceived == 0) {
        mFirstFrameTimeUs = timestampUs;
        // Initial delay
        if (mStartTimeUs > 0) {
#ifdef MTK_AOSP_ENHANCEMENT
#ifdef MTK_SLOW_MOTION_VIDEO_SUPPORT
            if ( mColorFormat == OMX_MTK_COLOR_FormatBitStream /*0x7F000300*/) {
                ALOGI("not drop frame for directlink, reset mStartTimeUs as first frame timestamp");
                if(timestampUs < mStartTimeUs) {
                    mStartTimeOffsetUs = mStartTimeUs - timestampUs;
                    ALOGI("mStartTimeOffsetUs = %" PRId64 "", mStartTimeOffsetUs);
                }
                mStartTimeUs = timestampUs;
            }
#endif
#endif
            if (timestampUs < mStartTimeUs) {
                // Frame was captured before recording was started
                // Drop it without updating the statistical data.
                releaseOneRecordingFrame(data);
#ifdef MTK_AOSP_ENHANCEMENT
                ALOGW("timestampUs=%" PRId64 " < mStartTimeUs=%" PRId64 " drop frame",timestampUs,mStartTimeUs);
#endif
                return;
            }
            mStartTimeUs = timestampUs - mStartTimeUs;
#ifdef MTK_AOSP_ENHANCEMENT
            ALOGI("the first video frame,time offset to mStartTimeUs=%" PRId64 "",mStartTimeUs);
#endif
        }
    }
    ++mNumFramesReceived;
#ifdef MTK_AOSP_ENHANCEMENT
    //if ((mDropRate != 0) && (mNumFramesReceived % mDropRate != 0)) {
    if ((mDropRate > 0) && (mNumFramesReceived != int(mLastNumFramesReceived + mDropRate * mNumRemainFrameReceived  + 0.5))) {
        releaseOneRecordingFrame(data);
        ++mNumFramesDropped;
        ALOGD("Quality adjust drop frame = %d",mNumFramesReceived);
        return;
    }
    //real received frame num
    ++mNumRemainFrameReceived;

#endif

#ifdef HAVE_AEE_FEATURE
    if(data == NULL || data->size() <= 0)
        aee_system_exception("CRDISPATCH_KEY:Camera issue",NULL,DB_OPT_DEFAULT,"\nCameraSource:dataCallbackTimestamp data error 0x%x",data.get());
#endif
    CHECK(data != NULL && data->size() > 0);
    mFramesReceived.push_back(data);
    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
    mFrameTimes.push_back(timeUs);

#ifdef MTK_AOSP_ENHANCEMENT
    if(mNumFramesReceived % LOG_INTERVAL == 1)
        ALOGI("initial delay: %" PRId64 ", current time stamp: %" PRId64",mFramesReceived.size()= %d,mNumFramesReceived= %d",
              mStartTimeUs, timeUs,mFramesReceived.size(),mNumFramesReceived);
#else
    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
          mStartTimeUs, timeUs);
#endif
    mFrameAvailableCondition.signal();
}

bool CameraSource::isMetaDataStoredInVideoBuffers() const {
    ALOGV("isMetaDataStoredInVideoBuffers");
    return mIsMetaDataStoredInVideoBuffers;
}