Ejemplo n.º 1
0
void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
        int32_t msgType __unused, const sp<IMemory> &data) {
    ALOGV("dataCallbackTimestamp: timestamp %lld us", (long long)timestampUs);
    Mutex::Autolock autoLock(mLock);
    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
        ALOGV("Drop frame at %lld/%lld us", (long long)timestampUs, (long long)mStartTimeUs);
        releaseOneRecordingFrame(data);
        return;
    }

    // May need to skip frame or modify timestamp. Currently implemented
    // by the subclass CameraSourceTimeLapse.
    if (skipCurrentFrame(timestampUs)) {
        releaseOneRecordingFrame(data);
        return;
    }

    if (mNumFramesReceived > 0) {
        if (timestampUs <= mLastFrameTimestampUs) {
            ALOGW("Dropping frame with backward timestamp %lld (last %lld)",
                    (long long)timestampUs, (long long)mLastFrameTimestampUs);
            releaseOneRecordingFrame(data);
            return;
        }
        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
            ++mNumGlitches;
        }
    }

    mLastFrameTimestampUs = timestampUs;
    if (mNumFramesReceived == 0) {
        mFirstFrameTimeUs = timestampUs;
        // Initial delay
        if (mStartTimeUs > 0) {
            if (timestampUs < mStartTimeUs) {
                // Frame was captured before recording was started
                // Drop it without updating the statistical data.
                releaseOneRecordingFrame(data);
                return;
            }
            mStartTimeUs = timestampUs - mStartTimeUs;
        }
    }
    ++mNumFramesReceived;

    CHECK(data != NULL && data->size() > 0);
    mFramesReceived.push_back(data);
    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
    mFrameTimes.push_back(timeUs);
    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
        mStartTimeUs, timeUs);
    mFrameAvailableCondition.signal();
}
void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
        int32_t msgType, const sp<IMemory> &data)
#endif
{
    LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
    Mutex::Autolock autoLock(mLock);
    if (!mStarted) {
        releaseOneRecordingFrame(data);
        ++mNumFramesReceived;
        ++mNumFramesDropped;
        return;
    }

    if (mNumFramesReceived > 0 &&
        timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
        if (mNumGlitches % 10 == 0) {  // Don't spam the log
            LOGW("Long delay detected in video recording");
        }
        ++mNumGlitches;
    }

    mLastFrameTimestampUs = timestampUs;
    if (mNumFramesReceived == 0) {
        mFirstFrameTimeUs = timestampUs;
        // Initial delay
        if (mStartTimeUs > 0) {
            if (timestampUs < mStartTimeUs) {
                // Frame was captured before recording was started
                // Drop it without updating the statistical data.
                releaseOneRecordingFrame(data);
                return;
            }
            mStartTimeUs = timestampUs - mStartTimeUs;
        }
    }
    ++mNumFramesReceived;

    mFramesReceived.push_back(data);
    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
    mFrameTimes.push_back(timeUs);
    LOGV("initial delay: %lld, current time stamp: %lld",
        mStartTimeUs, timeUs);

#if defined(OMAP_ENHANCEMENT) && (TARGET_OMAP4)
    mFrameOffset.push_back(offset);
#endif

    mFrameAvailableCondition.signal();
}
void CedarXRecorder::CedarXReleaseFrame(int index)
{
	int * p = (int *)(mFrameBuffer->pointer());
	
	*p = index;

	releaseOneRecordingFrame(mFrameBuffer);
}
Ejemplo n.º 4
0
void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
    ALOGV("signalBufferReturned: %p", buffer->data());
    Mutex::Autolock autoLock(mLock);
    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
         it != mFramesBeingEncoded.end(); ++it) {
        if ((*it)->pointer() ==  buffer->data()) {
            releaseOneRecordingFrame((*it));
            mFramesBeingEncoded.erase(it);
            ++mNumFramesEncoded;
            buffer->setObserver(0);
            buffer->release();
            mFrameCompleteCondition.signal();
            return;
        }
    }
    CHECK(!"signalBufferReturned: bogus buffer");
}
status_t CameraSource::read(
        MediaBuffer **buffer, const ReadOptions *options) {
    LOGV("read");

    *buffer = NULL;

    int64_t seekTimeUs;
    ReadOptions::SeekMode mode;
    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
        return ERROR_UNSUPPORTED;
    }

    sp<IMemory> frame;
    int64_t frameTime;

#if defined(OMAP_ENHANCEMENT) && (TARGET_OMAP4)
    uint32_t frameOffset;
#endif
    {
        Mutex::Autolock autoLock(mLock);
        while (mStarted) {
            while(mFramesReceived.empty()) {
                if (mNumFramesReceived == 0) {
                    /*
                     * It's perfectly normal that we don't receive frames for quite some
                     * time at record start, so don't use a timeout in that case.
                     */
                    mFrameAvailableCondition.wait(mLock);
                } else {
                    /*
                     * Don't wait indefinitely for camera frames, buggy HALs may
                     * fail to provide them in a timely manner under some conditions.
                     */
                    status_t err = mFrameAvailableCondition.waitRelative(mLock, 250000000);
                    if (err) {
                        return err;
                    }
                }
            }

            if (!mStarted) {
                return OK;
            }

            frame = *mFramesReceived.begin();
            mFramesReceived.erase(mFramesReceived.begin());

#if defined(OMAP_ENHANCEMENT) && (TARGET_OMAP4)
            frameOffset = *mFrameOffset.begin();
            mFrameOffset.erase(mFrameOffset.begin());
#endif
            frameTime = *mFrameTimes.begin();
            mFrameTimes.erase(mFrameTimes.begin());
            int64_t skipTimeUs;
            if (!options || !options->getSkipFrame(&skipTimeUs)) {
                skipTimeUs = frameTime;
            }
            if (skipTimeUs > frameTime) {
                LOGV("skipTimeUs: %lld us > frameTime: %lld us",
                    skipTimeUs, frameTime);
                releaseOneRecordingFrame(frame);
                ++mNumFramesDropped;
                // Safeguard against the abuse of the kSkipFrame_Option.
                if (skipTimeUs - frameTime >= 1E6) {
                    LOGE("Frame skipping requested is way too long: %lld us",
                        skipTimeUs - frameTime);
                    return UNKNOWN_ERROR;
                }
            } else {
                mFramesBeingEncoded.push_back(frame);
                *buffer = new MediaBuffer(frame->pointer(), frame->size());
                (*buffer)->setObserver(this);
                (*buffer)->add_ref();
                (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);

#if defined(OMAP_ENHANCEMENT) && (TARGET_OMAP4)
                (*buffer)->meta_data()->setInt32(kKeyOffset, frameOffset);
#endif
                return OK;
            }
        }
    }
    return OK;
}
Ejemplo n.º 6
0
void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
        int32_t msgType, const sp<IMemory> &data) {
    ALOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
    if (!mStarted) {
       ALOGD("Stop recording issued. Return here.");
       return;
    }
    Mutex::Autolock autoLock(mLock);
    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
        ALOGV("Drop frame at %lld/%lld us", timestampUs, mStartTimeUs);
        releaseOneRecordingFrame(data);
        return;
    }

    if (mRecPause == true) {
        if(!mFramesReceived.empty()) {
            ALOGV("releaseQueuedFrames - #Queued Frames : %d", mFramesReceived.size());
            releaseQueuedFrames();
        }
        ALOGV("release One Video Frame for Pause : %lld us", timestampUs);
        releaseOneRecordingFrame(data);
        mPauseEndTimeUs = timestampUs;
        return;
    }
    timestampUs -= mPauseAdjTimeUs;
    ALOGV("dataCallbackTimestamp: AdjTimestamp %lld us", timestampUs);
    if (mNumFramesReceived > 0) {
        CHECK(timestampUs > mLastFrameTimestampUs);
        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
            ++mNumGlitches;
        }
    }

    // May need to skip frame or modify timestamp. Currently implemented
    // by the subclass CameraSourceTimeLapse.
    if (skipCurrentFrame(timestampUs)) {
        releaseOneRecordingFrame(data);
        return;
    }

    mLastFrameTimestampUs = timestampUs;
    if (mNumFramesReceived == 0) {
        mFirstFrameTimeUs = timestampUs;
        // Initial delay
        if (mStartTimeUs > 0) {
            if (timestampUs < mStartTimeUs) {
                // Frame was captured before recording was started
                // Drop it without updating the statistical data.
                releaseOneRecordingFrame(data);
                return;
            }
            mStartTimeUs = timestampUs - mStartTimeUs;
        }
    }
    ++mNumFramesReceived;

    CHECK(data != NULL && data->size() > 0);
    mFramesReceived.push_back(data);
    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
    mFrameTimes.push_back(timeUs);
    ALOGV("initial delay: %lld, current time stamp: %lld",
        mStartTimeUs, timeUs);
    mFrameAvailableCondition.signal();
}
void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
        int32_t msgType, const sp<IMemory> &data) {
    ALOGV("dataCallbackTimestamp: timestamp %" PRId64 " us", timestampUs);
    Mutex::Autolock autoLock(mLock);
#ifdef MTK_AOSP_ENHANCEMENT
#ifdef MTB_SUPPORT
    ATRACE_ONESHOT(ATRACE_ONESHOT_SPECIAL, "dataCallbackTimestamp");
#endif
    if ((!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs))
#ifdef MTK_SLOW_MOTION_VIDEO_SUPPORT
            && (mColorFormat !=OMX_MTK_COLOR_FormatBitStream /*0x7F000300*/)
#endif
       ) {
        ALOGW("Drop frame at %" PRId64 "/%" PRId64 " us", timestampUs, mStartTimeUs);
#ifdef MTB_SUPPORT
        ATRACE_ONESHOT(ATRACE_ONESHOT_SPECIAL, "dropCameraFrame");
#endif
#else
    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
        ALOGV("Drop frame at %" PRId64 "/%" PRId64 " us", timestampUs, mStartTimeUs);
#endif
        releaseOneRecordingFrame(data);
        return;
    }

    if (mNumFramesReceived > 0) {
#ifdef MTK_AOSP_ENHANCEMENT
        if (timestampUs <= mLastFrameTimestampUs)
        {
            ALOGW("[CameraSource][dataCallbackTimestamp][Warning] current frame timestamp: %" PRId64 " <= previous frame timestamp: %" PRId64 "",
                  timestampUs, mLastFrameTimestampUs);
#ifdef HAVE_AEE_FEATURE
            if(timestampUs < mLastFrameTimestampUs)
                aee_system_exception("CRDISPATCH_KEY:Camera issue",NULL,DB_OPT_DEFAULT,"\nCameraSource:current frame timestamp: %" PRId64 " < previous frame timestamp: %" PRId64 "!",timestampUs, mLastFrameTimestampUs);
#endif

        }
#ifdef MTK_SLOW_MOTION_VIDEO_SUPPORT
        if(timestampUs <= mFirstFrameTimeUs+mStartTimeOffsetUs) {
            ALOGI("drop frame for directlink, timestampUs(%" PRId64 " us),mFirstFrameTimeUs(%" PRId64 " us),mStartTimeOffsetUs(%" PRId64 " us)",
                  timestampUs, mFirstFrameTimeUs, mStartTimeOffsetUs);
            releaseOneRecordingFrame(data);
            return;
        }
        else
        {
            timestampUs -= mStartTimeOffsetUs;
        }
#endif

#else
        CHECK(timestampUs > mLastFrameTimestampUs);
#endif
        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
            ++mNumGlitches;
        }
    }

    // May need to skip frame or modify timestamp. Currently implemented
    // by the subclass CameraSourceTimeLapse.
    if (skipCurrentFrame(timestampUs)) {
        releaseOneRecordingFrame(data);
        return;
    }

    mLastFrameTimestampUs = timestampUs;
    if (mNumFramesReceived == 0) {
        mFirstFrameTimeUs = timestampUs;
        // Initial delay
        if (mStartTimeUs > 0) {
#ifdef MTK_AOSP_ENHANCEMENT
#ifdef MTK_SLOW_MOTION_VIDEO_SUPPORT
            if ( mColorFormat == OMX_MTK_COLOR_FormatBitStream /*0x7F000300*/) {
                ALOGI("not drop frame for directlink, reset mStartTimeUs as first frame timestamp");
                if(timestampUs < mStartTimeUs) {
                    mStartTimeOffsetUs = mStartTimeUs - timestampUs;
                    ALOGI("mStartTimeOffsetUs = %" PRId64 "", mStartTimeOffsetUs);
                }
                mStartTimeUs = timestampUs;
            }
#endif
#endif
            if (timestampUs < mStartTimeUs) {
                // Frame was captured before recording was started
                // Drop it without updating the statistical data.
                releaseOneRecordingFrame(data);
#ifdef MTK_AOSP_ENHANCEMENT
                ALOGW("timestampUs=%" PRId64 " < mStartTimeUs=%" PRId64 " drop frame",timestampUs,mStartTimeUs);
#endif
                return;
            }
            mStartTimeUs = timestampUs - mStartTimeUs;
#ifdef MTK_AOSP_ENHANCEMENT
            ALOGI("the first video frame,time offset to mStartTimeUs=%" PRId64 "",mStartTimeUs);
#endif
        }
    }
    ++mNumFramesReceived;
#ifdef MTK_AOSP_ENHANCEMENT
    //if ((mDropRate != 0) && (mNumFramesReceived % mDropRate != 0)) {
    if ((mDropRate > 0) && (mNumFramesReceived != int(mLastNumFramesReceived + mDropRate * mNumRemainFrameReceived  + 0.5))) {
        releaseOneRecordingFrame(data);
        ++mNumFramesDropped;
        ALOGD("Quality adjust drop frame = %d",mNumFramesReceived);
        return;
    }
    //real received frame num
    ++mNumRemainFrameReceived;

#endif

#ifdef HAVE_AEE_FEATURE
    if(data == NULL || data->size() <= 0)
        aee_system_exception("CRDISPATCH_KEY:Camera issue",NULL,DB_OPT_DEFAULT,"\nCameraSource:dataCallbackTimestamp data error 0x%x",data.get());
#endif
    CHECK(data != NULL && data->size() > 0);
    mFramesReceived.push_back(data);
    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
    mFrameTimes.push_back(timeUs);

#ifdef MTK_AOSP_ENHANCEMENT
    if(mNumFramesReceived % LOG_INTERVAL == 1)
        ALOGI("initial delay: %" PRId64 ", current time stamp: %" PRId64",mFramesReceived.size()= %d,mNumFramesReceived= %d",
              mStartTimeUs, timeUs,mFramesReceived.size(),mNumFramesReceived);
#else
    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
          mStartTimeUs, timeUs);
#endif
    mFrameAvailableCondition.signal();
}

bool CameraSource::isMetaDataStoredInVideoBuffers() const {
    ALOGV("isMetaDataStoredInVideoBuffers");
    return mIsMetaDataStoredInVideoBuffers;
}