status_t CameraSource::stop() { LOGV("stop"); Mutex::Autolock autoLock(mLock); mStarted = false; mFrameAvailableCondition.signal(); int64_t token = IPCThreadState::self()->clearCallingIdentity(); mCamera->setListener(NULL); mCamera->stopRecording(); releaseQueuedFrames(); while (!mFramesBeingEncoded.empty()) { LOGI("Waiting for outstanding frames being encoded: %d", mFramesBeingEncoded.size()); mFrameCompleteCondition.wait(mLock); } mCamera = NULL; IPCThreadState::self()->restoreCallingIdentity(token); if (mCollectStats) { LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us", mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped, mLastFrameTimestampUs - mFirstFrameTimeUs); } CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped); return OK; }
status_t CameraSource::reset() { ALOGD("reset: E"); Mutex::Autolock autoLock(mLock); mStarted = false; mFrameAvailableCondition.signal(); int64_t token; bool isTokenValid = false; if (mCamera != 0) { token = IPCThreadState::self()->clearCallingIdentity(); isTokenValid = true; } releaseQueuedFrames(); while (!mFramesBeingEncoded.empty()) { if (NO_ERROR != mFrameCompleteCondition.waitRelative(mLock, mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) { ALOGW("Timed out waiting for outstanding frames being encoded: %zu", mFramesBeingEncoded.size()); } } stopCameraRecording(); #ifdef MTK_AOSP_ENHANCEMENT mNeedUnlock = true; #endif releaseCamera(); if (isTokenValid) { IPCThreadState::self()->restoreCallingIdentity(token); } if (mCollectStats) { ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us", mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped, mLastFrameTimestampUs - mFirstFrameTimeUs); } if (mNumGlitches > 0) { ALOGW("%d long delays between neighboring video frames", mNumGlitches); } #ifdef MTK_AOSP_ENHANCEMENT #ifdef MTK_SLOW_MOTION_VIDEO_SUPPORT if(mColorFormat !=OMX_MTK_COLOR_FormatBitStream /*0x7F000300*/) { //not check only if directlink, because datacallback not drop frame when direct link even mStarted has been seted to false after reset. //this will cause mNumFramesReceived increasing after reset CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped); } #else CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped); #endif #else CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped); #endif ALOGD("reset: X"); return OK; }
status_t CameraSource::reset() { ALOGD("reset: E"); { Mutex::Autolock autoLock(mLock); mStarted = false; mFrameAvailableCondition.signal(); int64_t token; bool isTokenValid = false; if (mCamera != 0) { token = IPCThreadState::self()->clearCallingIdentity(); isTokenValid = true; } releaseQueuedFrames(); while (!mFramesBeingEncoded.empty()) { if (NO_ERROR != mFrameCompleteCondition.waitRelative(mLock, mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) { ALOGW("Timed out waiting for outstanding frames being encoded: %zu", mFramesBeingEncoded.size()); } } stopCameraRecording(); if (isTokenValid) { IPCThreadState::self()->restoreCallingIdentity(token); } if (mCollectStats) { ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us", mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped, mLastFrameTimestampUs - mFirstFrameTimeUs); } if (mNumGlitches > 0) { ALOGW("%d long delays between neighboring video frames", mNumGlitches); } CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped); } releaseCamera(); ALOGD("reset: X"); return OK; }
void CameraSource::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType, const sp<IMemory> &data) { ALOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs); if (!mStarted) { ALOGD("Stop recording issued. Return here."); return; } Mutex::Autolock autoLock(mLock); if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) { ALOGV("Drop frame at %lld/%lld us", timestampUs, mStartTimeUs); releaseOneRecordingFrame(data); return; } if (mRecPause == true) { if(!mFramesReceived.empty()) { ALOGV("releaseQueuedFrames - #Queued Frames : %d", mFramesReceived.size()); releaseQueuedFrames(); } ALOGV("release One Video Frame for Pause : %lld us", timestampUs); releaseOneRecordingFrame(data); mPauseEndTimeUs = timestampUs; return; } timestampUs -= mPauseAdjTimeUs; ALOGV("dataCallbackTimestamp: AdjTimestamp %lld us", timestampUs); if (mNumFramesReceived > 0) { CHECK(timestampUs > mLastFrameTimestampUs); if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) { ++mNumGlitches; } } // May need to skip frame or modify timestamp. Currently implemented // by the subclass CameraSourceTimeLapse. if (skipCurrentFrame(timestampUs)) { releaseOneRecordingFrame(data); return; } mLastFrameTimestampUs = timestampUs; if (mNumFramesReceived == 0) { mFirstFrameTimeUs = timestampUs; // Initial delay if (mStartTimeUs > 0) { if (timestampUs < mStartTimeUs) { // Frame was captured before recording was started // Drop it without updating the statistical data. releaseOneRecordingFrame(data); return; } mStartTimeUs = timestampUs - mStartTimeUs; } } ++mNumFramesReceived; CHECK(data != NULL && data->size() > 0); mFramesReceived.push_back(data); int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs); mFrameTimes.push_back(timeUs); ALOGV("initial delay: %lld, current time stamp: %lld", mStartTimeUs, timeUs); mFrameAvailableCondition.signal(); }