MBOOL Pass2NodeImpl:: dequePass2() { CAM_TRACE_CALL(); MBOOL ret = MFALSE; QParams dequeParams; Vector<Input>::const_iterator iterIn; Vector<Output>::const_iterator iterOut; // MY_LOGV("type(%d) cnt %d: deque", mPass2Type, muDeqFrameCnt); if( !mpPostProcPipe->deque(dequeParams, PASS2_TIMEOUT) ) { MY_LOGE("type(%d) pass2 cnt %d: deque fail", mPass2Type, muDeqFrameCnt); aee_system_exception( LOG_TAG, NULL, DB_OPT_DEFAULT, "\nCRDISPATCH_KEY:MtkCam/P1Node:ISP pass2 deque fail"); goto lbExit; } if( !handleP2Done(dequeParams) ) { MY_LOGE("handle p2 callback failed"); goto lbExit; } ret = MTRUE; lbExit: CAM_TRACE_FMT_END(); return ret; }
void CameraSource::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType, const sp<IMemory> &data) { ALOGV("dataCallbackTimestamp: timestamp %" PRId64 " us", timestampUs); Mutex::Autolock autoLock(mLock); #ifdef MTK_AOSP_ENHANCEMENT #ifdef MTB_SUPPORT ATRACE_ONESHOT(ATRACE_ONESHOT_SPECIAL, "dataCallbackTimestamp"); #endif if ((!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) #ifdef MTK_SLOW_MOTION_VIDEO_SUPPORT && (mColorFormat !=OMX_MTK_COLOR_FormatBitStream /*0x7F000300*/) #endif ) { ALOGW("Drop frame at %" PRId64 "/%" PRId64 " us", timestampUs, mStartTimeUs); #ifdef MTB_SUPPORT ATRACE_ONESHOT(ATRACE_ONESHOT_SPECIAL, "dropCameraFrame"); #endif #else if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) { ALOGV("Drop frame at %" PRId64 "/%" PRId64 " us", timestampUs, mStartTimeUs); #endif releaseOneRecordingFrame(data); return; } if (mNumFramesReceived > 0) { #ifdef MTK_AOSP_ENHANCEMENT if (timestampUs <= mLastFrameTimestampUs) { ALOGW("[CameraSource][dataCallbackTimestamp][Warning] current frame timestamp: %" PRId64 " <= previous frame timestamp: %" PRId64 "", timestampUs, mLastFrameTimestampUs); #ifdef HAVE_AEE_FEATURE if(timestampUs < mLastFrameTimestampUs) aee_system_exception("CRDISPATCH_KEY:Camera issue",NULL,DB_OPT_DEFAULT,"\nCameraSource:current frame timestamp: %" PRId64 " < previous frame timestamp: %" PRId64 "!",timestampUs, mLastFrameTimestampUs); #endif } #ifdef MTK_SLOW_MOTION_VIDEO_SUPPORT if(timestampUs <= mFirstFrameTimeUs+mStartTimeOffsetUs) { ALOGI("drop frame for directlink, timestampUs(%" PRId64 " us),mFirstFrameTimeUs(%" PRId64 " us),mStartTimeOffsetUs(%" PRId64 " us)", timestampUs, mFirstFrameTimeUs, mStartTimeOffsetUs); releaseOneRecordingFrame(data); return; } else { timestampUs -= mStartTimeOffsetUs; } #endif #else CHECK(timestampUs > mLastFrameTimestampUs); #endif if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) { ++mNumGlitches; } } // May need to skip frame or modify timestamp. Currently implemented // by the subclass CameraSourceTimeLapse. if (skipCurrentFrame(timestampUs)) { releaseOneRecordingFrame(data); return; } mLastFrameTimestampUs = timestampUs; if (mNumFramesReceived == 0) { mFirstFrameTimeUs = timestampUs; // Initial delay if (mStartTimeUs > 0) { #ifdef MTK_AOSP_ENHANCEMENT #ifdef MTK_SLOW_MOTION_VIDEO_SUPPORT if ( mColorFormat == OMX_MTK_COLOR_FormatBitStream /*0x7F000300*/) { ALOGI("not drop frame for directlink, reset mStartTimeUs as first frame timestamp"); if(timestampUs < mStartTimeUs) { mStartTimeOffsetUs = mStartTimeUs - timestampUs; ALOGI("mStartTimeOffsetUs = %" PRId64 "", mStartTimeOffsetUs); } mStartTimeUs = timestampUs; } #endif #endif if (timestampUs < mStartTimeUs) { // Frame was captured before recording was started // Drop it without updating the statistical data. releaseOneRecordingFrame(data); #ifdef MTK_AOSP_ENHANCEMENT ALOGW("timestampUs=%" PRId64 " < mStartTimeUs=%" PRId64 " drop frame",timestampUs,mStartTimeUs); #endif return; } mStartTimeUs = timestampUs - mStartTimeUs; #ifdef MTK_AOSP_ENHANCEMENT ALOGI("the first video frame,time offset to mStartTimeUs=%" PRId64 "",mStartTimeUs); #endif } } ++mNumFramesReceived; #ifdef MTK_AOSP_ENHANCEMENT //if ((mDropRate != 0) && (mNumFramesReceived % mDropRate != 0)) { if ((mDropRate > 0) && (mNumFramesReceived != int(mLastNumFramesReceived + mDropRate * mNumRemainFrameReceived + 0.5))) { releaseOneRecordingFrame(data); ++mNumFramesDropped; ALOGD("Quality adjust drop frame = %d",mNumFramesReceived); return; } //real received frame num ++mNumRemainFrameReceived; #endif #ifdef HAVE_AEE_FEATURE if(data == NULL || data->size() <= 0) aee_system_exception("CRDISPATCH_KEY:Camera issue",NULL,DB_OPT_DEFAULT,"\nCameraSource:dataCallbackTimestamp data error 0x%x",data.get()); #endif CHECK(data != NULL && data->size() > 0); mFramesReceived.push_back(data); int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs); mFrameTimes.push_back(timeUs); #ifdef MTK_AOSP_ENHANCEMENT if(mNumFramesReceived % LOG_INTERVAL == 1) ALOGI("initial delay: %" PRId64 ", current time stamp: %" PRId64",mFramesReceived.size()= %d,mNumFramesReceived= %d", mStartTimeUs, timeUs,mFramesReceived.size(),mNumFramesReceived); #else ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64, mStartTimeUs, timeUs); #endif mFrameAvailableCondition.signal(); } bool CameraSource::isMetaDataStoredInVideoBuffers() const { ALOGV("isMetaDataStoredInVideoBuffers"); return mIsMetaDataStoredInVideoBuffers; }
MBOOL Pass2NodeImpl:: handleP2Done(QParams& rParams) { CAM_TRACE_FMT_BEGIN("deqP2:%d", muDeqFrameCnt); MBOOL ret = MFALSE; Vector<Input>::const_iterator iterIn; Vector<Output>::const_iterator iterOut; vector<IImageBuffer*> vpDstBufAddr; // if(rParams.mDequeSuccess == MFALSE) { MY_LOGE("type %d pass2 cnt %d: deque fail", mPass2Type, muDeqFrameCnt); aee_system_exception( LOG_TAG, NULL, DB_OPT_DEFAULT, "\nCRDISPATCH_KEY:MtkCam/P1Node:ISP pass2 deque fail"); } // MY_LOGD("type %d: cnt %d in %d out %d", mPass2Type, muDeqFrameCnt, rParams.mvIn.size(), rParams.mvOut.size()); // if( !mpIspSyncCtrlHw->unlockHw(IspSyncControlHw::HW_PASS2) ) { MY_LOGE("isp sync unlock pass2 failed"); goto lbExit; } // for( iterIn = rParams.mvIn.begin() ; iterIn != rParams.mvIn.end() ; iterIn++ ) { //MY_LOGD("In PortID(0x%08X)",portId); MUINT32 nodeDataType = mapToNodeDataType( iterIn->mPortID ); handleReturnBuffer( nodeDataType, (MUINTPTR)iterIn->mBuffer, 0 ); // } // vpDstBufAddr.clear(); for( iterOut = rParams.mvOut.begin() ; iterOut != rParams.mvOut.end() ; iterOut++ ) { MBOOL bFind = MFALSE; for(MUINT32 i=0; i<vpDstBufAddr.size(); i++) { if(vpDstBufAddr[i] == iterOut->mBuffer) { MY_LOGD("Buf(0x%X) has been posted",(MUINTPTR)iterOut->mBuffer); bFind = MTRUE; break; } } if(!bFind) { //MY_LOGD("Out PortID(0x%08X)",portId); MUINT32 nodeDataType = mapToNodeDataType( iterOut->mPortID ); handlePostBuffer( nodeDataType, (MUINTPTR)iterOut->mBuffer, 0 ); vpDstBufAddr.push_back(iterOut->mBuffer); } } { Mutex::Autolock lock(mLock); muDeqFrameCnt += rParams.mvIn.size(); mCondDeque.broadcast(); } ret = MTRUE; lbExit: CAM_TRACE_FMT_END(); return ret; }