Ejemplo n.º 1
0
// 先从历史数据中选出 K 个最近参考帧 + ALL 关键帧进行 match
// 选取一个帧ptr 序列
int TrackRunner::runKeyStep() {


	//printf("////////////////////////////////////////////\n");
	printf("//////////// idxImgCur: %06d ////////////\n",idxImgCur);
	//printf("////////////////////////////////////////////\n");

	TIME_BEGIN("MainLoop");

		TIME_BEGIN("FeatureDetect");
		/** TODO: 可以考虑 多线程并行计算,然后在这里放个 wait
		 *		  另外那个线程算好一个放个 signal
		 */
		// 读取当前帧数据
		FeatureState* ptrCurFeature = new FeatureState(idxImgCur);
		ptrCurFeature->detect(CFG_iMaxFeatures);
		TIME_END("FeatureDetect");
		
		//showImage(ptrCurFeature);

		// 确定当前帧需要比较的 历史帧集合
		std::vector<FeatureState*> vecKeyList;
		/** TODO: 可能需要靠 视觉磁带模型预估closure */
		vecKeyList = selectKeySequence(idxImgCur);

		// 对每一对历史-当前帧 进行运动估算
		std::vector<MotionState> vecCurMotions( vecKeyList.size() );
		

		// 调用 FrameParser 分别处理 参考帧<->当前帧 的 matR和MatT
		TIME_BEGIN(cv::format("Match&Motion[%d]", vecKeyList.size()));
#pragma omp parallel for
		for (int idx = 0; idx < vecKeyList.size();idx++) {
			FeatureState* ptrFeature = vecKeyList[idx];

			/** TODO: 考虑对不同情况设置不同过滤规则 */
			// 初始化 帧间处理器
			FrameParser fparser(ptrFeature, ptrCurFeature);
			fparser.match(CFG_dOpticalFlowThreshold);

			// 对极几何 计算运动matR和matT
			bool motionStatus = fparser.computeMotion(vecCurMotions[idx]);
			vecEnableIdxs[idxImgCur] = cntRunOk;
			// 运动参数计算失败
			if (motionStatus == false) {
				printf("运动参数计算失败\n");
				//TODO: 解不出来默认 运动,然后 强制一个默认尺度
				// 以上这句话只能用在 iDequeNumber == 1的时候,否则会有问题
				//vecCurMotions[idx].setInited(true);
				////按照CFG设置
				//vecCurMotions[idx].setScale(1.65f / CFG_dScaleRatioErrorDefault, true);
				//vecCurMotions[idx].errType.set(0);
				continue;
			}

			// 位移旋转限值
			if (CFG_bIsLimitRotationDiff && limitRotationDiff(vecCurMotions[idx],CFG_dRotationDiffLimit) == false) {
				printf("旋转跳\n");
				vecCurMotions[idx].setInited(false);
				

				continue;
			}

			// 尺度估算开启
			ScaleEstimator sEstimator;
			sEstimator.updateMotion(&vecCurMotions[idx]);
			double curScale = sEstimator.computeScaleTransform();

			if (curScale <= 0) {
				printf("尺度没算出来\n");
				curScale = 200;
			}
			// 尺度增量限制
			// 如果被跳帧了,跳帧后出现一个多间隔值,在这个间隔值之后,再允许任意一个。
			// 如果连跳多帧,那
			if (limitScaleDiff(vecCurMotions[idx], curScale, CFG_dScaleInvIncreaseDiffLimit) == false) {
				vecCurMotions[idx].setInited( false );
				
				continue;
			}
			int idxDelta = vecCurMotions[idx].getIdxImg(1) - vecCurMotions[idx].getIdxImg(0);
			if (CFG_iPreAverageFilter > 0) {
				auto& qDist = cDrawer.qDist;
				auto iter =  qDist.rbegin();
				double aver = 0.0f;
				int cnt = 0;
				for (; cnt < CFG_iPreAverageFilter && iter != qDist.rend(); iter++, cnt++) {
					printf("%d %f\n", idxImgCur, *iter);
					aver += *iter;
				}
				aver += 1.65f / curScale / idxDelta;
				aver /= cnt + 1;
				//可以做限制 只限制上升或者只限制下降
				//if (aver < 1.65f / curScale / idxDelta) {
				//	curScale = 1.65 / aver / idxDelta;
				//}
			}

			/** TODO: 这里需要一个合适的尺度评价 */
			
			if ((curScale*idxDelta < CFG_dScaleRatioLimitBottom /*/ idxDelta*/ || curScale*idxDelta > CFG_dScaleRatioLimitTop)) {
				
				printf("Scale LIMIT %d-%d:%f %f\n", vecCurMotions[idx].getIdxImg(0), vecCurMotions[idx].getIdxImg(1), curScale, 1.65f / curScale);
				if (curScale*idxDelta < CFG_dScaleRatioLimitBottom) {
					curScale = CFG_dScaleRatioLimitBottom / idxDelta;
					vecCurMotions[idx].errType.set(Const::Error::LimitSCALEPEAK);
				}
				else {
					curScale = CFG_dScaleRatioLimitTop / idxDelta;
					vecCurMotions[idx].errType.set(Const::Error::LimitSCALEPEAK);
				}

				
			}


			// 目前使用 GroundTruth的 运动尺度,因此目前误差只在旋转角度上
			if (CFG_bIsUseGroundTruthDistance) {
				vecCurMotions[idx].setScale(1.65f / cv::norm(pHelper.getPosition(vecCurMotions[idx].getIdxImg(1), vecCurMotions[idx].getIdxImg(0))));
				//vecCurMotions[idx].scale = 1.65f / cv::norm(pVisio.getPosition(vecCurMotions[idx].idxImg[1], vecCurMotions[idx].idxImg[0]));
			}
			else {
				vecCurMotions[idx].setScale(curScale);
			}
			//vecCurMotions[idx]._matR_ = vecCurMotions[idx].matR * vecPoses[vecCurMotions[idx].idxImg[0]].dir3;

		}
		TIME_END(cv::format("Match&Motion[%d]", vecKeyList.size()));

		if ( idxImgCur <= 2 || vecEnableIdxs[idxImgCur-1] > 0) {
			curError.set(0);
		}
		else {
			printf("Pre Error: %d\n", curError.get());
		}
			
		for (auto& motion : vecCurMotions) {
			curError.set(motion.errType, false);
		}

		//按照一定规则过滤 vecCurMotions,目前先过滤掉 inited为false的
		int cntValid = filterMotions(vecCurMotions,0);

		// 如果过滤完没有了, 就跳帧
		if (cntValid < 1) {
			delete ptrCurFeature;
			vecEnableIdxs[idxImgCur] = 0;
		}
		else {
			/** TODO: 对算出来的这么多有效的, 1是要判定closure; 2是要综合(取平均哈哈哈)运动状态 */
			TIME_BEGIN("PoseEstimate");

			// 对于计算出来的每对Motion, 应用该运动
			std::vector<PoseState> vecEstiPoses;
			for (int idx = 0; idx < vecCurMotions.size(); idx++) {
				MotionState& curMotion = vecCurMotions[idx];
				if (curMotion.getInited() == true) {
					// 对vecPose的对应位姿,应用Motion得到 vecEstiPoses新位姿
					vecEstiPoses.push_back(vecPoses[curMotion.getIdxImg(0)].move(curMotion));

					//加入骨架
					vecMotionLinks[curMotion.getIdxImg(1)].insert(std::make_pair(curMotion.getIdxImg(0), curMotion));
				}
			}
			

			/** TODO: 新的多位姿选择方法 */
			PoseState curPoseState = PoseState::calcAverage(vecEstiPoses);
			curPoseState.errType.set(curError);

			// 更新队列和关键帧
			updateKeyList(ptrCurFeature);
			cntRunOk++;

			vecPoses[idxImgCur] = curPoseState;

			TIME_END("PoseEstimate");
			//if (CFG_bIsLogGlobal)
			std::cout << vecPoses[idxImgCur] << std::endl;
			// 画布画出当前位姿, 以及GroundTruth实际路径
			// 内部修改 vecPoses 的数据,不能用 curPoseState 了
			cDrawer.drawCanvas(vecPoses[idxImgCur]);

			/*
			 *	本来用于测试 梯度下降优化方法,在计算完50个点之后执行
			 *  现废弃
			 */
			if (false && idxImgCur == 50) {
				printf("开始调整\n");

				Skeleton skl;
				skl.initData(vecPoses, vecMotionLinks);
				double err = skl.calcError();
				double preErr = err;
				printf("i=%d err=%f\n", -1, err);
				if (CFG_bIsLogGlobal)
				std::cout << skl.vecX[0] << std::endl;
				if (CFG_bIsLogGlobal)
				std::cout << skl.vecX[1] << std::endl;
				for (int i = 0; i < 1000; i++) {
					skl.calcDiff();
					skl.merge(1e-3);
					skl.fixMatrix();
					err = skl.calcError();
					
					printf("i=%d err=%f\n", i, err);
					if (CFG_bIsLogGlobal)
					std::cout << skl.vecX[0] << std::endl;
					if (CFG_bIsLogGlobal)
					std::cout << skl.vecX[1] << std::endl;
					if (preErr< err) {
						break;
					}
					preErr = err;
				}
				
				for (int i = idxImgBegin + 1; i < idxImgCur; i++) {
					if ( skl.vecX[i].rows) {
						PoseState t(i);
						t.setInited( true );
						t.pos.x = skl.vecX[i].at<double>(0, 0);
						t.pos.y = skl.vecX[i].at<double>(1, 0);
						t.pos.z = skl.vecX[i].at<double>(2, 0);
						if (CFG_bIsLogGlobal)
						std::cout << t.pos << std::endl;
					}

				}
				cv::waitKey();

				printf("结束调整\n");
			}


		}

	TIME_END("MainLoop");

	return ++idxImgCur;
}
void CvBlobTrackerAuto1::Process(IplImage* pImg, IplImage* pMask)
{
    int         CurBlobNum = 0;
    int         i;
    IplImage*   pFG = pMask;

    /* Bump frame counter: */
    m_FrameCount++;

    if(m_TimesFile)
    {
        static int64  TickCount = cvGetTickCount();
        static double TimeSum = 0;
        static int Count = 0;
        Count++;

        if(Count%100==0)
        {
#ifndef WINCE
            time_t ltime;
            time( &ltime );
			char* stime = ctime( &ltime );
#else
			/* WINCE does not have above POSIX functions (time,ctime) */
			const char* stime = " wince ";
#endif
            FILE* out = fopen(m_TimesFile,"at");
            double Time;
            TickCount = cvGetTickCount()-TickCount;
            Time = TickCount/FREQ;
            if(out){fprintf(out,"- %sFrame: %d ALL_TIME - %f\n",stime,Count,Time/1000);fclose(out);}

            TimeSum = 0;
            TickCount = cvGetTickCount();
        }
    }

    /* Update BG model: */
    TIME_BEGIN()

    if(m_pFG)
    {   /* If FG detector is needed: */
        m_pFG->Process(pImg);
        pFG = m_pFG->GetMask();
    }   /* If FG detector is needed. */

    TIME_END("FGDetector",-1)

    m_pFGMask = pFG; /* For external use. */

    /*if(m_pFG && m_pFG->GetParam("DebugWnd") == 1)
    {// debug foreground result
        IplImage *pFG = m_pFG->GetMask();
        if(pFG)
        {
            cvNamedWindow("FG",0);
            cvShowImage("FG", pFG);
        }
    }*/

    /* Track blobs: */
    TIME_BEGIN()
    if(m_pBT)
    {
        int i;
        m_pBT->Process(pImg, pFG);

        for(i=m_BlobList.GetBlobNum(); i>0; --i)
        {   /* Update data of tracked blob list: */
            CvBlob* pB = m_BlobList.GetBlob(i-1);
            int     BlobID = CV_BLOB_ID(pB);
            int     i = m_pBT->GetBlobIndexByID(BlobID);
            m_pBT->ProcessBlob(i, pB, pImg, pFG);
            pB->ID = BlobID;
        }
        CurBlobNum = m_pBT->GetBlobNum();
    }
    TIME_END("BlobTracker",CurBlobNum)

    /* This part should be removed: */
    if(m_BTReal && m_pBT)
    {   /* Update blob list (detect new blob for real blob tracker): */
        int i;

        for(i=m_pBT->GetBlobNum(); i>0; --i)
        {   /* Update data of tracked blob list: */
            CvBlob* pB = m_pBT->GetBlob(i-1);
            if(pB && m_BlobList.GetBlobByID(CV_BLOB_ID(pB)) == NULL )
            {
                CvBlobTrackAuto     NewB;
                NewB.blob = pB[0];
                NewB.BadFrames = 0;
                m_BlobList.AddBlob((CvBlob*)&NewB);
            }
        }   /* Next blob. */

        /* Delete blobs: */
        for(i=m_BlobList.GetBlobNum(); i>0; --i)
        {   /* Update tracked-blob list: */
            CvBlob* pB = m_BlobList.GetBlob(i-1);
            if(pB && m_pBT->GetBlobByID(CV_BLOB_ID(pB)) == NULL )
            {
                m_BlobList.DelBlob(i-1);
            }
        }   /* Next blob. */
    }   /* Update bloblist. */


    TIME_BEGIN()
    if(m_pBTPostProc)
    {   /* Post-processing module: */
        int i;
        for(i=m_BlobList.GetBlobNum(); i>0; --i)
        {   /* Update tracked-blob list: */
            CvBlob* pB = m_BlobList.GetBlob(i-1);
            m_pBTPostProc->AddBlob(pB);
        }
        m_pBTPostProc->Process();

        for(i=m_BlobList.GetBlobNum(); i>0; --i)
        {   /* Update tracked-blob list: */
            CvBlob* pB = m_BlobList.GetBlob(i-1);
            int     BlobID = CV_BLOB_ID(pB);
            CvBlob* pBN = m_pBTPostProc->GetBlobByID(BlobID);

            if(pBN && m_UsePPData && pBN->w >= CV_BLOB_MINW && pBN->h >= CV_BLOB_MINH)
            {   /* Set new data for tracker: */
                m_pBT->SetBlobByID(BlobID, pBN );
            }

            if(pBN)
            {   /* Update blob list with results from postprocessing: */
                pB[0] = pBN[0];
            }
        }
    }   /* Post-processing module. */

    TIME_END("PostProcessing",CurBlobNum)

    /* Blob deleter (experimental and simple): */
    TIME_BEGIN()
    if(pFG)
    {   /* Blob deleter: */
        int i;
        if(!m_BTReal)for(i=m_BlobList.GetBlobNum();i>0;--i)
        {   /* Check all blobs on list: */
            CvBlobTrackAuto* pB = (CvBlobTrackAuto*)(m_BlobList.GetBlob(i-1));
            int     Good = 0;
            int     w=pFG->width;
            int     h=pFG->height;
            CvRect  r = CV_BLOB_RECT(pB);
            CvMat   mat;
            double  aver = 0;
            double  area = CV_BLOB_WX(pB)*CV_BLOB_WY(pB);
            if(r.x < 0){r.width += r.x;r.x = 0;}
            if(r.y < 0){r.height += r.y;r.y = 0;}
            if(r.x+r.width>=w){r.width = w-r.x-1;}
            if(r.y+r.height>=h){r.height = h-r.y-1;}

            if(r.width > 4 && r.height > 4 && r.x < w && r.y < h &&
                r.x >=0 && r.y >=0 &&
                r.x+r.width < w && r.y+r.height < h && area > 0)
            {
                aver = cvSum(cvGetSubRect(pFG,&mat,r)).val[0] / area;
                /* if mask in blob area exists then its blob OK*/
                if(aver > 0.1*255)Good = 1;
            }
            else
            {
                pB->BadFrames+=2;
            }

            if(Good)
            {
                pB->BadFrames = 0;
            }
            else
            {
                pB->BadFrames++;
            }
        }   /* Next blob: */

        /* Check error count: */
        for(i=0; i<m_BlobList.GetBlobNum(); ++i)
        {
            CvBlobTrackAuto* pB = (CvBlobTrackAuto*)m_BlobList.GetBlob(i);

            if(pB->BadFrames>3)
            {   /* Delete such objects */
                /* from tracker...     */
                m_pBT->DelBlobByID(CV_BLOB_ID(pB));

                /* ... and from local list: */
                m_BlobList.DelBlob(i);
                i--;
            }
        }   /* Check error count for next blob. */
    }   /*  Blob deleter. */

    TIME_END("BlobDeleter",m_BlobList.GetBlobNum())

    /* Update blobs: */
    TIME_BEGIN()
    if(m_pBT)
        m_pBT->Update(pImg, pFG);
    TIME_END("BlobTrackerUpdate",CurBlobNum)

    /* Detect new blob: */
    TIME_BEGIN()
    if(!m_BTReal && m_pBD && pFG && (m_FrameCount > m_FGTrainFrames) )
    {   /* Detect new blob: */
        static CvBlobSeq    NewBlobList;
        CvBlobTrackAuto     NewB;

        NewBlobList.Clear();

        if(m_pBD->DetectNewBlob(pImg, pFG, &NewBlobList, &m_BlobList))
        {   /* Add new blob to tracker and blob list: */
            int i;
            IplImage* pMask = pFG;

            /*if(0)if(NewBlobList.GetBlobNum()>0 && pFG )
            {// erode FG mask (only for FG_0 and MS1||MS2)
                pMask = cvCloneImage(pFG);
                cvErode(pFG,pMask,NULL,2);
            }*/

            for(i=0; i<NewBlobList.GetBlobNum(); ++i)
            {
                CvBlob* pBN = NewBlobList.GetBlob(i);
                pBN->ID = m_NextBlobID;

                if(pBN && pBN->w >= CV_BLOB_MINW && pBN->h >= CV_BLOB_MINH)
                {
                    CvBlob* pB = m_pBT->AddBlob(pBN, pImg, pMask );
                    if(pB)
                    {
                        NewB.blob = pB[0];
                        NewB.BadFrames = 0;
                        m_BlobList.AddBlob((CvBlob*)&NewB);
                        m_NextBlobID++;
                    }
                }
            }   /* Add next blob from list of detected blob. */

            if(pMask != pFG) cvReleaseImage(&pMask);

        }   /* Create and add new blobs and trackers. */

    }   /*  Detect new blob. */

    TIME_END("BlobDetector",-1)

    TIME_BEGIN()
    if(m_pBTGen)
    {   /* Run track generator: */
        for(i=m_BlobList.GetBlobNum(); i>0; --i)
        {   /* Update data of tracked blob list: */
            CvBlob* pB = m_BlobList.GetBlob(i-1);
            m_pBTGen->AddBlob(pB);
        }
        m_pBTGen->Process(pImg, pFG);
    }   /* Run track generator: */
    TIME_END("TrajectoryGeneration",-1)

    TIME_BEGIN()
    if(m_pBTA)
    {   /* Trajectory analysis module: */
        int i;
        for(i=m_BlobList.GetBlobNum(); i>0; i--)
            m_pBTA->AddBlob(m_BlobList.GetBlob(i-1));

        m_pBTA->Process(pImg, pFG);

    }   /* Trajectory analysis module. */

    TIME_END("TrackAnalysis",m_BlobList.GetBlobNum())

} /* CvBlobTrackerAuto1::Process */