Exemplo n.º 1
0
	void Kalman_CV::eval() {
		// Logger::log("kalman_cv eval", meas, Logger::LOGLEVEL_INFO);
		// predict
		cvKalmanPredict(cvkal, 0);
		// correct
		cvKalmanCorrect(cvkal, meas);
	}
void CvBlobTrackPredictKalman::Update(CvBlob* pBlob)
{
    float   Z[4];
    CvMat   Zmat = cvMat(4,1,CV_32F,Z);
    m_BlobPredict = pBlob[0];

    if(m_Frame < 2)
    {   /* First call: */
        m_pKalman->state_post->data.fl[0+4] = CV_BLOB_X(pBlob)-m_pKalman->state_post->data.fl[0];
        m_pKalman->state_post->data.fl[1+4] = CV_BLOB_Y(pBlob)-m_pKalman->state_post->data.fl[1];
        if(m_pKalman->DP>6)
        {
            m_pKalman->state_post->data.fl[2+4] = CV_BLOB_WX(pBlob)-m_pKalman->state_post->data.fl[2];
            m_pKalman->state_post->data.fl[3+4] = CV_BLOB_WY(pBlob)-m_pKalman->state_post->data.fl[3];
        }
        m_pKalman->state_post->data.fl[0] = CV_BLOB_X(pBlob);
        m_pKalman->state_post->data.fl[1] = CV_BLOB_Y(pBlob);
        m_pKalman->state_post->data.fl[2] = CV_BLOB_WX(pBlob);
        m_pKalman->state_post->data.fl[3] = CV_BLOB_WY(pBlob);
    }
    else
    {   /* Nonfirst call: */
        Z[0] = CV_BLOB_X(pBlob);
        Z[1] = CV_BLOB_Y(pBlob);
        Z[2] = CV_BLOB_WX(pBlob);
        Z[3] = CV_BLOB_WY(pBlob);
        cvKalmanCorrect(m_pKalman,&Zmat);
    }

    cvKalmanPredict(m_pKalman,0);

    m_Frame++;

}   /* Update. */
Exemplo n.º 3
0
void kalman::kal_predict(int x,int y)
{
	int i = 0;

		const CvMat * prediction = cvKalmanPredict(cvkalman, 0);
		CvPoint predict_pt = cvPoint((int)prediction->data.fl[0], (int)prediction->data.fl[1]);

		measurement->data.fl[0] = (float)x;
		measurement->data.fl[1] = (float)y;

		const CvMat * correct = cvKalmanCorrect(cvkalman, measurement);
		cvSet(img, cvScalar(255, 255, 255, 0));
		cvRectangle(img, cvPoint(rect.x, rect.y), cvPoint(rect.x + rect.width, rect.y + rect.height), cvScalar(255, 0, 0));
		cvCircle(img, predict_pt, 5, CV_RGB(0, 255, 0), 3);
		cvCircle(img, cvPoint(x, y), 5, CV_RGB(255, 0, 0), 3);
		cvCircle(img, cvPoint(correct->data.fl[0], correct->data.fl[1]), 5, CV_RGB(0, 0, 255), 3);
		//cvShowImage("kalman", img);
		i++;
}
Exemplo n.º 4
0
float ofxCvKalman::correct(float next){

	//state->data.fl[0]=next;

	/* predict point position */
	const CvMat* prediction = cvKalmanPredict( kalman, 0 );
	//float predict = prediction->data.fl[0];


	/* generate measurement */
	measurement->data.fl[0]=next;


	/* adjust Kalman filter state */
	const CvMat* correction = cvKalmanCorrect( kalman, measurement );


	return correction->data.fl[0];

}
CvBlob* CvBlobTrackPostProcKalman::Process(CvBlob* pBlob)
{
    CvBlob* pBlobRes = &m_Blob;
    float   Z[4];
    CvMat   Zmat = cvMat(4,1,CV_32F,Z);
    m_Blob = pBlob[0];

    if(m_Frame < 2)
    {   /* First call: */
        m_pKalman->state_post->data.fl[0+4] = CV_BLOB_X(pBlob)-m_pKalman->state_post->data.fl[0];
        m_pKalman->state_post->data.fl[1+4] = CV_BLOB_Y(pBlob)-m_pKalman->state_post->data.fl[1];
        if(m_pKalman->DP>6)
        {
            m_pKalman->state_post->data.fl[2+4] = CV_BLOB_WX(pBlob)-m_pKalman->state_post->data.fl[2];
            m_pKalman->state_post->data.fl[3+4] = CV_BLOB_WY(pBlob)-m_pKalman->state_post->data.fl[3];
        }
        m_pKalman->state_post->data.fl[0] = CV_BLOB_X(pBlob);
        m_pKalman->state_post->data.fl[1] = CV_BLOB_Y(pBlob);
        m_pKalman->state_post->data.fl[2] = CV_BLOB_WX(pBlob);
        m_pKalman->state_post->data.fl[3] = CV_BLOB_WY(pBlob);
    }
    else
    {   /* Nonfirst call: */
        cvKalmanPredict(m_pKalman,0);
        Z[0] = CV_BLOB_X(pBlob);
        Z[1] = CV_BLOB_Y(pBlob);
        Z[2] = CV_BLOB_WX(pBlob);
        Z[3] = CV_BLOB_WY(pBlob);
        cvKalmanCorrect(m_pKalman,&Zmat);
        cvMatMulAdd(m_pKalman->measurement_matrix, m_pKalman->state_post, NULL, &Zmat);
        CV_BLOB_X(pBlobRes) = Z[0];
        CV_BLOB_Y(pBlobRes) = Z[1];
//        CV_BLOB_WX(pBlobRes) = Z[2];
//        CV_BLOB_WY(pBlobRes) = Z[3];
    }
    m_Frame++;
    return pBlobRes;
}
 virtual CvBlob* Process(CvBlob* pBlob, IplImage* /*pImg*/, IplImage* /*pImgFG*/ = NULL)
 {
     CvBlob* pBlobRes = &m_Blob;
     float   Z[4];
     CvMat   Zmat = cvMat(4,1,CV_32F,Z);
     m_Blob = pBlob[0];
     if(m_Frame < 2)
     {/* first call */
         m_pKalman->state_post->data.fl[0+4] = CV_BLOB_X(pBlob)-m_pKalman->state_post->data.fl[0];
         m_pKalman->state_post->data.fl[1+4] = CV_BLOB_Y(pBlob)-m_pKalman->state_post->data.fl[1];
         if(m_pKalman->DP>6)
         {
             m_pKalman->state_post->data.fl[2+4] = CV_BLOB_WX(pBlob)-m_pKalman->state_post->data.fl[2];
             m_pKalman->state_post->data.fl[3+4] = CV_BLOB_WY(pBlob)-m_pKalman->state_post->data.fl[3];
         }
         m_pKalman->state_post->data.fl[0] = CV_BLOB_X(pBlob);
         m_pKalman->state_post->data.fl[1] = CV_BLOB_Y(pBlob);
         m_pKalman->state_post->data.fl[2] = CV_BLOB_WX(pBlob);
         m_pKalman->state_post->data.fl[3] = CV_BLOB_WY(pBlob);
         memcpy(m_pKalman->state_pre->data.fl,m_pKalman->state_post->data.fl,sizeof(float)*STATE_NUM);
     }
     else
     {/* another call */
         Z[0] = CV_BLOB_X(pBlob);
         Z[1] = CV_BLOB_Y(pBlob);
         Z[2] = CV_BLOB_WX(pBlob);
         Z[3] = CV_BLOB_WY(pBlob);
         cvKalmanCorrect(m_pKalman,&Zmat);
         cvKalmanPredict(m_pKalman,0);
         cvMatMulAdd(m_pKalman->measurement_matrix, m_pKalman->state_pre, NULL, &Zmat);
         CV_BLOB_X(pBlobRes) = Z[0];
         CV_BLOB_Y(pBlobRes) = Z[1];
         CV_BLOB_WX(pBlobRes) = Z[2];
         CV_BLOB_WY(pBlobRes) = Z[3];
     }
     m_Frame++;
     return pBlobRes;
 }
Exemplo n.º 7
0
//Predict the next position and size
CvRect KalmanFilter::predictionReport(CvRect &rect)
{

	const CvMat* prediction = cvKalmanPredict( m_pKalmanFilter, 0 );

	// adjust Kalman filter state
	float width=(float)rect.width;
	float height=(float)rect.height;
	measurement->data.fl[0]=(float)(rect.x+(width)/2);
	measurement->data.fl[1]=(float)(rect.y+(height)/2);
	measurement->data.fl[2]=width;
	measurement->data.fl[3]=height;	
	cvKalmanCorrect( m_pKalmanFilter, measurement );
	CvRect rect1;

	rect1.width=(int)m_pKalmanFilter->state_post->data.fl[2];
	rect1.height=(int)m_pKalmanFilter->state_post->data.fl[3];
	rect1.x=(int)(m_pKalmanFilter->state_post->data.fl[0]-(rect.width/2));
	rect1.y=(int)(m_pKalmanFilter->state_post->data.fl[1]-(rect.height/2));

	return rect1;

}
Exemplo n.º 8
0
// --------------------------------------------------------------------------
// main(Number of arguments, Argument values)
// Description  : This is the entry point of the program.
// Return value : SUCCESS:0  ERROR:-1
// --------------------------------------------------------------------------
int main(int argc, char **argv)
{
    // AR.Drone class
    ARDrone ardrone;

    // Initialize
    if (!ardrone.open()) {
        printf("Failed to initialize.\n");
        return -1;
    }

    // Kalman filter
    CvKalman *kalman = cvCreateKalman(4, 2);

    // Setup
    cvSetIdentity(kalman->measurement_matrix, cvRealScalar(1.0));
    cvSetIdentity(kalman->process_noise_cov, cvRealScalar(1e-5));
    cvSetIdentity(kalman->measurement_noise_cov, cvRealScalar(0.1));
    cvSetIdentity(kalman->error_cov_post, cvRealScalar(1.0));

    // Linear system
    kalman->DynamMatr[0]  = 1.0; kalman->DynamMatr[1]  = 0.0; kalman->DynamMatr[2]  = 1.0; kalman->DynamMatr[3]  = 0.0; 
    kalman->DynamMatr[4]  = 0.0; kalman->DynamMatr[5]  = 1.0; kalman->DynamMatr[6]  = 0.0; kalman->DynamMatr[7]  = 1.0; 
    kalman->DynamMatr[8]  = 0.0; kalman->DynamMatr[9]  = 0.0; kalman->DynamMatr[10] = 1.0; kalman->DynamMatr[11] = 0.0; 
    kalman->DynamMatr[12] = 0.0; kalman->DynamMatr[13] = 0.0; kalman->DynamMatr[14] = 0.0; kalman->DynamMatr[15] = 1.0; 

    // Thresholds
    int minH = 0, maxH = 255;
    int minS = 0, maxS = 255;
    int minV = 0, maxV = 255;

    // Create a window
    cvNamedWindow("binalized");
    cvCreateTrackbar("H max", "binalized", &maxH, 255);
    cvCreateTrackbar("H min", "binalized", &minH, 255);
    cvCreateTrackbar("S max", "binalized", &maxS, 255);
    cvCreateTrackbar("S min", "binalized", &minS, 255);
    cvCreateTrackbar("V max", "binalized", &maxV, 255);
    cvCreateTrackbar("V min", "binalized", &minV, 255);
    cvResizeWindow("binalized", 0, 0);

    // Main loop
    while (1) {
        // Key input
        int key = cvWaitKey(1);
        if (key == 0x1b) break;

        // Update
        if (!ardrone.update()) break;

        // Get an image
        IplImage *image = ardrone.getImage();

        // HSV image
        IplImage *hsv = cvCloneImage(image);
        cvCvtColor(image, hsv, CV_RGB2HSV_FULL);

        // Binalized image
        IplImage *binalized = cvCreateImage(cvGetSize(image), IPL_DEPTH_8U, 1);

        // Binalize
        CvScalar lower = cvScalar(minH, minS, minV);
        CvScalar upper = cvScalar(maxH, maxS, maxV);
        cvInRangeS(image, lower, upper, binalized);

        // Show result
        cvShowImage("binalized", binalized);

        // De-noising
        cvMorphologyEx(binalized, binalized, NULL, NULL, CV_MOP_CLOSE);
 
        // Detect contours
        CvSeq *contour = NULL, *maxContour = NULL;
        CvMemStorage *contourStorage = cvCreateMemStorage();
        cvFindContours(binalized, contourStorage, &contour, sizeof(CvContour), CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE);

        // Find largest contour
        double max_area = 0.0;
        while (contour) {
            double area = fabs(cvContourArea(contour));
            if ( area > max_area) {
                maxContour = contour;
                max_area = area;
            }
            contour = contour->h_next;
        }

        // Object detected
        if (maxContour) {
            // Draw a contour
            cvZero(binalized);
            cvDrawContours(binalized, maxContour, cvScalarAll(255), cvScalarAll(255), 0, CV_FILLED);

            // Calculate the moments
            CvMoments moments;
            cvMoments(binalized, &moments, 1);
            int my = (int)(moments.m01/moments.m00);
            int mx = (int)(moments.m10/moments.m00);

            // Measurements
            float m[] = {mx, my};
            CvMat measurement = cvMat(2, 1, CV_32FC1, m);

            // Correct phase
            const CvMat *correction = cvKalmanCorrect(kalman, &measurement);
        }

        // Prediction phase
        const CvMat *prediction = cvKalmanPredict(kalman);

        // Display the image
        cvCircle(image, cvPointFrom32f(cvPoint2D32f(prediction->data.fl[0], prediction->data.fl[1])), 10, CV_RGB(0,255,0));
        cvShowImage("camera", image);

        // Release the memories
        cvReleaseImage(&hsv);
        cvReleaseImage(&binalized);
        cvReleaseMemStorage(&contourStorage);
    }

    // Release the kalman filter
    cvReleaseKalman(&kalman);

    // See you
    ardrone.close();

    return 0;
}
Exemplo n.º 9
0
// ######################################################################
Point2D<int> VisualTracker::trackObjects(const Image<byte>& grey)
{
  Point2D<int> targetLoc(-1,-1);

  if (!itsTracking)
    return targetLoc;

#ifdef HAVE_OPENCV
  if (itsCurrentNumPoints > 0)
  {
    IplImage* pGrey = img2ipl(itsPreviousGreyImg);
    IplImage* cGrey = img2ipl(grey);

    //flags = CV_LKFLOW_INITIAL_GUESSES;

    cvCalcOpticalFlowPyrLK(pGrey, cGrey, itsPreviousPyramid, itsCurrentPyramid,
        itsPreviousPoints, itsCurrentPoints,
        itsCurrentNumPoints, //number of feature points
        cvSize(itsTrackWindowSize.getVal(),itsTrackWindowSize.getVal()), //search window size in each pyramid
        3, // maximal pyramid level nummber
        itsTrackStatus,
        itsTrackError,
        cvTermCriteria(CV_TERMCRIT_ITER
          |CV_TERMCRIT_EPS,
          20,0.03), itsTrackFlags);

    itsTrackFlags = CV_LKFLOW_PYR_A_READY | CV_LKFLOW_PYR_B_READY;

    cvReleaseImageHeader(&pGrey);
    cvReleaseImageHeader(&cGrey);


    //show track points
    int k, i;
    for(i = k = 0; i<itsCurrentNumPoints; i++)
    {
      if (!itsTrackStatus[i])
        continue;

      itsCurrentPoints[k++] = itsCurrentPoints[i];
      //LINFO("Error %i: %f", i, itsTrackError[i]);
      if (itsTrackError[i] < 2000)
      {
        targetLoc.i = std::min(grey.getWidth()-1, std::max(0, (int)itsCurrentPoints[i].x));
        targetLoc.j = std::min(grey.getHeight()-1, std::max(0, (int)itsCurrentPoints[i].y));
        ASSERT(grey.coordsOk(targetLoc));
      }
    }
    itsCurrentNumPoints = k;

  }

  IplImage *swap_temp;
  CV_SWAP( itsPreviousPyramid, itsCurrentPyramid, swap_temp );
  CvPoint2D32f* swap_points;
  CV_SWAP( itsPreviousPoints, itsCurrentPoints, swap_points );

  itsPreviousGreyImg = grey;


  if (itsUseKalman && grey.coordsOk(targetLoc))
  {
    float Z[2];
    CvMat Zmat = cvMat(2,1,CV_32F, Z);

    Z[0] = targetLoc.i;
    Z[1] = targetLoc.j;

    cvKalmanCorrect(itsKalman, &Zmat);
    const CvMat* prediction = cvKalmanPredict(itsKalman, 0);

    //generate measurement
    cvMatMulAdd(itsKalman->measurement_matrix, itsKalman->state_pre, NULL, &Zmat);

    targetLoc.i = (int)prediction->data.fl[0];
    targetLoc.j = (int)prediction->data.fl[1];
  }

#endif

  return targetLoc;
}
Exemplo n.º 10
0
//=========================================
CvRect camKalTrack(IplImage* frame, camshift_kalman_tracker& camKalTrk) {
//=========================================
	if (!frame)
		printf("Input frame empty!\n");

	cvCopy(frame, camKalTrk.image, 0);
	cvCvtColor(camKalTrk.image, camKalTrk.hsv, CV_BGR2HSV); // BGR to HSV

	if (camKalTrk.trackObject) {
		int _vmin = vmin, _vmax = vmax;
		cvInRangeS(camKalTrk.hsv, cvScalar(0, smin, MIN(_vmin,_vmax), 0), cvScalar(180, 256, MAX(_vmin,_vmax), 0), camKalTrk.mask); // MASK
		cvSplit(camKalTrk.hsv, camKalTrk.hue, 0, 0, 0); //  HUE
		if (camKalTrk.trackObject < 0) {
			float max_val = 0.f;
			boundaryCheck(camKalTrk.originBox, frame->width, frame->height);
			cvSetImageROI(camKalTrk.hue, camKalTrk.originBox); // for ROI
			cvSetImageROI(camKalTrk.mask, camKalTrk.originBox); // for camKalTrk.mask
			cvCalcHist(&camKalTrk.hue, camKalTrk.hist, 0, camKalTrk.mask); //
			cvGetMinMaxHistValue(camKalTrk.hist, 0, &max_val, 0, 0);
			cvConvertScale(camKalTrk.hist->bins, camKalTrk.hist->bins, max_val ? 255. / max_val : 0., 0); //  bin  [0,255]
			cvResetImageROI(camKalTrk.hue); // remove ROI
			cvResetImageROI(camKalTrk.mask);
			camKalTrk.trackWindow = camKalTrk.originBox;
			camKalTrk.trackObject = 1;
			camKalTrk.lastpoint = camKalTrk.predictpoint = cvPoint(camKalTrk.trackWindow.x + camKalTrk.trackWindow.width / 2,
					camKalTrk.trackWindow.y + camKalTrk.trackWindow.height / 2);
			getCurrState(camKalTrk.kalman, camKalTrk.lastpoint, camKalTrk.predictpoint);//input curent state
		}
		//(x,y,vx,vy),
		camKalTrk.prediction = cvKalmanPredict(camKalTrk.kalman, 0);//predicton=kalman->state_post

		camKalTrk.predictpoint = cvPoint(cvRound(camKalTrk.prediction->data.fl[0]), cvRound(camKalTrk.prediction->data.fl[1]));

		camKalTrk.trackWindow = cvRect(camKalTrk.predictpoint.x - camKalTrk.trackWindow.width / 2, camKalTrk.predictpoint.y
				- camKalTrk.trackWindow.height / 2, camKalTrk.trackWindow.width, camKalTrk.trackWindow.height);

		camKalTrk.trackWindow = checkRectBoundary(cvRect(0, 0, frame->width, frame->height), camKalTrk.trackWindow);

		camKalTrk.searchWindow = cvRect(camKalTrk.trackWindow.x - region, camKalTrk.trackWindow.y - region, camKalTrk.trackWindow.width + 2
				* region, camKalTrk.trackWindow.height + 2 * region);

		camKalTrk.searchWindow = checkRectBoundary(cvRect(0, 0, frame->width, frame->height), camKalTrk.searchWindow);

		cvSetImageROI(camKalTrk.hue, camKalTrk.searchWindow);
		cvSetImageROI(camKalTrk.mask, camKalTrk.searchWindow);
		cvSetImageROI(camKalTrk.backproject, camKalTrk.searchWindow);

		cvCalcBackProject( &camKalTrk.hue, camKalTrk.backproject, camKalTrk.hist ); // back project

		cvAnd(camKalTrk.backproject, camKalTrk.mask, camKalTrk.backproject, 0);

		camKalTrk.trackWindow = cvRect(region, region, camKalTrk.trackWindow.width, camKalTrk.trackWindow.height);

		if (camKalTrk.trackWindow.height > 5 && camKalTrk.trackWindow.width > 5) {
			// calling CAMSHIFT
			cvCamShift(camKalTrk.backproject, camKalTrk.trackWindow, cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1),
					&camKalTrk.trackComp, &camKalTrk.trackBox);

			/*cvMeanShift( camKalTrk.backproject, camKalTrk.trackWindow,
			 cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),
			 &camKalTrk.trackComp);*/
		}
		else {
			camKalTrk.trackComp.rect.x = 0;
			camKalTrk.trackComp.rect.y = 0;
			camKalTrk.trackComp.rect.width = 0;
			camKalTrk.trackComp.rect.height = 0;
		}

		cvResetImageROI(camKalTrk.hue);
		cvResetImageROI(camKalTrk.mask);
		cvResetImageROI(camKalTrk.backproject);
		camKalTrk.trackWindow = camKalTrk.trackComp.rect;
		camKalTrk.trackWindow = cvRect(camKalTrk.trackWindow.x + camKalTrk.searchWindow.x, camKalTrk.trackWindow.y
				+ camKalTrk.searchWindow.y, camKalTrk.trackWindow.width, camKalTrk.trackWindow.height);

		camKalTrk.measurepoint = cvPoint(camKalTrk.trackWindow.x + camKalTrk.trackWindow.width / 2, camKalTrk.trackWindow.y
				+ camKalTrk.trackWindow.height / 2);
		camKalTrk.realposition->data.fl[0] = camKalTrk.measurepoint.x;
		camKalTrk.realposition->data.fl[1] = camKalTrk.measurepoint.y;
		camKalTrk.realposition->data.fl[2] = camKalTrk.measurepoint.x - camKalTrk.lastpoint.x;
		camKalTrk.realposition->data.fl[3] = camKalTrk.measurepoint.y - camKalTrk.lastpoint.y;
		camKalTrk.lastpoint = camKalTrk.measurepoint;//keep the current real position

		//measurement x,y
		cvMatMulAdd( camKalTrk.kalman->measurement_matrix/*2x4*/, camKalTrk.realposition/*4x1*/,/*measurementstate*/0, camKalTrk.measurement );
		cvKalmanCorrect(camKalTrk.kalman, camKalTrk.measurement);

		cvRectangle(frame, cvPoint(camKalTrk.trackWindow.x, camKalTrk.trackWindow.y), cvPoint(camKalTrk.trackWindow.x
				+ camKalTrk.trackWindow.width, camKalTrk.trackWindow.y + camKalTrk.trackWindow.height), CV_RGB(255,128,0), 4, 8, 0);
	}
	// set new selection if it exists
	if (camKalTrk.selectObject && camKalTrk.selection.width > 0 && camKalTrk.selection.height > 0) {
		cvSetImageROI(camKalTrk.image, camKalTrk.selection);
		cvXorS(camKalTrk.image, cvScalarAll(255), camKalTrk.image, 0);
		cvResetImageROI(camKalTrk.image);
	}

	return camKalTrk.trackWindow;
}
Exemplo n.º 11
0
KalmanPoint *NewKalman(PointSeq *Point_New, int ID, int frame_count, int contourArea) {
	const float A[] = { 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1 };
	const float H[] = { 1, 0, 0, 0, 0, 1, 0, 0 };

	KalmanPoint *Kalmanfilter;
	Kalmanfilter = (KalmanPoint *) malloc(sizeof(KalmanPoint));
	CvKalman *Kalman = cvCreateKalman(4, 2, 0);

	float measure[2] =
			{ (float) Point_New->Point.x, (float) Point_New->Point.y };
	CvMat measurement = cvMat(2, 1, CV_32FC1, measure);

	// initialize the Kalman filter
	memcpy(Kalman->transition_matrix->data.fl, A, sizeof(A));
	memcpy(Kalman->measurement_matrix->data.fl, H, sizeof(H));
	cvSetIdentity(Kalman->error_cov_post, cvRealScalar(10));
	cvSetIdentity(Kalman->process_noise_cov, cvRealScalar(1e-5));
	cvSetIdentity(Kalman->measurement_noise_cov, cvRealScalar(1e-1));

	// for the first time, use the measured point as the state_post
	cvZero(Kalman->state_post);
	cvmSet(Kalman->state_post, 0, 0, Point_New->Point.x);
	cvmSet(Kalman->state_post, 1, 0, Point_New->Point.y);

	// use Kalman  filter to predict the position of the centroid
	const CvMat *prediction = cvKalmanPredict(Kalman, 0);
	// use the measurement to correct the position
	const CvMat *correction = cvKalmanCorrect(Kalman, &measurement);

	/*
	 // test code
	 printf("measurement %f,%f\n",cvmGet(&measurement,0,0),cvmGet(&measurement, 1, 0));
	 printf("prediction %f,%f\n",cvmGet(prediction, 0, 0),cvmGet(prediction, 1, 0));
	 printf("correction %f,%f\ndelta %f,%f\n",cvmGet(correction, 0, 0),cvmGet(correction, 1, 0),
	 cvmGet(correction, 2, 0), cvmGet(correction, 3, 0));
	 */
	Kalmanfilter->Point_now = Point_New->Point;
	Kalmanfilter->Point_pre = Point_New->Point;
	Kalmanfilter->firstPoint = Point_New->Point;
	Kalmanfilter->firstFrame = frame_count; //tambahan 25 januari 2016 speed measurement
	Kalmanfilter->lastFrame = frame_count; //tambahan 25 januari 2016 speed measurement
	Kalmanfilter->Kalman = Kalman;
	Kalmanfilter->ID = Point_New->ID = ID;
	Kalmanfilter->contourArea=contourArea;
	/*
	//nilai 1 = mobil nilai 2= truk nilai 3 = undefined
	if (Kalmanfilter->contourArea >= 4800
			&& Kalmanfilter->contourArea <= 19900) {
		Kalmanfilter->jenis = 1;

	} else if (Kalmanfilter->contourArea > 19900) {
		Kalmanfilter->jenis = 2;

	} else {
		Kalmanfilter->jenis = 3;
	}
	*/
	//nilai 1 = motor;nilai 2 = mobil;nilai 3 = truk sedang;nilai 4 = truk besar;nilai 0 = undefined;
	if (Kalmanfilter->contourArea >= 4000
			&& Kalmanfilter->contourArea <= 7000) {
		Kalmanfilter->jenis = 2;
	} else if (Kalmanfilter->contourArea > 7000
			&& Kalmanfilter->contourArea <= 12000) {
		Kalmanfilter->jenis = 3;
	} else if (Kalmanfilter->contourArea > 12000) {
		Kalmanfilter->jenis = 4;
	} else {
		Kalmanfilter->jenis = 0;
	}
	//Kalmanfilter->jenis= 0;//tambahan 26 januari 2016
	Kalmanfilter->Loss = 0;

	return Kalmanfilter;
}
Exemplo n.º 12
0
int main(int argc, char **argv)
{

	// Initialize, create Kalman Filter object, window, random number
	// generator etc.
	//
	cvNamedWindow("Kalman", 1);
	CvRandState rng;
	cvRandInit(&rng, 0, 1, -1, CV_RAND_UNI);

	IplImage *img = cvCreateImage(cvSize(500, 500), 8, 3);
	CvKalman *kalman = cvCreateKalman(2, 1, 0);
	// state is (phi, delta_phi) - angle and angular velocity
	// Initialize with random guess.
	//
	CvMat *x_k = cvCreateMat(2, 1, CV_32FC1);
	cvRandSetRange(&rng, 0, 0.1, 0);
	rng.disttype = CV_RAND_NORMAL;
	cvRand(&rng, x_k);

	// process noise
	//
	CvMat *w_k = cvCreateMat(2, 1, CV_32FC1);

	// measurements, only one parameter for angle
	//
	CvMat *z_k = cvCreateMat(1, 1, CV_32FC1);
	cvZero(z_k);

	// Transition matrix 'F' describes relationship between
	// model parameters at step k and at step k+1 (this is 
	// the "dynamics" in our model.
	//
	const float F[] = { 1, 1, 0, 1 };
	memcpy(kalman->transition_matrix->data.fl, F, sizeof(F));
	// Initialize other Kalman filter parameters.
	//
	cvSetIdentity(kalman->measurement_matrix, cvRealScalar(1));
	cvSetIdentity(kalman->process_noise_cov, cvRealScalar(1e-5));
	cvSetIdentity(kalman->measurement_noise_cov, cvRealScalar(1e-1));
	cvSetIdentity(kalman->error_cov_post, cvRealScalar(1));

	// choose random initial state
	//
	cvRand(&rng, kalman->state_post);

	while (1) {
		// predict point position
		const CvMat *y_k = cvKalmanPredict(kalman, 0);

		// generate measurement (z_k)
		//
		cvRandSetRange(&rng,
					   0, sqrt(kalman->measurement_noise_cov->data.fl[0]), 0);
		cvRand(&rng, z_k);
		cvMatMulAdd(kalman->measurement_matrix, x_k, z_k, z_k);
		// plot points (eg convert to planar co-ordinates and draw)
		//
		cvZero(img);
		cvCircle(img, phi2xy(z_k), 4, CVX_YELLOW);	// observed state
		cvCircle(img, phi2xy(y_k), 4, CVX_WHITE, 2);	// "predicted" state
		cvCircle(img, phi2xy(x_k), 4, CVX_RED);	// real state
		cvShowImage("Kalman", img);
		// adjust Kalman filter state
		//
		cvKalmanCorrect(kalman, z_k);

		// Apply the transition matrix 'F' (eg, step time forward)
		// and also apply the "process" noise w_k.
		//
		cvRandSetRange(&rng, 0, sqrt(kalman->process_noise_cov->data.fl[0]), 0);
		cvRand(&rng, w_k);
		cvMatMulAdd(kalman->transition_matrix, x_k, w_k, x_k);

		// exit if user hits 'Esc'
		if (cvWaitKey(100) == 27)
			break;
	}

	return 0;
}
Exemplo n.º 13
0
// do the Kalman estimation..given the measurement (x,y) 
void TargetObject::kalmanEstimateState(float xx, float yy, float zz, float huee, float weightt){

	/// attention: make sure that the covariance min and max are kept (otherwise we run into numerical instability problems)

	//std::cout << " --------------kalmanEstimateState with " << xx << ", " << yy << ", " << huee << ", " << weightt << std::endl;
	/// set timer 
	//timer.setActiveTime();	
	
	// save old state
	xOld = x;
	yOld = y;
	zOld = z;
	vxOld = vx;
	vyOld = vy;
	vzOld = vz;
	hueOld = hue;
	weightOld = weight;


	// pointer to the state data
	CvMat* M = kalman->state_post;
	int   step  = M->step/sizeof(float);
	float *data = M->data.fl;


	// measurement 
	 /* x,y is measured */
	// pointer to the measurement data
	float* mesdata = measurement->data.fl;
	int   stepm  = measurement->step/sizeof(float);
	(mesdata+0*stepm)[0] = xx;
	(mesdata+1*stepm)[0] = yy;
	(mesdata+2*stepm)[0] = zz;
	(mesdata+3*stepm)[0] = huee;
	(mesdata+4*stepm)[0] = weightt;

	cvKalmanCorrect( kalman, measurement );
	
	float* postdata = kalman->state_post->data.fl;
	x = (postdata+0*step)[0];
	y = (postdata+1*step)[0];
	z = (postdata+2*step)[0];
	vx = (postdata+3*step)[0];
	vy = (postdata+4*step)[0];
	vz = (postdata+5*step)[0];
	hue = (postdata+9*step)[0];
	weight = (postdata+10*step)[0];


	if (isnan(x)){
		x = xOld;
		std::cout << "kalmanUpdateState: x is nan" << std::endl;
	}
	if (isnan(y)){
		y = yOld;
		std::cout << "kalmanUpdateState: y is nan" << std::endl;
	}
	if (isnan(z)){
		z = zOld;
		std::cout << "kalmanUpdateState: z is nan" << std::endl;
	}
	if (isnan(vx)){
		vx = vxOld;
		std::cout << "kalmanUpdateState: vx is nan" << std::endl;
	}
	if (isnan(vy)){
		vy = vyOld;
		std::cout << "kalmanUpdateState: vy is nan" << std::endl;
	}
	if (isnan(vz)){
		vz = vzOld;
		std::cout << "kalmanUpdateState: vz is nan" << std::endl;
	}
	
	if (isnan(hue)){
		hue = hueOld;
		std::cout << "kalmanUpdateState: hue is nan" << std::endl;
	}
	if (isnan(weight)){
		std::cout << "kalmanUpdateState: weight is nan" << std::endl;
		weight = weightOld;
	}

	//std::cout << "id: " << id << ", " << x << ", " << y <<  ", " << hue << ", " << weight << std::endl; 
	//std::cout << "id = " << id << ", norm cov = " << cvNorm(kalman->error_cov_post) << std::endl;
	double curNorm = cvNorm(kalman->error_cov_post);
	if (curNorm > 17.0){
		estimationAdvanced = true;
	}


	// 22.0 and 17.0 before
	if ((curNorm > 22.0 ) || ((estimationAdvanced) && (curNorm < 18.0)))
	{

		estimationAdvanced = false;
		//std::cout << std::endl << std::endl << "************************************ resetting  " << std::endl << std::endl; 

		/// set the new x and y to the data
		// set the current state
		CvMat* M = kalman->state_post;
		int   step  = M->step/sizeof(float);
		float *data = M->data.fl;

		(data+0*step)[0] = xx;
		(data+1*step)[0] = yy;
		(data+2*step)[0] = zz;
		(data+3*step)[0] = zz;
		(data+4*step)[0] = 1.0;
		(data+5*step)[0] = 1.0;
		(data+6*step)[0] = 1.0;
		(data+7*step)[0] = 1.0; 
		(data+8*step)[0] = 1.0;
		(data+9*step)[0] = huee; 
		(data+10*step)[0] = weightt; 


		cvSetIdentity( kalman->error_cov_post, cvRealScalar(1.0));

		cvCopy(kalman->state_post,kalman->state_pre );

		cvSetIdentity( kalman->process_noise_cov, cvRealScalar(5) );
        	cvSetIdentity( kalman->measurement_noise_cov, cvRealScalar(5.0) );
		

		/// for computation of the validation gate
		xaxis = 100.;
		yaxis = 100.;
		growth_factor = 0.1;

			
	}



	/// testing ID from hues
// 	if ((hue < 12.0) && (newHue) && (id ==0)){
// 		std::cout << "recovery green = " << tempTimer.getInactiveTime() << std::endl;
// 		 tempTimer.setActiveTime();
// 		newHue = false;
// 	}
// 	if ((hue > 15.0) && (newHue) && (id ==1)){
// 		std::cout << "recovery red = " << tempTimer.getInactiveTime() << std::endl;
// 		 tempTimer.setActiveTime();
// 		newHue = false;
// 	}

}
int main(int argc, char** argv)
{
    const float A[] = { 1, 1, 0, 1 };//状态转移矩阵
    
    IplImage* img = cvCreateImage( cvSize(500,500), 8, 3 );//创建显示所用的图像
    CvKalman* kalman = cvCreateKalman( 2, 1, 0 );//创建cvKalman数据结构,状态向量为2维,观测向量为1维,无激励输入维
    CvMat* state = cvCreateMat( 2, 1, CV_32FC1 ); //(phi, delta_phi) 定义了状态变量
    CvMat* process_noise = cvCreateMat( 2, 1, CV_32FC1 );// 创建两行一列CV_32FC1的单通道浮点型矩阵
    CvMat* measurement = cvCreateMat( 1, 1, CV_32FC1 ); //定义观测变量
    CvRNG rng = cvRNG(-1);//初始化一个随机序列函数
    char code = -1;

    cvZero( measurement );//观测变量矩阵置零
    cvNamedWindow( "Kalman", 1 );

    for(;;)
    {		//用均匀分布或者正态分布的随机数填充输出数组state
        cvRandArr( &rng, state, CV_RAND_NORMAL, cvRealScalar(0), cvRealScalar(0.1) );//状态state
        memcpy( kalman->transition_matrix->data.fl, A, sizeof(A));//初始化状态转移F矩阵
        
        //cvSetIdentity()用法:把数组中除了行数与列数相等以外的所有元素的值都设置为0;行数与列数相等的元素的值都设置为1
        //我们将(第一个前假象阶段的)后验状态初始化为一个随机值
        cvSetIdentity( kalman->measurement_matrix, cvRealScalar(1) );//观测矩阵H
        cvSetIdentity( kalman->process_noise_cov, cvRealScalar(1e-5) );//过程噪声Q
        cvSetIdentity( kalman->measurement_noise_cov, cvRealScalar(1e-1) );//观测噪声R 
        cvSetIdentity( kalman->error_cov_post, cvRealScalar(1));//后验误差协方差
        cvRandArr( &rng, kalman->state_post, CV_RAND_NORMAL, cvRealScalar(0), cvRealScalar(0.1) );//校正状态
        
        //在时机动态系统上开始预测
        
        for(;;)
        {
            #define calc_point(angle)                                      \
                cvPoint( cvRound(img->width/2 + img->width/3*cos(angle)),  \
                         cvRound(img->height/2 - img->width/3*sin(angle))) 

            float state_angle = state->data.fl[0];
            CvPoint state_pt = calc_point(state_angle);
            
            const CvMat* prediction = cvKalmanPredict( kalman, 0 );//计算下一个时间点的预期值,激励项输入为0
            float predict_angle = prediction->data.fl[0];
            CvPoint predict_pt = calc_point(predict_angle);
            
            float measurement_angle;
            CvPoint measurement_pt;

            cvRandArr( &rng, measurement, CV_RAND_NORMAL, cvRealScalar(0),
                       cvRealScalar(sqrt(kalman->measurement_noise_cov->data.fl[0])) );

            /* generate measurement */
            cvMatMulAdd( kalman->measurement_matrix, state, measurement, measurement );
            //cvMatMulAdd(src1,src2,src3,dst)就是实现dist=src1*src2+src3; 

            measurement_angle = measurement->data.fl[0];
            measurement_pt = calc_point(measurement_angle);
            
            //调用Kalman滤波器并赋予其最新的测量值,接下来就是产生过程噪声,然后对状态乘以传递矩阵F完成一次迭代并加上我们产生的过程噪声
            /* plot points */
            #define draw_cross( center, color, d )                                 \
                cvLine( img, cvPoint( center.x - d, center.y - d ),                \
                             cvPoint( center.x + d, center.y + d ), color, 1, CV_AA, 0); \
                cvLine( img, cvPoint( center.x + d, center.y - d ),                \
                             cvPoint( center.x - d, center.y + d ), color, 1, CV_AA, 0 )

            cvZero( img );
            //使用上面宏定义的函数
            draw_cross( state_pt, CV_RGB(255,255,255), 3 );//白色,状态点
            draw_cross( measurement_pt, CV_RGB(255,0,0), 3 );//红色,测量点
            draw_cross( predict_pt, CV_RGB(0,255,0), 3 );//绿色,估计点
            cvLine( img, state_pt, measurement_pt, CV_RGB(255,0,0), 3, CV_AA, 0 );
            cvLine( img, state_pt, predict_pt, CV_RGB(255,255,0), 3, CV_AA, 0 );
            
            cvKalmanCorrect( kalman, measurement );//校正新的测量值

            cvRandArr( &rng, process_noise, CV_RAND_NORMAL, cvRealScalar(0),
                       cvRealScalar(sqrt(kalman->process_noise_cov->data.fl[0])));//设置正态分布过程噪声
            cvMatMulAdd( kalman->transition_matrix, state, process_noise, state );

            cvShowImage( "Kalman", img );
			//当按键按下时,开始新的循环,初始矩阵可能会改变,所以移动速率会改变
            code = (char) cvWaitKey( 100 );
            if( code > 0 )
                break;
        }
        if( code == 27 || code == 'q' || code == 'Q' )
            break;
    }
    
    cvDestroyWindow("Kalman");

    return 0;
}
Exemplo n.º 15
0
int main(int argc, char** argv)
{
    /* A matrix data */
    const float A[] = { 1, 1, 0, 1 };

    IplImage* img = cvCreateImage( cvSize(500,500), 8, 3 );
    CvKalman* kalman = cvCreateKalman( 2, 1, 0 );
    /* state is (phi, delta_phi) - angle and angle increment */
    CvMat* state = cvCreateMat( 2, 1, CV_32FC1 );
    CvMat* process_noise = cvCreateMat( 2, 1, CV_32FC1 );
    /* only phi (angle) is measured */
    CvMat* measurement = cvCreateMat( 1, 1, CV_32FC1 );
    CvRandState rng;
    int code = -1;

    cvRandInit( &rng, 0, 1, -1, CV_RAND_UNI );

    cvZero( measurement );
    cvNamedWindow( "Kalman", 1 );

    for(;;)
    {
        cvRandSetRange( &rng, 0, 0.1, 0 );
        rng.disttype = CV_RAND_NORMAL;

        cvRand( &rng, state );

        memcpy( kalman->transition_matrix->data.fl, A, sizeof(A));
        cvSetIdentity( kalman->measurement_matrix, cvRealScalar(1) );
        cvSetIdentity( kalman->process_noise_cov, cvRealScalar(1e-5) );
        cvSetIdentity( kalman->measurement_noise_cov, cvRealScalar(1e-1) );
        cvSetIdentity( kalman->error_cov_post, cvRealScalar(1));
        /* choose random initial state */
        cvRand( &rng, kalman->state_post );

        rng.disttype = CV_RAND_NORMAL;

        for(;;)
        {
            #define calc_point(angle)                                      \
                cvPoint( cvRound(img->width/2 + img->width/3*cos(angle)),  \
                         cvRound(img->height/2 - img->width/3*sin(angle)))

            float state_angle = state->data.fl[0];
            CvPoint state_pt = calc_point(state_angle);

            /* predict point position */
            const CvMat* prediction = cvKalmanPredict( kalman, 0 );
            float predict_angle = prediction->data.fl[0];
            CvPoint predict_pt = calc_point(predict_angle);
            float measurement_angle;
            CvPoint measurement_pt;

            cvRandSetRange( &rng,
                            0,
                            sqrt(kalman->measurement_noise_cov->data.fl[0]),
                            0 );
            cvRand( &rng, measurement );

            /* generate measurement */
            cvMatMulAdd( kalman->measurement_matrix, state, measurement, measurement );

            measurement_angle = measurement->data.fl[0];
            measurement_pt = calc_point(measurement_angle);

            /* plot points */
            #define draw_cross( center, color, d )                        \
                cvLine( img, cvPoint( center.x - d, center.y - d ),       \
                             cvPoint( center.x + d, center.y + d ),       \
                             color, 1, 0 );                               \
                cvLine( img, cvPoint( center.x + d, center.y - d ),       \
                             cvPoint( center.x - d, center.y + d ),       \
                             color, 1, 0 )

            cvZero( img );
            draw_cross( state_pt, CV_RGB(255,255,255), 3 );
            draw_cross( measurement_pt, CV_RGB(255,0,0), 3 );
            draw_cross( predict_pt, CV_RGB(0,255,0), 3 );
            cvLine( img, state_pt, predict_pt, CV_RGB(255,255,0), 3, 0 );

            /* adjust Kalman filter state */
            cvKalmanCorrect( kalman, measurement );

            cvRandSetRange( &rng,
                            0,
                            sqrt(kalman->process_noise_cov->data.fl[0]),
                            0 );
            cvRand( &rng, process_noise );
            cvMatMulAdd( kalman->transition_matrix,
                         state,
                         process_noise,
                         state );

            cvShowImage( "Kalman", img );
            code = cvWaitKey( 100 );

            if( code > 0 ) /* break current simulation by pressing a key */
                break;
        }
        if( code == 27 ) /* exit by ESCAPE */
            break;
    }

    return 0;
}
Exemplo n.º 16
0
void   boxtrackproperties::
correct(const cv::Mat &m) { //m, measurement cv::Mat_<float>(5,1)
  CvMat _z=CvMat(m);
  cvKalmanCorrect(k,&_z);
}