void blobCallback(const cmvision::Blobs::ConstPtr& msg) {

	geometry_msgs::Point output;

	//kalman prediction
	if (keepPredicting) {
		pre = kf.predict();
		output.x = pre.at<float>(0);
		output.y = pre.at<float>(1);
		output.z = pre.at<float>(2);
		pub.publish(output);
		printf("Kalman prediction: (%.3f, %.3f, %.3f)\n", output.x, output.y, output.z);
	}

	//should be an if statement, but want to break from it
	while (msg->blob_count > 0) {

		double largeArea = 0, centerX = -1, centerY = -1;

		for (int i = 0; i < msg->blob_count; i++){
					
			if(msg->blobs[i].area > largeArea && msg->blobs[i].area > IGNORE_THRESHOLD) {
				largeArea = msg->blobs[i].area;
				centerX = msg->blobs[i].x;
				centerY = msg->blobs[i].y;
			}

		}

		//if didn't find a big enough blob, pretend we saw nothing.
		if (largeArea < 1) break;
		keepPredicting = true;

		//calculate relative position -- (x,y) are wrt to the center of the camera image; z is a distance from the camera
		float x = centerX - CAMERA_CENTER_X;
		float y = CAMERA_CENTER_Y - centerY;
		float z = TARGET_SIZE / largeArea;

		printf("Blob area %.0f at (%.0f,%.0f) - correcting with (%.3f, %.3f, %.3f).\n", largeArea, centerX, centerY, x, y, z);

		//update kalman filter with new measurement
		meas.at<float>(0) = x;
		meas.at<float>(1) = y;
		meas.at<float>(2) = z;
		kf.correct(meas);

		return;
		
	}

	//no blobs, or too small to care.
	printf("No blobs.\n");
	keepPredicting = false;

}
 cv::Point updatePrediction()
 {
     cv::Mat prediction = mKF.predict();
     mPrediction = cv::Point( prediction.at<float>(0), prediction.at<float>( 1 ) );
     
     return mPrediction;
 }
示例#3
0
  int kalman_find(cv::Mat& meas,vector<cv::Rect> &ballsBox,bool &found,cv::KalmanFilter& kf,cv::Mat& state){
	meas.at<float>(0) = ballsBox[0].x + ballsBox[0].width / 2;
	meas.at<float>(1) = ballsBox[0].y + ballsBox[0].height / 2;
	meas.at<float>(2) = (float)ballsBox[0].width;
	meas.at<float>(3) = (float)ballsBox[0].height;
	if (!found) // First detection!
	  {
		// >>>> Initialization
		kf.errorCovPre.at<float>(0) = 1; // px
		kf.errorCovPre.at<float>(7) = 1; // px
		kf.errorCovPre.at<float>(14) = 1;
		kf.errorCovPre.at<float>(21) = 1;
		kf.errorCovPre.at<float>(28) = 1; // px
		kf.errorCovPre.at<float>(35) = 1; // px
		state.at<float>(0) = meas.at<float>(0);
		state.at<float>(1) = meas.at<float>(1);
		state.at<float>(2) = 0;
		state.at<float>(3) = 0;
		state.at<float>(4) = meas.at<float>(2);
		state.at<float>(5) = meas.at<float>(3);
		// <<<< Initialization
		found = true;
	  }
	else	kf.correct(meas); // Kalman Correction
  
	//cout << "Measure matrix:" << endl << meas << endl;
	return 0;
  }
void initKalmanFilter(cv::KalmanFilter &KF, int nStates, int nMeasurements, int nInputs, double dt)
{
  KF.init(nStates, nMeasurements, nInputs, CV_64F);                 // init Kalman Filter
  cv::setIdentity(KF.processNoiseCov, cv::Scalar::all(8e-6));       // set process noise
  cv::setIdentity(KF.measurementNoiseCov, cv::Scalar::all(6e-5));   // set measurement noise
  cv::setIdentity(KF.errorCovPost, cv::Scalar::all(1));             // error covariance
  /* DYNAMIC MODEL */
  //  [1 0 0 dt  0  0 dt2   0   0 0 0 0  0  0  0   0   0   0]
  //  [0 1 0  0 dt  0   0 dt2   0 0 0 0  0  0  0   0   0   0]
  //  [0 0 1  0  0 dt   0   0 dt2 0 0 0  0  0  0   0   0   0]
  //  [0 0 0  1  0  0  dt   0   0 0 0 0  0  0  0   0   0   0]
  //  [0 0 0  0  1  0   0  dt   0 0 0 0  0  0  0   0   0   0]
  //  [0 0 0  0  0  1   0   0  dt 0 0 0  0  0  0   0   0   0]
  //  [0 0 0  0  0  0   1   0   0 0 0 0  0  0  0   0   0   0]
  //  [0 0 0  0  0  0   0   1   0 0 0 0  0  0  0   0   0   0]
  //  [0 0 0  0  0  0   0   0   1 0 0 0  0  0  0   0   0   0]
  //  [0 0 0  0  0  0   0   0   0 1 0 0 dt  0  0 dt2   0   0]
  //  [0 0 0  0  0  0   0   0   0 0 1 0  0 dt  0   0 dt2   0]
  //  [0 0 0  0  0  0   0   0   0 0 0 1  0  0 dt   0   0 dt2]
  //  [0 0 0  0  0  0   0   0   0 0 0 0  1  0  0  dt   0   0]
  //  [0 0 0  0  0  0   0   0   0 0 0 0  0  1  0   0  dt   0]
  //  [0 0 0  0  0  0   0   0   0 0 0 0  0  0  1   0   0  dt]
  //  [0 0 0  0  0  0   0   0   0 0 0 0  0  0  0   1   0   0]
  //  [0 0 0  0  0  0   0   0   0 0 0 0  0  0  0   0   1   0]
  //  [0 0 0  0  0  0   0   0   0 0 0 0  0  0  0   0   0   1]
  // position
  KF.transitionMatrix.at<double>(0,3) = dt;
  KF.transitionMatrix.at<double>(1,4) = dt;
  KF.transitionMatrix.at<double>(2,5) = dt;
  KF.transitionMatrix.at<double>(3,6) = dt;
  KF.transitionMatrix.at<double>(4,7) = dt;
  KF.transitionMatrix.at<double>(5,8) = dt;
  KF.transitionMatrix.at<double>(0,6) = 0.5*pow(dt,2);
  KF.transitionMatrix.at<double>(1,7) = 0.5*pow(dt,2);
  KF.transitionMatrix.at<double>(2,8) = 0.5*pow(dt,2);
  // orientation
  KF.transitionMatrix.at<double>(9,12) = dt;
  KF.transitionMatrix.at<double>(10,13) = dt;
  KF.transitionMatrix.at<double>(11,14) = dt;
  KF.transitionMatrix.at<double>(12,15) = dt;
  KF.transitionMatrix.at<double>(13,16) = dt;
  KF.transitionMatrix.at<double>(14,17) = dt;
  KF.transitionMatrix.at<double>(9,15) = 0.5*pow(dt,2);
  KF.transitionMatrix.at<double>(10,16) = 0.5*pow(dt,2);
  KF.transitionMatrix.at<double>(11,17) = 0.5*pow(dt,2);
  /* MEASUREMENT MODEL */
  //  [1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]
  //  [0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]
  //  [0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]
  //  [0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0]
  //  [0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0]
  //  [0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0]
  KF.measurementMatrix.at<double>(0,0) = 1;  // x
  KF.measurementMatrix.at<double>(1,1) = 1;  // y
  KF.measurementMatrix.at<double>(2,2) = 1;  // z
  KF.measurementMatrix.at<double>(3,9) = 1;  // roll
  KF.measurementMatrix.at<double>(4,10) = 1; // pitch
  KF.measurementMatrix.at<double>(5,11) = 1; // yaw
}
示例#5
0
cv::Mat kalmanPredict(cv::KalmanFilter &kf, cv::Mat control) //prediction = kf.predict(control);
{
	cv::Mat prediction(6, 1, CV_32F);
	prediction = kf.predict(control);
	//prediction = kf.predict();
	kf.statePre.copyTo(kf.statePost);
	kf.errorCovPre.copyTo(kf.errorCovPost);
	return prediction;
}
void updateKalmanFilter( cv::KalmanFilter &KF, cv::Mat &measurement, cv::Mat &translation_estimated, cv::Mat &rotation_estimated )
{
  // First predict, to update the internal statePre variable
  cv::Mat prediction = KF.predict();
  // The "correct" phase that is going to use the predicted value and our measurement
  cv::Mat estimated = KF.correct(measurement);

  // Estimated translation
  translation_estimated.at<double>(0) = estimated.at<double>(0);
  translation_estimated.at<double>(1) = estimated.at<double>(1);
  translation_estimated.at<double>(2) = estimated.at<double>(2);
  // Estimated euler angles
  cv::Mat eulers_estimated(3, 1, CV_64F);
  eulers_estimated.at<double>(0) = estimated.at<double>(9);
  eulers_estimated.at<double>(1) = estimated.at<double>(10);
  eulers_estimated.at<double>(2) = estimated.at<double>(11);
  // Convert estimated quaternion to rotation matrix
  rotation_estimated = euler2rot(eulers_estimated);
}
 cv::Point correct( cv::Point pt )
 {
     // update point
     mMeasurement( 0 ) = pt.x;
     mMeasurement( 1 ) = pt.y;
     
     cv::Point measPt( mMeasurement( 0 ), mMeasurement( 1 ) );
     
     // The "correct" phase that is going to use the predicted value and our measurement
     cv::Mat estimated = mKF.correct( mMeasurement );
     cv::Point statePt( estimated.at<float>( 0 ), estimated.at<float>( 1 ) );
     
     mPrediction = statePt;
     
     return mPrediction;
 }
示例#8
0
  int kalman_if_found(cv::KalmanFilter& kf,cv::Mat& state,cv::Mat& res){
	state = kf.predict();
	//  cout << "State post:" << endl << state << endl;

	cv::Rect predRect;
	predRect.width = state.at<float>(4);
	predRect.height = state.at<float>(5);
	predRect.x = state.at<float>(0) - predRect.width / 2;
	predRect.y = state.at<float>(1) - predRect.height / 2;

	cv::Point center;
	center.x = state.at<float>(0);
	center.y = state.at<float>(1);
	cv::circle(res, center, 2, CV_RGB(255,0,0), -1);
	cv::rectangle(res, predRect, CV_RGB(255,0,0), 2);

	return 0;
  }
//カルマンフィルタ
void kalmanFilter()
{
	cv::VideoCapture capture(0);

	//x,y,vx,vy
	//cv::KalmanFilter kalman(4, 2, 0);
	cv::setIdentity(kalman.measurementMatrix, cv::Scalar(1.0));
	cv::setIdentity(kalman.processNoiseCov, cv::Scalar::all(1e-5));
	cv::setIdentity(kalman.measurementNoiseCov, cv::Scalar::all(0.1));
	cv::setIdentity(kalman.errorCovPost, cv::Scalar::all(1.0));

    // 等速直線運動モデル
	kalman.transitionMatrix = *(cv::Mat_<float>(4, 4) << 1,0,1,0,   0,1,0,1,  0,0,1,0,  0,0,0,1);
	//観測値を格納するもの
	cv::Mat_<float> measurement(2,1); measurement.setTo(cv::Scalar(0));
	//初期値
	kalman.statePre.at<float>(0) = initx;
	kalman.statePre.at<float>(1) = inity;
	kalman.statePre.at<float>(2) = 0;
	kalman.statePre.at<float>(3) = 0;

	cv::namedWindow("KalmanFilter", CV_WINDOW_AUTOSIZE);
	cv::setMouseCallback("KalmanFilter", _onMouse, 0);

	// メインループ
    while (!GetAsyncKeyState(VK_ESCAPE)) {
        // 画像を取得
		capture >> capframe;
		cv::Mat image = capframe.clone();
 
        // HSVに変換
		//cv::Mat hsv = image.clone();
		//cv::cvtColor(image, hsv, CV_BGR2HSV);

		std::cout << "HSV: (" << h << ", " << s << ", " << v << ")\n";

		//クリックした点のRGBと近い画素を探索し、その重心を求める
		int sumX =0, sumY = 0, counter = 0;

		for(int y = 0; y < image.rows; y++)
			for(int x = 0; x < image.cols; x++)
			{
				//RGBの差分が閾値以内ならカウント
				if(sqrt((red - image.at<cv::Vec3b>(y,x)[2]) * (red - image.at<cv::Vec3b>(y,x)[2]) +
						  (green - image.at<cv::Vec3b>(y,x)[1]) * (green - image.at<cv::Vec3b>(y,x)[1]) +
						  (blue - image.at<cv::Vec3b>(y,x)[0]) * (blue - image.at<cv::Vec3b>(y,x)[0])) <= thresh)
				{
					//色付け
					image.at<cv::Vec3b>(y,x)[2] = 255;

					sumX += x;
					sumY += y;
					counter++;
				}
			}
 
		if(counter > 0)
		{
			//観測値
			int mx = (int)(sumX/counter);
			int my = (int)(sumY/counter);
			measurement(0) = mx;
			measurement(1) = my;
			cv::Point measPt(measurement(0), measurement(1));
			//表示
			cv::circle(image, measPt, 10, cv::Scalar(0, 0, 255));

			//修正フェーズ
			cv::Mat estimated = kalman.correct(measurement);
			cv::Point statePt(estimated.at<float>(0),estimated.at<float>(1));

        }

		//予測フェーズ
		cv::Mat prediction = kalman.predict();
		cv::Point predictPt(prediction.at<float>(0),prediction.at<float>(1));
		// 表示
		cv::circle(image, predictPt, 10, cv::Scalar(0, 255, 0));
		cv::circle(image, cv::Point(20, 20), 10, CV_RGB(red, green, blue), 5);
		cv::putText(image,"target", cv::Point(0, 50), cv::FONT_HERSHEY_SIMPLEX, 0.7, CV_RGB(red, green, blue));
  
		std::cout << (int)red << ", " << (int)green << ", " <<  (int)blue << std::endl;
		cv::imshow("KalmanFilter", image);
        cvWaitKey(1);
 
    }
}
示例#10
0
文件: track.cpp 项目: mateus03/HANNRS
void detectionsCallback(const hannrs_msgs::VisionDetection::ConstPtr& msg){
  if(msg->positions.size() == 1){
    if(people.empty()){
      if(msg->positions.size() == 1){
// 	double dist = sqrt((msg->positions[0].x - robot.back().position.x)*(msg->positions[0].x - robot.back().position.x) + 
// 	(msg->positions[0].y - robot.back().position.y)*(msg->positions[0].y - robot.back().position.y));
// 	if(dist < 0.35){
// 	}else{
	  KF = cv::KalmanFilter(4,2,0,CV_64F);
	  KF.transitionMatrix = *(cv::Mat_<double>(4, 4) << 1,0,1,0, 0,1,0,1, 0,0,1,0, 0,0,0,1);
	  // init...
	  KF.statePre.at<double>(0) = msg->positions[0].x;
	  KF.statePre.at<double>(1) = msg->positions[0].y;
	  KF.statePre.at<double>(2) = 0;
	  KF.statePre.at<double>(3) = 0;
  // 	setIdentity(KF.measurementMatrix);
	  KF.measurementMatrix = *(cv::Mat_<double>(2, 4) << 1,0,0,0, 0,1,0,0);
	  setIdentity(KF.processNoiseCov, cv::Scalar::all(0.25));
	  setIdentity(KF.measurementNoiseCov, cv::Scalar::all(0.05));
	  setIdentity(KF.errorCovPost, cv::Scalar::all(.1));
	  	  
	  hannrs_msgs::Person person;
	  person.pose.position = msg->positions[0];
	  person.pose.orientation.w = 1.0;
	  person.status = "standing"; //person cannot enter the scene sitted
	  person.name = "Andre";
	  people.push_back(person);
	  headers.push_back(msg->header);
// 	}
      }
    }else{
      cv::Mat_<double> measurement(2,1); measurement.setTo(cv::Scalar(0));
      double dt = msg->header.stamp.toSec() - headers.back().stamp.toSec();
      
      KF.transitionMatrix = *(cv::Mat_<double>(4, 4) << 1,0,dt,0, 0,1,0,dt, 0,0,1,0, 0,0,0,1);
      
      // First predict, to update the internal statePre variable
      cv::Mat prediction = KF.predict();

      double vel_module;
      hannrs_msgs::Person person;
      if(msg->positions.size() == 1){
// 	double dist = sqrt((msg->positions[0].x - robot.back().position.x)*(msg->positions[0].x - robot.back().position.x) + 
// 	(msg->positions[0].y - robot.back().position.y)*(msg->positions[0].y - robot.back().position.y));
// 	
// 	if(dist < 0.35){
// 	}else{
	  // Get detection
	  measurement(0) = msg->positions[0].x;
	  measurement(1) = msg->positions[0].y;
		      
	  cv::Point measPt(measurement(0),measurement(1));
	  
	  // The "correct" phase that is going to use the predicted value and our measurement
	  cv::Mat estimated = KF.correct(measurement);
	  
	  
	  person.pose.position.x = estimated.at<double>(0);
	  person.pose.position.y = estimated.at<double>(1);
	  if(msg->classifications[0] == 2.0){
	    person.pose.orientation = tf::createQuaternionMsgFromYaw(msg->positions[0].z);
	    person.velocity.linear.x = 0;
	    person.velocity.linear.y = 0;
	    vel_module = -1;
	  }else{
	    person.pose.orientation = tf::createQuaternionMsgFromYaw(atan2(estimated.at<double>(3), estimated.at<double>(2)));
	    person.velocity.linear.x = estimated.at<double>(2);
	    person.velocity.linear.y = estimated.at<double>(3);
	    vel_module = sqrt(estimated.at<double>(2)*estimated.at<double>(2) + estimated.at<double>(3)*estimated.at<double>(3));
	  }
// 	}
      }else{
	person.pose.position.x = prediction.at<double>(0);
	person.pose.position.y = prediction.at<double>(1);
	if(people.back().status.compare("sitting") == 0){
	  person.pose.orientation = people.back().pose.orientation;
	  person.velocity.linear.x = 0;
	  person.velocity.linear.y = 0;
	  vel_module = -1;
	}else{
	  person.pose.orientation = tf::createQuaternionMsgFromYaw(atan2(prediction.at<double>(3), prediction.at<double>(2)));
	  person.velocity.linear.x = prediction.at<double>(2);
	  person.velocity.linear.y = prediction.at<double>(3);
	  vel_module = sqrt(prediction.at<double>(2)*prediction.at<double>(2) + prediction.at<double>(3)*prediction.at<double>(3));
	}
      }
      
      if(vel_module == -1){
	person.velocity.linear.x = 0;
	person.velocity.linear.y = 0;
	person.status = "sitting"; //person cannot enter the scene sitted
      }else if(vel_module < 0.15){
	person.velocity.linear.x = 0;
	person.velocity.linear.y = 0;
	person.status = "standing"; //person cannot enter the scene sitted
      }else{
	person.status = "walking";
      }
      
      person.name = "Andre";
      people.push_back(person);
      headers.push_back(msg->header);
    }
  }
}