Пример #1
0
/**
 * @brief estimated_pose Returns estimated object pose in cartesian frame
 */
geometry_msgs::Pose kalman_pose_estimation(geometry_msgs::Pose previous_pose_, geometry_msgs::Pose measured_pose_) {

	// TODO: create kalman filter for each estimated object separatelly.
	cv::KalmanFilter KF;         // instantiate Kalman Filter
	int nStates = 18;            // the number of states
	int nMeasurements = 6;       // the number of measured states
	int nInputs = 0;             // the number of action control
	double dt = 0.125;           // time between measurements (1/FPS)
	initKalmanFilter(KF, nStates, nMeasurements, nInputs, dt);    // init function
	cv::Mat measurements(nMeasurements, 1, CV_64F);
	measurements.setTo(cv::Scalar(0));

	// Get object pose in sensor frame.
	tf::Transform measured_tf;
	tf::poseMsgToTF (measured_pose_ , measured_tf);
//	tf::btMatrix3x3 measured_rot = measured_tf.getBasis();

	// Get the measured translation
	cv::Mat translation_measured(3, 1, CV_64F);
	// translation_measured = ... TODO!
	// Get the measured rotation
	cv::Mat rotation_measured(3, 3, CV_64F);
	//rotation_measured = ... TODO!
	// fill the measurements vector
	fillMeasurements(measurements, translation_measured, rotation_measured);

	// Instantiate estimated translation and rotation
	cv::Mat translation_estimated(3, 1, CV_64F);
	cv::Mat rotation_estimated(3, 3, CV_64F);
	// update the Kalman filter with good measurements
	updateKalmanFilter( KF, measurements, translation_estimated, rotation_estimated);

	geometry_msgs::Pose estimated_pose;
	return estimated_pose;
}
DoorHandleDetectionNode::DoorHandleDetectionNode(ros::NodeHandle nh)
{
  n = nh;

  m_img_.init(480,640);
  m_img_mono.init(480,640);

  m_extrinsic_param_are_initialized = false;
  m_tracking_is_initialized = false;
  m_disp_is_initialized = false;
  m_cam_is_initialized = false;

  m_is_door_handle_present = 0;
  m_tracking_works = false;
  m_stop_detection = false;


  n.param<std::string>("imageTopicName", m_imageTopicName, "/camera/rgb/image_raw");
  n.param<std::string>("pclTopicName", m_pclTopicName, "/softkinetic_camera/depth/points");
  n.param<std::string>("cameraRGBTopicName", m_cameraRGBTopicName, "/softkinetic_camera/rgb/camera_info");
  n.param<std::string>("cameraDepthTopicName", m_cameraDepthTopicName, "/softkinetic_camera/depth/camera_info");
  n.param("dh_right", m_dh_right, true);
  n.param("lenght_dh",m_lenght_dh, 0.1);
  n.param("height_dh", m_height_dh, 0.055);
  n.param("debug", debug, false);

  //Subscribe to services
  cam_rgb_info_sub = n.subscribe( m_cameraRGBTopicName, 1, (boost::function < void(const sensor_msgs::CameraInfo::ConstPtr&)>) boost::bind( &DoorHandleDetectionNode::setupCameraParameters, this, _1 ));
  cam_depth_info_sub = n.subscribe( m_cameraDepthTopicName, 1, (boost::function < void(const sensor_msgs::CameraInfo::ConstPtr&)>) boost::bind( &DoorHandleDetectionNode::getExtrinsicParameters, this, _1 ));
  pcl_frame_sub = n.subscribe( m_pclTopicName, 1, (boost::function < void(const sensor_msgs::PointCloud2::ConstPtr&)>) boost::bind( &DoorHandleDetectionNode::mainComputation, this, _1 ));
  image_frame_sub = n.subscribe( m_imageTopicName, 1, (boost::function < void(const sensor_msgs::Image::ConstPtr&)>) boost::bind( &DoorHandleDetectionNode::displayImage, this, _1 ));

  ROS_INFO("Beautiful weather, isn't it ?");
  //Advertise the publishers
  pose_handle_pub = n.advertise< geometry_msgs::PoseStamped >("pose_handle", 1);
  door_handle_status_pub = n.advertise< std_msgs::Int8 >("status", 1);
  point_handle_pub = n.advertise< geometry_msgs::PointStamped >("point_handle", 1);
  if (debug)
  {
    pcl_plane_pub = n.advertise< pcl::PointCloud<pcl::PointXYZ> >("PC_Plane", 1);
    pcl_dh_pub = n.advertise< pcl::PointCloud<pcl::PointXYZ> >("PC_Sandwich", 1);
    debug_pcl_pub = n.advertise< pcl::PointCloud<pcl::PointXYZ> >("pc_debug", 1);
  }

  m_blob.setGraphics(true);
  m_blob.setGraphicsThickness(1);
  //  m_blob.setEllipsoidShapePrecision(0.);
  m_blob.setGrayLevelMin(170);
  m_blob.setGrayLevelMax(255);

  //Kalman Filter
  int nStates = 18;            // the number of states
  int nMeasurements = 6;       // the number of measured states
  int nInputs = 0;             // the number of action control
  double dt = 1/12.5;           // time between measurements (1/FPS)
  initKalmanFilter(m_KF, nStates, nMeasurements, nInputs, dt);    // init function
}
Пример #3
0
void Odometry::reset(const Transform & initialPose)
{
	previousVelocityTransform_.setNull();
	previousGroundTruthPose_.setNull();
	_resetCurrentCount = 0;
	previousStamp_ = 0;
	distanceTravelled_ = 0;
	if(_force3DoF || particleFilters_.size())
	{
		float x,y,z, roll,pitch,yaw;
		initialPose.getTranslationAndEulerAngles(x, y, z, roll, pitch, yaw);

		if(_force3DoF)
		{
			if(z != 0.0f || roll != 0.0f || pitch != 0.0f)
			{
				UWARN("Force2D=true and the initial pose contains z, roll or pitch values (%s). They are set to null.", initialPose.prettyPrint().c_str());
			}
			z = 0;
			roll = 0;
			pitch = 0;
			Transform pose(x, y, z, roll, pitch, yaw);
			_pose = pose;
		}
		else
		{
			_pose = initialPose;
		}

		if(particleFilters_.size())
		{
			UASSERT(particleFilters_.size() == 6);
			particleFilters_[0]->init(x);
			particleFilters_[1]->init(y);
			particleFilters_[2]->init(z);
			particleFilters_[3]->init(roll);
			particleFilters_[4]->init(pitch);
			particleFilters_[5]->init(yaw);
		}

		if(_filteringStrategy == 1)
		{
			initKalmanFilter(initialPose);
		}
	}
	else
	{
		_pose = initialPose;
	}
}
Пример #4
0
KalmanFilter::KalmanFilter(Pose *initialPose) {
	// store a reference to the given pose
    // so we can update it each time we filter two poses
	_pose = initialPose;
    // convert the pose to a 3-element array with x, y, and theta
	float initialPoseArr[3];
	initialPose->toArray(initialPoseArr);
    // create a zero'd velocity array for x, y, and theta
	_velocity[0] = 0;
	_velocity[1] = 0;
	_velocity[2] = 0;
    // initialize the kalman filter
	initKalmanFilter(&_kf, initialPoseArr, _velocity, 1);
    // initialize the track to zero'd state
    for (int i = 0; i < 9; i++) {
        _track[i] = 0;
    }
    // set the uncertainties to their defaults
    setUncertainty(0.05, 0.05, 0.05,
    			   0.05, 0.05, 0.05,
    			   0.05, 0.05, 0.05);
}
Пример #5
0
Learning::Learning(){
	spatialCombiEps = 0.18;
	spatialCombiMinPts = 1;

	associateVariance = 0.02;
	associateRate = 0.76;
	this->isAssSuccess = false;

	isTKFilter = true;
	isQKFilter = false;

	this->pObject = new Object();
	tempR = Mat::eye(3,3,CV_32FC1);
	tempT = Mat::zeros(3,1,CV_32FC1);

	updateDThreshold = 0.004;
	updateTThreshold = 25;

	sumMaxValue = -1;
	angle = 0;

	initKalmanFilter();
}
Пример #6
0
Odometry::Odometry(const rtabmap::ParametersMap & parameters) :
		_resetCountdown(Parameters::defaultOdomResetCountdown()),
		_force3DoF(Parameters::defaultRegForce3DoF()),
		_holonomic(Parameters::defaultOdomHolonomic()),
		guessFromMotion_(Parameters::defaultOdomGuessMotion()),
		_filteringStrategy(Parameters::defaultOdomFilteringStrategy()),
		_particleSize(Parameters::defaultOdomParticleSize()),
		_particleNoiseT(Parameters::defaultOdomParticleNoiseT()),
		_particleLambdaT(Parameters::defaultOdomParticleLambdaT()),
		_particleNoiseR(Parameters::defaultOdomParticleNoiseR()),
		_particleLambdaR(Parameters::defaultOdomParticleLambdaR()),
		_fillInfoData(Parameters::defaultOdomFillInfoData()),
		_kalmanProcessNoise(Parameters::defaultOdomKalmanProcessNoise()),
		_kalmanMeasurementNoise(Parameters::defaultOdomKalmanMeasurementNoise()),
		_imageDecimation(Parameters::defaultOdomImageDecimation()),
		_resetCurrentCount(0),
		previousStamp_(0),
		distanceTravelled_(0)
{
	Parameters::parse(parameters, Parameters::kOdomResetCountdown(), _resetCountdown);

	Parameters::parse(parameters, Parameters::kRegForce3DoF(), _force3DoF);
	Parameters::parse(parameters, Parameters::kOdomHolonomic(), _holonomic);
	Parameters::parse(parameters, Parameters::kOdomGuessMotion(), guessFromMotion_);
	Parameters::parse(parameters, Parameters::kOdomFillInfoData(), _fillInfoData);
	Parameters::parse(parameters, Parameters::kOdomFilteringStrategy(), _filteringStrategy);
	Parameters::parse(parameters, Parameters::kOdomParticleSize(), _particleSize);
	Parameters::parse(parameters, Parameters::kOdomParticleNoiseT(), _particleNoiseT);
	Parameters::parse(parameters, Parameters::kOdomParticleLambdaT(), _particleLambdaT);
	Parameters::parse(parameters, Parameters::kOdomParticleNoiseR(), _particleNoiseR);
	Parameters::parse(parameters, Parameters::kOdomParticleLambdaR(), _particleLambdaR);
	UASSERT(_particleNoiseT>0);
	UASSERT(_particleLambdaT>0);
	UASSERT(_particleNoiseR>0);
	UASSERT(_particleLambdaR>0);
	Parameters::parse(parameters, Parameters::kOdomKalmanProcessNoise(), _kalmanProcessNoise);
	Parameters::parse(parameters, Parameters::kOdomKalmanMeasurementNoise(), _kalmanMeasurementNoise);
	Parameters::parse(parameters, Parameters::kOdomImageDecimation(), _imageDecimation);
	UASSERT(_imageDecimation>=1);

	if(_filteringStrategy == 2)
	{
		// Initialize the Particle filters
		particleFilters_.resize(6);
		for(unsigned int i = 0; i<particleFilters_.size(); ++i)
		{
			if(i<3)
			{
				particleFilters_[i] = new ParticleFilter(_particleSize, _particleNoiseT, _particleLambdaT);
			}
			else
			{
				particleFilters_[i] = new ParticleFilter(_particleSize, _particleNoiseR, _particleLambdaR);
			}
		}
	}
	else if(_filteringStrategy == 1)
	{
		initKalmanFilter();
	}
}
Пример #7
0
Transform Odometry::process(SensorData & data, OdometryInfo * info)
{
	if(_pose.isNull())
	{
		_pose.setIdentity(); // initialized
	}

	UASSERT(!data.imageRaw().empty());

	if(!data.stereoCameraModel().isValidForProjection() &&
	   (data.cameraModels().size() == 0 || !data.cameraModels()[0].isValidForProjection()))
	{
		UERROR("Rectified images required! Calibrate your camera.");
		return Transform();
	}

	double dt = previousStamp_>0.0f?data.stamp() - previousStamp_:0.0;
	Transform guess = dt && guessFromMotion_?Transform::getIdentity():Transform();
	UASSERT_MSG(dt>0.0 || (dt == 0.0 && previousVelocityTransform_.isNull()), uFormat("dt=%f previous transform=%s", dt, previousVelocityTransform_.prettyPrint().c_str()).c_str());
	if(!previousVelocityTransform_.isNull())
	{
		if(guessFromMotion_)
		{
			if(_filteringStrategy == 1)
			{
				// use Kalman predict transform
				float vx,vy,vz, vroll,vpitch,vyaw;
				predictKalmanFilter(dt, &vx,&vy,&vz,&vroll,&vpitch,&vyaw);
				guess = Transform(vx*dt, vy*dt, vz*dt, vroll*dt, vpitch*dt, vyaw*dt);
			}
			else
			{
				float vx,vy,vz, vroll,vpitch,vyaw;
				previousVelocityTransform_.getTranslationAndEulerAngles(vx,vy,vz, vroll,vpitch,vyaw);
				guess = Transform(vx*dt, vy*dt, vz*dt, vroll*dt, vpitch*dt, vyaw*dt);
			}
		}
		else if(_filteringStrategy == 1)
		{
			predictKalmanFilter(dt);
		}
	}

	UTimer time;
	Transform t;
	if(_imageDecimation > 1)
	{
		// Decimation of images with calibrations
		SensorData decimatedData = data;
		decimatedData.setImageRaw(util2d::decimate(decimatedData.imageRaw(), _imageDecimation));
		decimatedData.setDepthOrRightRaw(util2d::decimate(decimatedData.depthOrRightRaw(), _imageDecimation));
		std::vector<CameraModel> cameraModels = decimatedData.cameraModels();
		for(unsigned int i=0; i<cameraModels.size(); ++i)
		{
			cameraModels[i] = cameraModels[i].scaled(1.0/double(_imageDecimation));
		}
		decimatedData.setCameraModels(cameraModels);
		StereoCameraModel stereoModel = decimatedData.stereoCameraModel();
		if(stereoModel.isValidForProjection())
		{
			stereoModel.scale(1.0/double(_imageDecimation));
		}
		decimatedData.setStereoCameraModel(stereoModel);

		// compute transform
		t = this->computeTransform(decimatedData, guess, info);

		// transform back the keypoints in the original image
		std::vector<cv::KeyPoint> kpts = decimatedData.keypoints();
		double log2value = log(double(_imageDecimation))/log(2.0);
		for(unsigned int i=0; i<kpts.size(); ++i)
		{
			kpts[i].pt.x *= _imageDecimation;
			kpts[i].pt.y *= _imageDecimation;
			kpts[i].size *= _imageDecimation;
			kpts[i].octave += log2value;
		}
		data.setFeatures(kpts, decimatedData.descriptors());

		if(info)
		{
			UASSERT(info->newCorners.size() == info->refCorners.size());
			for(unsigned int i=0; i<info->newCorners.size(); ++i)
			{
				info->refCorners[i].x *= _imageDecimation;
				info->refCorners[i].y *= _imageDecimation;
				info->newCorners[i].x *= _imageDecimation;
				info->newCorners[i].y *= _imageDecimation;
			}
			for(std::multimap<int, cv::KeyPoint>::iterator iter=info->words.begin(); iter!=info->words.end(); ++iter)
			{
				iter->second.pt.x *= _imageDecimation;
				iter->second.pt.y *= _imageDecimation;
				iter->second.size *= _imageDecimation;
				iter->second.octave += log2value;
			}
		}
	}
	else
	{
		t = this->computeTransform(data, guess, info);
	}

	if(info)
	{
		info->timeEstimation = time.ticks();
		info->lost = t.isNull();
		info->stamp = data.stamp();
		info->interval = dt;
		info->transform = t;

		if(!data.groundTruth().isNull())
		{
			if(!previousGroundTruthPose_.isNull())
			{
				info->transformGroundTruth = previousGroundTruthPose_.inverse() * data.groundTruth();
			}
			previousGroundTruthPose_ = data.groundTruth();
		}
	}

	if(!t.isNull())
	{
		_resetCurrentCount = _resetCountdown;

		float vx,vy,vz, vroll,vpitch,vyaw;
		t.getTranslationAndEulerAngles(vx,vy,vz, vroll,vpitch,vyaw);

		// transform to velocity
		if(dt)
		{
			vx /= dt;
			vy /= dt;
			vz /= dt;
			vroll /= dt;
			vpitch /= dt;
			vyaw /= dt;
		}

		if(_force3DoF || !_holonomic || particleFilters_.size() || _filteringStrategy==1)
		{
			if(_filteringStrategy == 1)
			{
				if(previousVelocityTransform_.isNull())
				{
					// reset Kalman
					if(dt)
					{
						initKalmanFilter(t, vx,vy,vz,vroll,vpitch,vyaw);
					}
					else
					{
						initKalmanFilter(t);
					}
				}
				else
				{
					// Kalman filtering
					updateKalmanFilter(vx,vy,vz,vroll,vpitch,vyaw);
				}
			}
			else if(particleFilters_.size())
			{
				// Particle filtering
				UASSERT(particleFilters_.size()==6);
				if(previousVelocityTransform_.isNull())
				{
					particleFilters_[0]->init(vx);
					particleFilters_[1]->init(vy);
					particleFilters_[2]->init(vz);
					particleFilters_[3]->init(vroll);
					particleFilters_[4]->init(vpitch);
					particleFilters_[5]->init(vyaw);
				}
				else
				{
					vx = particleFilters_[0]->filter(vx);
					vy = particleFilters_[1]->filter(vy);
					vyaw = particleFilters_[5]->filter(vyaw);

					if(!_holonomic)
					{
						// arc trajectory around ICR
						float tmpY = vyaw!=0.0f ? vx / tan((CV_PI-vyaw)/2.0f) : 0.0f;
						if(fabs(tmpY) < fabs(vy) || (tmpY<=0 && vy >=0) || (tmpY>=0 && vy<=0))
						{
							vy = tmpY;
						}
						else
						{
							vyaw = (atan(vx/vy)*2.0f-CV_PI)*-1;
						}
					}

					if(!_force3DoF)
					{
						vz = particleFilters_[2]->filter(vz);
						vroll = particleFilters_[3]->filter(vroll);
						vpitch = particleFilters_[4]->filter(vpitch);
					}
				}

				if(info)
				{
					info->timeParticleFiltering = time.ticks();
				}

				if(_force3DoF)
				{
					vz = 0.0f;
					vroll = 0.0f;
					vpitch = 0.0f;
				}
			}
			else if(!_holonomic)
			{
				// arc trajectory around ICR
				vy = vyaw!=0.0f ? vx / tan((CV_PI-vyaw)/2.0f) : 0.0f;
				if(_force3DoF)
				{
					vz = 0.0f;
					vroll = 0.0f;
					vpitch = 0.0f;
				}
			}

			if(dt)
			{
				t = Transform(vx*dt, vy*dt, vz*dt, vroll*dt, vpitch*dt, vyaw*dt);
			}
			else
			{
				t = Transform(vx, vy, vz, vroll, vpitch, vyaw);
			}

			if(info)
			{
				info->transformFiltered = t;
			}
		}

		previousStamp_ = data.stamp();
		previousVelocityTransform_.setNull();
		if(dt)
		{
			previousVelocityTransform_ = Transform(vx, vy, vz, vroll, vpitch, vyaw);
		}

		if(info)
		{
			distanceTravelled_ += t.getNorm();
			info->distanceTravelled = distanceTravelled_;
		}

		return _pose *= t; // updated
	}
	else if(_resetCurrentCount > 0)
	{
		UWARN("Odometry lost! Odometry will be reset after next %d consecutive unsuccessful odometry updates...", _resetCurrentCount);

		--_resetCurrentCount;
		if(_resetCurrentCount == 0)
		{
			UWARN("Odometry automatically reset to latest pose!");
			this->reset(_pose);
		}
	}

	previousVelocityTransform_.setNull();
	previousStamp_ = 0;

	return Transform();
}