Exemplo n.º 1
0
void DataRecorder::addData(const rtabmap::SensorData & data, const Transform & pose, const cv::Mat & covariance)
{
	memoryMutex_.lock();
	if(memory_)
	{
		if(memory_->getStMem().size() == 0 && data.id() > 0)
		{
			ParametersMap customParameters;
			customParameters.insert(ParametersPair(Parameters::kMemGenerateIds(), "false")); // use id from data
			memory_->parseParameters(customParameters);
		}

		//save to database
		UTimer time;
		memory_->update(data, pose, covariance);
		const Signature * s = memory_->getLastWorkingSignature();
		totalSizeKB_ += (int)s->sensorData().imageCompressed().total()/1000;
		totalSizeKB_ += (int)s->sensorData().depthOrRightCompressed().total()/1000;
		totalSizeKB_ += (int)s->sensorData().laserScanCompressed().total()/1000;
		memory_->cleanup();

		if(++count_ % 30)
		{
			memory_->emptyTrash();
		}
		UDEBUG("Time to process a message = %f s", time.ticks());
	}
	memoryMutex_.unlock();
}
Exemplo n.º 2
0
Transform Odometry::process(const SensorData & data, OdometryInfo * info)
{
	if(_pose.isNull())
	{
		_pose.setIdentity(); // initialized
	}

	UASSERT(!data.image().empty());
	if(dynamic_cast<OdometryMono*>(this) == 0)
	{
		UASSERT(!data.depthOrRightImage().empty());
	}

	if(data.fx() <= 0 || data.fyOrBaseline() <= 0)
	{
		UERROR("Rectified images required! Calibrate your camera. (fx=%f, fy/baseline=%f, cx=%f, cy=%f)",
				data.fx(), data.fyOrBaseline(), data.cx(), data.cy());
		return Transform();
	}

	UTimer time;
	Transform t = this->computeTransform(data, info);

	if(info)
	{
		info->time = time.elapsed();
		info->lost = t.isNull();
	}

	if(!t.isNull())
	{
		_resetCurrentCount = _resetCountdown;

		if(_force2D)
		{
			float x,y,z, roll,pitch,yaw;
			t.getTranslationAndEulerAngles(x, y, z, roll, pitch, yaw);
			t = Transform(x,y,0, 0,0,yaw);
		}

		return _pose *= t; // updated
	}
	else if(_resetCurrentCount > 0)
	{
		UWARN("Odometry lost! Odometry will be reset after next %d consecutive unsuccessful odometry updates...", _resetCurrentCount);

		--_resetCurrentCount;
		if(_resetCurrentCount == 0)
		{
			UWARN("Odometry automatically reset to latest pose!");
			this->reset(_pose);
		}
	}

	return Transform();
}
Exemplo n.º 3
0
SensorData Camera::takeImage(CameraInfo * info)
{
	bool warnFrameRateTooHigh = false;
	float actualFrameRate = 0;
	if(_imageRate>0)
	{
		int sleepTime = (1000.0f/_imageRate - 1000.0f*_frameRateTimer->getElapsedTime());
		if(sleepTime > 2)
		{
			uSleep(sleepTime-2);
		}
		else if(sleepTime < 0)
		{
			warnFrameRateTooHigh = true;
			actualFrameRate = 1.0/(_frameRateTimer->getElapsedTime());
		}

		// Add precision at the cost of a small overhead
		while(_frameRateTimer->getElapsedTime() < 1.0/double(_imageRate)-0.000001)
		{
			//
		}

		double slept = _frameRateTimer->getElapsedTime();
		_frameRateTimer->start();
		UDEBUG("slept=%fs vs target=%fs", slept, 1.0/double(_imageRate));
	}

	UTimer timer;
	SensorData data  = this->captureImage(info);
	double captureTime = timer.ticks();
	if(warnFrameRateTooHigh)
	{
		UWARN("Camera: Cannot reach target image rate %f Hz, current rate is %f Hz and capture time = %f s.",
				_imageRate, actualFrameRate, captureTime);
	}
	else
	{
		UDEBUG("Time capturing image = %fs", captureTime);
	}
	if(info)
	{
		info->id = data.id();
		info->stamp = data.stamp();
		info->timeCapture = captureTime;
	}
	return data;
}
	void mapDataReceivedCallback(const rtabmap_ros::MapDataConstPtr & msg)
	{
		UTimer timer;

		std::map<int, Transform> poses;
		std::multimap<int, Link> constraints;
		Transform mapOdom;
		rtabmap_ros::mapGraphFromROS(msg->graph, poses, constraints, mapOdom);
		for(unsigned int i=0; i<msg->nodes.size(); ++i)
		{
			if(msg->nodes[i].image.size() ||
			   msg->nodes[i].depth.size() ||
			   msg->nodes[i].laserScan.size())
			{
				uInsert(nodes_, std::make_pair(msg->nodes[i].id, rtabmap_ros::nodeDataFromROS(msg->nodes[i])));
			}
		}

		// create a tmp signature with latest sensory data
		if(poses.size() && nodes_.find(poses.rbegin()->first) != nodes_.end())
		{
			Signature tmpS = nodes_.at(poses.rbegin()->first);
			SensorData tmpData = tmpS.sensorData();
			tmpData.setId(-1);
			uInsert(nodes_, std::make_pair(-1, Signature(-1, -1, 0, tmpS.getStamp(), "", tmpS.getPose(), Transform(), tmpData)));
			poses.insert(std::make_pair(-1, poses.rbegin()->second));
		}

		// Update maps
		poses = mapsManager_.updateMapCaches(
				poses,
				0,
				false,
				false,
				false,
				false,
				false,
				nodes_);

		mapsManager_.publishMaps(poses, msg->header.stamp, msg->header.frame_id);

		ROS_INFO("map_assembler: Publishing data = %fs", timer.ticks());
	}
Exemplo n.º 5
0
// returned OcTree must be deleted
// RTAB-Map optimizes the graph at almost each iteration, an octomap cannot
// be updated online. Only available on service. To have an "online" octomap published as a topic,
// you may want to subscribe an octomap_server to /rtabmap/cloud topic.
//
octomap::OcTree * MapsManager::createOctomap(const std::map<int, Transform> & poses)
{
	octomap::OcTree * octree = new octomap::OcTree(gridCellSize_);
	UTimer time;
	for(std::map<int, Transform>::const_iterator posesIter = poses.begin(); posesIter!=poses.end(); ++posesIter)
	{
		std::map<int, pcl::PointCloud<pcl::PointXYZRGB>::Ptr >::iterator cloudsIter = clouds_.find(posesIter->first);
		if(cloudsIter != clouds_.end() && cloudsIter->second->size())
		{
			octomap::Pointcloud * scan = new octomap::Pointcloud();

			//octomap::pointcloudPCLToOctomap(*cloudsIter->second, *scan); // Not anymore in Indigo!
			scan->reserve(cloudsIter->second->size());
			for(pcl::PointCloud<pcl::PointXYZRGB>::const_iterator it = cloudsIter->second->begin();
				it != cloudsIter->second->end();
				++it)
			{
				// Check if the point is invalid
				if(pcl::isFinite(*it))
				{
					scan->push_back(it->x, it->y, it->z);
				}
			}

			float x,y,z, r,p,w;
			posesIter->second.getTranslationAndEulerAngles(x,y,z,r,p,w);
			octomap::ScanNode node(scan, octomap::pose6d(x,y,z, r,p,w), posesIter->first);
			octree->insertPointCloud(node, cloudMaxDepth_, true, true);
			ROS_INFO("inserted %d pt=%d (%fs)", posesIter->first, (int)scan->size(), time.ticks());
		}
	}

	octree->updateInnerOccupancy();
	ROS_INFO("updated inner occupancy (%fs)", time.ticks());

	// clear memory if no one subscribed
	if(mapCacheCleanup_ && cloudMapPub_.getNumSubscribers() == 0)
	{
		clouds_.clear();
	}
	return octree;
}
Exemplo n.º 6
0
void CameraThread::mainLoop()
{
	UTimer totalTime;
	UDEBUG("");
	CameraInfo info;
	SensorData data = _camera->takeImage(&info);

	if(!data.imageRaw().empty() || (dynamic_cast<DBReader*>(_camera) != 0 && data.id()>0)) // intermediate nodes could not have image set
	{
		postUpdate(&data, &info);

		info.cameraName = _camera->getSerial();
		info.timeTotal = totalTime.ticks();
		this->post(new CameraEvent(data, info));
	}
	else if(!this->isKilled())
	{
		UWARN("no more images...");
		this->kill();
		this->post(new CameraEvent());
	}
}
Exemplo n.º 7
0
int main(int argc, char** argv)
{
	ros::init(argc, argv, "data_player");

	//ULogger::setType(ULogger::kTypeConsole);
	//ULogger::setLevel(ULogger::kDebug);
	//ULogger::setEventLevel(ULogger::kWarning);

	bool publishClock = false;
	for(int i=1;i<argc;++i)
	{
		if(strcmp(argv[i], "--clock") == 0)
		{
			publishClock = true;
		}
	}

	ros::NodeHandle nh;
	ros::NodeHandle pnh("~");

	std::string frameId = "base_link";
	std::string odomFrameId = "odom";
	std::string cameraFrameId = "camera_optical_link";
	std::string scanFrameId = "base_laser_link";
	double rate = 1.0f;
	std::string databasePath = "";
	bool publishTf = true;
	int startId = 0;
	bool useDbStamps = true;

	pnh.param("frame_id", frameId, frameId);
	pnh.param("odom_frame_id", odomFrameId, odomFrameId);
	pnh.param("camera_frame_id", cameraFrameId, cameraFrameId);
	pnh.param("scan_frame_id", scanFrameId, scanFrameId);
	pnh.param("rate", rate, rate); // Ratio of the database stamps
	pnh.param("database", databasePath, databasePath);
	pnh.param("publish_tf", publishTf, publishTf);
	pnh.param("start_id", startId, startId);

	// A general 360 lidar with 0.5 deg increment
	double scanAngleMin, scanAngleMax, scanAngleIncrement, scanRangeMin, scanRangeMax;
	pnh.param<double>("scan_angle_min", scanAngleMin, -M_PI);
	pnh.param<double>("scan_angle_max", scanAngleMax, M_PI);
	pnh.param<double>("scan_angle_increment", scanAngleIncrement, M_PI / 720.0);
	pnh.param<double>("scan_range_min", scanRangeMin, 0.0);
	pnh.param<double>("scan_range_max", scanRangeMax, 60);

	ROS_INFO("frame_id = %s", frameId.c_str());
	ROS_INFO("odom_frame_id = %s", odomFrameId.c_str());
	ROS_INFO("camera_frame_id = %s", cameraFrameId.c_str());
	ROS_INFO("scan_frame_id = %s", scanFrameId.c_str());
	ROS_INFO("rate = %f", rate);
	ROS_INFO("publish_tf = %s", publishTf?"true":"false");
	ROS_INFO("start_id = %d", startId);
	ROS_INFO("Publish clock (--clock): %s", publishClock?"true":"false");

	if(databasePath.empty())
	{
		ROS_ERROR("Parameter \"database\" must be set (path to a RTAB-Map database).");
		return -1;
	}
	databasePath = uReplaceChar(databasePath, '~', UDirectory::homeDir());
	if(databasePath.size() && databasePath.at(0) != '/')
	{
		databasePath = UDirectory::currentDir(true) + databasePath;
	}
	ROS_INFO("database = %s", databasePath.c_str());

	rtabmap::DBReader reader(databasePath, -rate, false, false, false, startId);
	if(!reader.init())
	{
		ROS_ERROR("Cannot open database \"%s\".", databasePath.c_str());
		return -1;
	}

	ros::ServiceServer pauseSrv = pnh.advertiseService("pause", pauseCallback);
	ros::ServiceServer resumeSrv = pnh.advertiseService("resume", resumeCallback);

	image_transport::ImageTransport it(nh);
	image_transport::Publisher imagePub;
	image_transport::Publisher rgbPub;
	image_transport::Publisher depthPub;
	image_transport::Publisher leftPub;
	image_transport::Publisher rightPub;
	ros::Publisher rgbCamInfoPub;
	ros::Publisher depthCamInfoPub;
	ros::Publisher leftCamInfoPub;
	ros::Publisher rightCamInfoPub;
	ros::Publisher odometryPub;
	ros::Publisher scanPub;
	ros::Publisher scanCloudPub;
	ros::Publisher globalPosePub;
	ros::Publisher gpsFixPub;
	ros::Publisher clockPub;
	tf2_ros::TransformBroadcaster tfBroadcaster;

	if(publishClock)
	{
		clockPub = nh.advertise<rosgraph_msgs::Clock>("/clock", 1);
	}

	UTimer timer;
	rtabmap::CameraInfo cameraInfo;
	rtabmap::SensorData data = reader.takeImage(&cameraInfo);
	rtabmap::OdometryInfo odomInfo;
	odomInfo.reg.covariance = cameraInfo.odomCovariance;
	rtabmap::OdometryEvent odom(data, cameraInfo.odomPose, odomInfo);
	double acquisitionTime = timer.ticks();
	while(ros::ok() && odom.data().id())
	{
		ROS_INFO("Reading sensor data %d...", odom.data().id());

		ros::Time time(odom.data().stamp());

		if(publishClock)
		{
			rosgraph_msgs::Clock msg;
			msg.clock = time;
			clockPub.publish(msg);
		}

		sensor_msgs::CameraInfo camInfoA; //rgb or left
		sensor_msgs::CameraInfo camInfoB; //depth or right

		camInfoA.K.assign(0);
		camInfoA.K[0] = camInfoA.K[4] = camInfoA.K[8] = 1;
		camInfoA.R.assign(0);
		camInfoA.R[0] = camInfoA.R[4] = camInfoA.R[8] = 1;
		camInfoA.P.assign(0);
		camInfoA.P[10] = 1;

		camInfoA.header.frame_id = cameraFrameId;
		camInfoA.header.stamp = time;

		camInfoB = camInfoA;

		int type = -1;
		if(!odom.data().depthRaw().empty() && (odom.data().depthRaw().type() == CV_32FC1 || odom.data().depthRaw().type() == CV_16UC1))
		{
			if(odom.data().cameraModels().size() > 1)
			{
				ROS_WARN("Multi-cameras detected in database but this node cannot send multi-images yet...");
			}
			else
			{
				//depth
				if(odom.data().cameraModels().size())
				{
					camInfoA.D.resize(5,0);

					camInfoA.P[0] = odom.data().cameraModels()[0].fx();
					camInfoA.K[0] = odom.data().cameraModels()[0].fx();
					camInfoA.P[5] = odom.data().cameraModels()[0].fy();
					camInfoA.K[4] = odom.data().cameraModels()[0].fy();
					camInfoA.P[2] = odom.data().cameraModels()[0].cx();
					camInfoA.K[2] = odom.data().cameraModels()[0].cx();
					camInfoA.P[6] = odom.data().cameraModels()[0].cy();
					camInfoA.K[5] = odom.data().cameraModels()[0].cy();

					camInfoB = camInfoA;
				}

				type=0;

				if(rgbPub.getTopic().empty()) rgbPub = it.advertise("rgb/image", 1);
				if(depthPub.getTopic().empty()) depthPub = it.advertise("depth_registered/image", 1);
				if(rgbCamInfoPub.getTopic().empty()) rgbCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("rgb/camera_info", 1);
				if(depthCamInfoPub.getTopic().empty()) depthCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("depth_registered/camera_info", 1);
			}
		}
		else if(!odom.data().rightRaw().empty() && odom.data().rightRaw().type() == CV_8U)
		{
			//stereo
			if(odom.data().stereoCameraModel().isValidForProjection())
			{
				camInfoA.D.resize(8,0);

				camInfoA.P[0] = odom.data().stereoCameraModel().left().fx();
				camInfoA.K[0] = odom.data().stereoCameraModel().left().fx();
				camInfoA.P[5] = odom.data().stereoCameraModel().left().fy();
				camInfoA.K[4] = odom.data().stereoCameraModel().left().fy();
				camInfoA.P[2] = odom.data().stereoCameraModel().left().cx();
				camInfoA.K[2] = odom.data().stereoCameraModel().left().cx();
				camInfoA.P[6] = odom.data().stereoCameraModel().left().cy();
				camInfoA.K[5] = odom.data().stereoCameraModel().left().cy();

				camInfoB = camInfoA;
				camInfoB.P[3] = odom.data().stereoCameraModel().right().Tx(); // Right_Tx = -baseline*fx
			}

			type=1;

			if(leftPub.getTopic().empty()) leftPub = it.advertise("left/image", 1);
			if(rightPub.getTopic().empty()) rightPub = it.advertise("right/image", 1);
			if(leftCamInfoPub.getTopic().empty()) leftCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("left/camera_info", 1);
			if(rightCamInfoPub.getTopic().empty()) rightCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("right/camera_info", 1);

		}
		else
		{
			if(imagePub.getTopic().empty()) imagePub = it.advertise("image", 1);
		}

		camInfoA.height = odom.data().imageRaw().rows;
		camInfoA.width = odom.data().imageRaw().cols;
		camInfoB.height = odom.data().depthOrRightRaw().rows;
		camInfoB.width = odom.data().depthOrRightRaw().cols;

		if(!odom.data().laserScanRaw().isEmpty())
		{
			if(scanPub.getTopic().empty() && odom.data().laserScanRaw().is2d())
			{
				scanPub = nh.advertise<sensor_msgs::LaserScan>("scan", 1);
				if(odom.data().laserScanRaw().angleIncrement() > 0.0f)
				{
					ROS_INFO("Scan will be published.");
				}
				else
				{
					ROS_INFO("Scan will be published with those parameters:");
					ROS_INFO("  scan_angle_min=%f", scanAngleMin);
					ROS_INFO("  scan_angle_max=%f", scanAngleMax);
					ROS_INFO("  scan_angle_increment=%f", scanAngleIncrement);
					ROS_INFO("  scan_range_min=%f", scanRangeMin);
					ROS_INFO("  scan_range_max=%f", scanRangeMax);
				}
			}
			else if(scanCloudPub.getTopic().empty())
			{
				scanCloudPub = nh.advertise<sensor_msgs::PointCloud2>("scan_cloud", 1);
				ROS_INFO("Scan cloud will be published.");
			}
		}

		if(!odom.data().globalPose().isNull() &&
			odom.data().globalPoseCovariance().cols==6 &&
			odom.data().globalPoseCovariance().rows==6)
		{
			if(globalPosePub.getTopic().empty())
			{
				globalPosePub = nh.advertise<geometry_msgs::PoseWithCovarianceStamped>("global_pose", 1);
				ROS_INFO("Global pose will be published.");
			}
		}

		if(odom.data().gps().stamp() > 0.0)
		{
			if(gpsFixPub.getTopic().empty())
			{
				gpsFixPub = nh.advertise<sensor_msgs::NavSatFix>("gps/fix", 1);
				ROS_INFO("GPS will be published.");
			}
		}

		// publish transforms first
		if(publishTf)
		{
			rtabmap::Transform localTransform;
			if(odom.data().cameraModels().size() == 1)
			{
				localTransform = odom.data().cameraModels()[0].localTransform();
			}
			else if(odom.data().stereoCameraModel().isValidForProjection())
			{
				localTransform = odom.data().stereoCameraModel().left().localTransform();
			}
			if(!localTransform.isNull())
			{
				geometry_msgs::TransformStamped baseToCamera;
				baseToCamera.child_frame_id = cameraFrameId;
				baseToCamera.header.frame_id = frameId;
				baseToCamera.header.stamp = time;
				rtabmap_ros::transformToGeometryMsg(localTransform, baseToCamera.transform);
				tfBroadcaster.sendTransform(baseToCamera);
			}

			if(!odom.pose().isNull())
			{
				geometry_msgs::TransformStamped odomToBase;
				odomToBase.child_frame_id = frameId;
				odomToBase.header.frame_id = odomFrameId;
				odomToBase.header.stamp = time;
				rtabmap_ros::transformToGeometryMsg(odom.pose(), odomToBase.transform);
				tfBroadcaster.sendTransform(odomToBase);
			}

			if(!scanPub.getTopic().empty() || !scanCloudPub.getTopic().empty())
			{
				geometry_msgs::TransformStamped baseToLaserScan;
				baseToLaserScan.child_frame_id = scanFrameId;
				baseToLaserScan.header.frame_id = frameId;
				baseToLaserScan.header.stamp = time;
				rtabmap_ros::transformToGeometryMsg(odom.data().laserScanCompressed().localTransform(), baseToLaserScan.transform);
				tfBroadcaster.sendTransform(baseToLaserScan);
			}
		}
		if(!odom.pose().isNull())
		{
			if(odometryPub.getTopic().empty()) odometryPub = nh.advertise<nav_msgs::Odometry>("odom", 1);

			if(odometryPub.getNumSubscribers())
			{
				nav_msgs::Odometry odomMsg;
				odomMsg.child_frame_id = frameId;
				odomMsg.header.frame_id = odomFrameId;
				odomMsg.header.stamp = time;
				rtabmap_ros::transformToPoseMsg(odom.pose(), odomMsg.pose.pose);
				UASSERT(odomMsg.pose.covariance.size() == 36 &&
						odom.covariance().total() == 36 &&
						odom.covariance().type() == CV_64FC1);
				memcpy(odomMsg.pose.covariance.begin(), odom.covariance().data, 36*sizeof(double));
				odometryPub.publish(odomMsg);
			}
		}

		// Publish async topics first (so that they can catched by rtabmap before the image topics)
		if(globalPosePub.getNumSubscribers() > 0 &&
			!odom.data().globalPose().isNull() &&
			odom.data().globalPoseCovariance().cols==6 &&
			odom.data().globalPoseCovariance().rows==6)
		{
			geometry_msgs::PoseWithCovarianceStamped msg;
			rtabmap_ros::transformToPoseMsg(odom.data().globalPose(), msg.pose.pose);
			memcpy(msg.pose.covariance.data(), odom.data().globalPoseCovariance().data, 36*sizeof(double));
			msg.header.frame_id = frameId;
			msg.header.stamp = time;
			globalPosePub.publish(msg);
		}

		if(odom.data().gps().stamp() > 0.0)
		{
			sensor_msgs::NavSatFix msg;
			msg.longitude = odom.data().gps().longitude();
			msg.latitude = odom.data().gps().latitude();
			msg.altitude = odom.data().gps().altitude();
			msg.position_covariance_type = sensor_msgs::NavSatFix::COVARIANCE_TYPE_DIAGONAL_KNOWN;
			msg.position_covariance.at(0) = msg.position_covariance.at(4) = msg.position_covariance.at(8)= odom.data().gps().error()* odom.data().gps().error();
			msg.header.frame_id = frameId;
			msg.header.stamp.fromSec(odom.data().gps().stamp());
			gpsFixPub.publish(msg);
		}

		if(type >= 0)
		{
			if(rgbCamInfoPub.getNumSubscribers() && type == 0)
			{
				rgbCamInfoPub.publish(camInfoA);
			}
			if(leftCamInfoPub.getNumSubscribers() && type == 1)
			{
				leftCamInfoPub.publish(camInfoA);
			}
			if(depthCamInfoPub.getNumSubscribers() && type == 0)
			{
				depthCamInfoPub.publish(camInfoB);
			}
			if(rightCamInfoPub.getNumSubscribers() && type == 1)
			{
				rightCamInfoPub.publish(camInfoB);
			}
		}

		if(imagePub.getNumSubscribers() || rgbPub.getNumSubscribers() || leftPub.getNumSubscribers())
		{
			cv_bridge::CvImage img;
			if(odom.data().imageRaw().channels() == 1)
			{
				img.encoding = sensor_msgs::image_encodings::MONO8;
			}
			else
			{
				img.encoding = sensor_msgs::image_encodings::BGR8;
			}
			img.image = odom.data().imageRaw();
			sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg();
			imageRosMsg->header.frame_id = cameraFrameId;
			imageRosMsg->header.stamp = time;

			if(imagePub.getNumSubscribers())
			{
				imagePub.publish(imageRosMsg);
			}
			if(rgbPub.getNumSubscribers() && type == 0)
			{
				rgbPub.publish(imageRosMsg);
			}
			if(leftPub.getNumSubscribers() && type == 1)
			{
				leftPub.publish(imageRosMsg);
				leftCamInfoPub.publish(camInfoA);
			}
		}

		if(depthPub.getNumSubscribers() && !odom.data().depthRaw().empty() && type==0)
		{
			cv_bridge::CvImage img;
			if(odom.data().depthRaw().type() == CV_32FC1)
			{
				img.encoding = sensor_msgs::image_encodings::TYPE_32FC1;
			}
			else
			{
				img.encoding = sensor_msgs::image_encodings::TYPE_16UC1;
			}
			img.image = odom.data().depthRaw();
			sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg();
			imageRosMsg->header.frame_id = cameraFrameId;
			imageRosMsg->header.stamp = time;

			depthPub.publish(imageRosMsg);
			depthCamInfoPub.publish(camInfoB);
		}

		if(rightPub.getNumSubscribers() && !odom.data().rightRaw().empty() && type==1)
		{
			cv_bridge::CvImage img;
			img.encoding = sensor_msgs::image_encodings::MONO8;
			img.image = odom.data().rightRaw();
			sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg();
			imageRosMsg->header.frame_id = cameraFrameId;
			imageRosMsg->header.stamp = time;

			rightPub.publish(imageRosMsg);
			rightCamInfoPub.publish(camInfoB);
		}

		if(!odom.data().laserScanRaw().isEmpty())
		{
			if(scanPub.getNumSubscribers() && odom.data().laserScanRaw().is2d())
			{
				//inspired from pointcloud_to_laserscan package
				sensor_msgs::LaserScan msg;
				msg.header.frame_id = scanFrameId;
				msg.header.stamp = time;

				msg.angle_min = scanAngleMin;
				msg.angle_max = scanAngleMax;
				msg.angle_increment = scanAngleIncrement;
				msg.time_increment = 0.0;
				msg.scan_time = 0;
				msg.range_min = scanRangeMin;
				msg.range_max = scanRangeMax;
				if(odom.data().laserScanRaw().angleIncrement() > 0.0f)
				{
					msg.angle_min = odom.data().laserScanRaw().angleMin();
					msg.angle_max = odom.data().laserScanRaw().angleMax();
					msg.angle_increment = odom.data().laserScanRaw().angleIncrement();
					msg.range_min = odom.data().laserScanRaw().rangeMin();
					msg.range_max = odom.data().laserScanRaw().rangeMax();
				}

				uint32_t rangesSize = std::ceil((msg.angle_max - msg.angle_min) / msg.angle_increment);
				msg.ranges.assign(rangesSize, 0.0);

				const cv::Mat & scan = odom.data().laserScanRaw().data();
				for (int i=0; i<scan.cols; ++i)
				{
					const float * ptr = scan.ptr<float>(0,i);
					double range = hypot(ptr[0], ptr[1]);
					if (range >= msg.range_min && range <=msg.range_max)
					{
						double angle = atan2(ptr[1], ptr[0]);
						if (angle >= msg.angle_min && angle <= msg.angle_max)
						{
							int index = (angle - msg.angle_min) / msg.angle_increment;
							if (index>=0 && index<rangesSize && (range < msg.ranges[index] || msg.ranges[index]==0))
							{
								msg.ranges[index] = range;
							}
						}
					}
				}

				scanPub.publish(msg);
			}
			else if(scanCloudPub.getNumSubscribers())
			{
				sensor_msgs::PointCloud2 msg;
				pcl_conversions::moveFromPCL(*rtabmap::util3d::laserScanToPointCloud2(odom.data().laserScanRaw()), msg);
				msg.header.frame_id = scanFrameId;
				msg.header.stamp = time;
				scanCloudPub.publish(msg);
			}
		}

		if(odom.data().userDataRaw().type() == CV_8SC1 &&
		   odom.data().userDataRaw().cols >= 7 && // including null str ending
		   odom.data().userDataRaw().rows == 1 &&
		   memcmp(odom.data().userDataRaw().data, "GOAL:", 5) == 0)
		{
			//GOAL format detected, remove it from the user data and send it as goal event
			std::string goalStr = (const char *)odom.data().userDataRaw().data;
			if(!goalStr.empty())
			{
				std::list<std::string> strs = uSplit(goalStr, ':');
				if(strs.size() == 2)
				{
					int goalId = atoi(strs.rbegin()->c_str());

					if(goalId > 0)
					{
						ROS_WARN("Goal %d detected, calling rtabmap's set_goal service!", goalId);
						rtabmap_ros::SetGoal setGoalSrv;
						setGoalSrv.request.node_id = goalId;
						setGoalSrv.request.node_label = "";
						if(!ros::service::call("set_goal", setGoalSrv))
						{
							ROS_ERROR("Can't call \"set_goal\" service");
						}
					}
				}
			}
		}

		ros::spinOnce();

		while(ros::ok() && paused)
		{
			uSleep(100);
			ros::spinOnce();
		}

		timer.restart();
		cameraInfo = rtabmap::CameraInfo();
		data = reader.takeImage(&cameraInfo);
		odomInfo.reg.covariance = cameraInfo.odomCovariance;
		odom = rtabmap::OdometryEvent(data, cameraInfo.odomPose, odomInfo);
		acquisitionTime = timer.ticks();
	}


	return 0;
}
Exemplo n.º 8
0
// OpenGL thread
int RTABMapApp::Render()
{
	// should be before clearSceneOnNextRender_ in case openDatabase is called
	std::list<rtabmap::Statistics> rtabmapEvents;
	{
		boost::mutex::scoped_lock  lock(rtabmapMutex_);
		rtabmapEvents = rtabmapEvents_;
		rtabmapEvents_.clear();
	}

	if(clearSceneOnNextRender_)
	{
		odomMutex_.lock();
		odomEvents_.clear();
		odomMutex_.unlock();

		poseMutex_.lock();
		poseEvents_.clear();
		poseMutex_.unlock();

		main_scene_.clear();
		clearSceneOnNextRender_ = false;
		createdMeshes_.clear();
		rawPoses_.clear();
		totalPoints_ = 0;
		totalPolygons_ = 0;
		lastDrawnCloudsCount_ = 0;
		renderingFPS_ = 0.0f;
	}

	// Process events
	rtabmap::Transform pose;
	{
		boost::mutex::scoped_lock lock(poseMutex_);
		if(poseEvents_.size())
		{
			pose = poseEvents_.back();
			poseEvents_.clear();
		}
	}
	if(!pose.isNull())
	{
		// update camera pose?
		main_scene_.SetCameraPose(pose);
	}

	bool notifyDataLoaded = false;
	if(rtabmapEvents.size())
	{
		LOGI("Process rtabmap events");

		// update buffered signatures
		std::map<int, rtabmap::SensorData> bufferedSensorData;
		if(!trajectoryMode_)
		{
			for(std::list<rtabmap::Statistics>::iterator iter=rtabmapEvents.begin(); iter!=rtabmapEvents.end(); ++iter)
			{
				for(std::map<int, rtabmap::Signature>::const_iterator jter=iter->getSignatures().begin(); jter!=iter->getSignatures().end(); ++jter)
				{
					if(!jter->second.sensorData().imageRaw().empty() &&
					   !jter->second.sensorData().depthRaw().empty())
					{
						uInsert(bufferedSensorData, std::make_pair(jter->first, jter->second.sensorData()));
						uInsert(rawPoses_, std::make_pair(jter->first, jter->second.getPose()));
					}
					else if(!jter->second.sensorData().imageCompressed().empty() &&
							!jter->second.sensorData().depthOrRightCompressed().empty())
					{
						// uncompress
						rtabmap::SensorData data = jter->second.sensorData();
						cv::Mat tmpA,tmpB;
						data.uncompressData(&tmpA, &tmpB);
						uInsert(bufferedSensorData, std::make_pair(jter->first, data));
						uInsert(rawPoses_, std::make_pair(jter->first, jter->second.getPose()));
						notifyDataLoaded = true;
					}
				}
			}
		}

		std::map<int, rtabmap::Transform> poses = rtabmapEvents.back().poses();

		// Transform pose in OpenGL world
		for(std::map<int, rtabmap::Transform>::iterator iter=poses.begin(); iter!=poses.end(); ++iter)
		{
			if(!graphOptimization_)
			{
				std::map<int, rtabmap::Transform>::iterator jter = rawPoses_.find(iter->first);
				if(jter != rawPoses_.end())
				{
					iter->second = opengl_world_T_rtabmap_world*jter->second;
				}
			}
			else
			{
				iter->second = opengl_world_T_rtabmap_world*iter->second;
			}
		}

		const std::multimap<int, rtabmap::Link> & links = rtabmapEvents.back().constraints();
		if(poses.size())
		{
			//update graph
			main_scene_.updateGraph(poses, links);

			// update clouds
			boost::mutex::scoped_lock  lock(meshesMutex_);
			std::set<std::string> strIds;
			for(std::map<int, rtabmap::Transform>::iterator iter=poses.begin(); iter!=poses.end(); ++iter)
			{
				int id = iter->first;
				if(!iter->second.isNull())
				{
					if(main_scene_.hasCloud(id))
					{
						//just update pose
						main_scene_.setCloudPose(id, iter->second);
						main_scene_.setCloudVisible(id, true);
						std::map<int, Mesh>::iterator meshIter = createdMeshes_.find(id);
						if(meshIter!=createdMeshes_.end())
						{
							meshIter->second.pose = iter->second;
						}
						else
						{
							UERROR("Not found mesh %d !?!?", id);
						}
					}
					else if(uContains(bufferedSensorData, id))
					{
						rtabmap::SensorData & data = bufferedSensorData.at(id);
						if(!data.imageRaw().empty() && !data.depthRaw().empty())
						{
							// Voxelize and filter depending on the previous cloud?
							pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud;
							pcl::IndicesPtr indices(new std::vector<int>);
							LOGI("Creating node cloud %d (image size=%dx%d)", id, data.imageRaw().cols, data.imageRaw().rows);
							cloud = rtabmap::util3d::cloudRGBFromSensorData(data, data.imageRaw().rows/data.depthRaw().rows, maxCloudDepth_, 0, indices.get());

							if(cloud->size() && indices->size())
							{
								UTimer time;

								// pcl::organizedFastMesh doesn't take indices, so set to NaN points we don't need to mesh
								pcl::PointCloud<pcl::PointXYZRGB>::Ptr output(new pcl::PointCloud<pcl::PointXYZRGB>);
								pcl::ExtractIndices<pcl::PointXYZRGB> filter;
								filter.setIndices(indices);
								filter.setKeepOrganized(true);
								filter.setInputCloud(cloud);
								filter.filter(*output);

								std::vector<pcl::Vertices> polygons = rtabmap::util3d::organizedFastMesh(output, meshAngleToleranceDeg_*M_PI/180.0, false, meshTrianglePix_);
								pcl::PointCloud<pcl::PointXYZRGB>::Ptr outputCloud(new pcl::PointCloud<pcl::PointXYZRGB>);
								std::vector<pcl::Vertices> outputPolygons;

								outputCloud = output;
								outputPolygons = polygons;

								LOGI("Creating mesh, %d polygons (%fs)", (int)outputPolygons.size(), time.ticks());

								if(outputCloud->size() && outputPolygons.size())
								{
									totalPolygons_ += outputPolygons.size();

									main_scene_.addCloud(id, outputCloud, outputPolygons, iter->second, data.imageRaw());


									// protect createdMeshes_ used also by exportMesh() method
									std::pair<std::map<int, Mesh>::iterator, bool> inserted = createdMeshes_.insert(std::make_pair(id, Mesh()));
									UASSERT(inserted.second);
									inserted.first->second.cloud = outputCloud;
									inserted.first->second.polygons = outputPolygons;
									inserted.first->second.pose = iter->second;
									inserted.first->second.texture = data.imageCompressed();
								}
								else
								{
									LOGE("Not mesh could be created for node %d", id);
								}
							}
							totalPoints_+=indices->size();
						}
					}
				}
			}
		}

		//filter poses?
		if(poses.size() > 2)
		{
			if(nodesFiltering_)
			{
				for(std::multimap<int, rtabmap::Link>::const_iterator iter=links.begin(); iter!=links.end(); ++iter)
				{
					if(iter->second.type() != rtabmap::Link::kNeighbor)
					{
						int oldId = iter->second.to()>iter->second.from()?iter->second.from():iter->second.to();
						poses.erase(oldId);
					}
				}
			}
		}

		//update cloud visibility
		std::set<int> addedClouds = main_scene_.getAddedClouds();
		for(std::set<int>::const_iterator iter=addedClouds.begin();
			iter!=addedClouds.end();
			++iter)
		{
			if(*iter > 0 && poses.find(*iter) == poses.end())
			{
				main_scene_.setCloudVisible(*iter, false);
			}
		}
	}
	else
	{
		rtabmap::OdometryEvent event;
		bool set = false;
		{
			boost::mutex::scoped_lock  lock(odomMutex_);
			if(odomEvents_.size())
			{
				LOGI("Process odom events");
				event = odomEvents_.back();
				odomEvents_.clear();
				set = true;
			}
		}

		main_scene_.setCloudVisible(-1, odomCloudShown_ && !trajectoryMode_);

		//just process the last one
		if(set && !event.pose().isNull())
		{
			if(odomCloudShown_ && !trajectoryMode_)
			{
				if(!event.data().imageRaw().empty() && !event.data().depthRaw().empty())
				{
					pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud;
					cloud = rtabmap::util3d::cloudRGBFromSensorData(event.data(), event.data().imageRaw().rows/event.data().depthRaw().rows, maxCloudDepth_);
					if(cloud->size())
					{
						LOGI("Created odom cloud (rgb=%dx%d depth=%dx%d cloud=%dx%d)",
													event.data().imageRaw().cols, event.data().imageRaw().rows,
													event.data().depthRaw().cols, event.data().depthRaw().rows,
													(int)cloud->width, (int)cloud->height);
						std::vector<pcl::Vertices> polygons = rtabmap::util3d::organizedFastMesh(cloud, meshAngleToleranceDeg_*M_PI/180.0, false, meshTrianglePix_);
						main_scene_.addCloud(-1, cloud, polygons, opengl_world_T_rtabmap_world*event.pose(), event.data().imageRaw());
						main_scene_.setCloudVisible(-1, true);
					}
					else
					{
						LOGE("Generated cloud is empty!");
					}
				}
				else
				{
					LOGE("Odom data images are empty!");
				}
			}
		}
	}

	UTimer fpsTime;
    lastDrawnCloudsCount_ = main_scene_.Render();
    renderingFPS_ = 1.0/fpsTime.elapsed();

    if(rtabmapEvents.size())
    {
    	// send statistics to GUI
		LOGI("Posting PostRenderEvent!");
		UEventsManager::post(new PostRenderEvent(rtabmapEvents.back()));
    }

    return notifyDataLoaded?1:0;
}
Exemplo n.º 9
0
// return not null transform if odometry is correctly computed
Transform OdometryICP::computeTransform(const SensorData & data, OdometryInfo * info)
{
	UTimer timer;
	Transform output;

	bool hasConverged = false;
	double variance = 0;
	unsigned int minPoints = 100;
	if(!data.depth().empty())
	{
		if(data.depth().type() == CV_8UC1)
		{
			UERROR("ICP 3D cannot be done on stereo images!");
			return output;
		}

		pcl::PointCloud<pcl::PointXYZ>::Ptr newCloudXYZ = util3d::getICPReadyCloud(
						data.depth(),
						data.fx(),
						data.fy(),
						data.cx(),
						data.cy(),
						_decimation,
						this->getMaxDepth(),
						_voxelSize,
						_samples,
						data.localTransform());

		if(_pointToPlane)
		{
			pcl::PointCloud<pcl::PointNormal>::Ptr newCloud = util3d::computeNormals(newCloudXYZ);

			std::vector<int> indices;
			newCloud = util3d::removeNaNNormalsFromPointCloud<pcl::PointNormal>(newCloud);
			if(newCloudXYZ->size() != newCloud->size())
			{
				UWARN("removed nan normals...");
			}

			if(_previousCloudNormal->size() > minPoints && newCloud->size() > minPoints)
			{
				int correspondences = 0;
				Transform transform = util3d::icpPointToPlane(newCloud,
						_previousCloudNormal,
						_maxCorrespondenceDistance,
						_maxIterations,
						&hasConverged,
						&variance,
						&correspondences);

				// verify if there are enough correspondences
				float correspondencesRatio = float(correspondences)/float(_previousCloudNormal->size()>newCloud->size()?_previousCloudNormal->size():newCloud->size());

				if(!transform.isNull() && hasConverged &&
		   	   	   correspondencesRatio >= _correspondenceRatio)
				{
					output = transform;
					_previousCloudNormal = newCloud;
				}
				else
				{
					UWARN("Transform not valid (hasConverged=%s variance = %f)",
							hasConverged?"true":"false", variance);
				}
			}
			else if(newCloud->size() > minPoints)
			{
				output.setIdentity();
				_previousCloudNormal = newCloud;
			}
		}
		else
		{
			//point to point
			if(_previousCloud->size() > minPoints && newCloudXYZ->size() > minPoints)
			{
				int correspondences = 0;
				Transform transform = util3d::icp(newCloudXYZ,
						_previousCloud,
						_maxCorrespondenceDistance,
						_maxIterations,
						&hasConverged,
						&variance,
						&correspondences);

				// verify if there are enough correspondences
				float correspondencesRatio = float(correspondences)/float(_previousCloud->size()>newCloudXYZ->size()?_previousCloud->size():newCloudXYZ->size());

				if(!transform.isNull() && hasConverged &&
				   correspondencesRatio >= _correspondenceRatio)
				{
					output = transform;
					_previousCloud = newCloudXYZ;
				}
				else
				{
					UWARN("Transform not valid (hasConverged=%s variance = %f)",
							hasConverged?"true":"false", variance);
				}
			}
			else if(newCloudXYZ->size() > minPoints)
			{
				output.setIdentity();
				_previousCloud = newCloudXYZ;
			}
		}
	}
	else
	{
		UERROR("Depth is empty?!?");
	}

	if(info)
	{
		info->variance = variance;
	}

	UINFO("Odom update time = %fs hasConverged=%s variance=%f cloud=%d",
			timer.elapsed(),
			hasConverged?"true":"false",
			variance,
			(int)(_pointToPlane?_previousCloudNormal->size():_previousCloud->size()));

	return output;
}
Exemplo n.º 10
0
void UAppli::addTimeout(unsigned long _delay, int _ntimes, UCall& c) {
  UTimer* t = new UTimer(_delay, _ntimes, true); // true => auto_delete
  t->onTimeout(c);
  t->start();
}
cv::Mat Camera::takeImage(cv::Mat & descriptors, std::vector<cv::KeyPoint> & keypoints)
{
	descriptors = cv::Mat();
	keypoints.clear();
	float imageRate = _imageRate==0.0f?33.0f:_imageRate; // limit to 33Hz if infinity
	if(imageRate>0)
	{
		int sleepTime = (1000.0f/imageRate - 1000.0f*_frameRateTimer->getElapsedTime());
		if(sleepTime > 2)
		{
			uSleep(sleepTime-2);
		}

		// Add precision at the cost of a small overhead
		while(_frameRateTimer->getElapsedTime() < 1.0/double(imageRate)-0.000001)
		{
			//
		}

		double slept = _frameRateTimer->getElapsedTime();
		_frameRateTimer->start();
		UDEBUG("slept=%fs vs target=%fs", slept, 1.0/double(imageRate));
	}

	cv::Mat img;
	UTimer timer;
	img = this->captureImage();
	UDEBUG("Time capturing image = %fs", timer.ticks());

	if(!img.empty())
	{
		if(img.depth() != CV_8U)
		{
			UWARN("Images should have already 8U depth !?");
			cv::Mat tmp = img;
			img = cv::Mat();
			tmp.convertTo(img, CV_8U);
			UDEBUG("Time converting image to 8U = %fs", timer.ticks());
		}

		if(_featuresExtracted && _keypointDetector && _keypointDescriptor)
		{
			keypoints = _keypointDetector->generateKeypoints(img);
			descriptors = _keypointDescriptor->generateDescriptors(img, keypoints);
			UDEBUG("Post treatment time = %fs", timer.ticks());
		}

		if(_framesDropped)
		{
			unsigned int count = 0;
			while(count++ < _framesDropped)
			{
				cv::Mat tmp = this->captureImage();
				if(!tmp.empty())
				{
					UDEBUG("frame dropped (%d/%d)", (int)count, (int)_framesDropped);
				}
				else
				{
					break;
				}
			}
			UDEBUG("Frames dropped time = %fs", timer.ticks());
		}
	}
	return img;
}
Exemplo n.º 12
0
cv::Mat BayesFilter::generatePrediction(const Memory * memory, const std::vector<int> & ids) const
{
	if(!_fullPredictionUpdate && !_prediction.empty())
	{
		return updatePrediction(_prediction, memory, uKeys(_posterior), ids);
	}
	UDEBUG("");

	UASSERT(memory &&
		   _predictionLC.size() >= 2 &&
		   ids.size());

	UTimer timer;
	timer.start();
	UTimer timerGlobal;
	timerGlobal.start();

	std::map<int, int> idToIndexMap;
	for(unsigned int i=0; i<ids.size(); ++i)
	{
		UASSERT_MSG(ids[i] != 0, "Signature id is null ?!?");
		idToIndexMap.insert(idToIndexMap.end(), std::make_pair(ids[i], i));
	}

	//int rows = prediction.rows;
	cv::Mat prediction = cv::Mat::zeros(ids.size(), ids.size(), CV_32FC1);
	int cols = prediction.cols;

	// Each prior is a column vector
	UDEBUG("_predictionLC.size()=%d",_predictionLC.size());
	std::set<int> idsDone;

	for(unsigned int i=0; i<ids.size(); ++i)
	{
		if(idsDone.find(ids[i]) == idsDone.end())
		{
			if(ids[i] > 0)
			{
				// Set high values (gaussians curves) to loop closure neighbors

				// ADD prob for each neighbors
				std::map<int, int> neighbors = memory->getNeighborsId(ids[i], _predictionLC.size()-1, 0);
				std::list<int> idsLoopMargin;
				//filter neighbors in STM
				for(std::map<int, int>::iterator iter=neighbors.begin(); iter!=neighbors.end();)
				{
					if(memory->isInSTM(iter->first))
					{
						neighbors.erase(iter++);
					}
					else
					{
						if(iter->second == 0)
						{
							idsLoopMargin.push_back(iter->second);
						}
						++iter;
					}
				}

				// should at least have 1 id in idsMarginLoop
				if(idsLoopMargin.size() == 0)
				{
					UFATAL("No 0 margin neighbor for signature %d !?!?", ids[i]);
				}

				// same neighbor tree for loop signatures (margin = 0)
				for(std::list<int>::iterator iter = idsLoopMargin.begin(); iter!=idsLoopMargin.end(); ++iter)
				{
					float sum = 0.0f; // sum values added
					sum += this->addNeighborProb(prediction, i, neighbors, idToIndexMap);
					idsDone.insert(*iter);
					this->normalize(prediction, i, sum, ids[0]<0);
				}
			}
			else
			{
				// Set the virtual place prior
				if(_virtualPlacePrior > 0)
				{
					if(cols>1) // The first must be the virtual place
					{
						((float*)prediction.data)[i] = _virtualPlacePrior;
						float val = (1.0-_virtualPlacePrior)/(cols-1);
						for(int j=1; j<cols; j++)
						{
							((float*)prediction.data)[i + j*cols] = val;
						}
					}
					else if(cols>0)
					{
						((float*)prediction.data)[i] = 1;
					}
				}
				else
				{
					// Only for some tests...
					// when _virtualPlacePrior=0, set all priors to the same value
					if(cols>1)
					{
						float val = 1.0/cols;
						for(int j=0; j<cols; j++)
						{
							((float*)prediction.data)[i + j*cols] = val;
						}
					}
					else if(cols>0)
					{
						((float*)prediction.data)[i] = 1;
					}
				}
			}
		}
	}

	ULOGGER_DEBUG("time = %fs", timerGlobal.ticks());

	return prediction;
}
Exemplo n.º 13
0
int main(int argc, char * argv[])
{
	signal(SIGABRT, &sighandler);
	signal(SIGTERM, &sighandler);
	signal(SIGINT, &sighandler);

	/*for(int i=0; i<argc; i++)
	{
		printf("argv[%d] = %s\n", i, argv[i]);
	}*/
	const ParametersMap & defaultParameters = Parameters::getDefaultParameters();
	if(argc < 2)
	{
		showUsage();
	}
	else if(argc == 2 && strcmp(argv[1], "-v") == 0)
	{
		printf("%s\n", Rtabmap::getVersion().c_str());
		exit(0);
	}
	else if(argc == 2 && strcmp(argv[1], "-default_params") == 0)
	{
		for(ParametersMap::const_iterator iter = defaultParameters.begin(); iter!=defaultParameters.end(); ++iter)
		{
			printf("%s=%s\n", iter->first.c_str(), iter->second.c_str());
		}
		exit(0);
	}
	printf("\n");

	std::string path;
	float timeThreshold = 0.0;
	float rate = 0.0;
	int loopDataset = 0;
	int repeat = 0;
	bool createGT = false;
	int imageWidth = 0;
	int imageHeight = 0;
	int startAt = 1;
	ParametersMap pm;
	ULogger::Level logLevel = ULogger::kError;
	ULogger::Level exitLevel = ULogger::kFatal;

	for(int i=1; i<argc; ++i)
	{
		if(i == argc-1)
		{
			// The last must be the path
			path = argv[i];
			if(!UDirectory::exists(path.c_str()) && !UFile::exists(path.c_str()))
			{
				printf("Path not valid : %s\n", path.c_str());
				showUsage();
				exit(1);
			}
			break;
		}
		if(strcmp(argv[i], "-t") == 0)
		{
			++i;
			if(i < argc)
			{
				timeThreshold = std::atof(argv[i]);
				if(timeThreshold < 0)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-rate") == 0)
		{
			++i;
			if(i < argc)
			{
				rate = std::atof(argv[i]);
				if(rate < 0)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-rateHz") == 0)
		{
			++i;
			if(i < argc)
			{
				rate = std::atof(argv[i]);
				if(rate < 0)
				{
					showUsage();
				}
				else if(rate)
				{
					rate = 1/rate;
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-repeat") == 0)
		{
			++i;
			if(i < argc)
			{
				repeat = std::atoi(argv[i]);
				if(repeat < 1)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-image_width") == 0)
		{
			++i;
			if(i < argc)
			{
				imageWidth = std::atoi(argv[i]);
				if(imageWidth < 0)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-image_height") == 0)
		{
			++i;
			if(i < argc)
			{
				imageHeight = std::atoi(argv[i]);
				if(imageHeight < 0)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-start_at") == 0)
		{
			++i;
			if(i < argc)
			{
				startAt = std::atoi(argv[i]);
				if(startAt < 0)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-createGT") == 0)
		{
			createGT = true;
			continue;
		}
		if(strcmp(argv[i], "-debug") == 0)
		{
			logLevel = ULogger::kDebug;
			continue;
		}
		if(strcmp(argv[i], "-info") == 0)
		{
			logLevel = ULogger::kInfo;
			continue;
		}
		if(strcmp(argv[i], "-warn") == 0)
		{
			logLevel = ULogger::kWarning;
			continue;
		}
		if(strcmp(argv[i], "-exit_warn") == 0)
		{
			exitLevel = ULogger::kWarning;
			continue;
		}
		if(strcmp(argv[i], "-exit_error") == 0)
		{
			exitLevel = ULogger::kError;
			continue;
		}

		// Check for RTAB-Map's parameters
		std::string key = argv[i];
		key = uSplit(key, '-').back();
		if(defaultParameters.find(key) != defaultParameters.end())
		{
			++i;
			if(i < argc)
			{
				std::string value = argv[i];
				if(value.empty())
				{
					showUsage();
				}
				else
				{
					value = uReplaceChar(value, ',', ' ');
				}
				std::pair<ParametersMap::iterator, bool> inserted = pm.insert(ParametersPair(key, value));
				if(inserted.second == false)
				{
					inserted.first->second = value;
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}

		printf("Unrecognized option : %s\n", argv[i]);
		showUsage();
	}

	if(repeat && createGT)
	{
		printf("Cannot create a Ground truth if repeat is on.\n");
		showUsage();
	}
	else if((imageWidth && imageHeight == 0) ||
			(imageHeight && imageWidth == 0))
	{
		printf("If imageWidth is set, imageHeight must be too.\n");
		showUsage();
	}

	UTimer timer;
	timer.start();
	std::queue<double> iterationMeanTime;

	Camera * camera = 0;
	if(UDirectory::exists(path))
	{
		camera = new CameraImages(path, startAt, false, 1/rate, imageWidth, imageHeight);
	}
	else
	{
		camera = new CameraVideo(path, 1/rate, imageWidth, imageHeight);
	}

	if(!camera || !camera->init())
	{
		printf("Camera init failed, using path \"%s\"\n", path.c_str());
		exit(1);
	}

	std::map<int, int> groundTruth;

	ULogger::setType(ULogger::kTypeConsole);
	//ULogger::setType(ULogger::kTypeFile, rtabmap.getWorkingDir()+"/LogConsole.txt", false);
	//ULogger::setBuffered(true);
	ULogger::setLevel(logLevel);
	ULogger::setExitLevel(exitLevel);

	// Create tasks : memory is deleted
	Rtabmap rtabmap;
	// Disable statistics (we don't need them)
	pm.insert(ParametersPair(Parameters::kRtabmapPublishStats(), "false"));
	// use default working directory
	pm.insert(ParametersPair(Parameters::kRtabmapWorkingDirectory(), Parameters::defaultRtabmapWorkingDirectory()));
	pm.insert(ParametersPair(Parameters::kRGBDEnabled(), "false"));

	rtabmap.init(pm);
	rtabmap.setTimeThreshold(timeThreshold); // in ms

	printf("Avpd init time = %fs\n", timer.ticks());

	// Start thread's task
	int loopClosureId;
	int count = 0;
	int countLoopDetected=0;

	printf("\nParameters : \n");
	printf(" Data set : %s\n", path.c_str());
	printf(" Time threshold = %1.2f ms\n", timeThreshold);
	printf(" Image rate = %1.2f s (%1.2f Hz)\n", rate, 1/rate);
	printf(" Repeating data set = %s\n", repeat?"true":"false");
	printf(" Camera width=%d, height=%d (0 is default)\n", imageWidth, imageHeight);
	printf(" Camera starts at image %d (default 1)\n", startAt);
	if(createGT)
	{
		printf(" Creating the ground truth matrix.\n");
	}
	printf(" INFO: All other parameters are set to defaults\n");
	if(pm.size()>1)
	{
		printf("   Overwritten parameters :\n");
		for(ParametersMap::iterator iter = pm.begin(); iter!=pm.end(); ++iter)
		{
			printf("    %s=%s\n",iter->first.c_str(), iter->second.c_str());
		}
	}
	if(rtabmap.getWM().size() || rtabmap.getSTM().size())
	{
		printf("[Warning] RTAB-Map database is not empty (%s)\n", (rtabmap.getWorkingDir()+Parameters::getDefaultDatabaseName()).c_str());
	}
	printf("\nProcessing images...\n");

	UTimer iterationTimer;
	UTimer rtabmapTimer;
	int imagesProcessed = 0;
	std::list<std::vector<float> > teleopActions;
	while(loopDataset <= repeat && g_forever)
	{
		cv::Mat img = camera->takeImage();
		int i=0;
		double maxIterationTime = 0.0;
		int maxIterationTimeId = 0;
		while(!img.empty() && g_forever)
		{
			++imagesProcessed;
			iterationTimer.start();
			rtabmapTimer.start();
			rtabmap.process(img);
			double rtabmapTime = rtabmapTimer.elapsed();
			loopClosureId = rtabmap.getLoopClosureId();
			if(rtabmap.getLoopClosureId())
			{
				++countLoopDetected;
			}
			img = camera->takeImage();
			if(++count % 100 == 0)
			{
				printf(" count = %d, loop closures = %d, max time (at %d) = %fs\n",
						count, countLoopDetected, maxIterationTimeId, maxIterationTime);
				maxIterationTime = 0.0;
				maxIterationTimeId = 0;
				std::map<int, int> wm = rtabmap.getWeights();
				printf(" WM(%d)=[", (int)wm.size());
				for(std::map<int, int>::iterator iter=wm.begin(); iter!=wm.end();++iter)
				{
					if(iter != wm.begin())
					{
						printf(";");
					}
					printf("%d,%d", iter->first, iter->second);
				}
				printf("]\n");
			}

			// Update generated ground truth matrix
			if(createGT)
			{
				if(loopClosureId > 0)
				{
					groundTruth.insert(std::make_pair(i, loopClosureId-1));
				}
			}

			++i;

			double iterationTime = iterationTimer.ticks();

			if(rtabmapTime > maxIterationTime)
			{
				maxIterationTime = rtabmapTime;
				maxIterationTimeId = count;
			}

			ULogger::flush();

			if(rtabmap.getLoopClosureId())
			{
				printf(" iteration(%d) loop(%d) hyp(%.2f) time=%fs/%fs *\n",
						count, rtabmap.getLoopClosureId(), rtabmap.getLcHypValue(), rtabmapTime, iterationTime);
			}
			else if(rtabmap.getRetrievedId())
			{
				printf(" iteration(%d) high(%d) hyp(%.2f) time=%fs/%fs\n",
						count, rtabmap.getRetrievedId(), rtabmap.getLcHypValue(), rtabmapTime, iterationTime);
			}
			else
			{
				printf(" iteration(%d) time=%fs/%fs\n", count, rtabmapTime, iterationTime);
			}

			if(timeThreshold && rtabmapTime > timeThreshold*100.0f)
			{
				printf(" ERROR,  there is  problem, too much time taken... %fs", rtabmapTime);
				break; // there is  problem, don't continue
			}
		}
		++loopDataset;
		if(loopDataset <= repeat)
		{
			camera->init();
			printf(" Beginning loop %d...\n", loopDataset);
		}
	}
	printf("Processing images completed. Loop closures found = %d\n", countLoopDetected);
	printf(" Total time = %fs\n", timer.ticks());

	if(imagesProcessed && createGT)
	{
		cv::Mat groundTruthMat = cv::Mat::zeros(imagesProcessed, imagesProcessed, CV_8U);

		for(std::map<int, int>::iterator iter = groundTruth.begin(); iter!=groundTruth.end(); ++iter)
		{
			groundTruthMat.at<unsigned char>(iter->first, iter->second) = 255;
		}

		// Generate the ground truth file
		printf("Generate ground truth to file %s, size of %d\n", (rtabmap.getWorkingDir()+GENERATED_GT_NAME).c_str(), groundTruthMat.rows);
		IplImage img = groundTruthMat;
		cvSaveImage((rtabmap.getWorkingDir()+GENERATED_GT_NAME).c_str(), &img);
		printf(" Creating ground truth file = %fs\n", timer.ticks());
	}

	if(camera)
	{
		delete camera;
		camera = 0 ;
	}

	rtabmap.close();

	printf(" Cleanup time = %fs\n", timer.ticks());

	return 0;
}
Exemplo n.º 14
0
	void mapDataReceivedCallback(const rtabmap_ros::MapDataConstPtr & msg)
	{
		// save new poses and constraints
		// Assuming that nodes/constraints are all linked together
		UASSERT(msg->graph.posesId.size() == msg->graph.poses.size());

		bool dataChanged = false;

		std::multimap<int, Link> newConstraints;
		for(unsigned int i=0; i<msg->graph.links.size(); ++i)
		{
			Link link = rtabmap_ros::linkFromROS(msg->graph.links[i]);
			newConstraints.insert(std::make_pair(link.from(), link));

			bool edgeAlreadyAdded = false;
			for(std::multimap<int, Link>::iterator iter = cachedConstraints_.lower_bound(link.from());
					iter != cachedConstraints_.end() && iter->first == link.from();
					++iter)
			{
				if(iter->second.to() == link.to())
				{
					edgeAlreadyAdded = true;
					if(iter->second.transform().getDistanceSquared(link.transform()) > 0.0001)
					{
						ROS_WARN("%d ->%d (%s vs %s)",iter->second.from(), iter->second.to(), iter->second.transform().prettyPrint().c_str(),
								link.transform().prettyPrint().c_str());
						dataChanged = true;
					}
				}
			}
			if(!edgeAlreadyAdded)
			{
				cachedConstraints_.insert(std::make_pair(link.from(), link));
			}
		}

		std::map<int, Signature> newNodeInfos;
		// add new odometry poses
		for(unsigned int i=0; i<msg->nodes.size(); ++i)
		{
			int id = msg->nodes[i].id;
			Transform pose = rtabmap_ros::transformFromPoseMsg(msg->nodes[i].pose);
			Signature s = rtabmap_ros::nodeInfoFromROS(msg->nodes[i]);
			newNodeInfos.insert(std::make_pair(id, s));

			std::pair<std::map<int, Signature>::iterator, bool> p = cachedNodeInfos_.insert(std::make_pair(id, s));
			if(!p.second && pose.getDistanceSquared(cachedNodeInfos_.at(id).getPose()) > 0.0001)
			{
				dataChanged = true;
			}
		}

		if(dataChanged)
		{
			ROS_WARN("Graph data has changed! Reset cache...");
			cachedConstraints_ = newConstraints;
			cachedNodeInfos_ = newNodeInfos;
		}

		//match poses in the graph
		std::multimap<int, Link> constraints;
		std::map<int, Signature> nodeInfos;
		if(globalOptimization_)
		{
			constraints = cachedConstraints_;
			nodeInfos = cachedNodeInfos_;
		}
		else
		{
			constraints = newConstraints;
			for(unsigned int i=0; i<msg->graph.posesId.size(); ++i)
			{
				std::map<int, Signature>::iterator iter = cachedNodeInfos_.find(msg->graph.posesId[i]);
				if(iter != cachedNodeInfos_.end())
				{
					nodeInfos.insert(*iter);
				}
				else
				{
					ROS_ERROR("Odometry pose of node %d not found in cache!", msg->graph.posesId[i]);
					return;
				}
			}
		}

		std::map<int, Transform> poses;
		for(std::map<int, Signature>::iterator iter=nodeInfos.begin(); iter!=nodeInfos.end(); ++iter)
		{
			poses.insert(std::make_pair(iter->first, iter->second.getPose()));
		}

		// Optimize only if there is a subscriber
		if(mapDataPub_.getNumSubscribers() || mapGraphPub_.getNumSubscribers())
		{
			UTimer timer;
			std::map<int, Transform> optimizedPoses;
			Transform mapCorrection = Transform::getIdentity();
			std::map<int, rtabmap::Transform> posesOut;
			std::multimap<int, rtabmap::Link> linksOut;
			if(poses.size() > 1 && constraints.size() > 0)
			{
				int fromId = optimizeFromLastNode_?poses.rbegin()->first:poses.begin()->first;
				optimizer_->getConnectedGraph(
						fromId,
						poses,
						constraints,
						posesOut,
						linksOut);
				optimizedPoses = optimizer_->optimize(fromId, posesOut, linksOut);
				mapToOdomMutex_.lock();
				mapCorrection = optimizedPoses.at(posesOut.rbegin()->first) * posesOut.rbegin()->second.inverse();
				mapToOdom_ = mapCorrection;
				mapToOdomMutex_.unlock();
			}
			else if(poses.size() == 1 && constraints.size() == 0)
			{
				optimizedPoses = poses;
			}
			else if(poses.size() || constraints.size())
			{
				ROS_ERROR("map_optimizer: Poses=%d and edges=%d (poses must "
					   "not be null if there are edges, and edges must be null if poses <= 1)",
					  (int)poses.size(), (int)constraints.size());
			}

			rtabmap_ros::MapData outputDataMsg;
			rtabmap_ros::MapGraph outputGraphMsg;
			rtabmap_ros::mapGraphToROS(optimizedPoses,
					linksOut,
					mapCorrection,
					outputGraphMsg);

			if(mapGraphPub_.getNumSubscribers())
			{
				outputGraphMsg.header = msg->header;
				mapGraphPub_.publish(outputGraphMsg);
			}

			if(mapDataPub_.getNumSubscribers())
			{
				outputDataMsg.header = msg->header;
				outputDataMsg.graph = outputGraphMsg;
				outputDataMsg.nodes = msg->nodes;
				if(posesOut.size() > msg->nodes.size())
				{
					std::set<int> addedNodes;
					for(unsigned int i=0; i<msg->nodes.size(); ++i)
					{
						addedNodes.insert(msg->nodes[i].id);
					}
					std::list<int> toAdd;
					for(std::map<int, Transform>::iterator iter=posesOut.begin(); iter!=posesOut.end(); ++iter)
					{
						if(addedNodes.find(iter->first) == addedNodes.end())
						{
							toAdd.push_back(iter->first);
						}
					}
					if(toAdd.size())
					{
						int oi = outputDataMsg.nodes.size();
						outputDataMsg.nodes.resize(outputDataMsg.nodes.size()+toAdd.size());
						for(std::list<int>::iterator iter=toAdd.begin(); iter!=toAdd.end(); ++iter)
						{
							UASSERT(cachedNodeInfos_.find(*iter) != cachedNodeInfos_.end());
							rtabmap_ros::nodeDataToROS(cachedNodeInfos_.at(*iter), outputDataMsg.nodes[oi]);
							++oi;
						}
					}
				}
				mapDataPub_.publish(outputDataMsg);
			}

			ROS_INFO("Time graph optimization = %f s", timer.ticks());
		}
	}
Exemplo n.º 15
0
int main(int argc, char * argv[])
{
	if(argc < 2)
	{
		showUsage();
	}
	ULogger::setType(ULogger::kTypeConsole);
	ULogger::setLevel(ULogger::kDebug);

	std::string dictionaryPath = argv[argc-1];
	std::list<std::vector<float> > objectDescriptors;
	//std::list<std::vector<float> > descriptors;
	std::map<int, std::vector<float> > descriptors;
	int dimension  = 0;
	UTimer timer;
	int objectDescriptorsSize= 400;

	std::ifstream file;
	if(!dictionaryPath.empty())
	{
		file.open(dictionaryPath.c_str(), std::ifstream::in);
	}
	if(file.good())
	{
		UDEBUG("Loading the dictionary from \"%s\"", dictionaryPath.c_str());

		// first line is the header
		std::string str;
		std::list<std::string> strList;
		std::getline(file, str);
		strList = uSplitNumChar(str);
		for(std::list<std::string>::iterator iter = strList.begin(); iter != strList.end(); ++iter)
		{
			if(uIsDigit(iter->at(0)))
			{
				dimension = std::atoi(iter->c_str());
				break;
			}
		}

		if(dimension == 0 || dimension > 1000)
		{
			UERROR("Invalid dictionary file, visual word dimension (%d) is not valid, \"%s\"", dimension, dictionaryPath.c_str());
		}
		else
		{
			int descriptorsLoaded = 0;
			// Process all words
			while(file.good())
			{
				std::getline(file, str);
				strList = uSplit(str);
				if((int)strList.size() == dimension+1)
				{
					//first one is the visual word id
					std::list<std::string>::iterator iter = strList.begin();
					int id = atoi(iter->c_str());
					++iter;

					std::vector<float> descriptor(dimension);
					int i=0;

					//get descriptor
					for(;i<dimension && iter != strList.end(); ++i, ++iter)
					{
						descriptor[i] = uStr2Float(*iter);
					}
					if(i != dimension)
					{
						UERROR("");
					}

					if(++descriptorsLoaded<=objectDescriptorsSize)
					{
						objectDescriptors.push_back(descriptor);
					}
					else
					{
						//descriptors.push_back(descriptor);
						descriptors.insert(std::make_pair(id, descriptor));
					}
				}
				else if(str.size())
				{
					UWARN("Cannot parse line \"%s\"", str.c_str());
				}
			}
		}

		UDEBUG("Time loading dictionary = %fs, dimension=%d", timer.ticks(), dimension);
	}
	else
	{
		UERROR("Cannot open dictionary file \"%s\"", dictionaryPath.c_str());
	}
	file.close();

	if(descriptors.size() && objectDescriptors.size() && dimension)
	{
		cv::Mat dataTree;
		cv::Mat queries;

		UDEBUG("Creating data structures...");
		// Create the data structure
		dataTree = cv::Mat((int)descriptors.size(), dimension, CV_32F); // SURF descriptors are CV_32F
		{//scope
			//std::list<std::vector<float> >::const_iterator iter = descriptors.begin();
			std::map<int, std::vector<float> >::const_iterator iter = descriptors.begin();
			for(unsigned int i=0; i < descriptors.size(); ++i, ++iter)
			{
				UTimer tim;
				//memcpy(dataTree.ptr<float>(i), iter->data(), dimension*sizeof(float));
				memcpy(dataTree.ptr<float>(i), iter->second.data(), dimension*sizeof(float));
				//if(i%100==0)
				//	UDEBUG("i=%d/%d tim=%fs", i, descriptors.size(), tim.ticks());
			}
		}

		queries = cv::Mat((int)objectDescriptors.size(), dimension, CV_32F); // SURF descriptors are CV_32F
		{//scope
			std::list<std::vector<float> >::const_iterator iter = objectDescriptors.begin();
			for(unsigned int i=0; i < objectDescriptors.size(); ++i, ++iter)
			{
				UTimer tim;
				memcpy(queries.ptr<float>(i), iter->data(), dimension*sizeof(float));
				//if(i%100==0)
				//	UDEBUG("i=%d/%d tim=%fs", i, objectDescriptors.size(), tim.ticks());
			}
		}

		UDEBUG("descriptors.size()=%d, objectDescriptorsSize=%d, copying data = %f s",descriptors.size(), objectDescriptors.size(), timer.ticks());

		UDEBUG("Creating indexes...");
		cv::flann::Index * linearIndex = new cv::flann::Index(dataTree, cv::flann::LinearIndexParams());
		UDEBUG("Time to create linearIndex = %f s", timer.ticks());

		cv::flann::Index * kdTreeIndex1 = new cv::flann::Index(dataTree, cv::flann::KDTreeIndexParams(1));
		UDEBUG("Time to create kdTreeIndex1 = %f s", timer.ticks());

		cv::flann::Index * kdTreeIndex4 = new cv::flann::Index(dataTree, cv::flann::KDTreeIndexParams(4));
		UDEBUG("Time to create kdTreeIndex4 = %f s", timer.ticks());

		cv::flann::Index * kMeansIndex = new cv::flann::Index(dataTree, cv::flann::KMeansIndexParams());
		UDEBUG("Time to create kMeansIndex = %f s", timer.ticks());

		cv::flann::Index * compositeIndex = new cv::flann::Index(dataTree, cv::flann::CompositeIndexParams());
		UDEBUG("Time to create compositeIndex = %f s", timer.ticks());

		//cv::flann::Index * autoTunedIndex = new cv::flann::Index(dataTree, cv::flann::AutotunedIndexParams());
		//UDEBUG("Time to create autoTunedIndex = %f s", timer.ticks());


		UDEBUG("Search indexes...");
		int k=2; // 2 nearest neighbors
		cv::Mat results(queries.rows, k, CV_32SC1); // results index
		cv::Mat dists(queries.rows, k, CV_32FC1); // Distance results are CV_32FC1

		linearIndex->knnSearch(queries, results, dists, k);
		//std::cout << results.t() << std::endl;
		cv::Mat transposedLinear = dists.t();
		UDEBUG("Time to search linearIndex = %f s", timer.ticks());

		kdTreeIndex1->knnSearch(queries, results, dists, k);
		//std::cout << results.t() << std::endl;
		cv::Mat transposed = dists.t();
		UDEBUG("Time to search kdTreeIndex1 = %f s (size=%d, dist error(k1,k2)=(%f,%f))",
				timer.ticks(),
				transposed.cols,
				uMeanSquaredError(  (float*)transposed.data,
					transposed.cols,
					(float*)transposedLinear.data,
					transposedLinear.cols),
				uMeanSquaredError(  &transposed.at<float>(1,0),
					transposed.cols,
					&transposedLinear.at<float>(1,0),
					transposedLinear.cols));

		kdTreeIndex4->knnSearch(queries, results, dists, k);
		//std::cout << results.t() << std::endl;
		transposed = dists.t();
		UDEBUG("Time to search kdTreeIndex4 = %f s (size=%d, dist error(k1,k2)=(%f,%f))",
				timer.ticks(),
				transposed.cols,
				uMeanSquaredError(  (float*)transposed.data,
					transposed.cols,
					(float*)transposedLinear.data,
					transposedLinear.cols),
				uMeanSquaredError(  &transposed.at<float>(1,0),
					transposed.cols,
					&transposedLinear.at<float>(1,0),
					transposedLinear.cols));

		kMeansIndex->knnSearch(queries, results, dists, k);
		//std::cout << results.t() << std::endl;
		transposed = dists.t();
		UDEBUG("Time to search kMeansIndex = %f s (size=%d, dist error(k1,k2)=(%f,%f))",
				timer.ticks(),
				transposed.cols,
				uMeanSquaredError(  (float*)transposed.data,
					transposed.cols,
					(float*)transposedLinear.data,
					transposedLinear.cols),
				uMeanSquaredError(  &transposed.at<float>(1,0),
					transposed.cols,
					&transposedLinear.at<float>(1,0),
					transposedLinear.cols));

		compositeIndex->knnSearch(queries, results, dists, k);
		//std::cout << results.t() << std::endl;
		transposed = dists.t();
		UDEBUG("Time to search compositeIndex = %f s (size=%d, dist error(k1,k2)=(%f,%f))",
				timer.ticks(),
				transposed.cols,
				uMeanSquaredError(  (float*)transposed.data,
					transposed.cols,
					(float*)transposedLinear.data,
					transposedLinear.cols),
				uMeanSquaredError(  &transposed.at<float>(1,0),
					transposed.cols,
					&transposedLinear.at<float>(1,0),
					transposedLinear.cols));

		//autoTunedIndex->knnSearch(queries, results, dists, k);
		//UDEBUG("Time to search autoTunedIndex = %f s", timer.ticks());

		delete linearIndex;
		delete kdTreeIndex1;
		delete kdTreeIndex4;
		delete kMeansIndex;
		delete compositeIndex;
		//delete autoTunedIndex;
	}

    return 0;
}
Exemplo n.º 16
0
void DBDriver::emptyTrashes(bool async)
{
	if(async)
	{
		ULOGGER_DEBUG("Async emptying, start the trash thread");
		this->start();
		return;
	}

	UTimer totalTime;
	totalTime.start();

	std::map<int, Signature*> signatures;
	std::map<int, VisualWord*> visualWords;
	_trashesMutex.lock();
	{
		ULOGGER_DEBUG("signatures=%d, visualWords=%d", _trashSignatures.size(), _trashVisualWords.size());
		signatures = _trashSignatures;
		visualWords = _trashVisualWords;
		_trashSignatures.clear();
		_trashVisualWords.clear();

		_dbSafeAccessMutex.lock();
	}
	_trashesMutex.unlock();

	if(signatures.size() || visualWords.size())
	{
		this->beginTransaction();
		UTimer timer;
		timer.start();
		if(signatures.size())
		{
			if(this->isConnected())
			{
				//Only one query to the database
				this->saveOrUpdate(uValues(signatures));
			}

			for(std::map<int, Signature *>::iterator iter=signatures.begin(); iter!=signatures.end(); ++iter)
			{
				delete iter->second;
			}
			signatures.clear();
			ULOGGER_DEBUG("Time emptying memory signatures trash = %f...", timer.ticks());
		}
		if(visualWords.size())
		{
			if(this->isConnected())
			{
				//Only one query to the database
				this->saveOrUpdate(uValues(visualWords));
			}

			for(std::map<int, VisualWord *>::iterator iter=visualWords.begin(); iter!=visualWords.end(); ++iter)
			{
				delete (*iter).second;
			}
			visualWords.clear();
			ULOGGER_DEBUG("Time emptying memory visualWords trash = %f...", timer.ticks());
		}

		this->commit();
	}

	_emptyTrashesTime = totalTime.ticks();
	ULOGGER_DEBUG("Total time emptying trashes = %fs...", _emptyTrashesTime);

	_dbSafeAccessMutex.unlock();
}
Exemplo n.º 17
0
void CameraThread::postUpdate(SensorData * dataPtr, CameraInfo * info) const
{
	UASSERT(dataPtr!=0);
	SensorData & data = *dataPtr;
	if(_colorOnly && !data.depthRaw().empty())
	{
		data.setDepthOrRightRaw(cv::Mat());
	}

	if(_distortionModel && !data.depthRaw().empty())
	{
		UTimer timer;
		if(_distortionModel->getWidth() == data.depthRaw().cols &&
		   _distortionModel->getHeight() == data.depthRaw().rows	)
		{
			cv::Mat depth = data.depthRaw().clone();// make sure we are not modifying data in cached signatures.
			_distortionModel->undistort(depth);
			data.setDepthOrRightRaw(depth);
		}
		else
		{
			UERROR("Distortion model size is %dx%d but dpeth image is %dx%d!",
					_distortionModel->getWidth(), _distortionModel->getHeight(),
					data.depthRaw().cols, data.depthRaw().rows);
		}
		if(info) info->timeUndistortDepth = timer.ticks();
	}

	if(_bilateralFiltering && !data.depthRaw().empty())
	{
		UTimer timer;
		data.setDepthOrRightRaw(util2d::fastBilateralFiltering(data.depthRaw(), _bilateralSigmaS, _bilateralSigmaR));
		if(info) info->timeBilateralFiltering = timer.ticks();
	}

	if(_imageDecimation>1 && !data.imageRaw().empty())
	{
		UDEBUG("");
		UTimer timer;
		if(!data.depthRaw().empty() &&
		   !(data.depthRaw().rows % _imageDecimation == 0 && data.depthRaw().cols % _imageDecimation == 0))
		{
			UERROR("Decimation of depth images should be exact (decimation=%d, size=(%d,%d))! "
				   "Images won't be resized.", _imageDecimation, data.depthRaw().cols, data.depthRaw().rows);
		}
		else
		{
			data.setImageRaw(util2d::decimate(data.imageRaw(), _imageDecimation));
			data.setDepthOrRightRaw(util2d::decimate(data.depthOrRightRaw(), _imageDecimation));
			std::vector<CameraModel> models = data.cameraModels();
			for(unsigned int i=0; i<models.size(); ++i)
			{
				if(models[i].isValidForProjection())
				{
					models[i] = models[i].scaled(1.0/double(_imageDecimation));
				}
			}
			data.setCameraModels(models);
			StereoCameraModel stereoModel = data.stereoCameraModel();
			if(stereoModel.isValidForProjection())
			{
				stereoModel.scale(1.0/double(_imageDecimation));
				data.setStereoCameraModel(stereoModel);
			}
		}
		if(info) info->timeImageDecimation = timer.ticks();
	}
	if(_mirroring && !data.imageRaw().empty() && data.cameraModels().size() == 1)
	{
		UDEBUG("");
		UTimer timer;
		cv::Mat tmpRgb;
		cv::flip(data.imageRaw(), tmpRgb, 1);
		data.setImageRaw(tmpRgb);
		UASSERT_MSG(data.cameraModels().size() <= 1 && !data.stereoCameraModel().isValidForProjection(), "Only single RGBD cameras are supported for mirroring.");
		if(data.cameraModels().size() && data.cameraModels()[0].cx())
		{
			CameraModel tmpModel(
					data.cameraModels()[0].fx(),
					data.cameraModels()[0].fy(),
					float(data.imageRaw().cols) - data.cameraModels()[0].cx(),
					data.cameraModels()[0].cy(),
					data.cameraModels()[0].localTransform());
			data.setCameraModel(tmpModel);
		}
		if(!data.depthRaw().empty())
		{
			cv::Mat tmpDepth;
			cv::flip(data.depthRaw(), tmpDepth, 1);
			data.setDepthOrRightRaw(tmpDepth);
		}
		if(info) info->timeMirroring = timer.ticks();
	}
	if(_stereoToDepth && !data.imageRaw().empty() && data.stereoCameraModel().isValidForProjection() && !data.rightRaw().empty())
	{
		UDEBUG("");
		UTimer timer;
		cv::Mat depth = util2d::depthFromDisparity(
				_stereoDense->computeDisparity(data.imageRaw(), data.rightRaw()),
				data.stereoCameraModel().left().fx(),
				data.stereoCameraModel().baseline());
		// set Tx for stereo bundle adjustment (when used)
		CameraModel model = CameraModel(
				data.stereoCameraModel().left().fx(),
				data.stereoCameraModel().left().fy(),
				data.stereoCameraModel().left().cx(),
				data.stereoCameraModel().left().cy(),
				data.stereoCameraModel().localTransform(),
				-data.stereoCameraModel().baseline()*data.stereoCameraModel().left().fx());
		data.setCameraModel(model);
		data.setDepthOrRightRaw(depth);
		data.setStereoCameraModel(StereoCameraModel());
		if(info) info->timeDisparity = timer.ticks();
	}
	if(_scanFromDepth &&
		data.cameraModels().size() &&
		data.cameraModels().at(0).isValidForProjection() &&
		!data.depthRaw().empty())
	{
		UDEBUG("");
		if(data.laserScanRaw().empty())
		{
			UASSERT(_scanDecimation >= 1);
			UTimer timer;
			pcl::IndicesPtr validIndices(new std::vector<int>);
			pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = util3d::cloudFromSensorData(
					data,
					_scanDecimation,
					_scanMaxDepth,
					_scanMinDepth,
					validIndices.get());
			float maxPoints = (data.depthRaw().rows/_scanDecimation)*(data.depthRaw().cols/_scanDecimation);
			cv::Mat scan;
			const Transform & baseToScan = data.cameraModels()[0].localTransform();
			if(validIndices->size())
			{
				if(_scanVoxelSize>0.0f)
				{
					cloud = util3d::voxelize(cloud, validIndices, _scanVoxelSize);
					float ratio = float(cloud->size()) / float(validIndices->size());
					maxPoints = ratio * maxPoints;
				}
				else if(!cloud->is_dense)
				{
					pcl::PointCloud<pcl::PointXYZ>::Ptr denseCloud(new pcl::PointCloud<pcl::PointXYZ>);
					pcl::copyPointCloud(*cloud, *validIndices, *denseCloud);
					cloud = denseCloud;
				}

				if(cloud->size())
				{
					if(_scanNormalsK>0)
					{
						Eigen::Vector3f viewPoint(baseToScan.x(), baseToScan.y(), baseToScan.z());
						pcl::PointCloud<pcl::Normal>::Ptr normals = util3d::computeNormals(cloud, _scanNormalsK, viewPoint);
						pcl::PointCloud<pcl::PointNormal>::Ptr cloudNormals(new pcl::PointCloud<pcl::PointNormal>);
						pcl::concatenateFields(*cloud, *normals, *cloudNormals);
						scan = util3d::laserScanFromPointCloud(*cloudNormals, baseToScan.inverse());
					}
					else
					{
						scan = util3d::laserScanFromPointCloud(*cloud, baseToScan.inverse());
					}
				}
			}
			data.setLaserScanRaw(scan, LaserScanInfo((int)maxPoints, _scanMaxDepth, baseToScan));
			if(info) info->timeScanFromDepth = timer.ticks();
		}
		else
		{
			UWARN("Option to create laser scan from depth image is enabled, but "
				  "there is already a laser scan in the captured sensor data. Scan from "
				  "depth will not be created.");
		}
	}
}
Exemplo n.º 18
0
int main(int argc, char** argv)
{
	ros::init(argc, argv, "data_player");

	//ULogger::setType(ULogger::kTypeConsole);
	//ULogger::setLevel(ULogger::kDebug);
	//ULogger::setEventLevel(ULogger::kWarning);


	ros::NodeHandle nh;
	ros::NodeHandle pnh("~");

	std::string frameId = "base_link";
	std::string odomFrameId = "odom";
	std::string cameraFrameId = "camera_optical_link";
	std::string scanFrameId = "base_laser_link";
	double rate = -1.0f;
	std::string databasePath = "";
	bool publishTf = true;
	int startId = 0;

	pnh.param("frame_id", frameId, frameId);
	pnh.param("odom_frame_id", odomFrameId, odomFrameId);
	pnh.param("camera_frame_id", cameraFrameId, cameraFrameId);
	pnh.param("scan_frame_id", scanFrameId, scanFrameId);
	pnh.param("rate", rate, rate); // Set -1 to use database stamps
	pnh.param("database", databasePath, databasePath);
	pnh.param("publish_tf", publishTf, publishTf);
	pnh.param("start_id", startId, startId);

	// based on URG-04LX
	double scanHeight, scanAngleMin, scanAngleMax, scanAngleIncrement, scanTime, scanRangeMin, scanRangeMax;
	pnh.param<double>("scan_height", scanHeight, 0.3);
	pnh.param<double>("scan_angle_min", scanAngleMin, -M_PI / 2.0);
	pnh.param<double>("scan_angle_max", scanAngleMax, M_PI / 2.0);
	pnh.param<double>("scan_angle_increment", scanAngleIncrement, M_PI / 360.0);
	pnh.param<double>("scan_time", scanTime, 1.0 / 10.0);
	pnh.param<double>("scan_range_min", scanRangeMin, 0.02);
	pnh.param<double>("scan_range_max", scanRangeMax, 6.0);

	ROS_INFO("frame_id = %s", frameId.c_str());
	ROS_INFO("odom_frame_id = %s", odomFrameId.c_str());
	ROS_INFO("camera_frame_id = %s", cameraFrameId.c_str());
	ROS_INFO("scan_frame_id = %s", scanFrameId.c_str());
	ROS_INFO("rate = %f", rate);
	ROS_INFO("publish_tf = %s", publishTf?"true":"false");
	ROS_INFO("start_id = %d", startId);

	if(databasePath.empty())
	{
		ROS_ERROR("Parameter \"database\" must be set (path to a RTAB-Map database).");
		return -1;
	}
	databasePath = uReplaceChar(databasePath, '~', UDirectory::homeDir());
	if(databasePath.size() && databasePath.at(0) != '/')
	{
		databasePath = UDirectory::currentDir(true) + databasePath;
	}
	ROS_INFO("database = %s", databasePath.c_str());

	rtabmap::DBReader reader(databasePath, rate, false, false, false, startId);
	if(!reader.init())
	{
		ROS_ERROR("Cannot open database \"%s\".", databasePath.c_str());
		return -1;
	}

	ros::ServiceServer pauseSrv = pnh.advertiseService("pause", pauseCallback);
	ros::ServiceServer resumeSrv = pnh.advertiseService("resume", resumeCallback);

	image_transport::ImageTransport it(nh);
	image_transport::Publisher imagePub;
	image_transport::Publisher rgbPub;
	image_transport::Publisher depthPub;
	image_transport::Publisher leftPub;
	image_transport::Publisher rightPub;
	ros::Publisher rgbCamInfoPub;
	ros::Publisher depthCamInfoPub;
	ros::Publisher leftCamInfoPub;
	ros::Publisher rightCamInfoPub;
	ros::Publisher odometryPub;
	ros::Publisher scanPub;
	tf2_ros::TransformBroadcaster tfBroadcaster;

	UTimer timer;
	rtabmap::CameraInfo info;
	rtabmap::SensorData data = reader.takeImage(&info);
	rtabmap::OdometryEvent odom(data, info.odomPose, info.odomCovariance);
	double acquisitionTime = timer.ticks();
	while(ros::ok() && odom.data().id())
	{
		ROS_INFO("Reading sensor data %d...", odom.data().id());

		ros::Time time = ros::Time::now();

		sensor_msgs::CameraInfo camInfoA; //rgb or left
		sensor_msgs::CameraInfo camInfoB; //depth or right

		camInfoA.K.assign(0);
		camInfoA.K[0] = camInfoA.K[4] = camInfoA.K[8] = 1;
		camInfoA.R.assign(0);
		camInfoA.R[0] = camInfoA.R[4] = camInfoA.R[8] = 1;
		camInfoA.P.assign(0);
		camInfoA.P[10] = 1;

		camInfoA.header.frame_id = cameraFrameId;
		camInfoA.header.stamp = time;

		camInfoB = camInfoA;

		int type = -1;
		if(!odom.data().depthRaw().empty() && (odom.data().depthRaw().type() == CV_32FC1 || odom.data().depthRaw().type() == CV_16UC1))
		{
			if(odom.data().cameraModels().size() > 1)
			{
				ROS_WARN("Multi-cameras detected in database but this node cannot send multi-images yet...");
			}
			else
			{
				//depth
				if(odom.data().cameraModels().size())
				{
					camInfoA.D.resize(5,0);

					camInfoA.P[0] = odom.data().cameraModels()[0].fx();
					camInfoA.K[0] = odom.data().cameraModels()[0].fx();
					camInfoA.P[5] = odom.data().cameraModels()[0].fy();
					camInfoA.K[4] = odom.data().cameraModels()[0].fy();
					camInfoA.P[2] = odom.data().cameraModels()[0].cx();
					camInfoA.K[2] = odom.data().cameraModels()[0].cx();
					camInfoA.P[6] = odom.data().cameraModels()[0].cy();
					camInfoA.K[5] = odom.data().cameraModels()[0].cy();

					camInfoB = camInfoA;
				}

				type=0;

				if(rgbPub.getTopic().empty()) rgbPub = it.advertise("rgb/image", 1);
				if(depthPub.getTopic().empty()) depthPub = it.advertise("depth_registered/image", 1);
				if(rgbCamInfoPub.getTopic().empty()) rgbCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("rgb/camera_info", 1);
				if(depthCamInfoPub.getTopic().empty()) depthCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("depth_registered/camera_info", 1);
			}
		}
		else if(!odom.data().rightRaw().empty() && odom.data().rightRaw().type() == CV_8U)
		{
			//stereo
			if(odom.data().stereoCameraModel().isValidForProjection())
			{
				camInfoA.D.resize(8,0);

				camInfoA.P[0] = odom.data().stereoCameraModel().left().fx();
				camInfoA.K[0] = odom.data().stereoCameraModel().left().fx();
				camInfoA.P[5] = odom.data().stereoCameraModel().left().fy();
				camInfoA.K[4] = odom.data().stereoCameraModel().left().fy();
				camInfoA.P[2] = odom.data().stereoCameraModel().left().cx();
				camInfoA.K[2] = odom.data().stereoCameraModel().left().cx();
				camInfoA.P[6] = odom.data().stereoCameraModel().left().cy();
				camInfoA.K[5] = odom.data().stereoCameraModel().left().cy();

				camInfoB = camInfoA;
				camInfoB.P[3] = odom.data().stereoCameraModel().right().Tx(); // Right_Tx = -baseline*fx
			}

			type=1;

			if(leftPub.getTopic().empty()) leftPub = it.advertise("left/image", 1);
			if(rightPub.getTopic().empty()) rightPub = it.advertise("right/image", 1);
			if(leftCamInfoPub.getTopic().empty()) leftCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("left/camera_info", 1);
			if(rightCamInfoPub.getTopic().empty()) rightCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("right/camera_info", 1);

		}
		else
		{
			if(imagePub.getTopic().empty()) imagePub = it.advertise("image", 1);
		}

		camInfoA.height = odom.data().imageRaw().rows;
		camInfoA.width = odom.data().imageRaw().cols;
		camInfoB.height = odom.data().depthOrRightRaw().rows;
		camInfoB.width = odom.data().depthOrRightRaw().cols;

		if(!odom.data().laserScanRaw().empty())
		{
			if(scanPub.getTopic().empty()) scanPub = nh.advertise<sensor_msgs::LaserScan>("scan", 1);
		}

		// publish transforms first
		if(publishTf)
		{
			rtabmap::Transform localTransform;
			if(odom.data().cameraModels().size() == 1)
			{
				localTransform = odom.data().cameraModels()[0].localTransform();
			}
			else if(odom.data().stereoCameraModel().isValidForProjection())
			{
				localTransform = odom.data().stereoCameraModel().left().localTransform();
			}
			if(!localTransform.isNull())
			{
				geometry_msgs::TransformStamped baseToCamera;
				baseToCamera.child_frame_id = cameraFrameId;
				baseToCamera.header.frame_id = frameId;
				baseToCamera.header.stamp = time;
				rtabmap_ros::transformToGeometryMsg(localTransform, baseToCamera.transform);
				tfBroadcaster.sendTransform(baseToCamera);
			}

			if(!odom.pose().isNull())
			{
				geometry_msgs::TransformStamped odomToBase;
				odomToBase.child_frame_id = frameId;
				odomToBase.header.frame_id = odomFrameId;
				odomToBase.header.stamp = time;
				rtabmap_ros::transformToGeometryMsg(odom.pose(), odomToBase.transform);
				tfBroadcaster.sendTransform(odomToBase);
			}

			if(!scanPub.getTopic().empty())
			{
				geometry_msgs::TransformStamped baseToLaserScan;
				baseToLaserScan.child_frame_id = scanFrameId;
				baseToLaserScan.header.frame_id = frameId;
				baseToLaserScan.header.stamp = time;
				rtabmap_ros::transformToGeometryMsg(rtabmap::Transform(0,0,scanHeight,0,0,0), baseToLaserScan.transform);
				tfBroadcaster.sendTransform(baseToLaserScan);
			}
		}
		if(!odom.pose().isNull())
		{
			if(odometryPub.getTopic().empty()) odometryPub = nh.advertise<nav_msgs::Odometry>("odom", 1);

			if(odometryPub.getNumSubscribers())
			{
				nav_msgs::Odometry odomMsg;
				odomMsg.child_frame_id = frameId;
				odomMsg.header.frame_id = odomFrameId;
				odomMsg.header.stamp = time;
				rtabmap_ros::transformToPoseMsg(odom.pose(), odomMsg.pose.pose);
				UASSERT(odomMsg.pose.covariance.size() == 36 &&
						odom.covariance().total() == 36 &&
						odom.covariance().type() == CV_64FC1);
				memcpy(odomMsg.pose.covariance.begin(), odom.covariance().data, 36*sizeof(double));
				odometryPub.publish(odomMsg);
			}
		}

		if(type >= 0)
		{
			if(rgbCamInfoPub.getNumSubscribers() && type == 0)
			{
				rgbCamInfoPub.publish(camInfoA);
			}
			if(leftCamInfoPub.getNumSubscribers() && type == 1)
			{
				leftCamInfoPub.publish(camInfoA);
			}
			if(depthCamInfoPub.getNumSubscribers() && type == 0)
			{
				depthCamInfoPub.publish(camInfoB);
			}
			if(rightCamInfoPub.getNumSubscribers() && type == 1)
			{
				rightCamInfoPub.publish(camInfoB);
			}
		}

		if(imagePub.getNumSubscribers() || rgbPub.getNumSubscribers() || leftPub.getNumSubscribers())
		{
			cv_bridge::CvImage img;
			if(odom.data().imageRaw().channels() == 1)
			{
				img.encoding = sensor_msgs::image_encodings::MONO8;
			}
			else
			{
				img.encoding = sensor_msgs::image_encodings::BGR8;
			}
			img.image = odom.data().imageRaw();
			sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg();
			imageRosMsg->header.frame_id = cameraFrameId;
			imageRosMsg->header.stamp = time;

			if(imagePub.getNumSubscribers())
			{
				imagePub.publish(imageRosMsg);
			}
			if(rgbPub.getNumSubscribers() && type == 0)
			{
				rgbPub.publish(imageRosMsg);
			}
			if(leftPub.getNumSubscribers() && type == 1)
			{
				leftPub.publish(imageRosMsg);
				leftCamInfoPub.publish(camInfoA);
			}
		}

		if(depthPub.getNumSubscribers() && !odom.data().depthRaw().empty() && type==0)
		{
			cv_bridge::CvImage img;
			if(odom.data().depthRaw().type() == CV_32FC1)
			{
				img.encoding = sensor_msgs::image_encodings::TYPE_32FC1;
			}
			else
			{
				img.encoding = sensor_msgs::image_encodings::TYPE_16UC1;
			}
			img.image = odom.data().depthRaw();
			sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg();
			imageRosMsg->header.frame_id = cameraFrameId;
			imageRosMsg->header.stamp = time;

			depthPub.publish(imageRosMsg);
			depthCamInfoPub.publish(camInfoB);
		}

		if(rightPub.getNumSubscribers() && !odom.data().rightRaw().empty() && type==1)
		{
			cv_bridge::CvImage img;
			img.encoding = sensor_msgs::image_encodings::MONO8;
			img.image = odom.data().rightRaw();
			sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg();
			imageRosMsg->header.frame_id = cameraFrameId;
			imageRosMsg->header.stamp = time;

			rightPub.publish(imageRosMsg);
			rightCamInfoPub.publish(camInfoB);
		}

		if(scanPub.getNumSubscribers() && !odom.data().laserScanRaw().empty())
		{
			//inspired from pointcloud_to_laserscan package
			sensor_msgs::LaserScan msg;
			msg.header.frame_id = scanFrameId;
			msg.header.stamp = time;

			msg.angle_min = scanAngleMin;
			msg.angle_max = scanAngleMax;
			msg.angle_increment = scanAngleIncrement;
			msg.time_increment = 0.0;
			msg.scan_time = scanTime;
			msg.range_min = scanRangeMin;
			msg.range_max = scanRangeMax;

			uint32_t rangesSize = std::ceil((msg.angle_max - msg.angle_min) / msg.angle_increment);
			msg.ranges.assign(rangesSize, 0.0);

			const cv::Mat & scan = odom.data().laserScanRaw();
			UASSERT(scan.type() == CV_32FC2 || scan.type() == CV_32FC3);
			UASSERT(scan.rows == 1);
			for (int i=0; i<scan.cols; ++i)
			{
				cv::Vec2f pos = scan.at<cv::Vec2f>(i);
				double range = hypot(pos[0], pos[1]);
				if (range >= scanRangeMin && range <=scanRangeMax)
				{
					double angle = atan2(pos[1], pos[0]);
					if (angle >= msg.angle_min && angle <= msg.angle_max)
					{
						int index = (angle - msg.angle_min) / msg.angle_increment;
						if (index>=0 && index<rangesSize && (range < msg.ranges[index] || msg.ranges[index]==0))
						{
							msg.ranges[index] = range;
						}
					}
				}
			}

			scanPub.publish(msg);
		}

		if(odom.data().userDataRaw().type() == CV_8SC1 &&
		   odom.data().userDataRaw().cols >= 7 && // including null str ending
		   odom.data().userDataRaw().rows == 1 &&
		   memcmp(odom.data().userDataRaw().data, "GOAL:", 5) == 0)
		{
			//GOAL format detected, remove it from the user data and send it as goal event
			std::string goalStr = (const char *)odom.data().userDataRaw().data;
			if(!goalStr.empty())
			{
				std::list<std::string> strs = uSplit(goalStr, ':');
				if(strs.size() == 2)
				{
					int goalId = atoi(strs.rbegin()->c_str());

					if(goalId > 0)
					{
						ROS_WARN("Goal %d detected, calling rtabmap's set_goal service!", goalId);
						rtabmap_ros::SetGoal setGoalSrv;
						setGoalSrv.request.node_id = goalId;
						setGoalSrv.request.node_label = "";
						if(!ros::service::call("set_goal", setGoalSrv))
						{
							ROS_ERROR("Can't call \"set_goal\" service");
						}
					}
				}
			}
		}

		ros::spinOnce();

		while(ros::ok() && paused)
		{
			uSleep(100);
			ros::spinOnce();
		}

		timer.restart();
		info = rtabmap::CameraInfo();
		data = reader.takeImage(&info);
		odom = rtabmap::OdometryEvent(data, info.odomPose, info.odomCovariance);
		acquisitionTime = timer.ticks();
	}


	return 0;
}
Exemplo n.º 19
0
	void mapDataReceivedCallback(const rtabmap_ros::MapDataConstPtr & msg)
	{
		UTimer timer;
		for(unsigned int i=0; i<msg->nodes.size(); ++i)
		{
			if(!uContains(gridMaps_, msg->nodes[i].id) && msg->nodes[i].laserScan.size())
			{
				cv::Mat laserScan = rtabmap::uncompressData(msg->nodes[i].laserScan);
				if(!laserScan.empty())
				{
					cv::Mat ground, obstacles;
					util3d::occupancy2DFromLaserScan(laserScan, ground, obstacles, gridCellSize_);

					if(!ground.empty() || !obstacles.empty())
					{
						gridMaps_.insert(std::make_pair(msg->nodes[i].id, std::make_pair(ground, obstacles)));
					}
				}
			}
		}

		std::map<int, Transform> poses;
		UASSERT(msg->graph.posesId.size() == msg->graph.poses.size());
		for(unsigned int i=0; i<msg->graph.posesId.size(); ++i)
		{
			poses.insert(std::make_pair(msg->graph.posesId[i], rtabmap_ros::transformFromPoseMsg(msg->graph.poses[i])));
		}

		if(filterRadius_ > 0.0 && filterAngle_ > 0.0)
		{
			poses = rtabmap::graph::radiusPosesFiltering(poses, filterRadius_, filterAngle_*CV_PI/180.0);
		}

		if(gridMap_.getNumSubscribers())
		{
			// create the map
			float xMin=0.0f, yMin=0.0f;
			//cv::Mat pixels = util3d::create2DMap(poses, scans_, gridCellSize_, gridUnknownSpaceFilled_, xMin, yMin, mapSize_);
			cv::Mat pixels = util3d::create2DMapFromOccupancyLocalMaps(
							poses,
							gridMaps_,
							gridCellSize_,
							xMin, yMin,
							mapSize_,
							eroded_);

			if(!pixels.empty())
			{
				//init
				map_.info.resolution = gridCellSize_;
				map_.info.origin.position.x = 0.0;
				map_.info.origin.position.y = 0.0;
				map_.info.origin.position.z = 0.0;
				map_.info.origin.orientation.x = 0.0;
				map_.info.origin.orientation.y = 0.0;
				map_.info.origin.orientation.z = 0.0;
				map_.info.origin.orientation.w = 1.0;

				map_.info.width = pixels.cols;
				map_.info.height = pixels.rows;
				map_.info.origin.position.x = xMin;
				map_.info.origin.position.y = yMin;
				map_.data.resize(map_.info.width * map_.info.height);

				memcpy(map_.data.data(), pixels.data, map_.info.width * map_.info.height);

				map_.header.frame_id = msg->header.frame_id;
				map_.header.stamp = ros::Time::now();

				gridMap_.publish(map_);
				ROS_INFO("Grid Map published [%d,%d] (%fs)", pixels.cols, pixels.rows, timer.ticks());
			}
		}
	}
Exemplo n.º 20
0
	void mapDataReceivedCallback(const rtabmap_ros::MapDataConstPtr & msg)
	{
		UTimer timer;
		for(unsigned int i=0; i<msg->nodes.size(); ++i)
		{
			int id = msg->nodes[i].id;
			if(!uContains(rgbClouds_, id))
			{
				rtabmap::Signature s = rtabmap_ros::nodeDataFromROS(msg->nodes[i]);
				if(!s.sensorData().imageCompressed().empty() &&
				   !s.sensorData().depthOrRightCompressed().empty() &&
				   (s.sensorData().cameraModels().size() || s.sensorData().stereoCameraModel().isValid()))
				{
					cv::Mat image, depth;
					s.sensorData().uncompressData(&image, &depth, 0);


					if(!s.sensorData().imageRaw().empty() && !s.sensorData().depthOrRightRaw().empty())
					{
						pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud;
						cloud = rtabmap::util3d::cloudRGBFromSensorData(
								s.sensorData(),
								cloudDecimation_,
								cloudMaxDepth_);

						if(cloud->size() && noiseFilterRadius_ > 0.0 && noiseFilterMinNeighbors_ > 0)
						{
							pcl::IndicesPtr indices = rtabmap::util3d::radiusFiltering(cloud, noiseFilterRadius_, noiseFilterMinNeighbors_);
							pcl::PointCloud<pcl::PointXYZRGB>::Ptr tmp(new pcl::PointCloud<pcl::PointXYZRGB>);
							pcl::copyPointCloud(*cloud, *indices, *tmp);
							cloud = tmp;
						}
						if(cloud->size() && cloudVoxelSize_ > 0)
						{
							cloud = util3d::voxelize(cloud, cloudVoxelSize_);
						}

						if(cloud->size())
						{
							rgbClouds_.insert(std::make_pair(id, cloud));

							if(computeOccupancyGrid_)
							{
								pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloudClipped = cloud;
								if(cloudClipped->size() && maxHeight_ > 0)
								{
									cloudClipped = util3d::passThrough(cloudClipped, "z", std::numeric_limits<int>::min(), maxHeight_);
								}
								if(cloudClipped->size())
								{
									cloudClipped = util3d::voxelize(cloudClipped, gridCellSize_);

									cv::Mat ground, obstacles;
									util3d::occupancy2DFromCloud3D<pcl::PointXYZRGB>(cloudClipped, ground, obstacles, gridCellSize_, groundMaxAngle_, clusterMinSize_);
									if(!ground.empty() || !obstacles.empty())
									{
										occupancyLocalMaps_.insert(std::make_pair(id, std::make_pair(ground, obstacles)));
									}
								}

							}
						}
					}
				}
			}

			if(!uContains(scans_, id) && msg->nodes[i].laserScan.size())
			{
				cv::Mat laserScan = rtabmap::uncompressData(msg->nodes[i].laserScan);
				if(!laserScan.empty())
				{
					pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = util3d::laserScanToPointCloud(laserScan);
					if(cloud->size() && scanVoxelSize_ > 0)
					{
						cloud = util3d::voxelize(cloud, scanVoxelSize_);
					}
					if(cloud->size())
					{
						scans_.insert(std::make_pair(id, cloud));
					}
				}
			}
		}

		// filter poses
		std::map<int, Transform> poses;
		UASSERT(msg->posesId.size() == msg->poses.size());
		for(unsigned int i=0; i<msg->posesId.size(); ++i)
		{
			poses.insert(std::make_pair(msg->posesId[i], rtabmap_ros::transformFromPoseMsg(msg->poses[i])));
		}
		if(nodeFilteringAngle_ > 0.0 && nodeFilteringRadius_ > 0.0)
		{
			poses = rtabmap::graph::radiusPosesFiltering(poses, nodeFilteringRadius_, nodeFilteringAngle_*CV_PI/180.0);
		}

		if(assembledMapClouds_.getNumSubscribers())
		{
			// generate the assembled cloud!
			pcl::PointCloud<pcl::PointXYZRGB>::Ptr assembledCloud(new pcl::PointCloud<pcl::PointXYZRGB>);

			for(std::map<int, Transform>::iterator iter = poses.begin(); iter!=poses.end(); ++iter)
			{
				std::map<int, pcl::PointCloud<pcl::PointXYZRGB>::Ptr >::iterator jter = rgbClouds_.find(iter->first);
				if(jter != rgbClouds_.end())
				{
					pcl::PointCloud<pcl::PointXYZRGB>::Ptr transformed = util3d::transformPointCloud(jter->second, iter->second);
					*assembledCloud+=*transformed;
				}
			}

			if(assembledCloud->size())
			{
				if(cloudVoxelSize_ > 0)
				{
					assembledCloud = util3d::voxelize(assembledCloud,cloudVoxelSize_);
				}

				sensor_msgs::PointCloud2::Ptr cloudMsg(new sensor_msgs::PointCloud2);
				pcl::toROSMsg(*assembledCloud, *cloudMsg);
				cloudMsg->header.stamp = ros::Time::now();
				cloudMsg->header.frame_id = msg->header.frame_id;
				assembledMapClouds_.publish(cloudMsg);
			}
		}

		if(assembledMapScans_.getNumSubscribers())
		{
			// generate the assembled scan!
			pcl::PointCloud<pcl::PointXYZ>::Ptr assembledCloud(new pcl::PointCloud<pcl::PointXYZ>);

			for(std::map<int, Transform>::iterator iter = poses.begin(); iter!=poses.end(); ++iter)
			{
				std::map<int, pcl::PointCloud<pcl::PointXYZ>::Ptr >::iterator jter = scans_.find(iter->first);
				if(jter != scans_.end())
				{
					pcl::PointCloud<pcl::PointXYZ>::Ptr transformed = util3d::transformPointCloud(jter->second, iter->second);
					*assembledCloud+=*transformed;
				}
			}

			if(assembledCloud->size())
			{
				if(scanVoxelSize_ > 0)
				{
					assembledCloud = util3d::voxelize(assembledCloud, scanVoxelSize_);
				}

				sensor_msgs::PointCloud2::Ptr cloudMsg(new sensor_msgs::PointCloud2);
				pcl::toROSMsg(*assembledCloud, *cloudMsg);
				cloudMsg->header.stamp = ros::Time::now();
				cloudMsg->header.frame_id = msg->header.frame_id;
				assembledMapScans_.publish(cloudMsg);
			}
		}

		if(occupancyMapPub_.getNumSubscribers())
		{
			// create the map
			float xMin=0.0f, yMin=0.0f;
			cv::Mat pixels = util3d::create2DMapFromOccupancyLocalMaps(
					poses,
					occupancyLocalMaps_,
					gridCellSize_, xMin, yMin,
					occupancyMapSize_);

			if(!pixels.empty())
			{
				//init
				nav_msgs::OccupancyGrid map;
				map.info.resolution = gridCellSize_;
				map.info.origin.position.x = 0.0;
				map.info.origin.position.y = 0.0;
				map.info.origin.position.z = 0.0;
				map.info.origin.orientation.x = 0.0;
				map.info.origin.orientation.y = 0.0;
				map.info.origin.orientation.z = 0.0;
				map.info.origin.orientation.w = 1.0;

				map.info.width = pixels.cols;
				map.info.height = pixels.rows;
				map.info.origin.position.x = xMin;
				map.info.origin.position.y = yMin;
				map.data.resize(map.info.width * map.info.height);

				memcpy(map.data.data(), pixels.data, map.info.width * map.info.height);

				map.header.frame_id = msg->header.frame_id;
				map.header.stamp = ros::Time::now();

				occupancyMapPub_.publish(map);
			}
		}
		ROS_INFO("Processing data %fs", timer.ticks());
	}
Exemplo n.º 21
0
const std::map<int, float> & BayesFilter::computePosterior(const Memory * memory, const std::map<int, float> & likelihood)
{
	ULOGGER_DEBUG("");

	if(!memory)
	{
		ULOGGER_ERROR("Memory is Null!");
		return _posterior;
	}

	if(!likelihood.size())
	{
		ULOGGER_ERROR("likelihood is empty!");
		return _posterior;
	}

	if(_predictionLC.size() < 2)
	{
		ULOGGER_ERROR("Prediction is not valid!");
		return _posterior;
	}

	UTimer timer;
	timer.start();

	cv::Mat prior;
	cv::Mat posterior;

	float sum = 0;
	int j=0;
	// Recursive Bayes estimation...
	// STEP 1 - Prediction : Prior*lastPosterior
	_prediction = this->generatePrediction(memory, uKeys(likelihood));

	ULOGGER_DEBUG("STEP1-generate prior=%fs, rows=%d, cols=%d", timer.ticks(), _prediction.rows, _prediction.cols);
	//std::cout << "Prediction=" << _prediction << std::endl;

	// Adjust the last posterior if some images were
	// reactivated or removed from the working memory
	posterior = cv::Mat(likelihood.size(), 1, CV_32FC1);
	this->updatePosterior(memory, uKeys(likelihood));
	j=0;
	for(std::map<int, float>::const_iterator i=_posterior.begin(); i!= _posterior.end(); ++i)
	{
		((float*)posterior.data)[j++] = (*i).second;
	}
	ULOGGER_DEBUG("STEP1-update posterior=%fs, posterior=%d, _posterior size=%d", posterior.rows, _posterior.size());
	//std::cout << "LastPosterior=" << posterior << std::endl;

	// Multiply prediction matrix with the last posterior
	// (m,m) X (m,1) = (m,1)
	prior = _prediction * posterior;
	ULOGGER_DEBUG("STEP1-matrix mult time=%fs", timer.ticks());
	//std::cout << "ResultingPrior=" << prior << std::endl;

	ULOGGER_DEBUG("STEP1-matrix mult time=%fs", timer.ticks());
	std::vector<float> likelihoodValues = uValues(likelihood);
	//std::cout << "Likelihood=" << cv::Mat(likelihoodValues) << std::endl;

	// STEP 2 - Update : Multiply with observations (likelihood)
	j=0;
	for(std::map<int, float>::const_iterator i=likelihood.begin(); i!= likelihood.end(); ++i)
	{
		std::map<int, float>::iterator p =_posterior.find((*i).first);
		if(p!= _posterior.end())
		{
			(*p).second = (*i).second * ((float*)prior.data)[j++];
			sum+=(*p).second;
		}
		else
		{
			ULOGGER_ERROR("Problem1! can't find id=%d", (*i).first);
		}
	}
	ULOGGER_DEBUG("STEP2-likelihood time=%fs", timer.ticks());
	//std::cout << "Posterior (before normalization)=" << _posterior << std::endl;

	// Normalize
	ULOGGER_DEBUG("sum=%f", sum);
	if(sum != 0)
	{
		for(std::map<int, float>::iterator i=_posterior.begin(); i!= _posterior.end(); ++i)
		{
			(*i).second /= sum;
		}
	}
	ULOGGER_DEBUG("normalize time=%fs", timer.ticks());
	//std::cout << "Posterior=" << _posterior << std::endl;

	return _posterior;
}
Exemplo n.º 22
0
	void mapDataReceivedCallback(const rtabmap_ros::MapDataConstPtr & msg)
	{
		UTimer timer;
		for(unsigned int i=0; i<msg->nodes.size(); ++i)
		{
			int id = msg->nodes[i].id;
			if(!uContains(rgbClouds_, id))
			{
				rtabmap::Transform localTransform = rtabmap_ros::transformFromGeometryMsg(msg->nodes[i].localTransform);
				if(!localTransform.isNull())
				{
					cv::Mat image, depth;
					float fx = msg->nodes[i].fx;
					float fy = msg->nodes[i].fy;
					float cx = msg->nodes[i].cx;
					float cy = msg->nodes[i].cy;

					//uncompress data
					rtabmap::CompressionThread ctImage(rtabmap_ros::compressedMatFromBytes(msg->nodes[i].image, false), true);
					rtabmap::CompressionThread ctDepth(rtabmap_ros::compressedMatFromBytes(msg->nodes[i].depth, false), true);
					ctImage.start();
					ctDepth.start();
					ctImage.join();
					ctDepth.join();
					image = ctImage.getUncompressedData();
					depth = ctDepth.getUncompressedData();

					if(!image.empty() && !depth.empty() && fx > 0.0f && fy > 0.0f && cx >= 0.0f && cy >= 0.0f)
					{
						pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud;
						if(depth.type() == CV_8UC1)
						{
							cloud = util3d::cloudFromStereoImages(image, depth, cx, cy, fx, fy, cloudDecimation_);
						}
						else
						{
							cloud = util3d::cloudFromDepthRGB(image, depth, cx, cy, fx, fy, cloudDecimation_);
						}

						if(cloud->size() && cloudMaxDepth_ > 0)
						{
							cloud = util3d::passThrough(cloud, "z", 0, cloudMaxDepth_);
						}
						if(cloud->size() && noiseFilterRadius_ > 0.0 && noiseFilterMinNeighbors_ > 0)
						{
							pcl::IndicesPtr indices = rtabmap::util3d::radiusFiltering(cloud, noiseFilterRadius_, noiseFilterMinNeighbors_);
							pcl::PointCloud<pcl::PointXYZRGB>::Ptr tmp(new pcl::PointCloud<pcl::PointXYZRGB>);
							pcl::copyPointCloud(*cloud, *indices, *tmp);
							cloud = tmp;
						}
						if(cloud->size() && cloudVoxelSize_ > 0)
						{
							cloud = util3d::voxelize(cloud, cloudVoxelSize_);
						}

						if(cloud->size())
						{
							cloud = util3d::transformPointCloud(cloud, localTransform);


							rgbClouds_.insert(std::make_pair(id, cloud));

							if(computeOccupancyGrid_)
							{
								pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloudClipped = cloud;
								if(cloudClipped->size() && maxHeight_ > 0)
								{
									cloudClipped = util3d::passThrough(cloudClipped, "z", std::numeric_limits<int>::min(), maxHeight_);
								}
								if(cloudClipped->size())
								{
									cloudClipped = util3d::voxelize(cloudClipped, gridCellSize_);

									cv::Mat ground, obstacles;
									util3d::occupancy2DFromCloud3D<pcl::PointXYZRGB>(cloudClipped, ground, obstacles, gridCellSize_, groundMaxAngle_, clusterMinSize_);
									if(!ground.empty() || !obstacles.empty())
									{
										occupancyLocalMaps_.insert(std::make_pair(id, std::make_pair(ground, obstacles)));
									}
								}

							}
						}
					}
				}
			}

			if(!uContains(scans_, id) && msg->nodes[i].laserScan.size())
			{
				cv::Mat laserScan = rtabmap::uncompressData(msg->nodes[i].laserScan);
				if(!laserScan.empty())
				{
					pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = util3d::laserScanToPointCloud(laserScan);
					if(cloud->size() && scanVoxelSize_ > 0)
					{
						cloud = util3d::voxelize(cloud, scanVoxelSize_);
					}
					if(cloud->size())
					{
						scans_.insert(std::make_pair(id, cloud));
					}
				}
			}
		}

		// filter poses
		std::map<int, Transform> poses;
		for(unsigned int i=0; i<msg->graph.nodeIds.size() && i<msg->graph.poses.size(); ++i)
		{
			poses.insert(std::make_pair(msg->graph.nodeIds[i], rtabmap_ros::transformFromPoseMsg(msg->graph.poses[i])));
		}
		if(nodeFilteringAngle_ > 0.0 && nodeFilteringRadius_ > 0.0)
		{
			poses = rtabmap::graph::radiusPosesFiltering(poses, nodeFilteringRadius_, nodeFilteringAngle_*CV_PI/180.0);
		}

		if(assembledMapClouds_.getNumSubscribers())
		{
			// generate the assembled cloud!
			pcl::PointCloud<pcl::PointXYZRGB>::Ptr assembledCloud(new pcl::PointCloud<pcl::PointXYZRGB>);

			for(std::map<int, Transform>::iterator iter = poses.begin(); iter!=poses.end(); ++iter)
			{
				std::map<int, pcl::PointCloud<pcl::PointXYZRGB>::Ptr >::iterator jter = rgbClouds_.find(iter->first);
				if(jter != rgbClouds_.end())
				{
					pcl::PointCloud<pcl::PointXYZRGB>::Ptr transformed = util3d::transformPointCloud(jter->second, iter->second);
					*assembledCloud+=*transformed;
				}
			}

			if(assembledCloud->size())
			{
				if(cloudVoxelSize_ > 0)
				{
					assembledCloud = util3d::voxelize(assembledCloud,cloudVoxelSize_);
				}

				sensor_msgs::PointCloud2::Ptr cloudMsg(new sensor_msgs::PointCloud2);
				pcl::toROSMsg(*assembledCloud, *cloudMsg);
				cloudMsg->header.stamp = ros::Time::now();
				cloudMsg->header.frame_id = msg->header.frame_id;
				assembledMapClouds_.publish(cloudMsg);
			}
		}

		if(assembledMapScans_.getNumSubscribers())
		{
			// generate the assembled scan!
			pcl::PointCloud<pcl::PointXYZ>::Ptr assembledCloud(new pcl::PointCloud<pcl::PointXYZ>);

			for(std::map<int, Transform>::iterator iter = poses.begin(); iter!=poses.end(); ++iter)
			{
				std::map<int, pcl::PointCloud<pcl::PointXYZ>::Ptr >::iterator jter = scans_.find(iter->first);
				if(jter != scans_.end())
				{
					pcl::PointCloud<pcl::PointXYZ>::Ptr transformed = util3d::transformPointCloud(jter->second, iter->second);
					*assembledCloud+=*transformed;
				}
			}

			if(assembledCloud->size())
			{
				if(scanVoxelSize_ > 0)
				{
					assembledCloud = util3d::voxelize(assembledCloud, scanVoxelSize_);
				}

				sensor_msgs::PointCloud2::Ptr cloudMsg(new sensor_msgs::PointCloud2);
				pcl::toROSMsg(*assembledCloud, *cloudMsg);
				cloudMsg->header.stamp = ros::Time::now();
				cloudMsg->header.frame_id = msg->header.frame_id;
				assembledMapScans_.publish(cloudMsg);
			}
		}

		if(occupancyMapPub_.getNumSubscribers())
		{
			// create the map
			float xMin=0.0f, yMin=0.0f;
			cv::Mat pixels = util3d::create2DMapFromOccupancyLocalMaps(
					poses,
					occupancyLocalMaps_,
					gridCellSize_, xMin, yMin,
					occupancyMapSize_);

			if(!pixels.empty())
			{
				//init
				nav_msgs::OccupancyGrid map;
				map.info.resolution = gridCellSize_;
				map.info.origin.position.x = 0.0;
				map.info.origin.position.y = 0.0;
				map.info.origin.position.z = 0.0;
				map.info.origin.orientation.x = 0.0;
				map.info.origin.orientation.y = 0.0;
				map.info.origin.orientation.z = 0.0;
				map.info.origin.orientation.w = 1.0;

				map.info.width = pixels.cols;
				map.info.height = pixels.rows;
				map.info.origin.position.x = xMin;
				map.info.origin.position.y = yMin;
				map.data.resize(map.info.width * map.info.height);

				memcpy(map.data.data(), pixels.data, map.info.width * map.info.height);

				map.header.frame_id = msg->header.frame_id;
				map.header.stamp = ros::Time::now();

				occupancyMapPub_.publish(map);
			}
		}
		ROS_INFO("Processing data %fs", timer.ticks());
	}
Exemplo n.º 23
0
cv::Mat BayesFilter::updatePrediction(const cv::Mat & oldPrediction,
		const Memory * memory,
		const std::vector<int> & oldIds,
		const std::vector<int> & newIds) const
{
	UTimer timer;
	UDEBUG("");

	UASSERT(memory &&
		oldIds.size() &&
		newIds.size() &&
		oldIds.size() == (unsigned int)oldPrediction.cols &&
		oldIds.size() == (unsigned int)oldPrediction.rows);

	cv::Mat prediction = cv::Mat::zeros(newIds.size(), newIds.size(), CV_32FC1);

	// Create id to index maps
	std::map<int, int> oldIdToIndexMap;
	std::map<int, int> newIdToIndexMap;
	for(unsigned int i=0; i<oldIds.size() || i<newIds.size(); ++i)
	{
		if(i<oldIds.size())
		{
			UASSERT(oldIds[i]);
			oldIdToIndexMap.insert(oldIdToIndexMap.end(), std::make_pair(oldIds[i], i));
			//UDEBUG("oldIdToIndexMap[%d] = %d", oldIds[i], i);
		}
		if(i<newIds.size())
		{
			UASSERT(newIds[i]);
			newIdToIndexMap.insert(newIdToIndexMap.end(), std::make_pair(newIds[i], i));
			//UDEBUG("newIdToIndexMap[%d] = %d", newIds[i], i);
		}
	}
	UDEBUG("time creating id-index maps = %fs", timer.restart());

	//Get removed ids
	std::set<int> removedIds;
	for(unsigned int i=0; i<oldIds.size(); ++i)
	{
		if(!uContains(newIdToIndexMap, oldIds[i]))
		{
			removedIds.insert(removedIds.end(), oldIds[i]);
			UDEBUG("removed id=%d at oldIndex=%d", oldIds[i], i);
		}
	}
	UDEBUG("time getting removed ids = %fs", timer.restart());

	int added = 0;
	// get ids to update
	std::set<int> idsToUpdate;
	for(unsigned int i=0; i<oldIds.size() || i<newIds.size(); ++i)
	{
		if(i<oldIds.size())
		{
			if(removedIds.find(oldIds[i]) != removedIds.end())
			{
				unsigned int cols = oldPrediction.cols;
				for(unsigned int j=0; j<cols; ++j)
				{
					if(((const float *)oldPrediction.data)[i + j*cols] != 0.0f &&
					   j!=i &&
					   removedIds.find(oldIds[j]) == removedIds.end())
					{
						//UDEBUG("to update id=%d from id=%d removed (value=%f)", oldIds[j], oldIds[i], ((const float *)oldPrediction.data)[i + j*cols]);
						idsToUpdate.insert(oldIds[j]);
					}
				}
			}
		}
		if(i<newIds.size() && !uContains(oldIdToIndexMap,newIds[i]))
		{
			std::map<int, int> neighbors = memory->getNeighborsId(newIds[i], _predictionLC.size()-1, 0);
			float sum = this->addNeighborProb(prediction, i, neighbors, newIdToIndexMap);
			this->normalize(prediction, i, sum, newIds[0]<0);
			++added;
			for(std::map<int,int>::iterator iter=neighbors.begin(); iter!=neighbors.end(); ++iter)
			{
				if(uContains(oldIdToIndexMap, iter->first) &&
				   removedIds.find(iter->first) == removedIds.end())
				{
					idsToUpdate.insert(iter->first);
				}
			}
		}
	}
	UDEBUG("time getting ids to update = %fs", timer.restart());

	// update modified/added ids
	int modified = 0;
	for(std::set<int>::iterator iter = idsToUpdate.begin(); iter!=idsToUpdate.end(); ++iter)
	{
		std::map<int, int> neighbors = memory->getNeighborsId(*iter, _predictionLC.size()-1, 0);
		int index = newIdToIndexMap.at(*iter);
		float sum = this->addNeighborProb(prediction, index, neighbors, newIdToIndexMap);
		this->normalize(prediction, index, sum, newIds[0]<0);
		++modified;
	}
	UDEBUG("time updating modified/added ids = %fs", timer.restart());

	//UDEBUG("oldIds.size()=%d, oldPrediction.cols=%d, oldPrediction.rows=%d", oldIds.size(), oldPrediction.cols, oldPrediction.rows);
	//UDEBUG("newIdToIndexMap.size()=%d, prediction.cols=%d, prediction.rows=%d", newIdToIndexMap.size(), prediction.cols, prediction.rows);
	// copy not changed probabilities
	int copied = 0;
	for(unsigned int i=0; i<oldIds.size(); ++i)
	{
		if(oldIds[i]>0 && removedIds.find(oldIds[i]) == removedIds.end() && idsToUpdate.find(oldIds[i]) == idsToUpdate.end())
		{
			for(int j=0; j<oldPrediction.cols; ++j)
			{
				if(removedIds.find(oldIds[j]) == removedIds.end() && ((const float *)oldPrediction.data)[i + j*oldPrediction.cols] != 0.0f)
				{
					//UDEBUG("i=%d, j=%d", i, j);
					//UDEBUG("oldIds[i]=%d, oldIds[j]=%d", oldIds[i], oldIds[j]);
					//UDEBUG("newIdToIndexMap.at(oldIds[i])=%d", newIdToIndexMap.at(oldIds[i]));
					//UDEBUG("newIdToIndexMap.at(oldIds[j])=%d", newIdToIndexMap.at(oldIds[j]));
					((float *)prediction.data)[newIdToIndexMap.at(oldIds[i]) + newIdToIndexMap.at(oldIds[j])*prediction.cols] = ((const float *)oldPrediction.data)[i + j*oldPrediction.cols];
				}
			}
			++copied;
		}
	}
	UDEBUG("time copying = %fs", timer.restart());

	//update virtual place
	if(newIds[0] < 0)
	{
		if(prediction.cols>1) // The first must be the virtual place
		{
			((float*)prediction.data)[0] = _virtualPlacePrior;
			float val = (1.0-_virtualPlacePrior)/(prediction.cols-1);
			for(int j=1; j<prediction.cols; j++)
			{
				((float*)prediction.data)[j*prediction.cols] = val;
			}
		}
		else if(prediction.cols>0)
		{
			((float*)prediction.data)[0] = 1;
		}
	}
	UDEBUG("time updating virtual place = %fs", timer.restart());

	UDEBUG("Modified=%d, Added=%d, Copied=%d", modified, added, copied);
	return prediction;
}
Exemplo n.º 24
0
void CameraThread::mainLoop()
{
	UDEBUG("");
	CameraInfo info;
	SensorData data = _camera->takeImage(&info);

	if(!data.imageRaw().empty())
	{
		if(_colorOnly && !data.depthRaw().empty())
		{
			data.setDepthOrRightRaw(cv::Mat());
		}
		if(_imageDecimation>1 && !data.imageRaw().empty())
		{
			UDEBUG("");
			UTimer timer;
			if(!data.depthRaw().empty() &&
			   !(data.depthRaw().rows % _imageDecimation == 0 && data.depthRaw().cols % _imageDecimation == 0))
			{
				UERROR("Decimation of depth images should be exact (decimation=%d, size=(%d,%d))! "
					   "Images won't be resized.", _imageDecimation, data.depthRaw().cols, data.depthRaw().rows);
			}
			else
			{
				data.setImageRaw(util2d::decimate(data.imageRaw(), _imageDecimation));
				data.setDepthOrRightRaw(util2d::decimate(data.depthOrRightRaw(), _imageDecimation));
				std::vector<CameraModel> models = data.cameraModels();
				for(unsigned int i=0; i<models.size(); ++i)
				{
					if(models[i].isValidForProjection())
					{
						models[i] = models[i].scaled(1.0/double(_imageDecimation));
					}
				}
				data.setCameraModels(models);
				StereoCameraModel stereoModel = data.stereoCameraModel();
				if(stereoModel.isValidForProjection())
				{
					stereoModel.scale(1.0/double(_imageDecimation));
					data.setStereoCameraModel(stereoModel);
				}
			}
			info.timeImageDecimation = timer.ticks();
		}
		if(_mirroring && data.cameraModels().size() == 1)
		{
			UDEBUG("");
			UTimer timer;
			cv::Mat tmpRgb;
			cv::flip(data.imageRaw(), tmpRgb, 1);
			data.setImageRaw(tmpRgb);
			UASSERT_MSG(data.cameraModels().size() <= 1 && !data.stereoCameraModel().isValidForProjection(), "Only single RGBD cameras are supported for mirroring.");
			if(data.cameraModels().size() && data.cameraModels()[0].cx())
			{
				CameraModel tmpModel(
						data.cameraModels()[0].fx(),
						data.cameraModels()[0].fy(),
						float(data.imageRaw().cols) - data.cameraModels()[0].cx(),
						data.cameraModels()[0].cy(),
						data.cameraModels()[0].localTransform());
				data.setCameraModel(tmpModel);
			}
			if(!data.depthRaw().empty())
			{
				cv::Mat tmpDepth;
				cv::flip(data.depthRaw(), tmpDepth, 1);
				data.setDepthOrRightRaw(tmpDepth);
			}
			info.timeMirroring = timer.ticks();
		}
		if(_stereoToDepth && data.stereoCameraModel().isValidForProjection() && !data.rightRaw().empty())
		{
			UDEBUG("");
			UTimer timer;
			cv::Mat depth = util2d::depthFromDisparity(
					_stereoDense->computeDisparity(data.imageRaw(), data.rightRaw()),
					data.stereoCameraModel().left().fx(),
					data.stereoCameraModel().baseline());
			data.setCameraModel(data.stereoCameraModel().left());
			data.setDepthOrRightRaw(depth);
			data.setStereoCameraModel(StereoCameraModel());
			info.timeDisparity = timer.ticks();
			UDEBUG("Computing disparity = %f s", info.timeDisparity);
		}
		if(_scanFromDepth &&
			data.cameraModels().size() &&
			data.cameraModels().at(0).isValidForProjection() &&
			!data.depthRaw().empty())
		{
			UDEBUG("");
			if(data.laserScanRaw().empty())
			{
				UASSERT(_scanDecimation >= 1);
				UTimer timer;
				pcl::IndicesPtr validIndices(new std::vector<int>);
				pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = util3d::cloudFromSensorData(data, _scanDecimation, _scanMaxDepth, _scanMinDepth, validIndices.get());
				float maxPoints = (data.depthRaw().rows/_scanDecimation)*(data.depthRaw().cols/_scanDecimation);
				if(_scanVoxelSize>0.0f)
				{
					cloud = util3d::voxelize(cloud, validIndices, _scanVoxelSize);
					float ratio = float(cloud->size()) / float(validIndices->size());
					maxPoints = ratio * maxPoints;
				}
				else if(!cloud->is_dense)
				{
					pcl::PointCloud<pcl::PointXYZ>::Ptr denseCloud(new pcl::PointCloud<pcl::PointXYZ>);
					pcl::copyPointCloud(*cloud, *validIndices, *denseCloud);
					cloud = denseCloud;
				}

				cv::Mat scan;
				if(_scanNormalsK>0)
				{
					scan = util3d::laserScanFromPointCloud(*util3d::computeNormals(cloud, _scanNormalsK));
				}
				else
				{
					scan = util3d::laserScanFromPointCloud(*cloud);
				}
				data.setLaserScanRaw(scan, (int)maxPoints, _scanMaxDepth);
				info.timeScanFromDepth = timer.ticks();
				UDEBUG("Computing scan from depth = %f s", info.timeScanFromDepth);
			}
			else
			{
				UWARN("Option to create laser scan from depth image is enabled, but "
					  "there is already a laser scan in the captured sensor data. Scan from "
					  "depth will not be created.");
			}
		}

		info.cameraName = _camera->getSerial();
		this->post(new CameraEvent(data, info));
	}
	else if(!this->isKilled())
	{
		UWARN("no more images...");
		this->kill();
		this->post(new CameraEvent());
	}
}
Exemplo n.º 25
0
	void mapDataReceivedCallback(const rtabmap_ros::MapDataConstPtr & msg)
	{
		// save new poses and constraints
		// Assuming that nodes/constraints are all linked together
		UASSERT(msg->graph.nodeIds.size() == msg->graph.poses.size());
		UASSERT(msg->graph.nodeIds.size() == msg->graph.mapIds.size());
		UASSERT(msg->graph.nodeIds.size() == msg->graph.stamps.size());
		UASSERT(msg->graph.nodeIds.size() == msg->graph.labels.size());
		UASSERT(msg->graph.nodeIds.size() == msg->graph.userDatas.size());

		bool dataChanged = false;

		std::multimap<int, Link> newConstraints;
		for(unsigned int i=0; i<msg->graph.links.size(); ++i)
		{
			Link link = rtabmap_ros::linkFromROS(msg->graph.links[i]);
			newConstraints.insert(std::make_pair(link.from(), link));

			bool edgeAlreadyAdded = false;
			for(std::multimap<int, Link>::iterator iter = cachedConstraints_.lower_bound(link.from());
					iter != cachedConstraints_.end() && iter->first == link.from();
					++iter)
			{
				if(iter->second.to() == link.to())
				{
					edgeAlreadyAdded = true;
					if(iter->second.transform() != link.transform())
					{
						dataChanged = true;
					}
				}
			}
			if(!edgeAlreadyAdded)
			{
				cachedConstraints_.insert(std::make_pair(link.from(), link));
			}
		}

		std::map<int, Transform> newPoses;
		std::map<int, int> newMapIds;
		std::map<int, double> newStamps;
		std::map<int, std::string> newLabels;
		std::map<int, std::vector<unsigned char> > newUserDatas;
		// add new odometry poses
		for(unsigned int i=0; i<msg->nodes.size(); ++i)
		{
			int id = msg->nodes[i].id;
			Transform pose = rtabmap_ros::transformFromPoseMsg(msg->nodes[i].pose);
			newPoses.insert(std::make_pair(id, pose));
			newMapIds.insert(std::make_pair(id, msg->nodes[i].mapId));
			newStamps.insert(std::make_pair(id, msg->nodes[i].stamp));
			newLabels.insert(std::make_pair(id, msg->nodes[i].label));
			newUserDatas.insert(std::make_pair(id, msg->nodes[i].userData.data));

			std::pair<std::map<int, Transform>::iterator, bool> p = cachedPoses_.insert(std::make_pair(id, pose));
			if(!p.second && pose != cachedPoses_.at(id))
			{
				dataChanged = true;
			}
			else if(p.second)
			{
				cachedMapIds_.insert(std::make_pair(id, msg->nodes[i].mapId));
				cachedStamps_.insert(std::make_pair(id, msg->nodes[i].stamp));
				cachedLabels_.insert(std::make_pair(id, msg->nodes[i].label));
				cachedUserDatas_.insert(std::make_pair(id, msg->nodes[i].userData.data));
			}
		}

		if(dataChanged)
		{
			ROS_WARN("Graph data has changed! Reset cache...");
			cachedPoses_ = newPoses;
			cachedMapIds_ = newMapIds;
			cachedStamps_ = newStamps;
			cachedLabels_ = newLabels;
			cachedUserDatas_ = newUserDatas;
			cachedConstraints_ = newConstraints;
		}

		//match poses in the graph
		std::map<int, Transform> poses;
		std::map<int, int> mapIds;
		std::map<int, double> stamps;
		std::map<int, std::string> labels;
		std::map<int, std::vector<unsigned char> > userDatas;
		std::multimap<int, Link> constraints;
		if(globalOptimization_)
		{
			poses = cachedPoses_;
			mapIds = cachedMapIds_;
			stamps = cachedStamps_;
			labels = cachedLabels_;
			userDatas = cachedUserDatas_;
			constraints = cachedConstraints_;
		}
		else
		{
			constraints = newConstraints;
			for(unsigned int i=0; i<msg->graph.nodeIds.size(); ++i)
			{
				std::map<int, Transform>::iterator iter = cachedPoses_.find(msg->graph.nodeIds[i]);
				if(iter != cachedPoses_.end())
				{
					poses.insert(*iter);
					mapIds.insert(*cachedMapIds_.find(iter->first));
					stamps.insert(*cachedStamps_.find(iter->first));
					labels.insert(*cachedLabels_.find(iter->first));
					userDatas.insert(*cachedUserDatas_.find(iter->first));
				}
				else
				{
					ROS_ERROR("Odometry pose of node %d not found in cache!", msg->graph.nodeIds[i]);
					return;
				}
			}
		}

		// Optimize only if there is a subscriber
		if(mapDataPub_.getNumSubscribers())
		{
			UTimer timer;
			std::map<int, Transform> optimizedPoses;
			Transform mapCorrection = Transform::getIdentity();
			if(poses.size() > 1 && constraints.size() > 0)
			{
				graph::TOROOptimizer optimizer(iterations_, false, ignoreVariance_);
				int fromId = optimizeFromLastNode_?poses.rbegin()->first:poses.begin()->first;
				std::map<int, rtabmap::Transform> posesOut;
				std::multimap<int, rtabmap::Link> linksOut;
				optimizer.getConnectedGraph(
						fromId,
						poses,
						constraints,
						posesOut,
						linksOut);
				optimizedPoses = optimizer.optimize(fromId, posesOut, linksOut);

				mapToOdomMutex_.lock();
				mapCorrection = optimizedPoses.at(poses.rbegin()->first) * poses.rbegin()->second.inverse();
				mapToOdom_ = mapCorrection;
				mapToOdomMutex_.unlock();
			}
			else if(poses.size() == 1 && constraints.size() == 0)
			{
				optimizedPoses = poses;
			}
			else if(poses.size() || constraints.size())
			{
				ROS_ERROR("map_optimizer: Poses=%d and edges=%d (poses must "
					   "not be null if there are edges, and edges must be null if poses <= 1)",
					  (int)poses.size(), (int)constraints.size());
				mapIds.clear();
				labels.clear();
				stamps.clear();
				userDatas.clear();
			}

			UASSERT(optimizedPoses.size() == mapIds.size());
			UASSERT(optimizedPoses.size() == labels.size());
			UASSERT(optimizedPoses.size() == stamps.size());
			UASSERT(optimizedPoses.size() == userDatas.size());
			rtabmap_ros::MapData outputMsg;
			rtabmap_ros::mapGraphToROS(optimizedPoses, mapIds, stamps, labels, userDatas, std::multimap<int, rtabmap::Link>(), mapCorrection, outputMsg.graph);
			outputMsg.graph.links = msg->graph.links;
			outputMsg.header = msg->header;
            outputMsg.nodes = msg->nodes;
			mapDataPub_.publish(outputMsg);

			ROS_INFO("Time graph optimization = %f s", timer.ticks());
		}
	}
Exemplo n.º 26
0
int main(int argc, char * argv[])
{
	signal(SIGABRT, &sighandler);
	signal(SIGTERM, &sighandler);
	signal(SIGINT, &sighandler);

	ULogger::setType(ULogger::kTypeConsole);
	ULogger::setLevel(ULogger::kError);

	ParametersMap customParameters = Parameters::parseArguments(argc, argv);

	if(argc < 3)
	{
		showUsage();
	}

	bool assemble2dMap = false;
	bool assemble3dMap = false;
	bool assemble2dOctoMap = false;
	bool assemble3dOctoMap = false;
	bool useDatabaseRate = false;
	ParametersMap configParameters;
	for(int i=1; i<argc-2; ++i)
	{
		if(strcmp(argv[i], "-r") == 0 || strcmp(argv[i], "--r") == 0)
		{
			useDatabaseRate = true;
			printf("Using database stamps as input rate.\n");
		}
		else if (strcmp(argv[i], "-c") == 0 || strcmp(argv[i], "--c") == 0)
		{
			++i;
			if (i < argc - 2 && UFile::exists(argv[i]) && UFile::getExtension(argv[i]).compare("ini") == 0)
			{
				Parameters::readINI(argv[i], configParameters);
				printf("Using %d parameters from config file \"%s\"\n", (int)configParameters.size(), argv[i]);
			}
			else if(i < argc - 2)
			{
				printf("Config file \"%s\" is not valid or doesn't exist!\n", argv[i]);
			}
			else
			{
				printf("Config file is not set!\n");
			}
		}
		else if(strcmp(argv[i], "-g2") == 0 || strcmp(argv[i], "--g2") == 0)
		{
			assemble2dMap = true;
			printf("2D occupancy grid will be assembled (-g2 option).\n");
		}
		else if(strcmp(argv[i], "-g3") == 0 || strcmp(argv[i], "--g3") == 0)
		{
			assemble3dMap = true;
			printf("3D cloud map will be assembled (-g3 option).\n");
		}
		else if(strcmp(argv[i], "-o2") == 0 || strcmp(argv[i], "--o2") == 0)
		{
#ifdef RTABMAP_OCTOMAP
			assemble2dOctoMap = true;
			printf("OctoMap will be assembled (-o2 option).\n");
#else
			printf("RTAB-Map is not built with OctoMap support, cannot set -o2 option!\n");
#endif
		}
		else if(strcmp(argv[i], "-o3") == 0 || strcmp(argv[i], "--o3") == 0)
		{
#ifdef RTABMAP_OCTOMAP
			assemble3dOctoMap = true;
			printf("OctoMap will be assembled (-o3 option).\n");
#else
			printf("RTAB-Map is not built with OctoMap support, cannot set -o3 option!\n");
#endif
		}
	}

	std::string inputDatabasePath = uReplaceChar(argv[argc-2], '~', UDirectory::homeDir());
	std::string outputDatabasePath = uReplaceChar(argv[argc-1], '~', UDirectory::homeDir());

	std::list<std::string> databases = uSplit(inputDatabasePath, ';');
	if (databases.empty())
	{
		printf("No input database \"%s\" detected!\n", inputDatabasePath.c_str());
		return -1;
	}
	for (std::list<std::string>::iterator iter = databases.begin(); iter != databases.end(); ++iter)
	{
		if (!UFile::exists(*iter))
		{
			printf("Input database \"%s\" doesn't exist!\n", iter->c_str());
			return -1;
		}

		if (UFile::getExtension(*iter).compare("db") != 0)
		{
			printf("File \"%s\" is not a database format (*.db)!\n", iter->c_str());
			return -1;
		}
	}

	if(UFile::getExtension(outputDatabasePath).compare("db") != 0)
	{
		printf("File \"%s\" is not a database format (*.db)!\n", outputDatabasePath.c_str());
		return -1;
	}

	if(UFile::exists(outputDatabasePath))
	{
		UFile::erase(outputDatabasePath);
	}

	// Get parameters of the first database
	DBDriver * dbDriver = DBDriver::create();
	if(!dbDriver->openConnection(databases.front(), false))
	{
		printf("Failed opening input database!\n");
		delete dbDriver;
		return -1;
	}

	ParametersMap parameters = dbDriver->getLastParameters();
	if(parameters.empty())
	{
		printf("WARNING: Failed getting parameters from database, reprocessing will be done with default parameters! Database version may be too old (%s).\n", dbDriver->getDatabaseVersion().c_str());
	}
	if(customParameters.size())
	{
		printf("Custom parameters:\n");
		for(ParametersMap::iterator iter=customParameters.begin(); iter!=customParameters.end(); ++iter)
		{
			printf("  %s\t= %s\n", iter->first.c_str(), iter->second.c_str());
		}
	}
	uInsert(parameters, configParameters);
	uInsert(parameters, customParameters);

	bool incrementalMemory = Parameters::defaultMemIncrementalMemory();
	Parameters::parse(parameters, Parameters::kMemIncrementalMemory(), incrementalMemory);

	int totalIds = 0;
	std::set<int> ids;
	dbDriver->getAllNodeIds(ids);
	if(ids.empty())
	{
		printf("Input database doesn't have any nodes saved in it.\n");
		dbDriver->closeConnection(false);
		delete dbDriver;
		return -1;
	}
	if(!(!incrementalMemory && databases.size() > 1))
	{
		totalIds = ids.size();
	}
	dbDriver->closeConnection(false);

	// Count remaining ids in the other databases
	for (std::list<std::string>::iterator iter = ++databases.begin(); iter != databases.end(); ++iter)
	{
		if (!dbDriver->openConnection(*iter, false))
		{
			printf("Failed opening input database!\n");
			delete dbDriver;
			return -1;
		}
		ids.clear();
		dbDriver->getAllNodeIds(ids);
		totalIds += ids.size();
		dbDriver->closeConnection(false);
	}
	delete dbDriver;
	dbDriver = 0;

	std::string workingDirectory = UDirectory::getDir(outputDatabasePath);
	printf("Set working directory to \"%s\".\n", workingDirectory.c_str());
	uInsert(parameters, ParametersPair(Parameters::kRtabmapWorkingDirectory(), workingDirectory));
	uInsert(parameters, ParametersPair(Parameters::kRtabmapPublishStats(), "true")); // to log status below

	if(!incrementalMemory && databases.size() > 1)
	{
		UFile::copy(databases.front(), outputDatabasePath);
		printf("Parameter \"%s\" is set to false, initializing RTAB-Map with \"%s\" for localization...\n", Parameters::kMemIncrementalMemory().c_str(), databases.front().c_str());
		databases.pop_front();
		inputDatabasePath = uJoin(databases, ";");
	}

	Rtabmap rtabmap;
	rtabmap.init(parameters, outputDatabasePath);

	bool rgbdEnabled = Parameters::defaultRGBDEnabled();
	Parameters::parse(parameters, Parameters::kRGBDEnabled(), rgbdEnabled);
	bool odometryIgnored = !rgbdEnabled;
	DBReader dbReader(inputDatabasePath, useDatabaseRate?-1:0, odometryIgnored);
	dbReader.init();

	OccupancyGrid grid(parameters);
	grid.setCloudAssembling(assemble3dMap);
#ifdef RTABMAP_OCTOMAP
	OctoMap octomap(parameters);
#endif

	printf("Reprocessing data of \"%s\"...\n", inputDatabasePath.c_str());
	std::map<std::string, float> globalMapStats;
	int processed = 0;
	CameraInfo info;
	SensorData data = dbReader.takeImage(&info);
	Transform lastLocalizationPose = info.odomPose;
	while(data.isValid() && g_loopForever)
	{
		UTimer iterationTime;
		std::string status;
		if(!odometryIgnored && info.odomPose.isNull())
		{
			printf("Skipping node %d as it doesn't have odometry pose set.\n", data.id());
		}
		else
		{
			if(!odometryIgnored && !info.odomCovariance.empty() && info.odomCovariance.at<double>(0,0)>=9999)
			{
				printf("High variance detected, triggering a new map...\n");
				if(!incrementalMemory && processed>0)
				{
					showLocalizationStats();
					lastLocalizationPose = info.odomPose;
				}
				if(incrementalMemory)
				{
					rtabmap.triggerNewMap();
				}
			}
			UTimer t;
			if(!rtabmap.process(data, info.odomPose, info.odomCovariance, info.odomVelocity, globalMapStats))
			{
				printf("Failed processing node %d.\n", data.id());
				globalMapStats.clear();
			}
			else if(assemble2dMap || assemble3dMap || assemble2dOctoMap || assemble3dOctoMap)
			{
				globalMapStats.clear();
				double timeRtabmap = t.ticks();
				double timeUpdateInit = 0.0;
				double timeUpdateGrid = 0.0;
#ifdef RTABMAP_OCTOMAP
				double timeUpdateOctoMap = 0.0;
#endif
				const rtabmap::Statistics & stats = rtabmap.getStatistics();
				if(stats.poses().size() && stats.getLastSignatureData().id())
				{
					int id = stats.poses().rbegin()->first;
					if(id == stats.getLastSignatureData().id() &&
					   stats.getLastSignatureData().sensorData().gridCellSize() > 0.0f)
					{
						bool updateGridMap = false;
						bool updateOctoMap = false;
						if((assemble2dMap || assemble3dMap) && grid.addedNodes().find(id) == grid.addedNodes().end())
						{
							updateGridMap = true;
						}
#ifdef RTABMAP_OCTOMAP
						if((assemble2dOctoMap || assemble3dOctoMap) && octomap.addedNodes().find(id) == octomap.addedNodes().end())
						{
							updateOctoMap = true;
						}
#endif
						if(updateGridMap || updateOctoMap)
						{
							cv::Mat ground, obstacles, empty;
							stats.getLastSignatureData().sensorData().uncompressDataConst(0, 0, 0, 0, &ground, &obstacles, &empty);

							timeUpdateInit = t.ticks();

							if(updateGridMap)
							{
								grid.addToCache(id, ground, obstacles, empty);
								grid.update(stats.poses());
								timeUpdateGrid = t.ticks() + timeUpdateInit;
							}
#ifdef RTABMAP_OCTOMAP
							if(updateOctoMap)
							{
								const cv::Point3f & viewpoint = stats.getLastSignatureData().sensorData().gridViewPoint();
								octomap.addToCache(id, ground, obstacles, empty, viewpoint);
								octomap.update(stats.poses());
								timeUpdateOctoMap = t.ticks() + timeUpdateInit;
							}
#endif
						}
					}
				}

				globalMapStats.insert(std::make_pair(std::string("GlobalGrid/GridUpdate/ms"), timeUpdateGrid*1000.0f));
#ifdef RTABMAP_OCTOMAP
				//Simulate publishing
				double timePub2dOctoMap = 0.0;
				double timePub3dOctoMap = 0.0;
				if(assemble2dOctoMap)
				{
					float xMin, yMin, size;
					octomap.createProjectionMap(xMin, yMin, size);
					timePub2dOctoMap = t.ticks();
				}
				if(assemble3dOctoMap)
				{
					octomap.createCloud();
					timePub3dOctoMap = t.ticks();
				}

				globalMapStats.insert(std::make_pair(std::string("GlobalGrid/OctoMapUpdate/ms"), timeUpdateOctoMap*1000.0f));
				globalMapStats.insert(std::make_pair(std::string("GlobalGrid/OctoMapProjection/ms"), timePub2dOctoMap*1000.0f));
				globalMapStats.insert(std::make_pair(std::string("GlobalGrid/OctomapToCloud/ms"), timePub3dOctoMap*1000.0f));
				globalMapStats.insert(std::make_pair(std::string("GlobalGrid/TotalWithRtabmap/ms"), (timeUpdateGrid+timeUpdateOctoMap+timePub2dOctoMap+timePub3dOctoMap+timeRtabmap)*1000.0f));
#else
				globalMapStats.insert(std::make_pair(std::string("GlobalGrid/TotalWithRtabmap/ms"), (timeUpdateGrid+timeRtabmap)*1000.0f));
#endif
			}
		}

		const rtabmap::Statistics & stats = rtabmap.getStatistics();
		int loopId = stats.loopClosureId() > 0? stats.loopClosureId(): stats.proximityDetectionId() > 0?stats.proximityDetectionId() :0;
		int landmarkId = (int)uValue(stats.data(), rtabmap::Statistics::kLoopLandmark_detected(), 0.0f);
		int refMapId = stats.refImageMapId();
		++totalFrames;
		if (loopId>0)
		{
			++loopCount;
			int loopMapId = stats.loopClosureId() > 0? stats.loopClosureMapId(): stats.proximityDetectionMapId();
			printf("Processed %d/%d nodes [%d]... %dms Loop on %d [%d]\n", ++processed, totalIds, refMapId, int(iterationTime.ticks() * 1000), loopId, loopMapId);
		}
		else if(landmarkId != 0)
		{
			printf("Processed %d/%d nodes [%d]... %dms Loop on landmark %d\n", ++processed, totalIds, refMapId, int(iterationTime.ticks() * 1000), landmarkId);
		}
		else
		{
			printf("Processed %d/%d nodes [%d]... %dms\n", ++processed, totalIds, refMapId, int(iterationTime.ticks() * 1000));
		}

		// Here we accumulate statistics about distance from last localization
		if(!incrementalMemory &&
		   !lastLocalizationPose.isNull() &&
		   !info.odomPose.isNull())
		{
			if(loopId>0 || landmarkId != 0)
			{
				previousLocalizationDistances.push_back(lastLocalizationPose.getDistance(info.odomPose));
				lastLocalizationPose = info.odomPose;
			}
		}
		if(!incrementalMemory)
		{
			float totalTime = uValue(stats.data(), rtabmap::Statistics::kTimingTotal(), 0.0f);
			localizationTime.push_back(totalTime);
			if(loopId>0)
			{
				localizationDistances.push_back(stats.loopClosureTransform().getNorm());
			}
		}

		Transform odomPose = info.odomPose;
		data = dbReader.takeImage(&info);

		if(!incrementalMemory &&
		   !odomPose.isNull() &&
		   !info.odomPose.isNull())
		{
			odomDistances.push_back(odomPose.getDistance(info.odomPose));
		}
	}

	if(!incrementalMemory)
	{
		showLocalizationStats();
	}
	else
	{
		printf("Total loop closures = %d\n", loopCount);
	}

	printf("Closing database \"%s\"...\n", outputDatabasePath.c_str());
	rtabmap.close(true);
	printf("Closing database \"%s\"... done!\n", outputDatabasePath.c_str());

	if(assemble2dMap)
	{
		std::string outputPath = outputDatabasePath.substr(0, outputDatabasePath.size()-3) + "_map.pgm";
		float xMin,yMin;
		cv::Mat map = grid.getMap(xMin, yMin);
		if(!map.empty())
		{
			cv::Mat map8U(map.rows, map.cols, CV_8U);
			//convert to gray scaled map
			for (int i = 0; i < map.rows; ++i)
			{
				for (int j = 0; j < map.cols; ++j)
				{
					char v = map.at<char>(i, j);
					unsigned char gray;
					if(v == 0)
					{
						gray = 178;
					}
					else if(v == 100)
					{
						gray = 0;
					}
					else // -1
					{
						gray = 89;
					}
					map8U.at<unsigned char>(i, j) = gray;
				}
			}
			if(cv::imwrite(outputPath, map8U))
			{
				printf("Saving occupancy grid \"%s\"... done!\n", outputPath.c_str());
			}
			else
			{
				printf("Saving occupancy grid \"%s\"... failed!\n", outputPath.c_str());
			}
		}
		else
		{
			printf("2D map is empty! Cannot save it!\n");
		}
	}
	if(assemble3dMap)
	{
		std::string outputPath = outputDatabasePath.substr(0, outputDatabasePath.size()-3) + "_obstacles.pcd";
		if(pcl::io::savePCDFileBinary(outputPath, *grid.getMapObstacles()) == 0)
		{
			printf("Saving 3d obstacles \"%s\"... done!\n", outputPath.c_str());
		}
		else
		{
			printf("Saving 3d obstacles \"%s\"... failed!\n", outputPath.c_str());
		}
		if(grid.getMapGround()->size())
		{
			outputPath = outputDatabasePath.substr(0, outputDatabasePath.size()-3) + "_ground.pcd";
			if(pcl::io::savePCDFileBinary(outputPath, *grid.getMapGround()) == 0)
			{
				printf("Saving 3d ground \"%s\"... done!\n", outputPath.c_str());
			}
			else
			{
				printf("Saving 3d ground \"%s\"... failed!\n", outputPath.c_str());
			}
		}
		if(grid.getMapEmptyCells()->size())
		{
			outputPath = outputDatabasePath.substr(0, outputDatabasePath.size()-3) + "_empty.pcd";
			if(pcl::io::savePCDFileBinary(outputPath, *grid.getMapEmptyCells()) == 0)
			{
				printf("Saving 3d empty cells \"%s\"... done!\n", outputPath.c_str());
			}
			else
			{
				printf("Saving 3d empty cells \"%s\"... failed!\n", outputPath.c_str());
			}
		}
	}
#ifdef RTABMAP_OCTOMAP
	if(assemble2dOctoMap)
	{
		std::string outputPath = outputDatabasePath.substr(0, outputDatabasePath.size()-3) + "_octomap.pgm";
		float xMin,yMin,cellSize;
		cv::Mat map = octomap.createProjectionMap(xMin, yMin, cellSize);
		if(!map.empty())
		{
			cv::Mat map8U(map.rows, map.cols, CV_8U);
			//convert to gray scaled map
			for (int i = 0; i < map.rows; ++i)
			{
				for (int j = 0; j < map.cols; ++j)
				{
					char v = map.at<char>(i, j);
					unsigned char gray;
					if(v == 0)
					{
						gray = 178;
					}
					else if(v == 100)
					{
						gray = 0;
					}
					else // -1
					{
						gray = 89;
					}
					map8U.at<unsigned char>(i, j) = gray;
				}
			}
			if(cv::imwrite(outputPath, map8U))
			{
				printf("Saving octomap 2D projection \"%s\"... done!\n", outputPath.c_str());
			}
			else
			{
				printf("Saving octomap 2D projection \"%s\"... failed!\n", outputPath.c_str());
			}
		}
		else
		{
			printf("OctoMap 2D projection map is empty! Cannot save it!\n");
		}
	}
	if(assemble3dOctoMap)
	{
		std::string outputPath = outputDatabasePath.substr(0, outputDatabasePath.size()-3) + "_octomap_occupied.pcd";
		std::vector<int> obstacles, emptySpace, ground;
		pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = octomap.createCloud(0, &obstacles, &emptySpace, &ground);
		if(pcl::io::savePCDFile(outputPath, *cloud, obstacles, true) == 0)
		{
			printf("Saving obstacles cloud \"%s\"... done!\n", outputPath.c_str());
		}
		else
		{
			printf("Saving obstacles cloud \"%s\"... failed!\n", outputPath.c_str());
		}
		if(ground.size())
		{
			outputPath = outputDatabasePath.substr(0, outputDatabasePath.size()-3) + "_octomap_ground.pcd";
			if(pcl::io::savePCDFile(outputPath, *cloud, ground, true) == 0)
			{
				printf("Saving empty space cloud \"%s\"... done!\n", outputPath.c_str());
			}
			else
			{
				printf("Saving empty space cloud \"%s\"... failed!\n", outputPath.c_str());
			}
		}
		if(emptySpace.size())
		{
			outputPath = outputDatabasePath.substr(0, outputDatabasePath.size()-3) + "_octomap_empty.pcd";
			if(pcl::io::savePCDFile(outputPath, *cloud, emptySpace, true) == 0)
			{
				printf("Saving empty space cloud \"%s\"... done!\n", outputPath.c_str());
			}
			else
			{
				printf("Saving empty space cloud \"%s\"... failed!\n", outputPath.c_str());
			}
		}
	}
#endif

	return 0;
}
Exemplo n.º 27
0
Transform Odometry::process(SensorData & data, OdometryInfo * info)
{
	if(_pose.isNull())
	{
		_pose.setIdentity(); // initialized
	}

	UASSERT(!data.imageRaw().empty());

	if(!data.stereoCameraModel().isValidForProjection() &&
	   (data.cameraModels().size() == 0 || !data.cameraModels()[0].isValidForProjection()))
	{
		UERROR("Rectified images required! Calibrate your camera.");
		return Transform();
	}

	double dt = previousStamp_>0.0f?data.stamp() - previousStamp_:0.0;
	Transform guess = dt && guessFromMotion_?Transform::getIdentity():Transform();
	UASSERT_MSG(dt>0.0 || (dt == 0.0 && previousVelocityTransform_.isNull()), uFormat("dt=%f previous transform=%s", dt, previousVelocityTransform_.prettyPrint().c_str()).c_str());
	if(!previousVelocityTransform_.isNull())
	{
		if(guessFromMotion_)
		{
			if(_filteringStrategy == 1)
			{
				// use Kalman predict transform
				float vx,vy,vz, vroll,vpitch,vyaw;
				predictKalmanFilter(dt, &vx,&vy,&vz,&vroll,&vpitch,&vyaw);
				guess = Transform(vx*dt, vy*dt, vz*dt, vroll*dt, vpitch*dt, vyaw*dt);
			}
			else
			{
				float vx,vy,vz, vroll,vpitch,vyaw;
				previousVelocityTransform_.getTranslationAndEulerAngles(vx,vy,vz, vroll,vpitch,vyaw);
				guess = Transform(vx*dt, vy*dt, vz*dt, vroll*dt, vpitch*dt, vyaw*dt);
			}
		}
		else if(_filteringStrategy == 1)
		{
			predictKalmanFilter(dt);
		}
	}

	UTimer time;
	Transform t;
	if(_imageDecimation > 1)
	{
		// Decimation of images with calibrations
		SensorData decimatedData = data;
		decimatedData.setImageRaw(util2d::decimate(decimatedData.imageRaw(), _imageDecimation));
		decimatedData.setDepthOrRightRaw(util2d::decimate(decimatedData.depthOrRightRaw(), _imageDecimation));
		std::vector<CameraModel> cameraModels = decimatedData.cameraModels();
		for(unsigned int i=0; i<cameraModels.size(); ++i)
		{
			cameraModels[i] = cameraModels[i].scaled(1.0/double(_imageDecimation));
		}
		decimatedData.setCameraModels(cameraModels);
		StereoCameraModel stereoModel = decimatedData.stereoCameraModel();
		if(stereoModel.isValidForProjection())
		{
			stereoModel.scale(1.0/double(_imageDecimation));
		}
		decimatedData.setStereoCameraModel(stereoModel);

		// compute transform
		t = this->computeTransform(decimatedData, guess, info);

		// transform back the keypoints in the original image
		std::vector<cv::KeyPoint> kpts = decimatedData.keypoints();
		double log2value = log(double(_imageDecimation))/log(2.0);
		for(unsigned int i=0; i<kpts.size(); ++i)
		{
			kpts[i].pt.x *= _imageDecimation;
			kpts[i].pt.y *= _imageDecimation;
			kpts[i].size *= _imageDecimation;
			kpts[i].octave += log2value;
		}
		data.setFeatures(kpts, decimatedData.descriptors());

		if(info)
		{
			UASSERT(info->newCorners.size() == info->refCorners.size());
			for(unsigned int i=0; i<info->newCorners.size(); ++i)
			{
				info->refCorners[i].x *= _imageDecimation;
				info->refCorners[i].y *= _imageDecimation;
				info->newCorners[i].x *= _imageDecimation;
				info->newCorners[i].y *= _imageDecimation;
			}
			for(std::multimap<int, cv::KeyPoint>::iterator iter=info->words.begin(); iter!=info->words.end(); ++iter)
			{
				iter->second.pt.x *= _imageDecimation;
				iter->second.pt.y *= _imageDecimation;
				iter->second.size *= _imageDecimation;
				iter->second.octave += log2value;
			}
		}
	}
	else
	{
		t = this->computeTransform(data, guess, info);
	}

	if(info)
	{
		info->timeEstimation = time.ticks();
		info->lost = t.isNull();
		info->stamp = data.stamp();
		info->interval = dt;
		info->transform = t;

		if(!data.groundTruth().isNull())
		{
			if(!previousGroundTruthPose_.isNull())
			{
				info->transformGroundTruth = previousGroundTruthPose_.inverse() * data.groundTruth();
			}
			previousGroundTruthPose_ = data.groundTruth();
		}
	}

	if(!t.isNull())
	{
		_resetCurrentCount = _resetCountdown;

		float vx,vy,vz, vroll,vpitch,vyaw;
		t.getTranslationAndEulerAngles(vx,vy,vz, vroll,vpitch,vyaw);

		// transform to velocity
		if(dt)
		{
			vx /= dt;
			vy /= dt;
			vz /= dt;
			vroll /= dt;
			vpitch /= dt;
			vyaw /= dt;
		}

		if(_force3DoF || !_holonomic || particleFilters_.size() || _filteringStrategy==1)
		{
			if(_filteringStrategy == 1)
			{
				if(previousVelocityTransform_.isNull())
				{
					// reset Kalman
					if(dt)
					{
						initKalmanFilter(t, vx,vy,vz,vroll,vpitch,vyaw);
					}
					else
					{
						initKalmanFilter(t);
					}
				}
				else
				{
					// Kalman filtering
					updateKalmanFilter(vx,vy,vz,vroll,vpitch,vyaw);
				}
			}
			else if(particleFilters_.size())
			{
				// Particle filtering
				UASSERT(particleFilters_.size()==6);
				if(previousVelocityTransform_.isNull())
				{
					particleFilters_[0]->init(vx);
					particleFilters_[1]->init(vy);
					particleFilters_[2]->init(vz);
					particleFilters_[3]->init(vroll);
					particleFilters_[4]->init(vpitch);
					particleFilters_[5]->init(vyaw);
				}
				else
				{
					vx = particleFilters_[0]->filter(vx);
					vy = particleFilters_[1]->filter(vy);
					vyaw = particleFilters_[5]->filter(vyaw);

					if(!_holonomic)
					{
						// arc trajectory around ICR
						float tmpY = vyaw!=0.0f ? vx / tan((CV_PI-vyaw)/2.0f) : 0.0f;
						if(fabs(tmpY) < fabs(vy) || (tmpY<=0 && vy >=0) || (tmpY>=0 && vy<=0))
						{
							vy = tmpY;
						}
						else
						{
							vyaw = (atan(vx/vy)*2.0f-CV_PI)*-1;
						}
					}

					if(!_force3DoF)
					{
						vz = particleFilters_[2]->filter(vz);
						vroll = particleFilters_[3]->filter(vroll);
						vpitch = particleFilters_[4]->filter(vpitch);
					}
				}

				if(info)
				{
					info->timeParticleFiltering = time.ticks();
				}

				if(_force3DoF)
				{
					vz = 0.0f;
					vroll = 0.0f;
					vpitch = 0.0f;
				}
			}
			else if(!_holonomic)
			{
				// arc trajectory around ICR
				vy = vyaw!=0.0f ? vx / tan((CV_PI-vyaw)/2.0f) : 0.0f;
				if(_force3DoF)
				{
					vz = 0.0f;
					vroll = 0.0f;
					vpitch = 0.0f;
				}
			}

			if(dt)
			{
				t = Transform(vx*dt, vy*dt, vz*dt, vroll*dt, vpitch*dt, vyaw*dt);
			}
			else
			{
				t = Transform(vx, vy, vz, vroll, vpitch, vyaw);
			}

			if(info)
			{
				info->transformFiltered = t;
			}
		}

		previousStamp_ = data.stamp();
		previousVelocityTransform_.setNull();
		if(dt)
		{
			previousVelocityTransform_ = Transform(vx, vy, vz, vroll, vpitch, vyaw);
		}

		if(info)
		{
			distanceTravelled_ += t.getNorm();
			info->distanceTravelled = distanceTravelled_;
		}

		return _pose *= t; // updated
	}
	else if(_resetCurrentCount > 0)
	{
		UWARN("Odometry lost! Odometry will be reset after next %d consecutive unsuccessful odometry updates...", _resetCurrentCount);

		--_resetCurrentCount;
		if(_resetCurrentCount == 0)
		{
			UWARN("Odometry automatically reset to latest pose!");
			this->reset(_pose);
		}
	}

	previousVelocityTransform_.setNull();
	previousStamp_ = 0;

	return Transform();
}
Exemplo n.º 28
0
// return not null transform if odometry is correctly computed
Transform OdometryF2F::computeTransform(
		SensorData & data,
		const Transform & guess,
		OdometryInfo * info)
{
	UTimer timer;
	Transform output;
	if(!data.rightRaw().empty() && !data.stereoCameraModel().isValidForProjection())
	{
		UERROR("Calibrated stereo camera required");
		return output;
	}
	if(!data.depthRaw().empty() &&
		(data.cameraModels().size() != 1 || !data.cameraModels()[0].isValidForProjection()))
	{
		UERROR("Calibrated camera required (multi-cameras not supported).");
		return output;
	}

	RegistrationInfo regInfo;

	UASSERT(!this->getPose().isNull());
	if(lastKeyFramePose_.isNull())
	{
		lastKeyFramePose_ = this->getPose(); // reset to current pose
	}
	Transform motionSinceLastKeyFrame = lastKeyFramePose_.inverse()*this->getPose();

	Signature newFrame(data);
	if(refFrame_.sensorData().isValid())
	{
		Signature tmpRefFrame = refFrame_;
		output = registrationPipeline_->computeTransformationMod(
				tmpRefFrame,
				newFrame,
				!guess.isNull()?motionSinceLastKeyFrame*guess:Transform(),
				&regInfo);

		if(info && this->isInfoDataFilled())
		{
			std::list<std::pair<int, std::pair<cv::KeyPoint, cv::KeyPoint> > > pairs;
			EpipolarGeometry::findPairsUnique(tmpRefFrame.getWords(), newFrame.getWords(), pairs);
			info->refCorners.resize(pairs.size());
			info->newCorners.resize(pairs.size());
			std::map<int, int> idToIndex;
			int i=0;
			for(std::list<std::pair<int, std::pair<cv::KeyPoint, cv::KeyPoint> > >::iterator iter=pairs.begin();
				iter!=pairs.end();
				++iter)
			{
				info->refCorners[i] = iter->second.first.pt;
				info->newCorners[i] = iter->second.second.pt;
				idToIndex.insert(std::make_pair(iter->first, i));
				++i;
			}
			info->cornerInliers.resize(regInfo.inliersIDs.size(), 1);
			i=0;
			for(; i<(int)regInfo.inliersIDs.size(); ++i)
			{
				info->cornerInliers[i] = idToIndex.at(regInfo.inliersIDs[i]);
			}

			Transform t = this->getPose()*motionSinceLastKeyFrame.inverse();
			for(std::multimap<int, cv::Point3f>::const_iterator iter=tmpRefFrame.getWords3().begin(); iter!=tmpRefFrame.getWords3().end(); ++iter)
			{
				info->localMap.insert(std::make_pair(iter->first, util3d::transformPoint(iter->second, t)));
			}
			info->words = newFrame.getWords();
		}
	}
	else
	{
		//return Identity
		output = Transform::getIdentity();
		// a very high variance tells that the new pose is not linked with the previous one
		regInfo.variance = 9999;
	}

	if(!output.isNull())
	{
		output = motionSinceLastKeyFrame.inverse() * output;

		// new key-frame?
		if( (registrationPipeline_->isImageRequired() && (keyFrameThr_ == 0 || float(regInfo.inliers) <= keyFrameThr_*float(refFrame_.sensorData().keypoints().size()))) ||
			(registrationPipeline_->isScanRequired() && (scanKeyFrameThr_ == 0 || regInfo.icpInliersRatio <= scanKeyFrameThr_)))
		{
			UDEBUG("Update key frame");
			int features = newFrame.getWordsDescriptors().size();
			if(features == 0)
			{
				newFrame = Signature(data);
				// this will generate features only for the first frame or if optical flow was used (no 3d words)
				Signature dummy;
				registrationPipeline_->computeTransformationMod(
						newFrame,
						dummy);
				features = (int)newFrame.sensorData().keypoints().size();
			}

			if((features >= registrationPipeline_->getMinVisualCorrespondences()) &&
			   (registrationPipeline_->getMinGeometryCorrespondencesRatio()==0.0f ||
					   (newFrame.sensorData().laserScanRaw().cols &&
					   (newFrame.sensorData().laserScanMaxPts() == 0 || float(newFrame.sensorData().laserScanRaw().cols)/float(newFrame.sensorData().laserScanMaxPts())>=registrationPipeline_->getMinGeometryCorrespondencesRatio()))))
			{
				refFrame_ = newFrame;

				refFrame_.setWords(std::multimap<int, cv::KeyPoint>());
				refFrame_.setWords3(std::multimap<int, cv::Point3f>());
				refFrame_.setWordsDescriptors(std::multimap<int, cv::Mat>());

				//reset motion
				lastKeyFramePose_.setNull();
			}
			else
			{
				if (!refFrame_.sensorData().isValid())
				{
					// Don't send odometry if we don't have a keyframe yet
					output.setNull();
				}

				if(features < registrationPipeline_->getMinVisualCorrespondences())
				{
					UWARN("Too low 2D features (%d), keeping last key frame...", features);
				}

				if(registrationPipeline_->getMinGeometryCorrespondencesRatio()>0.0f && newFrame.sensorData().laserScanRaw().cols==0)
				{
					UWARN("Too low scan points (%d), keeping last key frame...", newFrame.sensorData().laserScanRaw().cols);
				}
				else if(registrationPipeline_->getMinGeometryCorrespondencesRatio()>0.0f && newFrame.sensorData().laserScanMaxPts() != 0 && float(newFrame.sensorData().laserScanRaw().cols)/float(newFrame.sensorData().laserScanMaxPts())<registrationPipeline_->getMinGeometryCorrespondencesRatio())
				{
					UWARN("Too low scan points ratio (%d < %d), keeping last key frame...", float(newFrame.sensorData().laserScanRaw().cols)/float(newFrame.sensorData().laserScanMaxPts()), registrationPipeline_->getMinGeometryCorrespondencesRatio());
				}
			}
		}
	}
	else if(!regInfo.rejectedMsg.empty())
	{
		UWARN("Registration failed: \"%s\"", regInfo.rejectedMsg.c_str());
	}

	data.setFeatures(newFrame.sensorData().keypoints(), newFrame.sensorData().descriptors());

	if(info)
	{
		info->type = 1;
		info->variance = regInfo.variance;
		info->inliers = regInfo.inliers;
		info->icpInliersRatio = regInfo.icpInliersRatio;
		info->matches = regInfo.matches;
		info->features = newFrame.sensorData().keypoints().size();
	}

	UINFO("Odom update time = %fs lost=%s inliers=%d, ref frame corners=%d, transform accepted=%s",
			timer.elapsed(),
			output.isNull()?"true":"false",
			(int)regInfo.inliers,
			(int)newFrame.sensorData().keypoints().size(),
			!output.isNull()?"true":"false");

	return output;
}
Exemplo n.º 29
0
// return not null transform if odometry is correctly computed
Transform OdometryDVO::computeTransform(
		SensorData & data,
		const Transform & guess,
		OdometryInfo * info)
{
	Transform t;

#ifdef RTABMAP_DVO
	UTimer timer;

	if(data.imageRaw().empty() ||
		data.imageRaw().rows != data.depthOrRightRaw().rows ||
		data.imageRaw().cols != data.depthOrRightRaw().cols)
	{
		UERROR("Not supported input!");
		return t;
	}

	if(!(data.cameraModels().size() == 1 && data.cameraModels()[0].isValidForReprojection()))
	{
		UERROR("Invalid camera model! Only single RGB-D camera supported by DVO. Try another odometry approach.");
		return t;
	}

	if(dvo_ == 0)
	{
		dvo::DenseTracker::Config cfg = dvo::DenseTracker::getDefaultConfig();

		dvo_ = new dvo::DenseTracker(cfg);
	}

	cv::Mat grey, grey_s16, depth_inpainted, depth_mask, depth_mono, depth_float;
	if(data.imageRaw().type() != CV_32FC1)
	{
		if(data.imageRaw().type() == CV_8UC3)
		{
			cv::cvtColor(data.imageRaw(), grey, CV_BGR2GRAY);
		}
		else
		{
			grey = data.imageRaw();
		}

		grey.convertTo(grey_s16, CV_32F);
	}
	else
	{
		grey_s16 = data.imageRaw();
	}

	// make sure all zeros are NAN
	if(data.depthRaw().type() == CV_32FC1)
	{
		depth_float = data.depthRaw();
		for(int i=0; i<depth_float.rows; ++i)
		{
			for(int j=0; j<depth_float.cols; ++j)
			{
				float & d = depth_float.at<float>(i,j);
				if(d == 0.0f)
				{
					d = NAN;
				}
			}
		}
	}
	else if(data.depthRaw().type() == CV_16UC1)
	{
		depth_float = cv::Mat(data.depthRaw().size(), CV_32FC1);
		for(int i=0; i<data.depthRaw().rows; ++i)
		{
			for(int j=0; j<data.depthRaw().cols; ++j)
			{
				float d = float(data.depthRaw().at<unsigned short>(i,j))/1000.0f;
				depth_float.at<float>(i, j) = d==0.0f?NAN:d;
			}
		}
	}
	else
	{
		UFATAL("Unknown depth format!");
	}

	if(camera_ == 0)
	{
		dvo::core::IntrinsicMatrix intrinsics = dvo::core::IntrinsicMatrix::create(
						data.cameraModels()[0].fx(),
						data.cameraModels()[0].fy(),
						data.cameraModels()[0].cx(),
						data.cameraModels()[0].cy());
		camera_ = new dvo::core::RgbdCameraPyramid(
				data.cameraModels()[0].imageWidth(),
				data.cameraModels()[0].imageHeight(),
				intrinsics);
	}

	dvo::core::RgbdImagePyramid * current = new dvo::core::RgbdImagePyramid(*camera_, grey_s16, depth_float);

	cv::Mat covariance;
	if(reference_ == 0)
	{
		reference_ = current;
		if(!lost_)
		{
			t.setIdentity();
		}
		covariance = cv::Mat::eye(6,6,CV_64FC1) * 9999.0;
	}
	else
	{
		dvo::DenseTracker::Result result;
		dvo_->match(*reference_, *current, result);
		t = Transform::fromEigen3d(result.Transformation);

		if(result.Information(0,0) > 0.0 && result.Information(0,0) != 1.0)
		{
			lost_ = false;
			cv::Mat information = cv::Mat::eye(6,6, CV_64FC1);
			memcpy(information.data, result.Information.data(), 36*sizeof(double));
			covariance = information.inv();
			covariance *= 100.0; // to be in the same scale than loop closure detection

			Transform currentMotion = t;
			t = motionFromKeyFrame_.inverse() * t;

			// TODO make parameters?
			if(currentMotion.getNorm() > 0.01 || currentMotion.getAngle() > 0.01)
			{
				if(info)
				{
					info->keyFrameAdded = true;
				}
				// new keyframe
				delete reference_;
				reference_ = current;
				motionFromKeyFrame_.setIdentity();
			}
			else
			{
				delete current;
				motionFromKeyFrame_ = currentMotion;
			}
		}
		else
		{
			lost_ = true;
			delete reference_;
			delete current;
			reference_ = 0; // this will make restart from the next frame
			motionFromKeyFrame_.setIdentity();
			t.setNull();
			covariance = cv::Mat::eye(6,6,CV_64FC1) * 9999.0;
			UWARN("dvo failed to estimate motion, tracking will be reinitialized on next frame.");
		}
	}

	const Transform & localTransform = data.cameraModels()[0].localTransform();
	if(!t.isNull() && !t.isIdentity() && !localTransform.isIdentity() && !localTransform.isNull())
	{
		// from camera frame to base frame
		t = localTransform * t * localTransform.inverse();
	}

	if(info)
	{
		info->type = (int)kTypeDVO;
		info->covariance = covariance;
	}

	UINFO("Odom update time = %fs", timer.elapsed());

#else
	UERROR("RTAB-Map is not built with DVO support! Select another visual odometry approach.");
#endif
	return t;
}
Exemplo n.º 30
0
void MapsManager::publishMaps(
		const std::map<int, rtabmap::Transform> & poses,
		const ros::Time & stamp,
		const std::string & mapFrameId)
{
	UDEBUG("Publishing maps...");

	// publish maps
	if(cloudMapPub_.getNumSubscribers())
	{
		// generate the assembled cloud!
		UTimer time;
		pcl::PointCloud<pcl::PointXYZRGB>::Ptr assembledCloud(new pcl::PointCloud<pcl::PointXYZRGB>);
		int count = 0;
		for(std::map<int, Transform>::const_iterator iter = poses.begin(); iter!=poses.end(); ++iter)
		{
			std::map<int, pcl::PointCloud<pcl::PointXYZRGB>::Ptr >::iterator jter = clouds_.find(iter->first);
			if(jter != clouds_.end())
			{
				pcl::PointCloud<pcl::PointXYZRGB>::Ptr transformed = util3d::transformPointCloud(jter->second, iter->second);
				*assembledCloud+=*transformed;
				++count;
			}
		}

		if(assembledCloud->size())
		{
			if(cloudVoxelSize_ > 0 && cloudOutputVoxelized_)
			{
				assembledCloud = util3d::voxelize(assembledCloud, cloudVoxelSize_);
			}
			ROS_INFO("Assembled %d clouds (%fs)", count, time.ticks());

			sensor_msgs::PointCloud2::Ptr cloudMsg(new sensor_msgs::PointCloud2);
			pcl::toROSMsg(*assembledCloud, *cloudMsg);
			cloudMsg->header.stamp = stamp;
			cloudMsg->header.frame_id = mapFrameId;
			cloudMapPub_.publish(cloudMsg);
		}
		else if(poses.size())
		{
			ROS_WARN("Cloud map is empty! (clouds=%d)", (int)clouds_.size());
		}
	}
	else if(mapCacheCleanup_)
	{
		clouds_.clear();
	}

	if(projMapPub_.getNumSubscribers())
	{
		// create the projection map
		float xMin=0.0f, yMin=0.0f, gridCellSize = 0.05f;
		cv::Mat pixels = this->generateProjMap(poses, xMin, yMin, gridCellSize);

		if(!pixels.empty())
		{
			//init
			nav_msgs::OccupancyGrid map;
			map.info.resolution = gridCellSize;
			map.info.origin.position.x = 0.0;
			map.info.origin.position.y = 0.0;
			map.info.origin.position.z = 0.0;
			map.info.origin.orientation.x = 0.0;
			map.info.origin.orientation.y = 0.0;
			map.info.origin.orientation.z = 0.0;
			map.info.origin.orientation.w = 1.0;

			map.info.width = pixels.cols;
			map.info.height = pixels.rows;
			map.info.origin.position.x = xMin;
			map.info.origin.position.y = yMin;
			map.data.resize(map.info.width * map.info.height);

			memcpy(map.data.data(), pixels.data, map.info.width * map.info.height);

			map.header.frame_id = mapFrameId;
			map.header.stamp = stamp;

			projMapPub_.publish(map);
		}
		else if(poses.size())
		{
			ROS_WARN("Projection map is empty! (proj maps=%d)", (int)projMaps_.size());
		}
	}
	else if(mapCacheCleanup_)
	{
		projMaps_.clear();
	}

	if(gridMapPub_.getNumSubscribers())
	{
		// create the grid map
		float xMin=0.0f, yMin=0.0f, gridCellSize = 0.05f;
		cv::Mat pixels = this->generateGridMap(poses, xMin, yMin, gridCellSize);

		if(!pixels.empty())
		{
			//init
			nav_msgs::OccupancyGrid map;
			map.info.resolution = gridCellSize;
			map.info.origin.position.x = 0.0;
			map.info.origin.position.y = 0.0;
			map.info.origin.position.z = 0.0;
			map.info.origin.orientation.x = 0.0;
			map.info.origin.orientation.y = 0.0;
			map.info.origin.orientation.z = 0.0;
			map.info.origin.orientation.w = 1.0;

			map.info.width = pixels.cols;
			map.info.height = pixels.rows;
			map.info.origin.position.x = xMin;
			map.info.origin.position.y = yMin;
			map.data.resize(map.info.width * map.info.height);

			memcpy(map.data.data(), pixels.data, map.info.width * map.info.height);

			map.header.frame_id = mapFrameId;
			map.header.stamp = stamp;

			gridMapPub_.publish(map);
		}
		else if(poses.size())
		{
			ROS_WARN("Grid map is empty! (local maps=%d)", (int)gridMaps_.size());
		}
	}
	else if(mapCacheCleanup_)
	{
		gridMaps_.clear();
	}
}