Ejemplo n.º 1
0
CameraModel::CameraModel(
		const std::string & name,
		double fx,
		double fy,
		double cx,
		double cy,
		const Transform & localTransform,
		double Tx,
		const cv::Size & imageSize) :
		name_(name),
		imageSize_(imageSize),
		K_(cv::Mat::eye(3, 3, CV_64FC1)),
		localTransform_(localTransform)
{
	UASSERT_MSG(fx > 0.0, uFormat("fx=%f", fx).c_str());
	UASSERT_MSG(fy > 0.0, uFormat("fy=%f", fy).c_str());
	UASSERT_MSG(cx >= 0.0, uFormat("cx=%f", cx).c_str());
	UASSERT_MSG(cy >= 0.0, uFormat("cy=%f", cy).c_str());
	UASSERT(!localTransform.isNull());
	if(Tx != 0.0)
	{
		P_ = cv::Mat::eye(3, 4, CV_64FC1),
		P_.at<double>(0,0) = fx;
		P_.at<double>(1,1) = fy;
		P_.at<double>(0,2) = cx;
		P_.at<double>(1,2) = cy;
		P_.at<double>(0,3) = Tx;
	}

	K_.at<double>(0,0) = fx;
	K_.at<double>(1,1) = fy;
	K_.at<double>(0,2) = cx;
	K_.at<double>(1,2) = cy;
}
Ejemplo n.º 2
0
CameraModel::CameraModel(
		const std::string & name,
		double fx,
		double fy,
		double cx,
		double cy,
		const Transform & localTransform,
		double Tx) :
		name_(name),
		K_(cv::Mat::eye(3, 3, CV_64FC1)),
		D_(cv::Mat::zeros(1, 5, CV_64FC1)),
		R_(cv::Mat::eye(3, 3, CV_64FC1)),
		P_(cv::Mat::eye(3, 4, CV_64FC1)),
		localTransform_(localTransform)
{
	UASSERT_MSG(fx >= 0.0, uFormat("fx=%f", fx).c_str());
	UASSERT_MSG(fy >= 0.0, uFormat("fy=%f", fy).c_str());
	UASSERT_MSG(cx >= 0.0, uFormat("cx=%f", cx).c_str());
	UASSERT_MSG(cy >= 0.0, uFormat("cy=%f", cy).c_str());
	P_.at<double>(0,0) = fx;
	P_.at<double>(1,1) = fy;
	P_.at<double>(0,2) = cx;
	P_.at<double>(1,2) = cy;
	P_.at<double>(0,3) = Tx;

	K_.at<double>(0,0) = fx;
	K_.at<double>(1,1) = fy;
	K_.at<double>(0,2) = cx;
	K_.at<double>(1,2) = cy;
}
Ejemplo n.º 3
0
void Signature::addLink(const Link & link)
{
	UDEBUG("Add link %d to %d (type=%d)", link.to(), this->id(), (int)link.type());
	UASSERT_MSG(link.from() == this->id(), uFormat("%d->%d for signature %d (type=%d)", link.from(), link.to(), this->id(), link.type()).c_str());
	UASSERT_MSG(link.to() != this->id(), uFormat("%d->%d for signature %d (type=%d)", link.from(), link.to(), this->id(), link.type()).c_str());
	std::pair<std::map<int, Link>::iterator, bool> pair = _links.insert(std::make_pair(link.to(), link));
	UASSERT_MSG(pair.second, uFormat("Link %d (type=%d) already added to signature %d!", link.to(), link.type(), this->id()).c_str());
	_linksModified = true;
}
Ejemplo n.º 4
0
// generate color stereo input
void generateColorStereoInput(TriclopsContext const &context,
	FlyCapture2::Image const &grabbedImage,
	ImageContainer &imageCont,
	TriclopsColorStereoPair &stereoPair)
{
	Fc2Triclops::ErrorType fc2TriclopsError;
	TriclopsError te;

	TriclopsColorImage triclopsImageContainer[2];
	FlyCapture2::Image *tmpImage = imageCont.tmp;
	FlyCapture2::Image *unprocessedImage = imageCont.unprocessed;

	// Convert the pixel interleaved raw data to de-interleaved and color processed data
	fc2TriclopsError = Fc2Triclops::unpackUnprocessedRawOrMono16Image(
		grabbedImage,
		true /*assume little endian*/,
		tmpImage[0],
		tmpImage[1]);

	UASSERT_MSG(fc2TriclopsError == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)fc2TriclopsError).c_str());

	// preprocess color image
	for (int i = 0; i < 2; ++i) {
		FlyCapture2::Error fc2Error;
		fc2Error = tmpImage[i].SetColorProcessing(FlyCapture2::HQ_LINEAR);
		UASSERT_MSG(fc2Error == FlyCapture2::PGRERROR_OK, fc2Error.GetDescription());

		// convert preprocessed color image to BGRU format
		fc2Error = tmpImage[i].Convert(FlyCapture2::PIXEL_FORMAT_BGRU,
			&unprocessedImage[i]);
		UASSERT_MSG(fc2Error == FlyCapture2::PGRERROR_OK, fc2Error.GetDescription());
	}

	// create triclops image for right and left lens
	for (size_t i = 0; i < 2; ++i) {
		TriclopsColorImage *image = &triclopsImageContainer[i];
		te = triclopsLoadColorImageFromBuffer(
			reinterpret_cast<TriclopsColorPixel *>(unprocessedImage[i].GetData()),
			unprocessedImage[i].GetRows(),
			unprocessedImage[i].GetCols(),
			unprocessedImage[i].GetStride(),
			image);
		UASSERT_MSG(te == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)te).c_str());
	}

	// create stereo input from the triclops images constructed above
	// pack image data into a TriclopsColorStereoPair structure
	te = triclopsBuildColorStereoPairFromBuffers(
		context,
		&triclopsImageContainer[1],
		&triclopsImageContainer[0],
		&stereoPair);
	UASSERT_MSG(te == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)te).c_str());
}
Ejemplo n.º 5
0
int RTABMapApp::setMappingParameter(const std::string & key, const std::string & value)
{
	if(rtabmap::Parameters::getDefaultParameters().find(key) != rtabmap::Parameters::getDefaultParameters().end())
	{
		LOGI(uFormat("Setting param \"%s\"  to \"\"", key.c_str(), value.c_str()).c_str());
		uInsert(mappingParameters_, rtabmap::ParametersPair(key, value));
		UEventsManager::post(new rtabmap::ParamEvent(mappingParameters_));
		return 0;
	}
	else
	{
		LOGE(uFormat("Key \"%s\" doesn't exist!", key.c_str()).c_str());
		return -1;
	}
}
Ejemplo n.º 6
0
	virtual void onInit()
	{
		ros::NodeHandle & nh = getNodeHandle();
		ros::NodeHandle & pnh = getPrivateNodeHandle();

		std::string modelPath;
		pnh.param("model", modelPath, modelPath);

		if(modelPath.empty())
		{
			NODELET_ERROR("undistort_depth: \"model\" parameter should be set!");
		}

		model_.load(modelPath);
		if(!model_.isValid())
		{
			NODELET_ERROR("Loaded distortion model from \"%s\" is not valid!", modelPath.c_str());
		}
		else
		{
			image_transport::ImageTransport it(nh);
			sub_ = it.subscribe("depth", 1, &UndistortDepth::callback, this);
			pub_ = it.advertise(uFormat("%s_undistorted", nh.resolveName("depth").c_str()), 1);
		}
	}
Ejemplo n.º 7
0
QStringList ParametersToolBox::resetPage(int index)
{
	QStringList paramChanged;
	const QObjectList & children = stackedWidget_->widget(index)->children().first()->children().first()->children();
	for(int j=0; j<children.size();++j)
	{
		QString key = children.at(j)->objectName();
		// ignore working memory
		QString group = key.split("/").first();
		if(parameters_.find(key.toStdString())!=parameters_.end())
		{
			UASSERT_MSG(parameters_.find(key.toStdString()) != parameters_.end(), uFormat("key=%s", key.toStdString().c_str()).c_str());
			std::string value = Parameters::getDefaultParameters().at(key.toStdString());
			parameters_.at(key.toStdString()) = value;

			if(qobject_cast<QComboBox*>(children.at(j)))
			{
				if(((QComboBox*)children.at(j))->currentIndex() != QString::fromStdString(value).split(':').first().toInt())
				{
					((QComboBox*)children.at(j))->setCurrentIndex(QString::fromStdString(value).split(':').first().toInt());
					paramChanged.append(key);
				}
			}
			else if(qobject_cast<QSpinBox*>(children.at(j)))
			{
				if(((QSpinBox*)children.at(j))->value() != uStr2Int(value))
				{
					((QSpinBox*)children.at(j))->setValue(uStr2Int(value));
					paramChanged.append(key);
				}
			}
			else if(qobject_cast<QDoubleSpinBox*>(children.at(j)))
			{
				if(((QDoubleSpinBox*)children.at(j))->value() != uStr2Double(value))
				{
					((QDoubleSpinBox*)children.at(j))->setValue(uStr2Double(value));
					paramChanged.append(key);
				}
			}
			else if(qobject_cast<QCheckBox*>(children.at(j)))
			{
				if(((QCheckBox*)children.at(j))->isChecked() != uStr2Bool(value))
				{
					((QCheckBox*)children.at(j))->setChecked(uStr2Bool(value));
					paramChanged.append(key);
				}
			}
			else if(qobject_cast<QLineEdit*>(children.at(j)))
			{
				if(((QLineEdit*)children.at(j))->text().compare(QString::fromStdString(value)) != 0)
				{
					((QLineEdit*)children.at(j))->setText(QString::fromStdString(value));
					paramChanged.append(key);
				}
			}
		}
	}
	return paramChanged;
}
Ejemplo n.º 8
0
void ParametersToolBox::updateParameter(const std::string & key, const std::string & value)
{
	QString group = QString::fromStdString(key).split("/").first();
	if(!ignoredGroups_.contains(group))
	{
		UASSERT_MSG(parameters_.find(key) != parameters_.end(), uFormat("key=\"%s\"", key.c_str()).c_str());
		parameters_.at(key) = value;
		QWidget * widget = this->findChild<QWidget*>(key.c_str());
		QString type = QString::fromStdString(Parameters::getType(key));
		if(type.compare("string") == 0)
		{
			QString valueQt = QString::fromStdString(value);
			if(valueQt.contains(';'))
			{
				// It's a list, just change the index
				QStringList splitted = valueQt.split(':');
				((QComboBox*)widget)->setCurrentIndex(splitted.first().toInt());
			}
			else
			{
				((QLineEdit*)widget)->setText(valueQt);
			}
		}
		else if(type.compare("int") == 0)
		{
			((QSpinBox*)widget)->setValue(uStr2Int(value));
		}
		else if(type.compare("uint") == 0)
		{
			((QSpinBox*)widget)->setValue(uStr2Int(value));
		}
		else if(type.compare("double") == 0)
		{
			((QDoubleSpinBox*)widget)->setValue(uStr2Double(value));
		}
		else if(type.compare("float") == 0)
		{
			((QDoubleSpinBox*)widget)->setValue(uStr2Float(value));
		}
		else if(type.compare("bool") == 0)
		{
			((QCheckBox*)widget)->setChecked(uStr2Bool(value));
		}
	}
}
Ejemplo n.º 9
0
void DBDriver::loadNodeData(std::list<Signature *> & signatures, bool loadMetricData) const
{
	// Don't look in the trash, we assume that if we want to load
	// data of a signature, it is not in thrash! Print an error if so.
	_trashesMutex.lock();
	if(_trashSignatures.size())
	{
		for(std::list<Signature *>::iterator iter=signatures.begin(); iter!=signatures.end(); ++iter)
		{
			UASSERT(*iter != 0);
			UASSERT_MSG(!uContains(_trashSignatures, (*iter)->id()), uFormat("Signature %d should not be used when transferred to trash!!!!", (*iter)->id()).c_str());
		}
	}
	_trashesMutex.unlock();

	_dbSafeAccessMutex.lock();
	this->loadNodeDataQuery(signatures, loadMetricData);
	_dbSafeAccessMutex.unlock();
}
Ejemplo n.º 10
0
void CommonDataSubscriber::warningLoop()
{
	ros::Duration r(5.0);
	while(!callbackCalled_)
	{
		r.sleep();
		if(!callbackCalled_)
		{
			ROS_WARN("%s: Did not receive data since 5 seconds! Make sure the input topics are "
					"published (\"$ rostopic hz my_topic\") and the timestamps in their "
					"header are set. If topics are coming from different computers, make sure "
					"the clocks of the computers are synchronized (\"ntpdate\"). %s%s",
					name_.c_str(),
					approxSync_?
							uFormat("If topics are not published at the same rate, you could increase \"queue_size\" parameter (current=%d).", queueSize_).c_str():
							"Parameter \"approx_sync\" is false, which means that input topics should have all the exact timestamp for the callback to be called.",
					subscribedTopicsMsg_.c_str());
		}
	}
}
Ejemplo n.º 11
0
cv::Mat uncompressData(const unsigned char * bytes, unsigned long size)
{
	cv::Mat data;
	if(bytes && size>=3*sizeof(int))
	{
		//last 3 int elements are matrix size and type
		int height = *((int*)&bytes[size-3*sizeof(int)]);
		int width = *((int*)&bytes[size-2*sizeof(int)]);
		int type = *((int*)&bytes[size-1*sizeof(int)]);

		// If the size is higher, it may be a wrong data format.
		UASSERT_MSG(height>=0 && height<10000 &&
				    width>=0 && width<10000,
				    uFormat("size=%d, height=%d width=%d type=%d", size, height, width, type).c_str());

		data = cv::Mat(height, width, type);
		uLongf totalUncompressed = uLongf(data.total())*uLongf(data.elemSize());

		int errCode = uncompress(
						(Bytef*)data.data,
						&totalUncompressed,
						(const Bytef*)bytes,
						uLong(size));

		if(errCode == Z_MEM_ERROR)
		{
			UERROR("Z_MEM_ERROR : Insufficient memory.");
		}
		else if(errCode == Z_BUF_ERROR)
		{
			UERROR("Z_BUF_ERROR : The buffer dest was not large enough to hold the uncompressed data.");
		}
		else if(errCode == Z_DATA_ERROR)
		{
			UERROR("Z_DATA_ERROR : The compressed data (referenced by source) was corrupted.");
		}
	}
	return data;
}
void CommonDataSubscriber::setupRGBD2Callbacks(
		ros::NodeHandle & nh,
		ros::NodeHandle & pnh,
		bool subscribeOdom,
		bool subscribeUserData,
		bool subscribeScan2d,
		bool subscribeScan3d,
		bool subscribeOdomInfo,
		int queueSize,
		bool approxSync)
{
	ROS_INFO("Setup rgbd2 callback");

	rgbdSubs_.resize(2);
	for(int i=0; i<2; ++i)
	{
		rgbdSubs_[i] = new message_filters::Subscriber<rtabmap_ros::RGBDImage>;
		rgbdSubs_[i]->subscribe(nh, uFormat("rgbd_image%d", i), 1);
	}
	if(subscribeOdom && subscribeUserData)
	{
		odomSub_.subscribe(nh, "odom", 1);
		userDataSub_.subscribe(nh, "user_data", 1);
		if(subscribeScan2d)
		{
			subscribedToScan2d_ = true;
			scanSub_.subscribe(nh, "scan", 1);
			SYNC_DECL5(rgbd2OdomDataScan2d, approxSync, queueSize, odomSub_, userDataSub_, (*rgbdSubs_[0]), (*rgbdSubs_[1]), scanSub_);
		}
		else if(subscribeScan3d)
		{
			subscribedToScan3d_ = true;
			scan3dSub_.subscribe(nh, "scan_cloud", 1);
			SYNC_DECL5(rgbd2OdomDataScan3d, approxSync, queueSize, odomSub_, userDataSub_, (*rgbdSubs_[0]), (*rgbdSubs_[1]), scan3dSub_);
		}
		else if(subscribeOdomInfo)
		{
			subscribedToOdomInfo_ = true;
			odomInfoSub_.subscribe(nh, "odom_info", 1);
			SYNC_DECL5(rgbd2OdomDataInfo, approxSync, queueSize, odomSub_, userDataSub_, (*rgbdSubs_[0]), (*rgbdSubs_[1]), odomInfoSub_);
		}
		else
		{
			SYNC_DECL4(rgbd2OdomData, approxSync, queueSize, odomSub_, userDataSub_, (*rgbdSubs_[0]), (*rgbdSubs_[1]));
		}
	}
	else if(subscribeOdom)
	{
		odomSub_.subscribe(nh, "odom", 1);
		if(subscribeScan2d)
		{
			subscribedToScan2d_ = true;
			scanSub_.subscribe(nh, "scan", 1);
			SYNC_DECL4(rgbd2OdomScan2d, approxSync, queueSize, odomSub_, (*rgbdSubs_[0]), (*rgbdSubs_[1]), scanSub_);
		}
		else if(subscribeScan3d)
		{
			subscribedToScan3d_ = true;
			scan3dSub_.subscribe(nh, "scan_cloud", 1);
			SYNC_DECL4(rgbd2OdomScan3d, approxSync, queueSize, odomSub_, (*rgbdSubs_[0]), (*rgbdSubs_[1]), scan3dSub_);
		}
		else if(subscribeOdomInfo)
		{
			subscribedToOdomInfo_ = true;
			odomInfoSub_.subscribe(nh, "odom_info", 1);
			SYNC_DECL4(rgbd2OdomInfo, approxSync, queueSize, odomSub_, (*rgbdSubs_[0]), (*rgbdSubs_[1]), odomInfoSub_);
		}
		else
		{
			SYNC_DECL3(rgbd2Odom, approxSync, queueSize, odomSub_, (*rgbdSubs_[0]), (*rgbdSubs_[1]));
		}
	}
	else if(subscribeUserData)
	{
		userDataSub_.subscribe(nh, "user_data", 1);
		if(subscribeScan2d)
		{
			subscribedToScan2d_ = true;
			scanSub_.subscribe(nh, "scan", 1);
			SYNC_DECL4(rgbd2DataScan2d, approxSync, queueSize, userDataSub_, (*rgbdSubs_[0]), (*rgbdSubs_[1]), scanSub_);
		}
		else if(subscribeScan3d)
		{
			subscribedToScan3d_ = true;
			scan3dSub_.subscribe(nh, "scan_cloud", 1);
			SYNC_DECL4(rgbd2DataScan3d, approxSync, queueSize, userDataSub_, (*rgbdSubs_[0]), (*rgbdSubs_[1]), scan3dSub_);
		}
		else if(subscribeOdomInfo)
		{
			subscribedToOdomInfo_ = true;
			odomInfoSub_.subscribe(nh, "odom_info", 1);
			SYNC_DECL4(rgbd2DataInfo, approxSync, queueSize, userDataSub_, (*rgbdSubs_[0]), (*rgbdSubs_[1]), odomInfoSub_);
		}
		else
		{
			SYNC_DECL3(rgbd2Data, approxSync, queueSize, userDataSub_, (*rgbdSubs_[0]), (*rgbdSubs_[1]));
		}
	}
	else
	{
		if(subscribeScan2d)
		{
			subscribedToScan2d_ = true;
			scanSub_.subscribe(nh, "scan", 1);
			SYNC_DECL3(rgbd2Scan2d, approxSync, queueSize, (*rgbdSubs_[0]), (*rgbdSubs_[1]), scanSub_);
		}
		else if(subscribeScan3d)
		{
			subscribedToScan3d_ = true;
			scan3dSub_.subscribe(nh, "scan_cloud", 1);
			SYNC_DECL3(rgbd2Scan3d, approxSync, queueSize, (*rgbdSubs_[0]), (*rgbdSubs_[1]), scan3dSub_);
		}
		else if(subscribeOdomInfo)
		{
			subscribedToOdomInfo_ = true;
			odomInfoSub_.subscribe(nh, "odom_info", 1);
			SYNC_DECL3(rgbd2Info, approxSync, queueSize, (*rgbdSubs_[0]), (*rgbdSubs_[1]), odomInfoSub_);
		}
		else
		{
			SYNC_DECL2(rgbd2, approxSync, queueSize, (*rgbdSubs_[0]), (*rgbdSubs_[1]));
		}
	}
}
void CommonDataSubscriber::setupRGBDCallbacks(
		ros::NodeHandle & nh,
		ros::NodeHandle & pnh,
		bool subscribeOdom,
		bool subscribeUserData,
		bool subscribeScan2d,
		bool subscribeScan3d,
		bool subscribeOdomInfo,
		int queueSize,
		bool approxSync)
{
	ROS_INFO("Setup rgbd callback");

	if(subscribeOdom || subscribeUserData || subscribeScan2d || subscribeScan3d || subscribeOdomInfo)
	{
		rgbdSubs_.resize(1);
		rgbdSubs_[0] = new message_filters::Subscriber<rtabmap_ros::RGBDImage>;
		rgbdSubs_[0]->subscribe(nh, "rgbd_image", 1);


		if(subscribeOdom && subscribeUserData)
		{
			odomSub_.subscribe(nh, "odom", 1);
			userDataSub_.subscribe(nh, "user_data", 1);
			if(subscribeScan2d)
			{
				subscribedToScan2d_ = true;
				scanSub_.subscribe(nh, "scan", 1);
				SYNC_DECL4(rgbdOdomDataScan2d, approxSync, queueSize, odomSub_, userDataSub_, (*rgbdSubs_[0]), scanSub_);
			}
			else if(subscribeScan3d)
			{
				subscribedToScan3d_ = true;
				scan3dSub_.subscribe(nh, "scan_cloud", 1);
				SYNC_DECL4(rgbdOdomDataScan3d, approxSync, queueSize, odomSub_, userDataSub_, (*rgbdSubs_[0]), scan3dSub_);
			}
			else if(subscribeOdomInfo)
			{
				subscribedToOdomInfo_ = true;
				odomInfoSub_.subscribe(nh, "odom_info", 1);
				SYNC_DECL4(rgbdOdomDataInfo, approxSync, queueSize, odomSub_, userDataSub_, (*rgbdSubs_[0]), odomInfoSub_);
			}
			else
			{
				SYNC_DECL3(rgbdOdomData, approxSync, queueSize, odomSub_, userDataSub_, (*rgbdSubs_[0]));
			}
		}
		else if(subscribeOdom)
		{
			odomSub_.subscribe(nh, "odom", 1);
			if(subscribeScan2d)
			{
				subscribedToScan2d_ = true;
				scanSub_.subscribe(nh, "scan", 1);
				SYNC_DECL3(rgbdOdomScan2d, approxSync, queueSize, odomSub_, (*rgbdSubs_[0]), scanSub_);
			}
			else if(subscribeScan3d)
			{
				subscribedToScan3d_ = true;
				scan3dSub_.subscribe(nh, "scan_cloud", 1);
				SYNC_DECL3(rgbdOdomScan3d, approxSync, queueSize, odomSub_, (*rgbdSubs_[0]), scan3dSub_);
			}
			else if(subscribeOdomInfo)
			{
				subscribedToOdomInfo_ = true;
				odomInfoSub_.subscribe(nh, "odom_info", 1);
				SYNC_DECL3(rgbdOdomInfo, approxSync, queueSize, odomSub_, (*rgbdSubs_[0]), odomInfoSub_);
			}
			else
			{
				SYNC_DECL2(rgbdOdom, approxSync, queueSize, odomSub_, (*rgbdSubs_[0]));
			}
		}
		else if(subscribeUserData)
		{
			userDataSub_.subscribe(nh, "user_data", 1);
			if(subscribeScan2d)
			{
				subscribedToScan2d_ = true;
				scanSub_.subscribe(nh, "scan", 1);
				SYNC_DECL3(rgbdDataScan2d, approxSync, queueSize, userDataSub_, (*rgbdSubs_[0]), scanSub_);
			}
			else if(subscribeScan3d)
			{
				subscribedToScan3d_ = true;
				scan3dSub_.subscribe(nh, "scan_cloud", 1);
				SYNC_DECL3(rgbdDataScan3d, approxSync, queueSize, userDataSub_, (*rgbdSubs_[0]), scan3dSub_);
			}
			else if(subscribeOdomInfo)
			{
				subscribedToOdomInfo_ = true;
				odomInfoSub_.subscribe(nh, "odom_info", 1);
				SYNC_DECL3(rgbdDataInfo, approxSync, queueSize, userDataSub_, (*rgbdSubs_[0]), odomInfoSub_);
			}
			else
			{
				SYNC_DECL2(rgbdData, approxSync, queueSize, userDataSub_, (*rgbdSubs_[0]));
			}
		}
		else
		{
			if(subscribeScan2d)
			{
				subscribedToScan2d_ = true;
				scanSub_.subscribe(nh, "scan", 1);
				SYNC_DECL2(rgbdScan2d, approxSync, queueSize, (*rgbdSubs_[0]), scanSub_);
			}
			else if(subscribeScan3d)
			{
				subscribedToScan3d_ = true;
				scan3dSub_.subscribe(nh, "scan_cloud", 1);
				SYNC_DECL2(rgbdScan3d, approxSync, queueSize, (*rgbdSubs_[0]), scan3dSub_);
			}
			else if(subscribeOdomInfo)
			{
				subscribedToOdomInfo_ = true;
				odomInfoSub_.subscribe(nh, "odom_info", 1);
				SYNC_DECL2(rgbdInfo, approxSync, queueSize, (*rgbdSubs_[0]), odomInfoSub_);
			}
			else
			{
				ROS_FATAL("Not supposed to be here!");
			}
		}
	}
	else
	{
		rgbdSub_ = nh.subscribe("rgbd_image", 1, &CommonDataSubscriber::rgbdCallback, this);

		subscribedTopicsMsg_ =
				uFormat("\n%s subscribed to:\n   %s",
				ros::this_node::getName().c_str(),
				rgbdSub_.getTopic().c_str());
	}
}
Ejemplo n.º 14
0
SensorData CameraStereoFlyCapture2::captureImage(CameraInfo * info)
{
	SensorData data;
#ifdef RTABMAP_FLYCAPTURE2
	if(camera_ && triclopsCtx_ && camera_->IsConnected())
	{
		// grab image from camera.
		// this image contains both right and left imagesCount
		FlyCapture2::Image grabbedImage;
		if(camera_->RetrieveBuffer(&grabbedImage) == FlyCapture2::PGRERROR_OK)
		{
			// right and left image extracted from grabbed image
			ImageContainer imageCont;

			TriclopsColorStereoPair colorStereoInput;
			generateColorStereoInput(triclopsCtx_, grabbedImage, imageCont, colorStereoInput);

			// rectify images
			TriclopsError triclops_status = triclopsColorRectify(triclopsCtx_, &colorStereoInput);
			UASSERT_MSG(triclops_status == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)triclops_status).c_str());

			// get images
			cv::Mat left,right;
			TriclopsColorImage color_image;
			triclops_status = triclopsGetColorImage(triclopsCtx_, TriImg_RECTIFIED_COLOR, TriCam_LEFT, &color_image);
			UASSERT_MSG(triclops_status == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)triclops_status).c_str());
			cv::cvtColor(cv::Mat(color_image.nrows, color_image.ncols, CV_8UC4, color_image.data), left, CV_RGBA2RGB);
			triclops_status = triclopsGetColorImage(triclopsCtx_, TriImg_RECTIFIED_COLOR, TriCam_RIGHT, &color_image);
			UASSERT_MSG(triclops_status == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)triclops_status).c_str());
			cv::cvtColor(cv::Mat(color_image.nrows, color_image.ncols, CV_8UC4, color_image.data), right, CV_RGBA2GRAY);

			// Set calibration stuff
			float fx, cy, cx, baseline;
			triclopsGetFocalLength(triclopsCtx_, &fx);
			triclopsGetImageCenter(triclopsCtx_, &cy, &cx);
			triclopsGetBaseline(triclopsCtx_, &baseline);
			cx *= left.cols;
			cy *= left.rows;

			StereoCameraModel model(
				fx,
				fx,
				cx,
				cy,
				baseline,
				this->getLocalTransform(),
				left.size());
			data = SensorData(left, right, model, this->getNextSeqID(), UTimer::now());

			// Compute disparity
			/*triclops_status = triclopsStereo(triclopsCtx_);
			UASSERT_MSG(triclops_status == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)triclops_status).c_str());

			TriclopsImage16 disparity_image;
			triclops_status = triclopsGetImage16(triclopsCtx_, TriImg16_DISPARITY, TriCam_REFERENCE, &disparity_image);
			UASSERT_MSG(triclops_status == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)triclops_status).c_str());
			cv::Mat depth(disparity_image.nrows, disparity_image.ncols, CV_32FC1);
			int pixelinc = disparity_image.rowinc / 2;
			float x, y;
			for (int i = 0, k = 0; i < disparity_image.nrows; i++) {
				unsigned short *row = disparity_image.data + i * pixelinc;
				float *rowOut = (float *)depth.row(i).data;
				for (int j = 0; j < disparity_image.ncols; j++, k++) {
					unsigned short disparity = row[j];

					// do not save invalid points
					if (disparity < 0xFFF0) {
						// convert the 16 bit disparity value to floating point x,y,z
						triclopsRCD16ToXYZ(triclopsCtx_, i, j, disparity, &x, &y, &rowOut[j]);
					}
				}
			}
			CameraModel model(fx, fx, cx, cy, this->getLocalTransform(), 0, left.size());
			data = SensorData(left, depth, model, this->getNextSeqID(), UTimer::now());
			*/
		}
	}

#else
	UERROR("CameraStereoFlyCapture2: RTAB-Map is not built with Triclops support!");
#endif
	return data;
}
Ejemplo n.º 15
0
Transform Odometry::process(SensorData & data, OdometryInfo * info)
{
	if(_pose.isNull())
	{
		_pose.setIdentity(); // initialized
	}

	UASSERT(!data.imageRaw().empty());

	if(!data.stereoCameraModel().isValidForProjection() &&
	   (data.cameraModels().size() == 0 || !data.cameraModels()[0].isValidForProjection()))
	{
		UERROR("Rectified images required! Calibrate your camera.");
		return Transform();
	}

	double dt = previousStamp_>0.0f?data.stamp() - previousStamp_:0.0;
	Transform guess = dt && guessFromMotion_?Transform::getIdentity():Transform();
	UASSERT_MSG(dt>0.0 || (dt == 0.0 && previousVelocityTransform_.isNull()), uFormat("dt=%f previous transform=%s", dt, previousVelocityTransform_.prettyPrint().c_str()).c_str());
	if(!previousVelocityTransform_.isNull())
	{
		if(guessFromMotion_)
		{
			if(_filteringStrategy == 1)
			{
				// use Kalman predict transform
				float vx,vy,vz, vroll,vpitch,vyaw;
				predictKalmanFilter(dt, &vx,&vy,&vz,&vroll,&vpitch,&vyaw);
				guess = Transform(vx*dt, vy*dt, vz*dt, vroll*dt, vpitch*dt, vyaw*dt);
			}
			else
			{
				float vx,vy,vz, vroll,vpitch,vyaw;
				previousVelocityTransform_.getTranslationAndEulerAngles(vx,vy,vz, vroll,vpitch,vyaw);
				guess = Transform(vx*dt, vy*dt, vz*dt, vroll*dt, vpitch*dt, vyaw*dt);
			}
		}
		else if(_filteringStrategy == 1)
		{
			predictKalmanFilter(dt);
		}
	}

	UTimer time;
	Transform t;
	if(_imageDecimation > 1)
	{
		// Decimation of images with calibrations
		SensorData decimatedData = data;
		decimatedData.setImageRaw(util2d::decimate(decimatedData.imageRaw(), _imageDecimation));
		decimatedData.setDepthOrRightRaw(util2d::decimate(decimatedData.depthOrRightRaw(), _imageDecimation));
		std::vector<CameraModel> cameraModels = decimatedData.cameraModels();
		for(unsigned int i=0; i<cameraModels.size(); ++i)
		{
			cameraModels[i] = cameraModels[i].scaled(1.0/double(_imageDecimation));
		}
		decimatedData.setCameraModels(cameraModels);
		StereoCameraModel stereoModel = decimatedData.stereoCameraModel();
		if(stereoModel.isValidForProjection())
		{
			stereoModel.scale(1.0/double(_imageDecimation));
		}
		decimatedData.setStereoCameraModel(stereoModel);

		// compute transform
		t = this->computeTransform(decimatedData, guess, info);

		// transform back the keypoints in the original image
		std::vector<cv::KeyPoint> kpts = decimatedData.keypoints();
		double log2value = log(double(_imageDecimation))/log(2.0);
		for(unsigned int i=0; i<kpts.size(); ++i)
		{
			kpts[i].pt.x *= _imageDecimation;
			kpts[i].pt.y *= _imageDecimation;
			kpts[i].size *= _imageDecimation;
			kpts[i].octave += log2value;
		}
		data.setFeatures(kpts, decimatedData.descriptors());

		if(info)
		{
			UASSERT(info->newCorners.size() == info->refCorners.size());
			for(unsigned int i=0; i<info->newCorners.size(); ++i)
			{
				info->refCorners[i].x *= _imageDecimation;
				info->refCorners[i].y *= _imageDecimation;
				info->newCorners[i].x *= _imageDecimation;
				info->newCorners[i].y *= _imageDecimation;
			}
			for(std::multimap<int, cv::KeyPoint>::iterator iter=info->words.begin(); iter!=info->words.end(); ++iter)
			{
				iter->second.pt.x *= _imageDecimation;
				iter->second.pt.y *= _imageDecimation;
				iter->second.size *= _imageDecimation;
				iter->second.octave += log2value;
			}
		}
	}
	else
	{
		t = this->computeTransform(data, guess, info);
	}

	if(info)
	{
		info->timeEstimation = time.ticks();
		info->lost = t.isNull();
		info->stamp = data.stamp();
		info->interval = dt;
		info->transform = t;

		if(!data.groundTruth().isNull())
		{
			if(!previousGroundTruthPose_.isNull())
			{
				info->transformGroundTruth = previousGroundTruthPose_.inverse() * data.groundTruth();
			}
			previousGroundTruthPose_ = data.groundTruth();
		}
	}

	if(!t.isNull())
	{
		_resetCurrentCount = _resetCountdown;

		float vx,vy,vz, vroll,vpitch,vyaw;
		t.getTranslationAndEulerAngles(vx,vy,vz, vroll,vpitch,vyaw);

		// transform to velocity
		if(dt)
		{
			vx /= dt;
			vy /= dt;
			vz /= dt;
			vroll /= dt;
			vpitch /= dt;
			vyaw /= dt;
		}

		if(_force3DoF || !_holonomic || particleFilters_.size() || _filteringStrategy==1)
		{
			if(_filteringStrategy == 1)
			{
				if(previousVelocityTransform_.isNull())
				{
					// reset Kalman
					if(dt)
					{
						initKalmanFilter(t, vx,vy,vz,vroll,vpitch,vyaw);
					}
					else
					{
						initKalmanFilter(t);
					}
				}
				else
				{
					// Kalman filtering
					updateKalmanFilter(vx,vy,vz,vroll,vpitch,vyaw);
				}
			}
			else if(particleFilters_.size())
			{
				// Particle filtering
				UASSERT(particleFilters_.size()==6);
				if(previousVelocityTransform_.isNull())
				{
					particleFilters_[0]->init(vx);
					particleFilters_[1]->init(vy);
					particleFilters_[2]->init(vz);
					particleFilters_[3]->init(vroll);
					particleFilters_[4]->init(vpitch);
					particleFilters_[5]->init(vyaw);
				}
				else
				{
					vx = particleFilters_[0]->filter(vx);
					vy = particleFilters_[1]->filter(vy);
					vyaw = particleFilters_[5]->filter(vyaw);

					if(!_holonomic)
					{
						// arc trajectory around ICR
						float tmpY = vyaw!=0.0f ? vx / tan((CV_PI-vyaw)/2.0f) : 0.0f;
						if(fabs(tmpY) < fabs(vy) || (tmpY<=0 && vy >=0) || (tmpY>=0 && vy<=0))
						{
							vy = tmpY;
						}
						else
						{
							vyaw = (atan(vx/vy)*2.0f-CV_PI)*-1;
						}
					}

					if(!_force3DoF)
					{
						vz = particleFilters_[2]->filter(vz);
						vroll = particleFilters_[3]->filter(vroll);
						vpitch = particleFilters_[4]->filter(vpitch);
					}
				}

				if(info)
				{
					info->timeParticleFiltering = time.ticks();
				}

				if(_force3DoF)
				{
					vz = 0.0f;
					vroll = 0.0f;
					vpitch = 0.0f;
				}
			}
			else if(!_holonomic)
			{
				// arc trajectory around ICR
				vy = vyaw!=0.0f ? vx / tan((CV_PI-vyaw)/2.0f) : 0.0f;
				if(_force3DoF)
				{
					vz = 0.0f;
					vroll = 0.0f;
					vpitch = 0.0f;
				}
			}

			if(dt)
			{
				t = Transform(vx*dt, vy*dt, vz*dt, vroll*dt, vpitch*dt, vyaw*dt);
			}
			else
			{
				t = Transform(vx, vy, vz, vroll, vpitch, vyaw);
			}

			if(info)
			{
				info->transformFiltered = t;
			}
		}

		previousStamp_ = data.stamp();
		previousVelocityTransform_.setNull();
		if(dt)
		{
			previousVelocityTransform_ = Transform(vx, vy, vz, vroll, vpitch, vyaw);
		}

		if(info)
		{
			distanceTravelled_ += t.getNorm();
			info->distanceTravelled = distanceTravelled_;
		}

		return _pose *= t; // updated
	}
	else if(_resetCurrentCount > 0)
	{
		UWARN("Odometry lost! Odometry will be reset after next %d consecutive unsuccessful odometry updates...", _resetCurrentCount);

		--_resetCurrentCount;
		if(_resetCurrentCount == 0)
		{
			UWARN("Odometry automatically reset to latest pose!");
			this->reset(_pose);
		}
	}

	previousVelocityTransform_.setNull();
	previousStamp_ = 0;

	return Transform();
}
Ejemplo n.º 16
0
void OdometryViewer::processData(const rtabmap::OdometryEvent & odom)
{
	processingData_ = true;
	int quality = odom.info().inliers;

	bool lost = false;
	bool lostStateChanged = false;

	if(odom.pose().isNull())
	{
		UDEBUG("odom lost"); // use last pose
		lostStateChanged = imageView_->getBackgroundColor() != Qt::darkRed;
		imageView_->setBackgroundColor(Qt::darkRed);
		cloudView_->setBackgroundColor(Qt::darkRed);

		lost = true;
	}
	else if(odom.info().inliers>0 &&
			qualityWarningThr_ &&
			odom.info().inliers < qualityWarningThr_)
	{
		UDEBUG("odom warn, quality(inliers)=%d thr=%d", odom.info().inliers, qualityWarningThr_);
		lostStateChanged = imageView_->getBackgroundColor() == Qt::darkRed;
		imageView_->setBackgroundColor(Qt::darkYellow);
		cloudView_->setBackgroundColor(Qt::darkYellow);
	}
	else
	{
		UDEBUG("odom ok");
		lostStateChanged = imageView_->getBackgroundColor() == Qt::darkRed;
		imageView_->setBackgroundColor(cloudView_->getDefaultBackgroundColor());
		cloudView_->setBackgroundColor(Qt::black);
	}

	timeLabel_->setText(QString("%1 s").arg(odom.info().timeEstimation));

	if(!odom.data().imageRaw().empty() &&
		!odom.data().depthOrRightRaw().empty() &&
		(odom.data().stereoCameraModel().isValid() || odom.data().cameraModels().size()))
	{
		UDEBUG("New pose = %s, quality=%d", odom.pose().prettyPrint().c_str(), quality);

		if(!odom.data().depthRaw().empty())
		{
			if(odom.data().imageRaw().cols % decimationSpin_->value() == 0 &&
			   odom.data().imageRaw().rows % decimationSpin_->value() == 0)
			{
				validDecimationValue_ = decimationSpin_->value();
			}
			else
			{
				UWARN("Decimation (%d) must be a denominator of the width and height of "
						"the image (%d/%d). Using last valid decimation value (%d).",
						decimationSpin_->value(),
						odom.data().imageRaw().cols,
						odom.data().imageRaw().rows,
						validDecimationValue_);
			}
		}
		else
		{
			validDecimationValue_ = decimationSpin_->value();
		}

		// visualization: buffering the clouds
		// Create the new cloud
		pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud;
		cloud = util3d::cloudRGBFromSensorData(
				odom.data(),
				validDecimationValue_,
				0.0f,
				voxelSpin_->value());

		if(cloud->size())
		{
			if(!odom.pose().isNull())
			{
				if(cloudView_->getAddedClouds().contains("cloudtmp"))
				{
					cloudView_->removeCloud("cloudtmp");
				}

				while(maxCloudsSpin_->value()>0 && (int)addedClouds_.size() > maxCloudsSpin_->value())
				{
					UASSERT(cloudView_->removeCloud(addedClouds_.first()));
					addedClouds_.pop_front();
				}

				odom.data().id()?id_=odom.data().id():++id_;
				std::string cloudName = uFormat("cloud%d", id_);
				addedClouds_.push_back(cloudName);
				UASSERT(cloudView_->addCloud(cloudName, cloud, odom.pose()));
			}
			else
			{
				cloudView_->addOrUpdateCloud("cloudtmp", cloud, lastOdomPose_);
			}
		}
	}

	if(!odom.pose().isNull())
	{
		lastOdomPose_ = odom.pose();
		cloudView_->updateCameraTargetPosition(odom.pose());
	}

	if(odom.info().localMap.size())
	{
		pcl::PointCloud<pcl::PointXYZ>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZ>);
		cloud->resize(odom.info().localMap.size());
		int i=0;
		for(std::multimap<int, cv::Point3f>::const_iterator iter=odom.info().localMap.begin(); iter!=odom.info().localMap.end(); ++iter)
		{
			(*cloud)[i].x = iter->second.x;
			(*cloud)[i].y = iter->second.y;
			(*cloud)[i++].z = iter->second.z;
		}
		cloudView_->addOrUpdateCloud("localmap", cloud);
	}

	if(!odom.data().imageRaw().empty())
	{
		if(odom.info().type == 0)
		{
			imageView_->setFeatures(odom.info().words, odom.data().depthRaw(), Qt::yellow);
		}
		else if(odom.info().type == 1)
		{
			std::vector<cv::KeyPoint> kpts;
			cv::KeyPoint::convert(odom.info().refCorners, kpts);
			imageView_->setFeatures(kpts, odom.data().depthRaw(), Qt::red);
		}

		imageView_->clearLines();
		if(lost)
		{
			if(lostStateChanged)
			{
				// save state
				odomImageShow_ = imageView_->isImageShown();
				odomImageDepthShow_ = imageView_->isImageDepthShown();
			}
			imageView_->setImageDepth(uCvMat2QImage(odom.data().imageRaw()));
			imageView_->setImageShown(true);
			imageView_->setImageDepthShown(true);
		}
		else
		{
			if(lostStateChanged)
			{
				// restore state
				imageView_->setImageShown(odomImageShow_);
				imageView_->setImageDepthShown(odomImageDepthShow_);
			}

			imageView_->setImage(uCvMat2QImage(odom.data().imageRaw()));
			if(imageView_->isImageDepthShown())
			{
				imageView_->setImageDepth(uCvMat2QImage(odom.data().depthOrRightRaw()));
			}

			if(odom.info().type == 0)
			{
				if(imageView_->isFeaturesShown())
				{
					for(unsigned int i=0; i<odom.info().wordMatches.size(); ++i)
					{
						imageView_->setFeatureColor(odom.info().wordMatches[i], Qt::red); // outliers
					}
					for(unsigned int i=0; i<odom.info().wordInliers.size(); ++i)
					{
						imageView_->setFeatureColor(odom.info().wordInliers[i], Qt::green); // inliers
					}
				}
			}
		}
		if(odom.info().type == 1 && odom.info().cornerInliers.size())
		{
			if(imageView_->isFeaturesShown() || imageView_->isLinesShown())
			{
				//draw lines
				UASSERT(odom.info().refCorners.size() == odom.info().newCorners.size());
				for(unsigned int i=0; i<odom.info().cornerInliers.size(); ++i)
				{
					if(imageView_->isFeaturesShown())
					{
						imageView_->setFeatureColor(odom.info().cornerInliers[i], Qt::green); // inliers
					}
					if(imageView_->isLinesShown())
					{
						imageView_->addLine(
								odom.info().refCorners[odom.info().cornerInliers[i]].x,
								odom.info().refCorners[odom.info().cornerInliers[i]].y,
								odom.info().newCorners[odom.info().cornerInliers[i]].x,
								odom.info().newCorners[odom.info().cornerInliers[i]].y,
								Qt::blue);
					}
				}
			}
		}

		if(!odom.data().imageRaw().empty())
		{
			imageView_->setSceneRect(QRectF(0,0,(float)odom.data().imageRaw().cols, (float)odom.data().imageRaw().rows));
		}
	}

	imageView_->update();
	cloudView_->update();
	QApplication::processEvents();
	processingData_ = false;
}
void ULogger::write(ULogger::Level level,
		const char * file,
		int line,
		const char * function,
		const char* msg,
		...)
{
	if(exitingState_)
	{
		// Ignore messages after a fatal exit...
		return;
	}
	loggerMutex_.lock();
	if(type_ == kTypeNoLog && level < kFatal)
	{
		loggerMutex_.unlock();
		return;
	}
	if(strlen(msg) == 0 && !printWhere_ && level < exitLevel_)
	{
		loggerMutex_.unlock();
		// No need to show an empty message if we don't print where.
		return;
	}

    if(level >= level_)
    {
#ifdef WIN32
    	int color = 0;
#else
    	const char* color = NULL;
#endif
    	switch(level)
    	{
    	case kDebug:
    		color = COLOR_GREEN;
    		break;
    	case kInfo:
    		color = COLOR_NORMAL;
    		break;
    	case kWarning:
    		color = COLOR_YELLOW;
    		break;
    	case kError:
    	case kFatal:
    		color = COLOR_RED;
    		break;
    	default:
    		break;
    	}

		std::string endline = "";
		if(printEndline_) {
			endline = "\r\n";
		}

		std::string time = "";
		if(printTime_)
		{
			time.append("(");
			getTime(time);
			time.append(") ");
		}

		std::string levelStr = "";
		if(printLevel_)
		{
			const int bufSize = 30;
			char buf[bufSize] = {0};

#ifdef _MSC_VER
			sprintf_s(buf, bufSize, "[%s]", levelName_[level]);
#else
			snprintf(buf, bufSize, "[%s]", levelName_[level]);
#endif
			levelStr = buf;
			levelStr.append(" ");
		}

		std::string whereStr = "";
		if(printWhere_)
		{
			whereStr.append("");
			//File
			if(printWhereFullPath_)
			{
				whereStr.append(file);
			}
			else
			{
				std::string fileName = UFile::getName(file);
				if(limitWhereLength_ && fileName.size() > 8)
				{
					fileName.erase(8);
					fileName.append("~");
				}
				whereStr.append(fileName);
			}

			//Line
			whereStr.append(":");
			std::string lineStr = uNumber2Str(line);
			whereStr.append(lineStr);

			//Function
			whereStr.append("::");
			std::string funcStr = function;
			if(!printWhereFullPath_ && limitWhereLength_ && funcStr.size() > 8)
			{
				funcStr.erase(8);
				funcStr.append("~");
			}
			funcStr.append("()");
			whereStr.append(funcStr);

			whereStr.append(" ");
		}

		va_list args;

		if(type_ != kTypeNoLog)
		{
			va_start(args, msg);
#ifdef WIN32
			HANDLE H = GetStdHandle(STD_OUTPUT_HANDLE);
#endif
			if(type_ == ULogger::kTypeConsole && printColored_)
			{
#ifdef WIN32
				SetConsoleTextAttribute(H,color);
#else
				if(buffered_)
				{
					bufferedMsgs_.append(color);
				}
				else
				{
					ULogger::getInstance()->_writeStr(color);
				}
#endif
			}

			if(buffered_)
			{
				bufferedMsgs_.append(levelStr.c_str());
				bufferedMsgs_.append(time.c_str());
				bufferedMsgs_.append(whereStr.c_str());
				bufferedMsgs_.append(uFormatv(msg, args));
			}
			else
			{
				ULogger::getInstance()->_writeStr(levelStr.c_str());
				ULogger::getInstance()->_writeStr(time.c_str());
				ULogger::getInstance()->_writeStr(whereStr.c_str());
				ULogger::getInstance()->_write(msg, args);
			}
			if(type_ == ULogger::kTypeConsole && printColored_)
			{
#ifdef WIN32
				SetConsoleTextAttribute(H,COLOR_NORMAL);
#else
				if(buffered_)
				{
					bufferedMsgs_.append(COLOR_NORMAL);
				}
				else
				{
					ULogger::getInstance()->_writeStr(COLOR_NORMAL);
				}
#endif
			}
			if(buffered_)
			{
				bufferedMsgs_.append(endline.c_str());
			}
			else
			{
				ULogger::getInstance()->_writeStr(endline.c_str());
			}
			va_end (args);
		}

		if(level >= eventLevel_)
		{
			std::string fullMsg = uFormat("%s%s%s", levelStr.c_str(), time.c_str(), whereStr.c_str());
			va_start(args, msg);
			fullMsg.append(uFormatv(msg, args));
			va_end(args);
			if(level >= exitLevel_)
			{
				// Send it synchronously, then receivers
				// can do something before the code (exiting) below is executed.
				exitingState_ = true;
				UEventsManager::post(new ULogEvent(fullMsg, kFatal), false);
			}
			else
			{
				UEventsManager::post(new ULogEvent(fullMsg, level));
			}
		}

		if(level >= exitLevel_)
		{
			printf("\n*******\n%s message occurred! Application will now exit.\n", levelName_[level]);
			if(type_ != kTypeConsole)
			{
				printf("  %s%s%s", levelStr.c_str(), time.c_str(), whereStr.c_str());
				va_start(args, msg);
				vprintf(msg, args);
				va_end(args);
			}
			printf("\n*******\n");
			destroyer_.setDoomed(0);
			delete instance_; // If a FileLogger is used, this will close the file.
			instance_ = 0;
			//========================================================================
			//                          EXIT APPLICATION
			exit(1);
			//========================================================================
		}
    }
    loggerMutex_.unlock();
}
Ejemplo n.º 18
0
bool RTABMapApp::exportMesh(const std::string & filePath)
{
	bool success = false;

	//Assemble the meshes
	if(UFile::getExtension(filePath).compare("obj") == 0)
	{
		pcl::TextureMesh textureMesh;
		std::vector<cv::Mat> textures;
		int totalPolygons = 0;
		pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr mergedClouds(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
		{
			boost::mutex::scoped_lock  lock(meshesMutex_);

			textureMesh.tex_materials.resize(createdMeshes_.size());
			textureMesh.tex_polygons.resize(createdMeshes_.size());
			textureMesh.tex_coordinates.resize(createdMeshes_.size());
			textures.resize(createdMeshes_.size());

			int polygonsStep = 0;
			int oi = 0;
			for(std::map<int, Mesh>::iterator iter=createdMeshes_.begin();
				iter!= createdMeshes_.end();
				++iter)
			{
				UASSERT(!iter->second.cloud->is_dense);

				if(!iter->second.texture.empty() &&
					iter->second.cloud->size() &&
					iter->second.polygons.size())
				{
					// OBJ format requires normals
					pcl::PointCloud<pcl::Normal>::Ptr normals = rtabmap::util3d::computeNormals(iter->second.cloud, 20);

					pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloudWithNormals;
					pcl::concatenateFields(*iter->second.cloud, *normals, *cloudWithNormals);

					// create dense cloud
					pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr denseCloud(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
					std::vector<pcl::Vertices> densePolygons;
					std::map<int, int> newToOldIndices;
					newToOldIndices = rtabmap::util3d::filterNotUsedVerticesFromMesh(
							*cloudWithNormals,
							iter->second.polygons,
							*denseCloud,
							densePolygons);

					// polygons
					UASSERT(densePolygons.size());
					unsigned int polygonSize = densePolygons.front().vertices.size();
					textureMesh.tex_polygons[oi].resize(densePolygons.size());
					textureMesh.tex_coordinates[oi].resize(densePolygons.size() * polygonSize);
					for(unsigned int j=0; j<densePolygons.size(); ++j)
					{
						pcl::Vertices vertices = densePolygons[j];
						UASSERT(polygonSize == vertices.vertices.size());
						for(unsigned int k=0; k<vertices.vertices.size(); ++k)
						{
							//uv
							std::map<int, int>::iterator jter = newToOldIndices.find(vertices.vertices[k]);
							textureMesh.tex_coordinates[oi][j*vertices.vertices.size()+k] = Eigen::Vector2f(
									float(jter->second % iter->second.cloud->width) / float(iter->second.cloud->width),   // u
									float(iter->second.cloud->height - jter->second / iter->second.cloud->width) / float(iter->second.cloud->height)); // v

							vertices.vertices[k] += polygonsStep;
						}
						textureMesh.tex_polygons[oi][j] = vertices;

					}
					totalPolygons += densePolygons.size();
					polygonsStep += denseCloud->size();

					pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr transformedCloud = rtabmap::util3d::transformPointCloud(denseCloud, iter->second.pose);
					if(mergedClouds->size() == 0)
					{
						*mergedClouds = *transformedCloud;
					}
					else
					{
						*mergedClouds += *transformedCloud;
					}

					textures[oi] = iter->second.texture;
					textureMesh.tex_materials[oi].tex_illum = 1;
					textureMesh.tex_materials[oi].tex_name = uFormat("material_%d", iter->first);
					++oi;
				}
				else
				{
					UERROR("Texture not set for mesh %d", iter->first);
				}
			}
			textureMesh.tex_materials.resize(oi);
			textureMesh.tex_polygons.resize(oi);
			textures.resize(oi);

			if(textures.size())
			{
				pcl::toPCLPointCloud2(*mergedClouds, textureMesh.cloud);

				std::string textureDirectory = uSplit(filePath, '.').front();
				UINFO("Saving %d textures to %s.", textures.size(), textureDirectory.c_str());
				UDirectory::makeDir(textureDirectory);
				for(unsigned int i=0;i<textures.size(); ++i)
				{
					cv::Mat rawImage = rtabmap::uncompressImage(textures[i]);
					std::string texFile = textureDirectory+"/"+textureMesh.tex_materials[i].tex_name+".png";
					cv::imwrite(texFile, rawImage);

					UINFO("Saved %s (%d bytes).", texFile.c_str(), rawImage.total()*rawImage.channels());

					// relative path
					textureMesh.tex_materials[i].tex_file = uSplit(UFile::getName(filePath), '.').front()+"/"+textureMesh.tex_materials[i].tex_name+".png";
				}

				UINFO("Saving obj (%d vertices, %d polygons) to %s.", (int)mergedClouds->size(), totalPolygons, filePath.c_str());
				success = pcl::io::saveOBJFile(filePath, textureMesh) == 0;
				if(success)
				{
					UINFO("Saved obj to %s!", filePath.c_str());
				}
				else
				{
					UERROR("Failed saving obj to %s!", filePath.c_str());
				}
			}
		}
	}
	else
	{
		pcl::PointCloud<pcl::PointXYZRGB>::Ptr mergedClouds(new pcl::PointCloud<pcl::PointXYZRGB>);
		std::vector<pcl::Vertices> mergedPolygons;

		{
			boost::mutex::scoped_lock  lock(meshesMutex_);

			for(std::map<int, Mesh>::iterator iter=createdMeshes_.begin();
				iter!= createdMeshes_.end();
				++iter)
			{
				pcl::PointCloud<pcl::PointXYZRGB>::Ptr denseCloud(new pcl::PointCloud<pcl::PointXYZRGB>);
				std::vector<pcl::Vertices> densePolygons;

				rtabmap::util3d::filterNotUsedVerticesFromMesh(
						*iter->second.cloud,
						iter->second.polygons,
						*denseCloud,
						densePolygons);

				pcl::PointCloud<pcl::PointXYZRGB>::Ptr transformedCloud = rtabmap::util3d::transformPointCloud(denseCloud, iter->second.pose);
				if(mergedClouds->size() == 0)
				{
					*mergedClouds = *transformedCloud;
					mergedPolygons = densePolygons;
				}
				else
				{
					rtabmap::util3d::appendMesh(*mergedClouds, mergedPolygons, *transformedCloud, densePolygons);
				}
			}
		}

		if(mergedClouds->size() && mergedPolygons.size())
		{
			pcl::PolygonMesh mesh;
			pcl::toPCLPointCloud2(*mergedClouds, mesh.cloud);
			mesh.polygons = mergedPolygons;

			UINFO("Saving ply (%d vertices, %d polygons) to %s.", (int)mergedClouds->size(), (int)mergedPolygons.size(), filePath.c_str());
			success = pcl::io::savePLYFileBinary(filePath, mesh) == 0;
			if(success)
			{
				UINFO("Saved ply to %s!", filePath.c_str());
			}
			else
			{
				UERROR("Failed saving ply to %s!", filePath.c_str());
			}
		}
	}
	return success;
}
std::string Transform::prettyPrint() const
{
	float x,y,z,roll,pitch,yaw;
	getTranslationAndEulerAngles(x, y, z, roll, pitch, yaw);
	return uFormat("xyz=%f,%f,%f rpy=%f,%f,%f", x,y,z, roll,pitch,yaw);
}