Пример #1
0
int main (int argc, char * argv[])
{
	ULogger::setType(ULogger::kTypeConsole);
	ULogger::setLevel(ULogger::kInfo);
	ULogger::setPrintTime(false);
	ULogger::setPrintWhere(false);

	// parse arguments
	float rate = 0.0;
	std::string inputDatabase;
	int driver = 0;
	int odomType = rtabmap::Parameters::defaultOdomFeatureType();
	bool icp = false;
	bool flow = false;
	bool mono = false;
	int nnType = rtabmap::Parameters::defaultOdomBowNNType();
	float nndr = rtabmap::Parameters::defaultOdomBowNNDR();
	float distance = rtabmap::Parameters::defaultOdomInlierDistance();
	int maxWords = rtabmap::Parameters::defaultOdomMaxFeatures();
	int minInliers = rtabmap::Parameters::defaultOdomMinInliers();
	float maxDepth = rtabmap::Parameters::defaultOdomMaxDepth();
	int iterations = rtabmap::Parameters::defaultOdomIterations();
	int resetCountdown = rtabmap::Parameters::defaultOdomResetCountdown();
	int decimation = 4;
	float voxel = 0.005;
	int samples = 10000;
	float ratio = 0.7f;
	int maxClouds = 10;
	int briefBytes = rtabmap::Parameters::defaultBRIEFBytes();
	int fastThr = rtabmap::Parameters::defaultFASTThreshold();
	float sec = 0.0f;
	bool gpu = false;
	int localHistory = rtabmap::Parameters::defaultOdomBowLocalHistorySize();
	bool p2p = rtabmap::Parameters::defaultOdomPnPEstimation();

	for(int i=1; i<argc; ++i)
	{
		if(strcmp(argv[i], "-driver") == 0)
		{
			++i;
			if(i < argc)
			{
				driver = std::atoi(argv[i]);
				if(driver < 0 || driver > 7)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-o") == 0)
		{
			++i;
			if(i < argc)
			{
				odomType = std::atoi(argv[i]);
				if(odomType < 0 || odomType > 6)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-nn") == 0)
		{
			++i;
			if(i < argc)
			{
				nnType = std::atoi(argv[i]);
				if(nnType < 0 || nnType > 4)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-nndr") == 0)
		{
			++i;
			if(i < argc)
			{
				nndr = uStr2Float(argv[i]);
				if(nndr < 0.0f)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-hz") == 0)
		{
			++i;
			if(i < argc)
			{
				rate = uStr2Float(argv[i]);
				if(rate < 0)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-db") == 0)
		{
			++i;
			if(i < argc)
			{
				inputDatabase = argv[i];
				if(UFile::getExtension(inputDatabase).compare("db") != 0)
				{
					printf("Database path (%s) should end with \"db\" \n", inputDatabase.c_str());
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-clouds") == 0)
		{
			++i;
			if(i < argc)
			{
				maxClouds = std::atoi(argv[i]);
				if(maxClouds < 0)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-sec") == 0)
		{
			++i;
			if(i < argc)
			{
				sec = uStr2Float(argv[i]);
				if(sec < 0.0f)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-in") == 0)
		{
			++i;
			if(i < argc)
			{
				distance = uStr2Float(argv[i]);
				if(distance <= 0)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-max") == 0)
		{
			++i;
			if(i < argc)
			{
				maxWords = std::atoi(argv[i]);
				if(maxWords < 0)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-min") == 0)
		{
			++i;
			if(i < argc)
			{
				minInliers = std::atoi(argv[i]);
				if(minInliers < 0)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-depth") == 0)
		{
			++i;
			if(i < argc)
			{
				maxDepth = uStr2Float(argv[i]);
				if(maxDepth < 0)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-i") == 0)
		{
			++i;
			if(i < argc)
			{
				iterations = std::atoi(argv[i]);
				if(iterations <= 0)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-reset") == 0)
		{
			++i;
			if(i < argc)
			{
				resetCountdown = std::atoi(argv[i]);
				if(resetCountdown < 0)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-d") == 0)
		{
			++i;
			if(i < argc)
			{
				decimation = std::atoi(argv[i]);
				if(decimation < 1)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-v") == 0)
		{
			++i;
			if(i < argc)
			{
				voxel = uStr2Float(argv[i]);
				if(voxel < 0)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-s") == 0)
		{
			++i;
			if(i < argc)
			{
				samples = std::atoi(argv[i]);
				if(samples < 0)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-cr") == 0)
		{
			++i;
			if(i < argc)
			{
				ratio = uStr2Float(argv[i]);
				if(ratio < 0.0f)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-gpu") == 0)
		{
			gpu = true;
			continue;
		}
		if(strcmp(argv[i], "-lh") == 0)
		{
			++i;
			if(i < argc)
			{
				localHistory = std::atoi(argv[i]);
				if(localHistory < 0)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-brief_bytes") == 0)
		{
			++i;
			if(i < argc)
			{
				briefBytes = std::atoi(argv[i]);
				if(briefBytes < 1)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-fast_thr") == 0)
		{
			++i;
			if(i < argc)
			{
				fastThr = std::atoi(argv[i]);
				if(fastThr < 1)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "-icp") == 0)
		{
			icp = true;
			continue;
		}
		if(strcmp(argv[i], "-flow") == 0)
		{
			flow = true;
			continue;
		}
		if(strcmp(argv[i], "-mono") == 0)
		{
			mono = true;
			continue;
		}
		if(strcmp(argv[i], "-p2p") == 0)
		{
			p2p = true;
			continue;
		}
		if(strcmp(argv[i], "-debug") == 0)
		{
			ULogger::setLevel(ULogger::kDebug);
			ULogger::setPrintTime(true);
			ULogger::setPrintWhere(true);
			continue;
		}

		printf("Unrecognized option : %s\n", argv[i]);
		showUsage();
	}

	if(odomType > 1 && nnType == rtabmap::VWDictionary::kNNFlannKdTree)
	{
		UERROR("You set \"-o %d\" (binary descriptor), you must use \"-nn 2\" (any \"-nn\" other than kNNFlannKdTree)", odomType);
		showUsage();
	}
	else if(odomType <= 1 && nnType == rtabmap::VWDictionary::kNNFlannLSH)
	{
		UERROR("You set \"-o %d\" (float descriptor), you must use \"-nn 1\" (any \"-nn\" other than kNNFlannLSH)", odomType);
		showUsage();
	}

	if(inputDatabase.size())
	{
		UINFO("Using database input \"%s\"", inputDatabase.c_str());
	}
	else
	{
		UINFO("Using OpenNI camera");
	}

	std::string odomName;
	if(odomType == 0)
	{
		odomName = "SURF";
	}
	else if(odomType == 1)
	{
		odomName = "SIFT";
	}
	else if(odomType == 2)
	{
		odomName = "ORB";
	}
	else if(odomType == 3)
	{
		odomName = "FAST+FREAK";
	}
	else if(odomType == 4)
	{
		odomName = "FAST+BRIEF";
	}
	else if(odomType == 5)
	{
		odomName = "GFTT+FREAK";
	}
	else if(odomType == 6)
	{
		odomName = "GFTT+BRIEF";
	}
	else if(odomType == 7)
	{
		odomName = "BRISK";
	}

	if(icp)
	{
		odomName= "ICP";
	}

	if(flow)
	{
		odomName= "Optical Flow";
	}

	std::string nnName;
	if(nnType == 0)
	{
		nnName = "kNNFlannLinear";
	}
	else if(nnType == 1)
	{
		nnName = "kNNFlannKdTree";
	}
	else if(nnType == 2)
	{
		nnName= "kNNFlannLSH";
	}
	else if(nnType == 3)
	{
		nnName= "kNNBruteForce";
	}
	else if(nnType == 4)
	{
		nnName= "kNNBruteForceGPU";
	}

	UINFO("Odometry used =           %s", odomName.c_str());
	UINFO("Camera rate =             %f Hz", rate);
	UINFO("Maximum clouds shown =    %d", maxClouds);
	UINFO("Delay =                   %f s", sec);
	UINFO("Max depth =               %f", maxDepth);
	UINFO("Reset odometry coutdown = %d", resetCountdown);

	QApplication app(argc, argv);

	rtabmap::Odometry * odom = 0;

	rtabmap::ParametersMap parameters;

	parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomMaxDepth(), uNumber2Str(maxDepth)));
	parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomResetCountdown(), uNumber2Str(resetCountdown)));

	if(!icp)
	{
		UINFO("Min inliers =             %d", minInliers);
		UINFO("Inlier maximum correspondences distance = %f", distance);
		UINFO("RANSAC iterations =       %d", iterations);
		UINFO("Max features =            %d", maxWords);
		UINFO("GPU =                     %s", gpu?"true":"false");
		parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomInlierDistance(), uNumber2Str(distance)));
		parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomMinInliers(), uNumber2Str(minInliers)));
		parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomIterations(), uNumber2Str(iterations)));
		parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomMaxFeatures(), uNumber2Str(maxWords)));
		parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomFeatureType(), uNumber2Str(odomType)));
		if(odomType == 0)
		{
			parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kSURFGpuVersion(), uBool2Str(gpu)));
		}
		if(odomType == 2)
		{
			parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kORBGpu(), uBool2Str(gpu)));
		}
		if(odomType == 3 || odomType == 4)
		{
			UINFO("FAST threshold =          %d", fastThr);
			parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kFASTThreshold(), uNumber2Str(fastThr)));
			parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kFASTGpu(), uBool2Str(gpu)));
		}
		if(odomType == 4 || odomType == 6)
		{
			UINFO("BRIEF bytes =             %d", briefBytes);
			parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kBRIEFBytes(), uNumber2Str(briefBytes)));
		}

		if(flow)
		{
			// Optical Flow
			odom = new rtabmap::OdometryOpticalFlow(parameters);
		}
		else
		{
			//BOW
			UINFO("Nearest neighbor =         %s", nnName.c_str());
			UINFO("Nearest neighbor ratio =  %f", nndr);
			UINFO("Local history =           %d", localHistory);
			parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomBowNNType(), uNumber2Str(nnType)));
			parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomBowNNDR(), uNumber2Str(nndr)));
			parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomBowLocalHistorySize(), uNumber2Str(localHistory)));

			if(mono)
			{
				parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomPnPFlags(), uNumber2Str(0))); //CV_ITERATIVE
				parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomPnPReprojError(), "4.0"));
				parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomIterations(), "100"));
				odom = new rtabmap::OdometryMono(parameters);
			}
			else
			{
				odom = new rtabmap::OdometryBOW(parameters);
			}
		}
	}
	else if(icp) // ICP
	{
		UINFO("ICP maximum correspondences distance = %f", distance);
		UINFO("ICP iterations =          %d", iterations);
		UINFO("Cloud decimation =        %d", decimation);
		UINFO("Cloud voxel size =        %f", voxel);
		UINFO("Cloud samples =           %d", samples);
		UINFO("Cloud correspondence ratio = %f", ratio);
		UINFO("Cloud point to plane =    %s", p2p?"false":"true");

		odom = new rtabmap::OdometryICP(decimation, voxel, samples, distance, iterations, ratio, !p2p);
	}

	rtabmap::OdometryThread odomThread(odom);
	rtabmap::OdometryViewer odomViewer(maxClouds, 2, 0.0, 50);
	UEventsManager::addHandler(&odomThread);
	UEventsManager::addHandler(&odomViewer);

	odomViewer.setWindowTitle("Odometry view");
	odomViewer.resize(1280, 480+QPushButton().minimumHeight());

	if(inputDatabase.size())
	{
		rtabmap::DBReader camera(inputDatabase, rate, true);
		if(camera.init())
		{
			odomThread.start();

			if(sec > 0)
			{
				uSleep(sec*1000);
			}

			camera.start();

			app.exec();

			camera.join(true);
			odomThread.join(true);
		}
	}
	else
	{
		rtabmap::CameraRGBD * camera = 0;
		rtabmap::Transform t=rtabmap::Transform(0,0,1,0, -1,0,0,0, 0,-1,0,0);
		if(driver == 0)
		{
			camera = new rtabmap::CameraOpenni("", rate, t);
		}
		else if(driver == 1)
		{
			if(!rtabmap::CameraOpenNI2::available())
			{
				UERROR("Not built with OpenNI2 support...");
				exit(-1);
			}
			camera = new rtabmap::CameraOpenNI2("", rate, t);
		}
		else if(driver == 2)
		{
			if(!rtabmap::CameraFreenect::available())
			{
				UERROR("Not built with Freenect support...");
				exit(-1);
			}
			camera = new rtabmap::CameraFreenect(0, rate, t);
		}
		else if(driver == 3)
		{
			if(!rtabmap::CameraOpenNICV::available())
			{
				UERROR("Not built with OpenNI from OpenCV support...");
				exit(-1);
			}
			camera = new rtabmap::CameraOpenNICV(false, rate, t);
		}
		else if(driver == 4)
		{
			if(!rtabmap::CameraOpenNICV::available())
			{
				UERROR("Not built with OpenNI from OpenCV support...");
				exit(-1);
			}
			camera = new rtabmap::CameraOpenNICV(true, rate, t);
		}
		else if(driver == 5)
		{
			if(!rtabmap::CameraFreenect2::available())
			{
				UERROR("Not built with Freenect2 support...");
				exit(-1);
			}
			camera = new rtabmap::CameraFreenect2(0, rtabmap::CameraFreenect2::kTypeRGBDepthSD, rate, t);
		}
		else if(driver == 6)
		{
			if(!rtabmap::CameraStereoDC1394::available())
			{
				UERROR("Not built with dc1394 support...");
				exit(-1);
			}
			camera = new rtabmap::CameraStereoDC1394(rate, t);
		}
		else if(driver == 7)
		{
			if(!rtabmap::CameraStereoFlyCapture2::available())
			{
				UERROR("Not built with FlyCapture2/Triclops support...");
				exit(-1);
			}
			camera = new rtabmap::CameraStereoFlyCapture2(rate, t);
		}
		else
		{
			UFATAL("Camera driver (%d) not found!", driver);
		}

		//pcl::console::setVerbosityLevel(pcl::console::L_DEBUG);

		if(camera->init())
		{
			if(camera->isCalibrated())
			{
				rtabmap::CameraThread cameraThread(camera);

				odomThread.start();
				cameraThread.start();

				odomViewer.exec();

				cameraThread.join(true);
				odomThread.join(true);
			}
			else
			{
				printf("The camera is not calibrated! You should calibrate the camera first.\n");
				delete camera;
			}
		}
		else
		{
			printf("Failed to initialize the camera! Please select another driver (see \"--help\").\n");
			delete camera;
		}
	}

	return 0;
}
Пример #2
0
Transform Odometry::process(SensorData & data, OdometryInfo * info)
{
	if(_pose.isNull())
	{
		_pose.setIdentity(); // initialized
	}

	UASSERT(!data.imageRaw().empty());

	if(!data.stereoCameraModel().isValidForProjection() &&
	   (data.cameraModels().size() == 0 || !data.cameraModels()[0].isValidForProjection()))
	{
		UERROR("Rectified images required! Calibrate your camera.");
		return Transform();
	}

	double dt = previousStamp_>0.0f?data.stamp() - previousStamp_:0.0;
	Transform guess = dt && guessFromMotion_?Transform::getIdentity():Transform();
	UASSERT_MSG(dt>0.0 || (dt == 0.0 && previousVelocityTransform_.isNull()), uFormat("dt=%f previous transform=%s", dt, previousVelocityTransform_.prettyPrint().c_str()).c_str());
	if(!previousVelocityTransform_.isNull())
	{
		if(guessFromMotion_)
		{
			if(_filteringStrategy == 1)
			{
				// use Kalman predict transform
				float vx,vy,vz, vroll,vpitch,vyaw;
				predictKalmanFilter(dt, &vx,&vy,&vz,&vroll,&vpitch,&vyaw);
				guess = Transform(vx*dt, vy*dt, vz*dt, vroll*dt, vpitch*dt, vyaw*dt);
			}
			else
			{
				float vx,vy,vz, vroll,vpitch,vyaw;
				previousVelocityTransform_.getTranslationAndEulerAngles(vx,vy,vz, vroll,vpitch,vyaw);
				guess = Transform(vx*dt, vy*dt, vz*dt, vroll*dt, vpitch*dt, vyaw*dt);
			}
		}
		else if(_filteringStrategy == 1)
		{
			predictKalmanFilter(dt);
		}
	}

	UTimer time;
	Transform t;
	if(_imageDecimation > 1)
	{
		// Decimation of images with calibrations
		SensorData decimatedData = data;
		decimatedData.setImageRaw(util2d::decimate(decimatedData.imageRaw(), _imageDecimation));
		decimatedData.setDepthOrRightRaw(util2d::decimate(decimatedData.depthOrRightRaw(), _imageDecimation));
		std::vector<CameraModel> cameraModels = decimatedData.cameraModels();
		for(unsigned int i=0; i<cameraModels.size(); ++i)
		{
			cameraModels[i] = cameraModels[i].scaled(1.0/double(_imageDecimation));
		}
		decimatedData.setCameraModels(cameraModels);
		StereoCameraModel stereoModel = decimatedData.stereoCameraModel();
		if(stereoModel.isValidForProjection())
		{
			stereoModel.scale(1.0/double(_imageDecimation));
		}
		decimatedData.setStereoCameraModel(stereoModel);

		// compute transform
		t = this->computeTransform(decimatedData, guess, info);

		// transform back the keypoints in the original image
		std::vector<cv::KeyPoint> kpts = decimatedData.keypoints();
		double log2value = log(double(_imageDecimation))/log(2.0);
		for(unsigned int i=0; i<kpts.size(); ++i)
		{
			kpts[i].pt.x *= _imageDecimation;
			kpts[i].pt.y *= _imageDecimation;
			kpts[i].size *= _imageDecimation;
			kpts[i].octave += log2value;
		}
		data.setFeatures(kpts, decimatedData.descriptors());

		if(info)
		{
			UASSERT(info->newCorners.size() == info->refCorners.size());
			for(unsigned int i=0; i<info->newCorners.size(); ++i)
			{
				info->refCorners[i].x *= _imageDecimation;
				info->refCorners[i].y *= _imageDecimation;
				info->newCorners[i].x *= _imageDecimation;
				info->newCorners[i].y *= _imageDecimation;
			}
			for(std::multimap<int, cv::KeyPoint>::iterator iter=info->words.begin(); iter!=info->words.end(); ++iter)
			{
				iter->second.pt.x *= _imageDecimation;
				iter->second.pt.y *= _imageDecimation;
				iter->second.size *= _imageDecimation;
				iter->second.octave += log2value;
			}
		}
	}
	else
	{
		t = this->computeTransform(data, guess, info);
	}

	if(info)
	{
		info->timeEstimation = time.ticks();
		info->lost = t.isNull();
		info->stamp = data.stamp();
		info->interval = dt;
		info->transform = t;

		if(!data.groundTruth().isNull())
		{
			if(!previousGroundTruthPose_.isNull())
			{
				info->transformGroundTruth = previousGroundTruthPose_.inverse() * data.groundTruth();
			}
			previousGroundTruthPose_ = data.groundTruth();
		}
	}

	if(!t.isNull())
	{
		_resetCurrentCount = _resetCountdown;

		float vx,vy,vz, vroll,vpitch,vyaw;
		t.getTranslationAndEulerAngles(vx,vy,vz, vroll,vpitch,vyaw);

		// transform to velocity
		if(dt)
		{
			vx /= dt;
			vy /= dt;
			vz /= dt;
			vroll /= dt;
			vpitch /= dt;
			vyaw /= dt;
		}

		if(_force3DoF || !_holonomic || particleFilters_.size() || _filteringStrategy==1)
		{
			if(_filteringStrategy == 1)
			{
				if(previousVelocityTransform_.isNull())
				{
					// reset Kalman
					if(dt)
					{
						initKalmanFilter(t, vx,vy,vz,vroll,vpitch,vyaw);
					}
					else
					{
						initKalmanFilter(t);
					}
				}
				else
				{
					// Kalman filtering
					updateKalmanFilter(vx,vy,vz,vroll,vpitch,vyaw);
				}
			}
			else if(particleFilters_.size())
			{
				// Particle filtering
				UASSERT(particleFilters_.size()==6);
				if(previousVelocityTransform_.isNull())
				{
					particleFilters_[0]->init(vx);
					particleFilters_[1]->init(vy);
					particleFilters_[2]->init(vz);
					particleFilters_[3]->init(vroll);
					particleFilters_[4]->init(vpitch);
					particleFilters_[5]->init(vyaw);
				}
				else
				{
					vx = particleFilters_[0]->filter(vx);
					vy = particleFilters_[1]->filter(vy);
					vyaw = particleFilters_[5]->filter(vyaw);

					if(!_holonomic)
					{
						// arc trajectory around ICR
						float tmpY = vyaw!=0.0f ? vx / tan((CV_PI-vyaw)/2.0f) : 0.0f;
						if(fabs(tmpY) < fabs(vy) || (tmpY<=0 && vy >=0) || (tmpY>=0 && vy<=0))
						{
							vy = tmpY;
						}
						else
						{
							vyaw = (atan(vx/vy)*2.0f-CV_PI)*-1;
						}
					}

					if(!_force3DoF)
					{
						vz = particleFilters_[2]->filter(vz);
						vroll = particleFilters_[3]->filter(vroll);
						vpitch = particleFilters_[4]->filter(vpitch);
					}
				}

				if(info)
				{
					info->timeParticleFiltering = time.ticks();
				}

				if(_force3DoF)
				{
					vz = 0.0f;
					vroll = 0.0f;
					vpitch = 0.0f;
				}
			}
			else if(!_holonomic)
			{
				// arc trajectory around ICR
				vy = vyaw!=0.0f ? vx / tan((CV_PI-vyaw)/2.0f) : 0.0f;
				if(_force3DoF)
				{
					vz = 0.0f;
					vroll = 0.0f;
					vpitch = 0.0f;
				}
			}

			if(dt)
			{
				t = Transform(vx*dt, vy*dt, vz*dt, vroll*dt, vpitch*dt, vyaw*dt);
			}
			else
			{
				t = Transform(vx, vy, vz, vroll, vpitch, vyaw);
			}

			if(info)
			{
				info->transformFiltered = t;
			}
		}

		previousStamp_ = data.stamp();
		previousVelocityTransform_.setNull();
		if(dt)
		{
			previousVelocityTransform_ = Transform(vx, vy, vz, vroll, vpitch, vyaw);
		}

		if(info)
		{
			distanceTravelled_ += t.getNorm();
			info->distanceTravelled = distanceTravelled_;
		}

		return _pose *= t; // updated
	}
	else if(_resetCurrentCount > 0)
	{
		UWARN("Odometry lost! Odometry will be reset after next %d consecutive unsuccessful odometry updates...", _resetCurrentCount);

		--_resetCurrentCount;
		if(_resetCurrentCount == 0)
		{
			UWARN("Odometry automatically reset to latest pose!");
			this->reset(_pose);
		}
	}

	previousVelocityTransform_.setNull();
	previousStamp_ = 0;

	return Transform();
}
Пример #3
0
int main(int argc, char * argv[])
{
	if(argc < 2)
	{
		showUsage();
	}
	ULogger::setType(ULogger::kTypeConsole);
	ULogger::setLevel(ULogger::kDebug);

	std::string dictionaryPath = argv[argc-1];
	std::list<std::vector<float> > objectDescriptors;
	//std::list<std::vector<float> > descriptors;
	std::map<int, std::vector<float> > descriptors;
	int dimension  = 0;
	UTimer timer;
	int objectDescriptorsSize= 400;

	std::ifstream file;
	if(!dictionaryPath.empty())
	{
		file.open(dictionaryPath.c_str(), std::ifstream::in);
	}
	if(file.good())
	{
		UDEBUG("Loading the dictionary from \"%s\"", dictionaryPath.c_str());

		// first line is the header
		std::string str;
		std::list<std::string> strList;
		std::getline(file, str);
		strList = uSplitNumChar(str);
		for(std::list<std::string>::iterator iter = strList.begin(); iter != strList.end(); ++iter)
		{
			if(uIsDigit(iter->at(0)))
			{
				dimension = std::atoi(iter->c_str());
				break;
			}
		}

		if(dimension == 0 || dimension > 1000)
		{
			UERROR("Invalid dictionary file, visual word dimension (%d) is not valid, \"%s\"", dimension, dictionaryPath.c_str());
		}
		else
		{
			int descriptorsLoaded = 0;
			// Process all words
			while(file.good())
			{
				std::getline(file, str);
				strList = uSplit(str);
				if((int)strList.size() == dimension+1)
				{
					//first one is the visual word id
					std::list<std::string>::iterator iter = strList.begin();
					int id = atoi(iter->c_str());
					++iter;

					std::vector<float> descriptor(dimension);
					int i=0;

					//get descriptor
					for(;i<dimension && iter != strList.end(); ++i, ++iter)
					{
						descriptor[i] = uStr2Float(*iter);
					}
					if(i != dimension)
					{
						UERROR("");
					}

					if(++descriptorsLoaded<=objectDescriptorsSize)
					{
						objectDescriptors.push_back(descriptor);
					}
					else
					{
						//descriptors.push_back(descriptor);
						descriptors.insert(std::make_pair(id, descriptor));
					}
				}
				else if(str.size())
				{
					UWARN("Cannot parse line \"%s\"", str.c_str());
				}
			}
		}

		UDEBUG("Time loading dictionary = %fs, dimension=%d", timer.ticks(), dimension);
	}
	else
	{
		UERROR("Cannot open dictionary file \"%s\"", dictionaryPath.c_str());
	}
	file.close();

	if(descriptors.size() && objectDescriptors.size() && dimension)
	{
		cv::Mat dataTree;
		cv::Mat queries;

		UDEBUG("Creating data structures...");
		// Create the data structure
		dataTree = cv::Mat((int)descriptors.size(), dimension, CV_32F); // SURF descriptors are CV_32F
		{//scope
			//std::list<std::vector<float> >::const_iterator iter = descriptors.begin();
			std::map<int, std::vector<float> >::const_iterator iter = descriptors.begin();
			for(unsigned int i=0; i < descriptors.size(); ++i, ++iter)
			{
				UTimer tim;
				//memcpy(dataTree.ptr<float>(i), iter->data(), dimension*sizeof(float));
				memcpy(dataTree.ptr<float>(i), iter->second.data(), dimension*sizeof(float));
				//if(i%100==0)
				//	UDEBUG("i=%d/%d tim=%fs", i, descriptors.size(), tim.ticks());
			}
		}

		queries = cv::Mat((int)objectDescriptors.size(), dimension, CV_32F); // SURF descriptors are CV_32F
		{//scope
			std::list<std::vector<float> >::const_iterator iter = objectDescriptors.begin();
			for(unsigned int i=0; i < objectDescriptors.size(); ++i, ++iter)
			{
				UTimer tim;
				memcpy(queries.ptr<float>(i), iter->data(), dimension*sizeof(float));
				//if(i%100==0)
				//	UDEBUG("i=%d/%d tim=%fs", i, objectDescriptors.size(), tim.ticks());
			}
		}

		UDEBUG("descriptors.size()=%d, objectDescriptorsSize=%d, copying data = %f s",descriptors.size(), objectDescriptors.size(), timer.ticks());

		UDEBUG("Creating indexes...");
		cv::flann::Index * linearIndex = new cv::flann::Index(dataTree, cv::flann::LinearIndexParams());
		UDEBUG("Time to create linearIndex = %f s", timer.ticks());

		cv::flann::Index * kdTreeIndex1 = new cv::flann::Index(dataTree, cv::flann::KDTreeIndexParams(1));
		UDEBUG("Time to create kdTreeIndex1 = %f s", timer.ticks());

		cv::flann::Index * kdTreeIndex4 = new cv::flann::Index(dataTree, cv::flann::KDTreeIndexParams(4));
		UDEBUG("Time to create kdTreeIndex4 = %f s", timer.ticks());

		cv::flann::Index * kMeansIndex = new cv::flann::Index(dataTree, cv::flann::KMeansIndexParams());
		UDEBUG("Time to create kMeansIndex = %f s", timer.ticks());

		cv::flann::Index * compositeIndex = new cv::flann::Index(dataTree, cv::flann::CompositeIndexParams());
		UDEBUG("Time to create compositeIndex = %f s", timer.ticks());

		//cv::flann::Index * autoTunedIndex = new cv::flann::Index(dataTree, cv::flann::AutotunedIndexParams());
		//UDEBUG("Time to create autoTunedIndex = %f s", timer.ticks());


		UDEBUG("Search indexes...");
		int k=2; // 2 nearest neighbors
		cv::Mat results(queries.rows, k, CV_32SC1); // results index
		cv::Mat dists(queries.rows, k, CV_32FC1); // Distance results are CV_32FC1

		linearIndex->knnSearch(queries, results, dists, k);
		//std::cout << results.t() << std::endl;
		cv::Mat transposedLinear = dists.t();
		UDEBUG("Time to search linearIndex = %f s", timer.ticks());

		kdTreeIndex1->knnSearch(queries, results, dists, k);
		//std::cout << results.t() << std::endl;
		cv::Mat transposed = dists.t();
		UDEBUG("Time to search kdTreeIndex1 = %f s (size=%d, dist error(k1,k2)=(%f,%f))",
				timer.ticks(),
				transposed.cols,
				uMeanSquaredError(  (float*)transposed.data,
					transposed.cols,
					(float*)transposedLinear.data,
					transposedLinear.cols),
				uMeanSquaredError(  &transposed.at<float>(1,0),
					transposed.cols,
					&transposedLinear.at<float>(1,0),
					transposedLinear.cols));

		kdTreeIndex4->knnSearch(queries, results, dists, k);
		//std::cout << results.t() << std::endl;
		transposed = dists.t();
		UDEBUG("Time to search kdTreeIndex4 = %f s (size=%d, dist error(k1,k2)=(%f,%f))",
				timer.ticks(),
				transposed.cols,
				uMeanSquaredError(  (float*)transposed.data,
					transposed.cols,
					(float*)transposedLinear.data,
					transposedLinear.cols),
				uMeanSquaredError(  &transposed.at<float>(1,0),
					transposed.cols,
					&transposedLinear.at<float>(1,0),
					transposedLinear.cols));

		kMeansIndex->knnSearch(queries, results, dists, k);
		//std::cout << results.t() << std::endl;
		transposed = dists.t();
		UDEBUG("Time to search kMeansIndex = %f s (size=%d, dist error(k1,k2)=(%f,%f))",
				timer.ticks(),
				transposed.cols,
				uMeanSquaredError(  (float*)transposed.data,
					transposed.cols,
					(float*)transposedLinear.data,
					transposedLinear.cols),
				uMeanSquaredError(  &transposed.at<float>(1,0),
					transposed.cols,
					&transposedLinear.at<float>(1,0),
					transposedLinear.cols));

		compositeIndex->knnSearch(queries, results, dists, k);
		//std::cout << results.t() << std::endl;
		transposed = dists.t();
		UDEBUG("Time to search compositeIndex = %f s (size=%d, dist error(k1,k2)=(%f,%f))",
				timer.ticks(),
				transposed.cols,
				uMeanSquaredError(  (float*)transposed.data,
					transposed.cols,
					(float*)transposedLinear.data,
					transposedLinear.cols),
				uMeanSquaredError(  &transposed.at<float>(1,0),
					transposed.cols,
					&transposedLinear.at<float>(1,0),
					transposedLinear.cols));

		//autoTunedIndex->knnSearch(queries, results, dists, k);
		//UDEBUG("Time to search autoTunedIndex = %f s", timer.ticks());

		delete linearIndex;
		delete kdTreeIndex1;
		delete kdTreeIndex4;
		delete kMeansIndex;
		delete compositeIndex;
		//delete autoTunedIndex;
	}

    return 0;
}
Пример #4
0
bool CalibrationDialog::save()
{
	bool saved = false;
	processingData_ = true;
	if(!stereo_)
	{
		UASSERT(models_[0].isValid());
		QString cameraName = models_[0].name().c_str();
		QString filePath = QFileDialog::getSaveFileName(this, tr("Export"), savingDirectory_+"/"+cameraName+".yaml", "*.yaml");

		if(!filePath.isEmpty())
		{
			QString name = QFileInfo(filePath).baseName();
			QString dir = QFileInfo(filePath).absoluteDir().absolutePath();
			models_[0].setName(name.toStdString());
			if(models_[0].save(dir.toStdString()))
			{
				QMessageBox::information(this, tr("Export"), tr("Calibration file saved to \"%1\".").arg(filePath));
				UINFO("Saved \"%s\"!", filePath.toStdString().c_str());
				savedCalibration_ = true;
				saved = true;
			}
			else
			{
				UERROR("Error saving \"%s\"", filePath.toStdString().c_str());
			}
		}
	}
	else
	{
		UASSERT(stereoModel_.left().isValid() &&
				stereoModel_.right().isValid()&&
				(!ui_->label_baseline->isVisible() || stereoModel_.baseline() > 0.0));
		QString cameraName = stereoModel_.name().c_str();
		QString filePath = QFileDialog::getSaveFileName(this, tr("Export"), savingDirectory_ + "/" + cameraName, "*.yaml");
		QString name = QFileInfo(filePath).baseName();
		QString dir = QFileInfo(filePath).absoluteDir().absolutePath();
		if(!name.isEmpty())
		{
			stereoModel_.setName(name.toStdString());
			std::string base = (dir+QDir::separator()+name).toStdString();
			std::string leftPath = base+"_left.yaml";
			std::string rightPath = base+"_right.yaml";
			std::string posePath = base+"_pose.yaml";
			if(stereoModel_.save(dir.toStdString(), false))
			{
				QMessageBox::information(this, tr("Export"), tr("Calibration files saved:\n  \"%1\"\n  \"%2\"\n  \"%3\".").
						arg(leftPath.c_str()).arg(rightPath.c_str()).arg(posePath.c_str()));
				UINFO("Saved \"%s\" and \"%s\"!", leftPath.c_str(), rightPath.c_str());
				savedCalibration_ = true;
				saved = true;
			}
			else
			{
				UERROR("Error saving \"%s\" and \"%s\"", leftPath.c_str(), rightPath.c_str());
			}
		}
	}
	processingData_ = false;
	return saved;
}
Пример #5
0
cv::flann::IndexParams * Settings::createFlannIndexParams()
{
	cv::flann::IndexParams * params = 0;
	QString str = getNearestNeighbor_1Strategy();
	QStringList split = str.split(':');
	if(split.size()==2)
	{
		bool ok = false;
		int index = split.first().toInt(&ok);
		if(ok)
		{
			QStringList strategies = split.last().split(';');
			if(strategies.size() == 6 && index>=0 && index<6)
			{
				switch(index)
				{
				case 0:
					if(strategies.at(index).compare("Linear") == 0)
					{
						UDEBUG("type=%s", "Linear");
						params = new cv::flann::LinearIndexParams();
					}
					break;
				case 1:
					if(strategies.at(index).compare("KDTree") == 0)
					{
						UDEBUG("type=%s", "KDTree");
						params = new cv::flann::KDTreeIndexParams(
								getNearestNeighbor_KDTree_trees());
					}
					break;
				case 2:
					if(strategies.at(index).compare("KMeans") == 0)
					{
						cvflann::flann_centers_init_t centers_init = cvflann::FLANN_CENTERS_RANDOM;
						QString str = getNearestNeighbor_KMeans_centers_init();
						QStringList split = str.split(':');
						if(split.size()==2)
						{
							bool ok = false;
							int index = split.first().toInt(&ok);
							if(ok)
							{
								centers_init = (cvflann::flann_centers_init_t)index;
							}
						}
						UDEBUG("type=%s", "KMeans");
						params = new cv::flann::KMeansIndexParams(
								getNearestNeighbor_KMeans_branching(),
								getNearestNeighbor_KMeans_iterations(),
								centers_init,
								getNearestNeighbor_KMeans_cb_index());
					}
					break;
				case 3:
					if(strategies.at(index).compare("Composite") == 0)
					{
						cvflann::flann_centers_init_t centers_init = cvflann::FLANN_CENTERS_RANDOM;
						QString str = getNearestNeighbor_Composite_centers_init();
						QStringList split = str.split(':');
						if(split.size()==2)
						{
							bool ok = false;
							int index = split.first().toInt(&ok);
							if(ok)
							{
								centers_init = (cvflann::flann_centers_init_t)index;
							}
						}
						UDEBUG("type=%s", "Composite");
						params = new cv::flann::CompositeIndexParams(
								getNearestNeighbor_Composite_trees(),
								getNearestNeighbor_Composite_branching(),
								getNearestNeighbor_Composite_iterations(),
								centers_init,
								getNearestNeighbor_Composite_cb_index());
					}
					break;
				case 4:
					if(strategies.at(index).compare("Autotuned") == 0)
					{
						UDEBUG("type=%s", "Autotuned");
						params = new cv::flann::AutotunedIndexParams(
								getNearestNeighbor_Autotuned_target_precision(),
								getNearestNeighbor_Autotuned_build_weight(),
								getNearestNeighbor_Autotuned_memory_weight(),
								getNearestNeighbor_Autotuned_sample_fraction());
					}
					break;
				case 5:
					if(strategies.at(index).compare("Lsh") == 0)
					{
						UDEBUG("type=%s", "Lsh");
						params = new cv::flann::LshIndexParams(
								getNearestNeighbor_Lsh_table_number(),
								getNearestNeighbor_Lsh_key_size(),
								getNearestNeighbor_Lsh_multi_probe_level());

					}
					break;
				default:
					break;
				}
			}
		}
	}
	if(!params)
	{
		UERROR("NN strategy not found !? Using default KDTRee...");
		params = new cv::flann::KDTreeIndexParams();
	}
	return params ;
}
Пример #6
0
bool UAudioCaptureMic::init()
{
	this->close();
	bool ok = UAudioCapture::init();
	if(ok)
	{
		std::string::size_type loc;

		if(_fileName.size())
		{
			loc = _fileName.find( ".mp3", 0 );
			if( loc != std::string::npos )
			{
#ifdef BUILT_WITH_LAME
				_encodeToMp3 = true;
#else
				_fileName.append(".wav");
				UERROR("Cannot write to a mp3, saving to a wav instead (%s)", _fileName.c_str());
#endif
			}
			_fp = fopen(_fileName.c_str(), "wb");
		}

		FMOD_RESULT result;
		FMOD_BOOL isRecording = false;
		result = UAudioSystem::isRecording(_driver, &isRecording); UASSERT_MSG(result==FMOD_OK, FMOD_ErrorString(result));
		if(isRecording)
		{
			result = UAudioSystem::recordStop(_driver); UASSERT_MSG(result==FMOD_OK, FMOD_ErrorString(result));
		}

		_dataLength = 0;
		_soundLength = 0;
		_lastRecordPos = 0;
		FMOD_CREATESOUNDEXINFO exinfo;
		memset(&exinfo, 0, sizeof(FMOD_CREATESOUNDEXINFO));
		exinfo.cbsize           = sizeof(FMOD_CREATESOUNDEXINFO);
		exinfo.numchannels      = channels();
		if(bytesPerSample() == 1)
		{
			exinfo.format           = FMOD_SOUND_FORMAT_PCM8;
		}
		else if(bytesPerSample() == 2)
		{
			exinfo.format           = FMOD_SOUND_FORMAT_PCM16;
		}
		else if(bytesPerSample() == 3)
		{
			exinfo.format           = FMOD_SOUND_FORMAT_PCM24;
		}
		else if(bytesPerSample() == 4)
		{
			exinfo.format           = FMOD_SOUND_FORMAT_PCM32;
		}
		exinfo.defaultfrequency = (int)fs();
		exinfo.length           = exinfo.defaultfrequency * bytesPerSample() * exinfo.numchannels * 2; // 2 -> pour deux secondes

		result = UAudioSystem::createSound(0, FMOD_2D | FMOD_SOFTWARE | FMOD_OPENUSER, &exinfo, &_sound); UASSERT_MSG(result==FMOD_OK, FMOD_ErrorString(result));
		if(_fp)
		{
			int channels, bits;
			float rate;
			FMOD_Sound_GetFormat(_sound, 0, 0, &channels, &bits);
			FMOD_Sound_GetDefaults(_sound, &rate, 0, 0, 0);
			UWav::writeWavHeader(_fp, _dataLength, rate, channels, bits);        //    Write out the wav header.  La longueur sera de 0 puisqu'elle est incunnue pour l'instant.
		}

		result = FMOD_Sound_GetLength(_sound, &_soundLength, FMOD_TIMEUNIT_PCM); UASSERT_MSG(result==FMOD_OK, FMOD_ErrorString(result));
	}
	return ok;
}
Пример #7
0
int main(int argc, char * argv[])
{
	ULogger::setType(ULogger::kTypeConsole);
	ULogger::setLevel(ULogger::kInfo);

	int driver = 0;
	if(argc < 2)
	{
		showUsage();
	}
	else
	{
		driver = atoi(argv[argc-1]);
		if(driver < 0 || driver > 4)
		{
			UERROR("driver should be between 0 and 4.");
			showUsage();
		}
	}
	UINFO("Using driver %d", driver);

	rtabmap::CameraRGBD * camera = 0;
	if(driver == 0)
	{
		camera = new rtabmap::CameraOpenni("", 0);
	}
	else if(driver == 1)
	{
		if(!rtabmap::CameraOpenNI2::available())
		{
			UERROR("Not built with OpenNI2 support...");
			exit(-1);
		}
		camera = new rtabmap::CameraOpenNI2(0);
	}
	else if(driver == 2)
	{
		if(!rtabmap::CameraFreenect::available())
		{
			UERROR("Not built with Freenect support...");
			exit(-1);
		}
		camera = new rtabmap::CameraFreenect(0, 0);
	}
	else if(driver == 3)
	{
		if(!rtabmap::CameraOpenNICV::available())
		{
			UERROR("Not built with OpenNI from OpenCV support...");
			exit(-1);
		}
		camera = new rtabmap::CameraOpenNICV(false, 0);
	}
	else if(driver == 4)
	{
		if(!rtabmap::CameraOpenNICV::available())
		{
			UERROR("Not built with OpenNI from OpenCV support...");
			exit(-1);
		}
		camera = new rtabmap::CameraOpenNICV(true, 0);
	}
	else
	{
		UFATAL("");
	}

	if(!camera->init())
	{
		printf("Camera init failed!\n");
		delete camera;
		exit(1);
	}

	cv::Mat rgb, depth;
	float fx, fy, cx, cy;
	camera->takeImage(rgb, depth, fx, fy, cx, cy);
	cv::namedWindow("Video", CV_WINDOW_AUTOSIZE); // create window
	cv::namedWindow("Depth", CV_WINDOW_AUTOSIZE); // create window
	pcl::visualization::CloudViewer viewer("cloud");
	while(!rgb.empty() && !viewer.wasStopped())
	{
		cv::Mat tmp;
		depth.convertTo(tmp, CV_8UC1, 255.0/2048.0);

		cv::imshow("Video", rgb); // show frame
		cv::imshow("Depth",tmp);

		viewer.showCloud(rtabmap::util3d::cloudFromDepthRGB(rgb, depth, cx, cy, fx, fy), "cloud");

		int c = cv::waitKey(10); // wait 10 ms or for key stroke
		if(c == 27)
			break; // if ESC, break and quit

		rgb = cv::Mat();
		depth = cv::Mat();
		camera->takeImage(rgb, depth, fx, fy, cx, cy);
	}
	cv::destroyWindow("Video");
	cv::destroyWindow("Depth");
	delete camera;
	return 0;
}
Пример #8
0
void CameraThread::mainLoop()
{
	UDEBUG("");
	CameraInfo info;
	SensorData data = _camera->takeImage(&info);

	if(!data.imageRaw().empty())
	{
		if(_colorOnly && !data.depthRaw().empty())
		{
			data.setDepthOrRightRaw(cv::Mat());
		}
		if(_imageDecimation>1 && !data.imageRaw().empty())
		{
			UDEBUG("");
			UTimer timer;
			if(!data.depthRaw().empty() &&
			   !(data.depthRaw().rows % _imageDecimation == 0 && data.depthRaw().cols % _imageDecimation == 0))
			{
				UERROR("Decimation of depth images should be exact (decimation=%d, size=(%d,%d))! "
					   "Images won't be resized.", _imageDecimation, data.depthRaw().cols, data.depthRaw().rows);
			}
			else
			{
				data.setImageRaw(util2d::decimate(data.imageRaw(), _imageDecimation));
				data.setDepthOrRightRaw(util2d::decimate(data.depthOrRightRaw(), _imageDecimation));
				std::vector<CameraModel> models = data.cameraModels();
				for(unsigned int i=0; i<models.size(); ++i)
				{
					if(models[i].isValidForProjection())
					{
						models[i] = models[i].scaled(1.0/double(_imageDecimation));
					}
				}
				data.setCameraModels(models);
				StereoCameraModel stereoModel = data.stereoCameraModel();
				if(stereoModel.isValidForProjection())
				{
					stereoModel.scale(1.0/double(_imageDecimation));
					data.setStereoCameraModel(stereoModel);
				}
			}
			info.timeImageDecimation = timer.ticks();
		}
		if(_mirroring && data.cameraModels().size() == 1)
		{
			UDEBUG("");
			UTimer timer;
			cv::Mat tmpRgb;
			cv::flip(data.imageRaw(), tmpRgb, 1);
			data.setImageRaw(tmpRgb);
			UASSERT_MSG(data.cameraModels().size() <= 1 && !data.stereoCameraModel().isValidForProjection(), "Only single RGBD cameras are supported for mirroring.");
			if(data.cameraModels().size() && data.cameraModels()[0].cx())
			{
				CameraModel tmpModel(
						data.cameraModels()[0].fx(),
						data.cameraModels()[0].fy(),
						float(data.imageRaw().cols) - data.cameraModels()[0].cx(),
						data.cameraModels()[0].cy(),
						data.cameraModels()[0].localTransform());
				data.setCameraModel(tmpModel);
			}
			if(!data.depthRaw().empty())
			{
				cv::Mat tmpDepth;
				cv::flip(data.depthRaw(), tmpDepth, 1);
				data.setDepthOrRightRaw(tmpDepth);
			}
			info.timeMirroring = timer.ticks();
		}
		if(_stereoToDepth && data.stereoCameraModel().isValidForProjection() && !data.rightRaw().empty())
		{
			UDEBUG("");
			UTimer timer;
			cv::Mat depth = util2d::depthFromDisparity(
					_stereoDense->computeDisparity(data.imageRaw(), data.rightRaw()),
					data.stereoCameraModel().left().fx(),
					data.stereoCameraModel().baseline());
			data.setCameraModel(data.stereoCameraModel().left());
			data.setDepthOrRightRaw(depth);
			data.setStereoCameraModel(StereoCameraModel());
			info.timeDisparity = timer.ticks();
			UDEBUG("Computing disparity = %f s", info.timeDisparity);
		}
		if(_scanFromDepth &&
			data.cameraModels().size() &&
			data.cameraModels().at(0).isValidForProjection() &&
			!data.depthRaw().empty())
		{
			UDEBUG("");
			if(data.laserScanRaw().empty())
			{
				UASSERT(_scanDecimation >= 1);
				UTimer timer;
				pcl::IndicesPtr validIndices(new std::vector<int>);
				pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = util3d::cloudFromSensorData(data, _scanDecimation, _scanMaxDepth, _scanMinDepth, validIndices.get());
				float maxPoints = (data.depthRaw().rows/_scanDecimation)*(data.depthRaw().cols/_scanDecimation);
				if(_scanVoxelSize>0.0f)
				{
					cloud = util3d::voxelize(cloud, validIndices, _scanVoxelSize);
					float ratio = float(cloud->size()) / float(validIndices->size());
					maxPoints = ratio * maxPoints;
				}
				else if(!cloud->is_dense)
				{
					pcl::PointCloud<pcl::PointXYZ>::Ptr denseCloud(new pcl::PointCloud<pcl::PointXYZ>);
					pcl::copyPointCloud(*cloud, *validIndices, *denseCloud);
					cloud = denseCloud;
				}

				cv::Mat scan;
				if(_scanNormalsK>0)
				{
					scan = util3d::laserScanFromPointCloud(*util3d::computeNormals(cloud, _scanNormalsK));
				}
				else
				{
					scan = util3d::laserScanFromPointCloud(*cloud);
				}
				data.setLaserScanRaw(scan, (int)maxPoints, _scanMaxDepth);
				info.timeScanFromDepth = timer.ticks();
				UDEBUG("Computing scan from depth = %f s", info.timeScanFromDepth);
			}
			else
			{
				UWARN("Option to create laser scan from depth image is enabled, but "
					  "there is already a laser scan in the captured sensor data. Scan from "
					  "depth will not be created.");
			}
		}

		info.cameraName = _camera->getSerial();
		this->post(new CameraEvent(data, info));
	}
	else if(!this->isKilled())
	{
		UWARN("no more images...");
		this->kill();
		this->post(new CameraEvent());
	}
}
Пример #9
0
// return not null transform if odometry is correctly computed
Transform OdometryF2F::computeTransform(
		SensorData & data,
		const Transform & guess,
		OdometryInfo * info)
{
	UTimer timer;
	Transform output;
	if(!data.rightRaw().empty() && !data.stereoCameraModel().isValidForProjection())
	{
		UERROR("Calibrated stereo camera required");
		return output;
	}
	if(!data.depthRaw().empty() &&
		(data.cameraModels().size() != 1 || !data.cameraModels()[0].isValidForProjection()))
	{
		UERROR("Calibrated camera required (multi-cameras not supported).");
		return output;
	}

	RegistrationInfo regInfo;

	UASSERT(!this->getPose().isNull());
	if(lastKeyFramePose_.isNull())
	{
		lastKeyFramePose_ = this->getPose(); // reset to current pose
	}
	Transform motionSinceLastKeyFrame = lastKeyFramePose_.inverse()*this->getPose();

	Signature newFrame(data);
	if(refFrame_.sensorData().isValid())
	{
		Signature tmpRefFrame = refFrame_;
		output = registrationPipeline_->computeTransformationMod(
				tmpRefFrame,
				newFrame,
				!guess.isNull()?motionSinceLastKeyFrame*guess:Transform(),
				&regInfo);

		if(info && this->isInfoDataFilled())
		{
			std::list<std::pair<int, std::pair<cv::KeyPoint, cv::KeyPoint> > > pairs;
			EpipolarGeometry::findPairsUnique(tmpRefFrame.getWords(), newFrame.getWords(), pairs);
			info->refCorners.resize(pairs.size());
			info->newCorners.resize(pairs.size());
			std::map<int, int> idToIndex;
			int i=0;
			for(std::list<std::pair<int, std::pair<cv::KeyPoint, cv::KeyPoint> > >::iterator iter=pairs.begin();
				iter!=pairs.end();
				++iter)
			{
				info->refCorners[i] = iter->second.first.pt;
				info->newCorners[i] = iter->second.second.pt;
				idToIndex.insert(std::make_pair(iter->first, i));
				++i;
			}
			info->cornerInliers.resize(regInfo.inliersIDs.size(), 1);
			i=0;
			for(; i<(int)regInfo.inliersIDs.size(); ++i)
			{
				info->cornerInliers[i] = idToIndex.at(regInfo.inliersIDs[i]);
			}

			Transform t = this->getPose()*motionSinceLastKeyFrame.inverse();
			for(std::multimap<int, cv::Point3f>::const_iterator iter=tmpRefFrame.getWords3().begin(); iter!=tmpRefFrame.getWords3().end(); ++iter)
			{
				info->localMap.insert(std::make_pair(iter->first, util3d::transformPoint(iter->second, t)));
			}
			info->words = newFrame.getWords();
		}
	}
	else
	{
		//return Identity
		output = Transform::getIdentity();
		// a very high variance tells that the new pose is not linked with the previous one
		regInfo.variance = 9999;
	}

	if(!output.isNull())
	{
		output = motionSinceLastKeyFrame.inverse() * output;

		// new key-frame?
		if( (registrationPipeline_->isImageRequired() && (keyFrameThr_ == 0 || float(regInfo.inliers) <= keyFrameThr_*float(refFrame_.sensorData().keypoints().size()))) ||
			(registrationPipeline_->isScanRequired() && (scanKeyFrameThr_ == 0 || regInfo.icpInliersRatio <= scanKeyFrameThr_)))
		{
			UDEBUG("Update key frame");
			int features = newFrame.getWordsDescriptors().size();
			if(features == 0)
			{
				newFrame = Signature(data);
				// this will generate features only for the first frame or if optical flow was used (no 3d words)
				Signature dummy;
				registrationPipeline_->computeTransformationMod(
						newFrame,
						dummy);
				features = (int)newFrame.sensorData().keypoints().size();
			}

			if((features >= registrationPipeline_->getMinVisualCorrespondences()) &&
			   (registrationPipeline_->getMinGeometryCorrespondencesRatio()==0.0f ||
					   (newFrame.sensorData().laserScanRaw().cols &&
					   (newFrame.sensorData().laserScanMaxPts() == 0 || float(newFrame.sensorData().laserScanRaw().cols)/float(newFrame.sensorData().laserScanMaxPts())>=registrationPipeline_->getMinGeometryCorrespondencesRatio()))))
			{
				refFrame_ = newFrame;

				refFrame_.setWords(std::multimap<int, cv::KeyPoint>());
				refFrame_.setWords3(std::multimap<int, cv::Point3f>());
				refFrame_.setWordsDescriptors(std::multimap<int, cv::Mat>());

				//reset motion
				lastKeyFramePose_.setNull();
			}
			else
			{
				if (!refFrame_.sensorData().isValid())
				{
					// Don't send odometry if we don't have a keyframe yet
					output.setNull();
				}

				if(features < registrationPipeline_->getMinVisualCorrespondences())
				{
					UWARN("Too low 2D features (%d), keeping last key frame...", features);
				}

				if(registrationPipeline_->getMinGeometryCorrespondencesRatio()>0.0f && newFrame.sensorData().laserScanRaw().cols==0)
				{
					UWARN("Too low scan points (%d), keeping last key frame...", newFrame.sensorData().laserScanRaw().cols);
				}
				else if(registrationPipeline_->getMinGeometryCorrespondencesRatio()>0.0f && newFrame.sensorData().laserScanMaxPts() != 0 && float(newFrame.sensorData().laserScanRaw().cols)/float(newFrame.sensorData().laserScanMaxPts())<registrationPipeline_->getMinGeometryCorrespondencesRatio())
				{
					UWARN("Too low scan points ratio (%d < %d), keeping last key frame...", float(newFrame.sensorData().laserScanRaw().cols)/float(newFrame.sensorData().laserScanMaxPts()), registrationPipeline_->getMinGeometryCorrespondencesRatio());
				}
			}
		}
	}
	else if(!regInfo.rejectedMsg.empty())
	{
		UWARN("Registration failed: \"%s\"", regInfo.rejectedMsg.c_str());
	}

	data.setFeatures(newFrame.sensorData().keypoints(), newFrame.sensorData().descriptors());

	if(info)
	{
		info->type = 1;
		info->variance = regInfo.variance;
		info->inliers = regInfo.inliers;
		info->icpInliersRatio = regInfo.icpInliersRatio;
		info->matches = regInfo.matches;
		info->features = newFrame.sensorData().keypoints().size();
	}

	UINFO("Odom update time = %fs lost=%s inliers=%d, ref frame corners=%d, transform accepted=%s",
			timer.elapsed(),
			output.isNull()?"true":"false",
			(int)regInfo.inliers,
			(int)newFrame.sensorData().keypoints().size(),
			!output.isNull()?"true":"false");

	return output;
}
Пример #10
0
int main(int argc, char * argv[])
{
	ULogger::setType(ULogger::kTypeConsole);
	ULogger::setLevel(ULogger::kInfo);
	ULogger::setPrintTime(false);
	ULogger::setPrintWhere(false);

	int driver = -1;
	int device = 0;
	bool stereo = false;
	for(int i=1; i<argc; ++i)
	{
		if(strcmp(argv[i], "--driver") == 0)
		{
			++i;
			if(i < argc)
			{
				driver = std::atoi(argv[i]);
				if(driver < -1)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "--device") == 0)
		{
			++i;
			if(i < argc)
			{
				device = std::atoi(argv[i]);
				if(device < 0)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "--debug") == 0)
		{
			ULogger::setLevel(ULogger::kDebug);
			ULogger::setPrintTime(true);
			ULogger::setPrintWhere(true);
			continue;
		}
		if(strcmp(argv[i], "--stereo") == 0)
		{
			stereo=true;
			continue;
		}
		if(strcmp(argv[i], "--help") == 0)
		{
			showUsage();
		}
		printf("Unrecognized option : %s\n", argv[i]);
		showUsage();
	}
	if(driver < -1 || driver > 7)
	{
		UERROR("driver should be between -1 and 7.");
		showUsage();
	}

	UINFO("Using driver %d", driver);
	UINFO("Using device %d", device);
	UINFO("Stereo: %s", stereo?"true":"false");

	bool switchImages = false;

	rtabmap::Camera * camera = 0;
	if(driver == -1)
	{
		camera = new rtabmap::CameraVideo(device);
	}
	else if(driver == 0)
	{
		camera = new rtabmap::CameraOpenni();
	}
	else if(driver == 1)
	{
		if(!rtabmap::CameraOpenNI2::available())
		{
			UERROR("Not built with OpenNI2 support...");
			exit(-1);
		}
		camera = new rtabmap::CameraOpenNI2();
	}
	else if(driver == 2)
	{
		if(!rtabmap::CameraFreenect::available())
		{
			UERROR("Not built with Freenect support...");
			exit(-1);
		}
		camera = new rtabmap::CameraFreenect();
	}
	else if(driver == 3)
	{
		if(!rtabmap::CameraOpenNICV::available())
		{
			UERROR("Not built with OpenNI from OpenCV support...");
			exit(-1);
		}
		camera = new rtabmap::CameraOpenNICV(false);
	}
	else if(driver == 4)
	{
		if(!rtabmap::CameraOpenNICV::available())
		{
			UERROR("Not built with OpenNI from OpenCV support...");
			exit(-1);
		}
		camera = new rtabmap::CameraOpenNICV(true);
	}
	else if(driver == 5)
	{
		if(!rtabmap::CameraFreenect2::available())
		{
			UERROR("Not built with Freenect2 support...");
			exit(-1);
		}
		switchImages = true;
		camera = new rtabmap::CameraFreenect2(0, rtabmap::CameraFreenect2::kTypeColorIR);
	}
	else if(driver == 6)
	{
		if(!rtabmap::CameraStereoDC1394::available())
		{
			UERROR("Not built with DC1394 support...");
			exit(-1);
		}
		camera = new rtabmap::CameraStereoDC1394();
	}
	else if(driver == 7)
	{
		if(!rtabmap::CameraStereoFlyCapture2::available())
		{
			UERROR("Not built with FlyCapture2/Triclops support...");
			exit(-1);
		}
		camera = new rtabmap::CameraStereoFlyCapture2();
	}
	else
	{
		UFATAL("");
	}

	rtabmap::CameraThread * cameraThread = 0;

	if(camera)
	{
		if(!camera->init(""))
		{
			printf("Camera init failed!\n");
			delete camera;
			exit(1);
		}
		cameraThread = new rtabmap::CameraThread(camera);
	}

	QApplication app(argc, argv);
	rtabmap::CalibrationDialog dialog(stereo, ".", switchImages);
	dialog.registerToEventsManager();

	dialog.show();
	cameraThread->start();
	app.exec();
	cameraThread->join(true);
	delete cameraThread;
}
Пример #11
0
int main(int argc, char * argv[])
{
	ULogger::setType(ULogger::kTypeConsole);
	ULogger::setLevel(ULogger::kInfo);
	//ULogger::setPrintTime(false);
	//ULogger::setPrintWhere(false);

	int driver = 0;
	if(argc < 2)
	{
		showUsage();
	}
	else
	{
		if(strcmp(argv[argc-1], "--help") == 0)
		{
			showUsage();
		}
		driver = atoi(argv[argc-1]);
		if(driver < 0 || driver > 7)
		{
			UERROR("driver should be between 0 and 6.");
			showUsage();
		}
	}
	UINFO("Using driver %d", driver);

	rtabmap::CameraRGBD * camera = 0;
	if(driver == 0)
	{
		camera = new rtabmap::CameraOpenni();
	}
	else if(driver == 1)
	{
		if(!rtabmap::CameraOpenNI2::available())
		{
			UERROR("Not built with OpenNI2 support...");
			exit(-1);
		}
		camera = new rtabmap::CameraOpenNI2();
	}
	else if(driver == 2)
	{
		if(!rtabmap::CameraFreenect::available())
		{
			UERROR("Not built with Freenect support...");
			exit(-1);
		}
		camera = new rtabmap::CameraFreenect();
	}
	else if(driver == 3)
	{
		if(!rtabmap::CameraOpenNICV::available())
		{
			UERROR("Not built with OpenNI from OpenCV support...");
			exit(-1);
		}
		camera = new rtabmap::CameraOpenNICV(false);
	}
	else if(driver == 4)
	{
		if(!rtabmap::CameraOpenNICV::available())
		{
			UERROR("Not built with OpenNI from OpenCV support...");
			exit(-1);
		}
		camera = new rtabmap::CameraOpenNICV(true);
	}
	else if(driver == 5)
	{
		if(!rtabmap::CameraFreenect2::available())
		{
			UERROR("Not built with Freenect2 support...");
			exit(-1);
		}
		camera = new rtabmap::CameraFreenect2(0, rtabmap::CameraFreenect2::kTypeRGBDepthSD);
	}
	else if(driver == 6)
	{
		if(!rtabmap::CameraStereoDC1394::available())
		{
			UERROR("Not built with DC1394 support...");
			exit(-1);
		}
		camera = new rtabmap::CameraStereoDC1394();
	}
	else if(driver == 7)
	{
		if(!rtabmap::CameraStereoFlyCapture2::available())
		{
			UERROR("Not built with FlyCapture2/Triclops support...");
			exit(-1);
		}
		camera = new rtabmap::CameraStereoFlyCapture2();
	}
	else
	{
		UFATAL("");
	}

	if(!camera->init())
	{
		printf("Camera init failed! Please select another driver (see \"--help\").\n");
		delete camera;
		exit(1);
	}
	cv::Mat rgb, depth;
	float fx, fy, cx, cy;
	camera->takeImage(rgb, depth, fx, fy, cx, cy);
	if(rgb.cols != depth.cols || rgb.rows != depth.rows)
	{
		UWARN("RGB (%d/%d) and depth (%d/%d) frames are not the same size! The registered cloud cannot be shown.",
				rgb.cols, rgb.rows, depth.cols, depth.rows);
	}
	if(!fx || !fy)
	{
		UWARN("fx and/or fy are not set! The registered cloud cannot be shown.");
	}
	pcl::visualization::CloudViewer viewer("cloud");
	rtabmap::Transform t(1, 0, 0, 0,
						 0, -1, 0, 0,
						 0, 0, -1, 0);
	while(!rgb.empty() && !viewer.wasStopped())
	{
		if(depth.type() == CV_16UC1 || depth.type() == CV_32FC1)
		{
			// depth
			if(depth.type() == CV_32FC1)
			{
				depth = rtabmap::util3d::cvtDepthFromFloat(depth);
			}

			if(rgb.cols == depth.cols && rgb.rows == depth.rows && fx && fy)
			{
                std::cout << "------------------------------------------------- tools/CameraRGBD/main.cpp -------------------------------------------------" << std::endl;
				pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = rtabmap::util3d::cloudFromDepthRGB(rgb, depth, cx, cy, fx, fy);
				cloud = rtabmap::util3d::transformPointCloud(cloud, t);
				viewer.showCloud(cloud, "cloud");
			}
			else if(!depth.empty() && fx && fy)
			{
				pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = rtabmap::util3d::cloudFromDepth(depth, cx, cy, fx, fy);
				cloud = rtabmap::util3d::transformPointCloud(cloud, t);
				viewer.showCloud(cloud, "cloud");
			}

			cv::Mat tmp;
			unsigned short min=0, max = 2048;
			uMinMax((unsigned short*)depth.data, depth.rows*depth.cols, min, max);
			depth.convertTo(tmp, CV_8UC1, 255.0/max);

			cv::imshow("Video", rgb); // show frame
			cv::imshow("Depth", tmp);
		}
		else
		{
			// stereo
			cv::imshow("Left", rgb); // show frame
			cv::imshow("Right", depth);

			if(rgb.cols == depth.cols && rgb.rows == depth.rows && fx && fy)
			{
				if(depth.channels() == 3)
				{
					cv::cvtColor(depth, depth, CV_BGR2GRAY);
				}
				pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = rtabmap::util3d::cloudFromStereoImages(rgb, depth, cx, cy, fx, fy);
				cloud = rtabmap::util3d::transformPointCloud(cloud, t);
				viewer.showCloud(cloud, "cloud");
			}
		}

		int c = cv::waitKey(10); // wait 10 ms or for key stroke
		if(c == 27)
			break; // if ESC, break and quit

		rgb = cv::Mat();
		depth = cv::Mat();
		camera->takeImage(rgb, depth, fx, fy, cx, cy);
	}
	cv::destroyWindow("Video");
	cv::destroyWindow("Depth");
	delete camera;
	return 0;
}
Пример #12
0
SensorData DBReader::getNextData()
{
	SensorData data;
	if(_dbDriver)
	{
		float frameRate = _frameRate;
		if(frameRate>0.0f)
		{
			int sleepTime = (1000.0f/frameRate - 1000.0f*_timer.getElapsedTime());
			if(sleepTime > 2)
			{
				uSleep(sleepTime-2);
			}

			// Add precision at the cost of a small overhead
			while(_timer.getElapsedTime() < 1.0/double(frameRate)-0.000001)
			{
				//
			}

			double slept = _timer.getElapsedTime();
			_timer.start();
			UDEBUG("slept=%fs vs target=%fs", slept, 1.0/double(frameRate));
		}

		if(!this->isKilled() && _currentId != _ids.end())
		{
			cv::Mat imageBytes;
			cv::Mat depthBytes;
			cv::Mat laserScanBytes;
			int mapId;
			float fx,fy,cx,cy;
			Transform localTransform, pose;
			float rotVariance = 1.0f;
			float transVariance = 1.0f;
			std::vector<unsigned char> userData;
			_dbDriver->getNodeData(*_currentId, imageBytes, depthBytes, laserScanBytes, fx, fy, cx, cy, localTransform);

			// info
			int weight;
			std::string label;
			double stamp;
			_dbDriver->getNodeInfo(*_currentId, pose, mapId, weight, label, stamp, userData);

			if(!_odometryIgnored)
			{
				std::map<int, Link> links;
				_dbDriver->loadLinks(*_currentId, links, Link::kNeighbor);
				if(links.size())
				{
					// assume the first is the backward neighbor, take its variance
					rotVariance = links.begin()->second.rotVariance();
					transVariance = links.begin()->second.transVariance();
				}
			}
			else
			{
				pose.setNull();
			}

			int seq = *_currentId;
			++_currentId;
			if(imageBytes.empty())
			{
				UWARN("No image loaded from the database for id=%d!", *_currentId);
			}

			rtabmap::CompressionThread ctImage(imageBytes, true);
			rtabmap::CompressionThread ctDepth(depthBytes, true);
			rtabmap::CompressionThread ctLaserScan(laserScanBytes, false);
			ctImage.start();
			ctDepth.start();
			ctLaserScan.start();
			ctImage.join();
			ctDepth.join();
			ctLaserScan.join();
			data = SensorData(
					ctLaserScan.getUncompressedData(),
					ctImage.getUncompressedData(),
					ctDepth.getUncompressedData(),
					fx,fy,cx,cy,
					localTransform,
					pose,
					rotVariance,
					transVariance,
					seq,
					stamp,
					userData);
			UDEBUG("Laser=%d RGB/Left=%d Depth=%d Right=%d",
					data.laserScan().empty()?0:1,
					data.image().empty()?0:1,
					data.depth().empty()?0:1,
					data.rightImage().empty()?0:1);
		}
	}
	else
	{
		UERROR("Not initialized...");
	}
	return data;
}
Пример #13
0
void CameraThread::postUpdate(SensorData * dataPtr, CameraInfo * info) const
{
	UASSERT(dataPtr!=0);
	SensorData & data = *dataPtr;
	if(_colorOnly && !data.depthRaw().empty())
	{
		data.setDepthOrRightRaw(cv::Mat());
	}

	if(_distortionModel && !data.depthRaw().empty())
	{
		UTimer timer;
		if(_distortionModel->getWidth() == data.depthRaw().cols &&
		   _distortionModel->getHeight() == data.depthRaw().rows	)
		{
			cv::Mat depth = data.depthRaw().clone();// make sure we are not modifying data in cached signatures.
			_distortionModel->undistort(depth);
			data.setDepthOrRightRaw(depth);
		}
		else
		{
			UERROR("Distortion model size is %dx%d but dpeth image is %dx%d!",
					_distortionModel->getWidth(), _distortionModel->getHeight(),
					data.depthRaw().cols, data.depthRaw().rows);
		}
		if(info) info->timeUndistortDepth = timer.ticks();
	}

	if(_bilateralFiltering && !data.depthRaw().empty())
	{
		UTimer timer;
		data.setDepthOrRightRaw(util2d::fastBilateralFiltering(data.depthRaw(), _bilateralSigmaS, _bilateralSigmaR));
		if(info) info->timeBilateralFiltering = timer.ticks();
	}

	if(_imageDecimation>1 && !data.imageRaw().empty())
	{
		UDEBUG("");
		UTimer timer;
		if(!data.depthRaw().empty() &&
		   !(data.depthRaw().rows % _imageDecimation == 0 && data.depthRaw().cols % _imageDecimation == 0))
		{
			UERROR("Decimation of depth images should be exact (decimation=%d, size=(%d,%d))! "
				   "Images won't be resized.", _imageDecimation, data.depthRaw().cols, data.depthRaw().rows);
		}
		else
		{
			data.setImageRaw(util2d::decimate(data.imageRaw(), _imageDecimation));
			data.setDepthOrRightRaw(util2d::decimate(data.depthOrRightRaw(), _imageDecimation));
			std::vector<CameraModel> models = data.cameraModels();
			for(unsigned int i=0; i<models.size(); ++i)
			{
				if(models[i].isValidForProjection())
				{
					models[i] = models[i].scaled(1.0/double(_imageDecimation));
				}
			}
			data.setCameraModels(models);
			StereoCameraModel stereoModel = data.stereoCameraModel();
			if(stereoModel.isValidForProjection())
			{
				stereoModel.scale(1.0/double(_imageDecimation));
				data.setStereoCameraModel(stereoModel);
			}
		}
		if(info) info->timeImageDecimation = timer.ticks();
	}
	if(_mirroring && !data.imageRaw().empty() && data.cameraModels().size() == 1)
	{
		UDEBUG("");
		UTimer timer;
		cv::Mat tmpRgb;
		cv::flip(data.imageRaw(), tmpRgb, 1);
		data.setImageRaw(tmpRgb);
		UASSERT_MSG(data.cameraModels().size() <= 1 && !data.stereoCameraModel().isValidForProjection(), "Only single RGBD cameras are supported for mirroring.");
		if(data.cameraModels().size() && data.cameraModels()[0].cx())
		{
			CameraModel tmpModel(
					data.cameraModels()[0].fx(),
					data.cameraModels()[0].fy(),
					float(data.imageRaw().cols) - data.cameraModels()[0].cx(),
					data.cameraModels()[0].cy(),
					data.cameraModels()[0].localTransform());
			data.setCameraModel(tmpModel);
		}
		if(!data.depthRaw().empty())
		{
			cv::Mat tmpDepth;
			cv::flip(data.depthRaw(), tmpDepth, 1);
			data.setDepthOrRightRaw(tmpDepth);
		}
		if(info) info->timeMirroring = timer.ticks();
	}
	if(_stereoToDepth && !data.imageRaw().empty() && data.stereoCameraModel().isValidForProjection() && !data.rightRaw().empty())
	{
		UDEBUG("");
		UTimer timer;
		cv::Mat depth = util2d::depthFromDisparity(
				_stereoDense->computeDisparity(data.imageRaw(), data.rightRaw()),
				data.stereoCameraModel().left().fx(),
				data.stereoCameraModel().baseline());
		// set Tx for stereo bundle adjustment (when used)
		CameraModel model = CameraModel(
				data.stereoCameraModel().left().fx(),
				data.stereoCameraModel().left().fy(),
				data.stereoCameraModel().left().cx(),
				data.stereoCameraModel().left().cy(),
				data.stereoCameraModel().localTransform(),
				-data.stereoCameraModel().baseline()*data.stereoCameraModel().left().fx());
		data.setCameraModel(model);
		data.setDepthOrRightRaw(depth);
		data.setStereoCameraModel(StereoCameraModel());
		if(info) info->timeDisparity = timer.ticks();
	}
	if(_scanFromDepth &&
		data.cameraModels().size() &&
		data.cameraModels().at(0).isValidForProjection() &&
		!data.depthRaw().empty())
	{
		UDEBUG("");
		if(data.laserScanRaw().empty())
		{
			UASSERT(_scanDecimation >= 1);
			UTimer timer;
			pcl::IndicesPtr validIndices(new std::vector<int>);
			pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = util3d::cloudFromSensorData(
					data,
					_scanDecimation,
					_scanMaxDepth,
					_scanMinDepth,
					validIndices.get());
			float maxPoints = (data.depthRaw().rows/_scanDecimation)*(data.depthRaw().cols/_scanDecimation);
			cv::Mat scan;
			const Transform & baseToScan = data.cameraModels()[0].localTransform();
			if(validIndices->size())
			{
				if(_scanVoxelSize>0.0f)
				{
					cloud = util3d::voxelize(cloud, validIndices, _scanVoxelSize);
					float ratio = float(cloud->size()) / float(validIndices->size());
					maxPoints = ratio * maxPoints;
				}
				else if(!cloud->is_dense)
				{
					pcl::PointCloud<pcl::PointXYZ>::Ptr denseCloud(new pcl::PointCloud<pcl::PointXYZ>);
					pcl::copyPointCloud(*cloud, *validIndices, *denseCloud);
					cloud = denseCloud;
				}

				if(cloud->size())
				{
					if(_scanNormalsK>0)
					{
						Eigen::Vector3f viewPoint(baseToScan.x(), baseToScan.y(), baseToScan.z());
						pcl::PointCloud<pcl::Normal>::Ptr normals = util3d::computeNormals(cloud, _scanNormalsK, viewPoint);
						pcl::PointCloud<pcl::PointNormal>::Ptr cloudNormals(new pcl::PointCloud<pcl::PointNormal>);
						pcl::concatenateFields(*cloud, *normals, *cloudNormals);
						scan = util3d::laserScanFromPointCloud(*cloudNormals, baseToScan.inverse());
					}
					else
					{
						scan = util3d::laserScanFromPointCloud(*cloud, baseToScan.inverse());
					}
				}
			}
			data.setLaserScanRaw(scan, LaserScanInfo((int)maxPoints, _scanMaxDepth, baseToScan));
			if(info) info->timeScanFromDepth = timer.ticks();
		}
		else
		{
			UWARN("Option to create laser scan from depth image is enabled, but "
				  "there is already a laser scan in the captured sensor data. Scan from "
				  "depth will not be created.");
		}
	}
}
bool Camera::start()
{
	//freopen("out.txt","w",stdout);
	if(!capture_.isOpened() && images_.empty() && cameraTcpServer_ == 0)
	{
		if(Settings::getCamera_6useTcpCamera())
		{
			cameraTcpServer_ = new CameraTcpServer(Settings::getCamera_8port(), this);
			if(!cameraTcpServer_->isListening())
			{
				UWARN("CameraTCP: Cannot listen to port %d", cameraTcpServer_->getPort());
				delete cameraTcpServer_;
				cameraTcpServer_ = 0;
			}
			else
			{
				UINFO("CameraTCP: listening to port %d (IP=%s)",
						cameraTcpServer_->getPort(),
						cameraTcpServer_->getHostAddress().toString().toStdString().c_str());
			}
		}
		else
		{
			QString path = Settings::getCamera_5mediaPath();
			if(UDirectory::exists(path.toStdString()))
			{
				//Images directory
				QString ext = Settings::getGeneral_imageFormats();
				ext.remove('*');
				ext.remove('.');
				UDirectory dir(path.toStdString(), ext.toStdString()); // this will load fileNames matching the extensions (in natural order)
				const std::list<std::string> & fileNames = dir.getFileNames();
				currentImageIndex_ = 0;
				images_.clear();
				// Modify to have full path
				for(std::list<std::string>::const_iterator iter = fileNames.begin(); iter!=fileNames.end(); ++iter)
				{
					images_.append(path.toStdString() + UDirectory::separator() + *iter);
				}
				UINFO("Camera: Reading %d images from directory \"%s\"...", (int)images_.size(), path.toStdString().c_str());
				if(images_.isEmpty())
				{
					UWARN("Camera: Directory \"%s\" is empty (no images matching the \"%s\" extensions). "
						   "If you want to disable loading automatically this directory, "
						   "clear the Camera/mediaPath parameter. By default, webcam will be used instead of the directory.",
						   path.toStdString().c_str(),
						   ext.toStdString().c_str());
				}
			}
			else if(!path.isEmpty())
			{
				//Video file
				capture_.open(path.toStdString().c_str());
				if(!capture_.isOpened())
				{
					UWARN("Camera: Cannot open file \"%s\". If you want to disable loading "
						  "automatically this video file, clear the Camera/mediaPath parameter. "
						  "By default, webcam will be used instead of the file.", path.toStdString().c_str());
				}
				else
				{
					UINFO("Camera: Reading from video file \"%s\"...", path.toStdString().c_str());
				}
			}
			if(!capture_.isOpened() && images_.empty())
			{
				//set camera device
				capture_.open(Settings::getCamera_1deviceId());
				if(Settings::getCamera_2imageWidth() && Settings::getCamera_3imageHeight())
				{
					capture_.set(CV_CAP_PROP_FRAME_WIDTH, double(Settings::getCamera_2imageWidth()));
					capture_.set(CV_CAP_PROP_FRAME_HEIGHT, double(Settings::getCamera_3imageHeight()));
				}
				UINFO("Camera: Reading from camera device %d...", Settings::getCamera_1deviceId());
			}
		}
	}
	if(!capture_.isOpened() && images_.empty() && cameraTcpServer_ == 0)
	{
		UERROR("Camera: Failed to open a capture object!");
		return false;
	}

	startTimer();
	return true;
}
Пример #15
0
int main(int argc, char * argv[])
{
	ULogger::setType(ULogger::kTypeConsole);
	ULogger::setLevel(ULogger::kInfo);
	//ULogger::setPrintTime(false);
	//ULogger::setPrintWhere(false);

	int driver = 0;
	std::string stereoSavePath;
	float rate = 0.0f;
	std::string fourcc = "MJPG";
	if(argc < 2)
	{
		showUsage();
	}
	else
	{
		for(int i=1; i<argc; ++i)
		{
			if(strcmp(argv[i], "-rate") == 0)
			{
				++i;
				if(i < argc)
				{
					rate = uStr2Float(argv[i]);
					if(rate < 0.0f)
					{
						showUsage();
					}
				}
				else
				{
					showUsage();
				}
				continue;
			}
			if(strcmp(argv[i], "-save_stereo") == 0)
			{
				++i;
				if(i < argc)
				{
					stereoSavePath = argv[i];
				}
				else
				{
					showUsage();
				}
				continue;
			}
			if(strcmp(argv[i], "-fourcc") == 0)
			{
				++i;
				if(i < argc)
				{
					fourcc = argv[i];
					if(fourcc.size() != 4)
					{
						UERROR("fourcc should be 4 characters.");
						showUsage();
					}
				}
				else
				{
					showUsage();
				}
				continue;
			}
			if(strcmp(argv[i], "--help") == 0 || strcmp(argv[i], "-help") == 0)
			{
				showUsage();
			}
			else if(i< argc-1)
			{
				printf("Unrecognized option \"%s\"", argv[i]);
				showUsage();
			}

			// last
			driver = atoi(argv[i]);
			if(driver < 0 || driver > 8)
			{
				UERROR("driver should be between 0 and 8.");
				showUsage();
			}
		}
	}
	UINFO("Using driver %d", driver);

	rtabmap::Camera * camera = 0;
	if(driver < 6)
	{
		if(!stereoSavePath.empty())
		{
			UWARN("-save_stereo option cannot be used with RGB-D drivers.");
			stereoSavePath.clear();
		}

		if(driver == 0)
		{
			camera = new rtabmap::CameraOpenni();
		}
		else if(driver == 1)
		{
			if(!rtabmap::CameraOpenNI2::available())
			{
				UERROR("Not built with OpenNI2 support...");
				exit(-1);
			}
			camera = new rtabmap::CameraOpenNI2();
		}
		else if(driver == 2)
		{
			if(!rtabmap::CameraFreenect::available())
			{
				UERROR("Not built with Freenect support...");
				exit(-1);
			}
			camera = new rtabmap::CameraFreenect();
		}
		else if(driver == 3)
		{
			if(!rtabmap::CameraOpenNICV::available())
			{
				UERROR("Not built with OpenNI from OpenCV support...");
				exit(-1);
			}
			camera = new rtabmap::CameraOpenNICV(false);
		}
		else if(driver == 4)
		{
			if(!rtabmap::CameraOpenNICV::available())
			{
				UERROR("Not built with OpenNI from OpenCV support...");
				exit(-1);
			}
			camera = new rtabmap::CameraOpenNICV(true);
		}
		else if(driver == 5)
		{
			if(!rtabmap::CameraFreenect2::available())
			{
				UERROR("Not built with Freenect2 support...");
				exit(-1);
			}
			camera = new rtabmap::CameraFreenect2(0, rtabmap::CameraFreenect2::kTypeColor2DepthSD);
		}
	}
	else if(driver == 6)
	{
		if(!rtabmap::CameraStereoDC1394::available())
		{
			UERROR("Not built with DC1394 support...");
			exit(-1);
		}
		camera = new rtabmap::CameraStereoDC1394();
	}
	else if(driver == 7)
	{
		if(!rtabmap::CameraStereoFlyCapture2::available())
		{
			UERROR("Not built with FlyCapture2/Triclops support...");
			exit(-1);
		}
		camera = new rtabmap::CameraStereoFlyCapture2();
	}
	else if(driver == 8)
	{
		if(!rtabmap::CameraStereoZed::available())
		{
			UERROR("Not built with ZED sdk support...");
			exit(-1);
		}
		camera = new rtabmap::CameraStereoZed(0);
	}
	else
	{
		UFATAL("");
	}

	if(!camera->init())
	{
		printf("Camera init failed! Please select another driver (see \"--help\").\n");
		delete camera;
		exit(1);
	}

	rtabmap::SensorData data = camera->takeImage();
	if(data.imageRaw().cols != data.depthOrRightRaw().cols || data.imageRaw().rows != data.depthOrRightRaw().rows)
	{
		UWARN("RGB (%d/%d) and depth (%d/%d) frames are not the same size! The registered cloud cannot be shown.",
				data.imageRaw().cols, data.imageRaw().rows, data.depthOrRightRaw().cols, data.depthOrRightRaw().rows);
	}
	pcl::visualization::CloudViewer * viewer = 0;
	if(!data.stereoCameraModel().isValidForProjection() && (data.cameraModels().size() == 0 || !data.cameraModels()[0].isValidForProjection()))
	{
		UWARN("Camera not calibrated! The registered cloud cannot be shown.");
	}
	else
	{
		viewer = new pcl::visualization::CloudViewer("cloud");
	}
	rtabmap::Transform t(1, 0, 0, 0,
						 0, -1, 0, 0,
						 0, 0, -1, 0);

	cv::VideoWriter videoWriter;
	UDirectory dir;
	if(!stereoSavePath.empty() &&
	   !data.imageRaw().empty() &&
	   !data.rightRaw().empty())
	{
		if(UFile::getExtension(stereoSavePath).compare("avi") == 0)
		{
			if(data.imageRaw().size() == data.rightRaw().size())
			{
				if(rate <= 0)
				{
					UERROR("You should set the input rate when saving stereo images to a video file.");
					showUsage();
				}
				cv::Size targetSize = data.imageRaw().size();
				targetSize.width *= 2;
				UASSERT(fourcc.size() == 4);
				videoWriter.open(
						stereoSavePath,
						CV_FOURCC(fourcc.at(0), fourcc.at(1), fourcc.at(2), fourcc.at(3)),
						rate,
						targetSize,
						data.imageRaw().channels() == 3);
			}
			else
			{
				UERROR("Images not the same size, cannot save stereo images to the video file.");
			}
		}
		else if(UDirectory::exists(stereoSavePath))
		{
			UDirectory::makeDir(stereoSavePath+"/"+"left");
			UDirectory::makeDir(stereoSavePath+"/"+"right");
		}
		else
		{
			UERROR("Directory \"%s\" doesn't exist.", stereoSavePath.c_str());
			stereoSavePath.clear();
		}
	}

	// to catch the ctrl-c
	signal(SIGABRT, &sighandler);
	signal(SIGTERM, &sighandler);
	signal(SIGINT, &sighandler);

	int id=1;
	while(!data.imageRaw().empty() && (viewer==0 || !viewer->wasStopped()) && running)
	{
		cv::Mat rgb = data.imageRaw();
		if(!data.depthRaw().empty() && (data.depthRaw().type() == CV_16UC1 || data.depthRaw().type() == CV_32FC1))
		{
			// depth
			cv::Mat depth = data.depthRaw();
			if(depth.type() == CV_32FC1)
			{
				depth = rtabmap::util2d::cvtDepthFromFloat(depth);
			}

			if(rgb.cols == depth.cols && rgb.rows == depth.rows &&
					data.cameraModels().size() &&
					data.cameraModels()[0].isValidForProjection())
			{
				pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = rtabmap::util3d::cloudFromDepthRGB(
						rgb, depth,
						data.cameraModels()[0]);
				cloud = rtabmap::util3d::transformPointCloud(cloud, t);
				if(viewer)
					viewer->showCloud(cloud, "cloud");
			}
			else if(!depth.empty() &&
					data.cameraModels().size() &&
					data.cameraModels()[0].isValidForProjection())
			{
				pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = rtabmap::util3d::cloudFromDepth(
						depth,
						data.cameraModels()[0]);
				cloud = rtabmap::util3d::transformPointCloud(cloud, t);
				viewer->showCloud(cloud, "cloud");
			}

			cv::Mat tmp;
			unsigned short min=0, max = 2048;
			uMinMax((unsigned short*)depth.data, depth.rows*depth.cols, min, max);
			depth.convertTo(tmp, CV_8UC1, 255.0/max);

			cv::imshow("Video", rgb); // show frame
			cv::imshow("Depth", tmp);
		}
		else if(!data.rightRaw().empty())
		{
			// stereo
			cv::Mat right = data.rightRaw();
			cv::imshow("Left", rgb); // show frame
			cv::imshow("Right", right);

			if(rgb.cols == right.cols && rgb.rows == right.rows && data.stereoCameraModel().isValidForProjection())
			{
				if(right.channels() == 3)
				{
					cv::cvtColor(right, right, CV_BGR2GRAY);
				}
				pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = rtabmap::util3d::cloudFromStereoImages(
						rgb, right,
						data.stereoCameraModel());
				cloud = rtabmap::util3d::transformPointCloud(cloud, t);
				if(viewer)
					viewer->showCloud(cloud, "cloud");
			}
		}

		int c = cv::waitKey(10); // wait 10 ms or for key stroke
		if(c == 27)
			break; // if ESC, break and quit

		if(videoWriter.isOpened())
		{
			cv::Mat left = data.imageRaw();
			cv::Mat right = data.rightRaw();
			if(left.size() == right.size())
			{
				cv::Size targetSize = left.size();
				targetSize.width *= 2;
				cv::Mat targetImage(targetSize, left.type());
				if(right.type() != left.type())
				{
					cv::Mat tmp;
					cv::cvtColor(right, tmp, left.channels()==3?CV_GRAY2BGR:CV_BGR2GRAY);
					right = tmp;
				}
				UASSERT(left.type() == right.type());

				cv::Mat roiA(targetImage, cv::Rect( 0, 0, left.size().width, left.size().height ));
				left.copyTo(roiA);
				cv::Mat roiB( targetImage, cvRect( left.size().width, 0, left.size().width, left.size().height ) );
				right.copyTo(roiB);

				videoWriter.write(targetImage);
				printf("Saved frame %d to \"%s\"\n", id, stereoSavePath.c_str());
			}
			else
			{
				UERROR("Left and right images are not the same size!?");
			}
		}
		else if(!stereoSavePath.empty())
		{
			cv::imwrite(stereoSavePath+"/"+"left/"+uNumber2Str(id) + ".jpg", data.imageRaw());
			cv::imwrite(stereoSavePath+"/"+"right/"+uNumber2Str(id) + ".jpg", data.rightRaw());
			printf("Saved frames %d to \"%s/left\" and \"%s/right\" directories\n", id, stereoSavePath.c_str(), stereoSavePath.c_str());
		}
		++id;
		data = camera->takeImage();
	}
	printf("Closing...\n");
	if(viewer)
	{
		delete viewer;
	}
	cv::destroyWindow("Video");
	cv::destroyWindow("Depth");
	delete camera;
	return 0;
}
Пример #16
0
// OpenGL thread
int RTABMapApp::Render()
{
	// should be before clearSceneOnNextRender_ in case openDatabase is called
	std::list<rtabmap::Statistics> rtabmapEvents;
	{
		boost::mutex::scoped_lock  lock(rtabmapMutex_);
		rtabmapEvents = rtabmapEvents_;
		rtabmapEvents_.clear();
	}

	if(clearSceneOnNextRender_)
	{
		odomMutex_.lock();
		odomEvents_.clear();
		odomMutex_.unlock();

		poseMutex_.lock();
		poseEvents_.clear();
		poseMutex_.unlock();

		main_scene_.clear();
		clearSceneOnNextRender_ = false;
		createdMeshes_.clear();
		rawPoses_.clear();
		totalPoints_ = 0;
		totalPolygons_ = 0;
		lastDrawnCloudsCount_ = 0;
		renderingFPS_ = 0.0f;
	}

	// Process events
	rtabmap::Transform pose;
	{
		boost::mutex::scoped_lock lock(poseMutex_);
		if(poseEvents_.size())
		{
			pose = poseEvents_.back();
			poseEvents_.clear();
		}
	}
	if(!pose.isNull())
	{
		// update camera pose?
		main_scene_.SetCameraPose(pose);
	}

	bool notifyDataLoaded = false;
	if(rtabmapEvents.size())
	{
		LOGI("Process rtabmap events");

		// update buffered signatures
		std::map<int, rtabmap::SensorData> bufferedSensorData;
		if(!trajectoryMode_)
		{
			for(std::list<rtabmap::Statistics>::iterator iter=rtabmapEvents.begin(); iter!=rtabmapEvents.end(); ++iter)
			{
				for(std::map<int, rtabmap::Signature>::const_iterator jter=iter->getSignatures().begin(); jter!=iter->getSignatures().end(); ++jter)
				{
					if(!jter->second.sensorData().imageRaw().empty() &&
					   !jter->second.sensorData().depthRaw().empty())
					{
						uInsert(bufferedSensorData, std::make_pair(jter->first, jter->second.sensorData()));
						uInsert(rawPoses_, std::make_pair(jter->first, jter->second.getPose()));
					}
					else if(!jter->second.sensorData().imageCompressed().empty() &&
							!jter->second.sensorData().depthOrRightCompressed().empty())
					{
						// uncompress
						rtabmap::SensorData data = jter->second.sensorData();
						cv::Mat tmpA,tmpB;
						data.uncompressData(&tmpA, &tmpB);
						uInsert(bufferedSensorData, std::make_pair(jter->first, data));
						uInsert(rawPoses_, std::make_pair(jter->first, jter->second.getPose()));
						notifyDataLoaded = true;
					}
				}
			}
		}

		std::map<int, rtabmap::Transform> poses = rtabmapEvents.back().poses();

		// Transform pose in OpenGL world
		for(std::map<int, rtabmap::Transform>::iterator iter=poses.begin(); iter!=poses.end(); ++iter)
		{
			if(!graphOptimization_)
			{
				std::map<int, rtabmap::Transform>::iterator jter = rawPoses_.find(iter->first);
				if(jter != rawPoses_.end())
				{
					iter->second = opengl_world_T_rtabmap_world*jter->second;
				}
			}
			else
			{
				iter->second = opengl_world_T_rtabmap_world*iter->second;
			}
		}

		const std::multimap<int, rtabmap::Link> & links = rtabmapEvents.back().constraints();
		if(poses.size())
		{
			//update graph
			main_scene_.updateGraph(poses, links);

			// update clouds
			boost::mutex::scoped_lock  lock(meshesMutex_);
			std::set<std::string> strIds;
			for(std::map<int, rtabmap::Transform>::iterator iter=poses.begin(); iter!=poses.end(); ++iter)
			{
				int id = iter->first;
				if(!iter->second.isNull())
				{
					if(main_scene_.hasCloud(id))
					{
						//just update pose
						main_scene_.setCloudPose(id, iter->second);
						main_scene_.setCloudVisible(id, true);
						std::map<int, Mesh>::iterator meshIter = createdMeshes_.find(id);
						if(meshIter!=createdMeshes_.end())
						{
							meshIter->second.pose = iter->second;
						}
						else
						{
							UERROR("Not found mesh %d !?!?", id);
						}
					}
					else if(uContains(bufferedSensorData, id))
					{
						rtabmap::SensorData & data = bufferedSensorData.at(id);
						if(!data.imageRaw().empty() && !data.depthRaw().empty())
						{
							// Voxelize and filter depending on the previous cloud?
							pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud;
							pcl::IndicesPtr indices(new std::vector<int>);
							LOGI("Creating node cloud %d (image size=%dx%d)", id, data.imageRaw().cols, data.imageRaw().rows);
							cloud = rtabmap::util3d::cloudRGBFromSensorData(data, data.imageRaw().rows/data.depthRaw().rows, maxCloudDepth_, 0, indices.get());

							if(cloud->size() && indices->size())
							{
								UTimer time;

								// pcl::organizedFastMesh doesn't take indices, so set to NaN points we don't need to mesh
								pcl::PointCloud<pcl::PointXYZRGB>::Ptr output(new pcl::PointCloud<pcl::PointXYZRGB>);
								pcl::ExtractIndices<pcl::PointXYZRGB> filter;
								filter.setIndices(indices);
								filter.setKeepOrganized(true);
								filter.setInputCloud(cloud);
								filter.filter(*output);

								std::vector<pcl::Vertices> polygons = rtabmap::util3d::organizedFastMesh(output, meshAngleToleranceDeg_*M_PI/180.0, false, meshTrianglePix_);
								pcl::PointCloud<pcl::PointXYZRGB>::Ptr outputCloud(new pcl::PointCloud<pcl::PointXYZRGB>);
								std::vector<pcl::Vertices> outputPolygons;

								outputCloud = output;
								outputPolygons = polygons;

								LOGI("Creating mesh, %d polygons (%fs)", (int)outputPolygons.size(), time.ticks());

								if(outputCloud->size() && outputPolygons.size())
								{
									totalPolygons_ += outputPolygons.size();

									main_scene_.addCloud(id, outputCloud, outputPolygons, iter->second, data.imageRaw());


									// protect createdMeshes_ used also by exportMesh() method
									std::pair<std::map<int, Mesh>::iterator, bool> inserted = createdMeshes_.insert(std::make_pair(id, Mesh()));
									UASSERT(inserted.second);
									inserted.first->second.cloud = outputCloud;
									inserted.first->second.polygons = outputPolygons;
									inserted.first->second.pose = iter->second;
									inserted.first->second.texture = data.imageCompressed();
								}
								else
								{
									LOGE("Not mesh could be created for node %d", id);
								}
							}
							totalPoints_+=indices->size();
						}
					}
				}
			}
		}

		//filter poses?
		if(poses.size() > 2)
		{
			if(nodesFiltering_)
			{
				for(std::multimap<int, rtabmap::Link>::const_iterator iter=links.begin(); iter!=links.end(); ++iter)
				{
					if(iter->second.type() != rtabmap::Link::kNeighbor)
					{
						int oldId = iter->second.to()>iter->second.from()?iter->second.from():iter->second.to();
						poses.erase(oldId);
					}
				}
			}
		}

		//update cloud visibility
		std::set<int> addedClouds = main_scene_.getAddedClouds();
		for(std::set<int>::const_iterator iter=addedClouds.begin();
			iter!=addedClouds.end();
			++iter)
		{
			if(*iter > 0 && poses.find(*iter) == poses.end())
			{
				main_scene_.setCloudVisible(*iter, false);
			}
		}
	}
	else
	{
		rtabmap::OdometryEvent event;
		bool set = false;
		{
			boost::mutex::scoped_lock  lock(odomMutex_);
			if(odomEvents_.size())
			{
				LOGI("Process odom events");
				event = odomEvents_.back();
				odomEvents_.clear();
				set = true;
			}
		}

		main_scene_.setCloudVisible(-1, odomCloudShown_ && !trajectoryMode_);

		//just process the last one
		if(set && !event.pose().isNull())
		{
			if(odomCloudShown_ && !trajectoryMode_)
			{
				if(!event.data().imageRaw().empty() && !event.data().depthRaw().empty())
				{
					pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud;
					cloud = rtabmap::util3d::cloudRGBFromSensorData(event.data(), event.data().imageRaw().rows/event.data().depthRaw().rows, maxCloudDepth_);
					if(cloud->size())
					{
						LOGI("Created odom cloud (rgb=%dx%d depth=%dx%d cloud=%dx%d)",
													event.data().imageRaw().cols, event.data().imageRaw().rows,
													event.data().depthRaw().cols, event.data().depthRaw().rows,
													(int)cloud->width, (int)cloud->height);
						std::vector<pcl::Vertices> polygons = rtabmap::util3d::organizedFastMesh(cloud, meshAngleToleranceDeg_*M_PI/180.0, false, meshTrianglePix_);
						main_scene_.addCloud(-1, cloud, polygons, opengl_world_T_rtabmap_world*event.pose(), event.data().imageRaw());
						main_scene_.setCloudVisible(-1, true);
					}
					else
					{
						LOGE("Generated cloud is empty!");
					}
				}
				else
				{
					LOGE("Odom data images are empty!");
				}
			}
		}
	}

	UTimer fpsTime;
    lastDrawnCloudsCount_ = main_scene_.Render();
    renderingFPS_ = 1.0/fpsTime.elapsed();

    if(rtabmapEvents.size())
    {
    	// send statistics to GUI
		LOGI("Posting PostRenderEvent!");
		UEventsManager::post(new PostRenderEvent(rtabmapEvents.back()));
    }

    return notifyDataLoaded?1:0;
}
Пример #17
0
SensorData DBReader::getNextData()
{
	SensorData data;
	if(_dbDriver)
	{
		float frameRate = _frameRate;
		if(frameRate>0.0f)
		{
			int sleepTime = (1000.0f/frameRate - 1000.0f*_timer.getElapsedTime());
			if(sleepTime > 2)
			{
				uSleep(sleepTime-2);
			}

			// Add precision at the cost of a small overhead
			while(_timer.getElapsedTime() < 1.0/double(frameRate)-0.000001)
			{
				//
			}

			double slept = _timer.getElapsedTime();
			_timer.start();
			UDEBUG("slept=%fs vs target=%fs", slept, 1.0/double(frameRate));
		}

		if(!this->isKilled() && _currentId != _ids.end())
		{
			cv::Mat imageBytes;
			cv::Mat depthBytes;
			cv::Mat laserScanBytes;
			int mapId;
			float fx,fy,cx,cy;
			Transform localTransform, pose;
			float variance = 1.0f;
			_dbDriver->getNodeData(*_currentId, imageBytes, depthBytes, laserScanBytes, fx, fy, cx, cy, localTransform);
			if(!_odometryIgnored)
			{
				_dbDriver->getPose(*_currentId, pose, mapId);
				std::map<int, Link> links;
				_dbDriver->loadLinks(*_currentId, links, Link::kNeighbor);
				if(links.size())
				{
					// assume the first is the backward neighbor, take its variance
					variance = links.begin()->second.variance();
				}
			}
			int seq = *_currentId;
			++_currentId;
			if(imageBytes.empty())
			{
				UWARN("No image loaded from the database for id=%d!", *_currentId);
			}

			util3d::CompressionThread ctImage(imageBytes, true);
			util3d::CompressionThread ctDepth(depthBytes, true);
			util3d::CompressionThread ctLaserScan(laserScanBytes, false);
			ctImage.start();
			ctDepth.start();
			ctLaserScan.start();
			ctImage.join();
			ctDepth.join();
			ctLaserScan.join();
			data = SensorData(
					ctLaserScan.getUncompressedData(),
					ctImage.getUncompressedData(),
					ctDepth.getUncompressedData(),
					fx,fy,cx,cy,
					localTransform,
					pose,
					variance,
					seq);
		}
	}
	else
	{
		UERROR("Not initialized...");
	}
	return data;
}
Пример #18
0
bool RTABMapApp::exportMesh(const std::string & filePath)
{
	bool success = false;

	//Assemble the meshes
	if(UFile::getExtension(filePath).compare("obj") == 0)
	{
		pcl::TextureMesh textureMesh;
		std::vector<cv::Mat> textures;
		int totalPolygons = 0;
		pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr mergedClouds(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
		{
			boost::mutex::scoped_lock  lock(meshesMutex_);

			textureMesh.tex_materials.resize(createdMeshes_.size());
			textureMesh.tex_polygons.resize(createdMeshes_.size());
			textureMesh.tex_coordinates.resize(createdMeshes_.size());
			textures.resize(createdMeshes_.size());

			int polygonsStep = 0;
			int oi = 0;
			for(std::map<int, Mesh>::iterator iter=createdMeshes_.begin();
				iter!= createdMeshes_.end();
				++iter)
			{
				UASSERT(!iter->second.cloud->is_dense);

				if(!iter->second.texture.empty() &&
					iter->second.cloud->size() &&
					iter->second.polygons.size())
				{
					// OBJ format requires normals
					pcl::PointCloud<pcl::Normal>::Ptr normals = rtabmap::util3d::computeNormals(iter->second.cloud, 20);

					pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloudWithNormals;
					pcl::concatenateFields(*iter->second.cloud, *normals, *cloudWithNormals);

					// create dense cloud
					pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr denseCloud(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
					std::vector<pcl::Vertices> densePolygons;
					std::map<int, int> newToOldIndices;
					newToOldIndices = rtabmap::util3d::filterNotUsedVerticesFromMesh(
							*cloudWithNormals,
							iter->second.polygons,
							*denseCloud,
							densePolygons);

					// polygons
					UASSERT(densePolygons.size());
					unsigned int polygonSize = densePolygons.front().vertices.size();
					textureMesh.tex_polygons[oi].resize(densePolygons.size());
					textureMesh.tex_coordinates[oi].resize(densePolygons.size() * polygonSize);
					for(unsigned int j=0; j<densePolygons.size(); ++j)
					{
						pcl::Vertices vertices = densePolygons[j];
						UASSERT(polygonSize == vertices.vertices.size());
						for(unsigned int k=0; k<vertices.vertices.size(); ++k)
						{
							//uv
							std::map<int, int>::iterator jter = newToOldIndices.find(vertices.vertices[k]);
							textureMesh.tex_coordinates[oi][j*vertices.vertices.size()+k] = Eigen::Vector2f(
									float(jter->second % iter->second.cloud->width) / float(iter->second.cloud->width),   // u
									float(iter->second.cloud->height - jter->second / iter->second.cloud->width) / float(iter->second.cloud->height)); // v

							vertices.vertices[k] += polygonsStep;
						}
						textureMesh.tex_polygons[oi][j] = vertices;

					}
					totalPolygons += densePolygons.size();
					polygonsStep += denseCloud->size();

					pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr transformedCloud = rtabmap::util3d::transformPointCloud(denseCloud, iter->second.pose);
					if(mergedClouds->size() == 0)
					{
						*mergedClouds = *transformedCloud;
					}
					else
					{
						*mergedClouds += *transformedCloud;
					}

					textures[oi] = iter->second.texture;
					textureMesh.tex_materials[oi].tex_illum = 1;
					textureMesh.tex_materials[oi].tex_name = uFormat("material_%d", iter->first);
					++oi;
				}
				else
				{
					UERROR("Texture not set for mesh %d", iter->first);
				}
			}
			textureMesh.tex_materials.resize(oi);
			textureMesh.tex_polygons.resize(oi);
			textures.resize(oi);

			if(textures.size())
			{
				pcl::toPCLPointCloud2(*mergedClouds, textureMesh.cloud);

				std::string textureDirectory = uSplit(filePath, '.').front();
				UINFO("Saving %d textures to %s.", textures.size(), textureDirectory.c_str());
				UDirectory::makeDir(textureDirectory);
				for(unsigned int i=0;i<textures.size(); ++i)
				{
					cv::Mat rawImage = rtabmap::uncompressImage(textures[i]);
					std::string texFile = textureDirectory+"/"+textureMesh.tex_materials[i].tex_name+".png";
					cv::imwrite(texFile, rawImage);

					UINFO("Saved %s (%d bytes).", texFile.c_str(), rawImage.total()*rawImage.channels());

					// relative path
					textureMesh.tex_materials[i].tex_file = uSplit(UFile::getName(filePath), '.').front()+"/"+textureMesh.tex_materials[i].tex_name+".png";
				}

				UINFO("Saving obj (%d vertices, %d polygons) to %s.", (int)mergedClouds->size(), totalPolygons, filePath.c_str());
				success = pcl::io::saveOBJFile(filePath, textureMesh) == 0;
				if(success)
				{
					UINFO("Saved obj to %s!", filePath.c_str());
				}
				else
				{
					UERROR("Failed saving obj to %s!", filePath.c_str());
				}
			}
		}
	}
	else
	{
		pcl::PointCloud<pcl::PointXYZRGB>::Ptr mergedClouds(new pcl::PointCloud<pcl::PointXYZRGB>);
		std::vector<pcl::Vertices> mergedPolygons;

		{
			boost::mutex::scoped_lock  lock(meshesMutex_);

			for(std::map<int, Mesh>::iterator iter=createdMeshes_.begin();
				iter!= createdMeshes_.end();
				++iter)
			{
				pcl::PointCloud<pcl::PointXYZRGB>::Ptr denseCloud(new pcl::PointCloud<pcl::PointXYZRGB>);
				std::vector<pcl::Vertices> densePolygons;

				rtabmap::util3d::filterNotUsedVerticesFromMesh(
						*iter->second.cloud,
						iter->second.polygons,
						*denseCloud,
						densePolygons);

				pcl::PointCloud<pcl::PointXYZRGB>::Ptr transformedCloud = rtabmap::util3d::transformPointCloud(denseCloud, iter->second.pose);
				if(mergedClouds->size() == 0)
				{
					*mergedClouds = *transformedCloud;
					mergedPolygons = densePolygons;
				}
				else
				{
					rtabmap::util3d::appendMesh(*mergedClouds, mergedPolygons, *transformedCloud, densePolygons);
				}
			}
		}

		if(mergedClouds->size() && mergedPolygons.size())
		{
			pcl::PolygonMesh mesh;
			pcl::toPCLPointCloud2(*mergedClouds, mesh.cloud);
			mesh.polygons = mergedPolygons;

			UINFO("Saving ply (%d vertices, %d polygons) to %s.", (int)mergedClouds->size(), (int)mergedPolygons.size(), filePath.c_str());
			success = pcl::io::savePLYFileBinary(filePath, mesh) == 0;
			if(success)
			{
				UINFO("Saved ply to %s!", filePath.c_str());
			}
			else
			{
				UERROR("Failed saving ply to %s!", filePath.c_str());
			}
		}
	}
	return success;
}
Пример #19
0
/*********************************
Fonction RecordFichier
Permet d'enregistrer un fichier WAV
*********************************/
void UAudioCaptureMic::mainLoop()
{
	if(!_sound)
	{
		UERROR("Recorder is not initialized.");
		this->kill();
		return;
	}
    FMOD_RESULT result;
    void *ptr1 = 0, *ptr2 = 0;
    int blockLength;
    unsigned int len1 = 0, len2 = 0;

    unsigned int recordPos = 0;

	result = UAudioSystem::getRecordPosition(_driver, &recordPos); UASSERT_MSG(result==FMOD_OK, FMOD_ErrorString(result));
	if (recordPos != _lastRecordPos)
	{
		blockLength = (int)recordPos - (int)_lastRecordPos;
		if (blockLength < 0)
		{
			blockLength += _soundLength;
		}

		// * exinfo.numchannels * 2 = stereo 16bit.  1 sample = 4 bytes.
		// Lock the sound to get access to the raw data.
		FMOD_Sound_Lock(_sound, _lastRecordPos * channels() * bytesPerSample(), blockLength * channels() * bytesPerSample(), &ptr1, &ptr2, &len1, &len2);
		{
			if (ptr1 && len1)    //    Write it to disk.
			{
				if(_fp)
				{
					//write to file
					_dataLength += fwrite(ptr1, 1, len1, _fp);
				}

				// push back in the frames buffer
				pushBackSamples(ptr1, len1);
			}

			if (ptr2 && len2)    //    Write it to disk.
			{
				if(_fp)
				{
					//write to file
					_dataLength += fwrite(ptr2, 1, len2, _fp);
				}

				// push back in the frames buffer
				pushBackSamples(ptr2, len2);
			}
		}
		//Unlock the sound to allow FMOD to use it again.
		FMOD_Sound_Unlock(_sound, ptr1, ptr2, len1, len2);

		_lastRecordPos = recordPos;
	}

    UAudioSystem::update();

    uSleep(10);

    // If we are recording to a file, make sure to stop 
    // when the maximum file size is reached
    if (_fp && _maxFileSize != 0 && int(_dataLength) + frameLength()*bytesPerSample() > _maxFileSize)
    {
        UWARN("Recording max memory reached (%d Mb)... stopped", _maxFileSize/1000000);
        this->kill();
    }
}
Пример #20
0
void RTABMapApp::handleEvent(UEvent * event)
{
	if(camera_ && camera_->isRunning())
	{
		// called from events manager thread, so protect the data
		if(event->getClassName().compare("OdometryEvent") == 0)
		{
			LOGI("Received OdometryEvent!");
			if(odomMutex_.try_lock())
			{
				odomEvents_.clear();
				if(camera_->isRunning())
				{
					odomEvents_.push_back(*((rtabmap::OdometryEvent*)(event)));
				}
				odomMutex_.unlock();
			}
		}
		if(status_.first == rtabmap::RtabmapEventInit::kInitialized &&
		   event->getClassName().compare("RtabmapEvent") == 0)
		{
			LOGI("Received RtabmapEvent!");
			if(camera_->isRunning())
			{
				boost::mutex::scoped_lock lock(rtabmapMutex_);
				rtabmapEvents_.push_back(((rtabmap::RtabmapEvent*)event)->getStats());
			}
		}
	}

	if(event->getClassName().compare("PoseEvent") == 0)
	{
		if(poseMutex_.try_lock())
		{
			poseEvents_.clear();
			poseEvents_.push_back(((rtabmap::PoseEvent*)event)->pose());
			poseMutex_.unlock();
		}
	}

	if(event->getClassName().compare("CameraTangoEvent") == 0)
	{
		rtabmap::CameraTangoEvent * tangoEvent = (rtabmap::CameraTangoEvent*)event;

		// Call JAVA callback with tango event msg
		bool success = false;
		if(jvm && RTABMapActivity)
		{
			JNIEnv *env = 0;
			jint rs = jvm->AttachCurrentThread(&env, NULL);
			if(rs == JNI_OK && env)
			{
				jclass clazz = env->GetObjectClass(RTABMapActivity);
				if(clazz)
				{
					jmethodID methodID = env->GetMethodID(clazz, "tangoEventCallback", "(ILjava/lang/String;Ljava/lang/String;)V" );
					if(methodID)
					{
						env->CallVoidMethod(RTABMapActivity, methodID,
								tangoEvent->type(),
								env->NewStringUTF(tangoEvent->key().c_str()),
								env->NewStringUTF(tangoEvent->value().c_str()));
						success = true;
					}
				}
			}
			jvm->DetachCurrentThread();
		}
		if(!success)
		{
			UERROR("Failed to call RTABMapActivity::tangoEventCallback");
		}
	}

	if(event->getClassName().compare("RtabmapEventInit") == 0)
	{
		LOGI("Received RtabmapEventInit!");
		status_.first = ((rtabmap::RtabmapEventInit*)event)->getStatus();
		status_.second = ((rtabmap::RtabmapEventInit*)event)->getInfo();

		if(status_.first == rtabmap::RtabmapEventInit::kClosed)
		{
			clearSceneOnNextRender_ = true;
		}

		// Call JAVA callback with init msg
		bool success = false;
		if(jvm && RTABMapActivity)
		{
			JNIEnv *env = 0;
			jint rs = jvm->AttachCurrentThread(&env, NULL);
			if(rs == JNI_OK && env)
			{
				jclass clazz = env->GetObjectClass(RTABMapActivity);
				if(clazz)
				{
					jmethodID methodID = env->GetMethodID(clazz, "rtabmapInitEventCallback", "(ILjava/lang/String;)V" );
					if(methodID)
					{
						env->CallVoidMethod(RTABMapActivity, methodID,
								status_.first,
								env->NewStringUTF(status_.second.c_str()));
						success = true;
					}
				}
			}
			jvm->DetachCurrentThread();
		}
		if(!success)
		{
			UERROR("Failed to call RTABMapActivity::rtabmapInitEventsCallback");
		}
	}

	if(event->getClassName().compare("PostRenderEvent") == 0)
	{
		LOGI("Received PostRenderEvent!");
		const rtabmap::Statistics & stats = ((PostRenderEvent*)event)->getStats();
		int nodes = (int)uValue(stats.data(), rtabmap::Statistics::kMemoryWorking_memory_size(), 0.0f) +
				uValue(stats.data(), rtabmap::Statistics::kMemoryShort_time_memory_size(), 0.0f);
		int words = (int)uValue(stats.data(), rtabmap::Statistics::kKeypointDictionary_size(), 0.0f);
		float updateTime = uValue(stats.data(), rtabmap::Statistics::kTimingTotal(), 0.0f);
		int loopClosureId = stats.loopClosureId()>0?stats.loopClosureId():stats.proximityDetectionId()>0?stats.proximityDetectionId():0;
		int highestHypId = (int)uValue(stats.data(), rtabmap::Statistics::kLoopHighest_hypothesis_id(), 0.0f);
		int databaseMemoryUsed = (int)uValue(stats.data(), rtabmap::Statistics::kMemoryDatabase_memory_used(), 0.0f);
		int inliers = (int)uValue(stats.data(), rtabmap::Statistics::kLoopVisual_inliers(), 0.0f);
		int rejected = (int)uValue(stats.data(), rtabmap::Statistics::kLoopRejectedHypothesis(), 0.0f);
		int featuresExtracted = stats.getSignatures().size()?stats.getSignatures().rbegin()->second.getWords().size():0;
		float hypothesis = uValue(stats.data(), rtabmap::Statistics::kLoopHighest_hypothesis_value(), 0.0f);

		// Call JAVA callback with some stats
		UINFO("Send statistics to GUI");
		bool success = false;
		if(jvm && RTABMapActivity)
		{
			JNIEnv *env = 0;
			jint rs = jvm->AttachCurrentThread(&env, NULL);
			if(rs == JNI_OK && env)
			{
				jclass clazz = env->GetObjectClass(RTABMapActivity);
				if(clazz)
				{
					jmethodID methodID = env->GetMethodID(clazz, "updateStatsCallback", "(IIIIFIIIIIFIFI)V" );
					if(methodID)
					{
						env->CallVoidMethod(RTABMapActivity, methodID,
								nodes,
								words,
								totalPoints_,
								totalPolygons_,
								updateTime,
								loopClosureId,
								highestHypId,
								databaseMemoryUsed,
								inliers,
								featuresExtracted,
								hypothesis,
								lastDrawnCloudsCount_,
								renderingFPS_,
								rejected);
						success = true;
					}
				}
			}
			jvm->DetachCurrentThread();
		}
		if(!success)
		{
			UERROR("Failed to call RTABMapActivity::updateStatsCallback");
		}
	}
}
Пример #21
0
void CalibrationDialog::processImages(const cv::Mat & imageLeft, const cv::Mat & imageRight, const QString & cameraName)
{
	processingData_ = true;
	if(cameraName_.isEmpty())
	{
		cameraName_ = "0000";
		if(!cameraName.isEmpty())
		{
			cameraName_ = cameraName;
		}
	}
	if(ui_->label_serial->text().isEmpty())
	{
		ui_->label_serial->setText(cameraName_);

	}
	std::vector<cv::Mat> inputRawImages(2);
	if(ui_->checkBox_switchImages->isChecked())
	{
		inputRawImages[0] = imageRight;
		inputRawImages[1] = imageLeft;
	}
	else
	{
		inputRawImages[0] = imageLeft;
		inputRawImages[1] = imageRight;
	}

	std::vector<cv::Mat> images(2);
	images[0] = inputRawImages[0];
	images[1] = inputRawImages[1];
	imageSize_[0] = images[0].size();
	imageSize_[1] = images[1].size();

	bool boardFound[2] = {false};
	bool boardAccepted[2] = {false};
	bool readyToCalibrate[2] = {false};

	std::vector<std::vector<cv::Point2f> > pointBuf(2);

	bool depthDetected = false;
	for(int id=0; id<(stereo_?2:1); ++id)
	{
		cv::Mat viewGray;
		if(!images[id].empty())
		{
			if(images[id].type() == CV_16UC1)
			{
				depthDetected = true;
				//assume IR image: convert to gray scaled
				const float factor = 255.0f / float((maxIrs_[id] - minIrs_[id]));
				viewGray = cv::Mat(images[id].rows, images[id].cols, CV_8UC1);
				for(int i=0; i<images[id].rows; ++i)
				{
					for(int j=0; j<images[id].cols; ++j)
					{
						viewGray.at<unsigned char>(i, j) = (unsigned char)std::min(float(std::max(images[id].at<unsigned short>(i,j) - minIrs_[id], 0)) * factor, 255.0f);
					}
				}
				cvtColor(viewGray, images[id], cv::COLOR_GRAY2BGR); // convert to show detected points in color
			}
			else if(images[id].channels() == 3)
			{
				cvtColor(images[id], viewGray, cv::COLOR_BGR2GRAY);
			}
			else
			{
				viewGray = images[id];
				cvtColor(viewGray, images[id], cv::COLOR_GRAY2BGR); // convert to show detected points in color
			}
		}
		else
		{
			UERROR("Image %d is empty!! Should not!", id);
		}

		minIrs_[id] = 0;
		maxIrs_[id] = 0x7FFF;

		//Dot it only if not yet calibrated
		if(!ui_->pushButton_save->isEnabled())
		{
			cv::Size boardSize(ui_->spinBox_boardWidth->value(), ui_->spinBox_boardHeight->value());
			if(!viewGray.empty())
			{
				int flags = CV_CALIB_CB_ADAPTIVE_THRESH | CV_CALIB_CB_NORMALIZE_IMAGE;

				if(!viewGray.empty())
				{
					int maxScale = viewGray.cols < 640?2:1;
					for( int scale = 1; scale <= maxScale; scale++ )
					{
						cv::Mat timg;
						if( scale == 1 )
							timg = viewGray;
						else
							cv::resize(viewGray, timg, cv::Size(), scale, scale, CV_INTER_CUBIC);
						boardFound[id] = cv::findChessboardCorners(timg, boardSize, pointBuf[id], flags);
						if(boardFound[id])
						{
							if( scale > 1 )
							{
								cv::Mat cornersMat(pointBuf[id]);
								cornersMat *= 1./scale;
							}
							break;
						}
					}
				}
			}

			if(boardFound[id]) // If done with success,
			{
				// improve the found corners' coordinate accuracy for chessboard
				float minSquareDistance = -1.0f;
				for(unsigned int i=0; i<pointBuf[id].size()-1; ++i)
				{
					float d = cv::norm(pointBuf[id][i] - pointBuf[id][i+1]);
					if(minSquareDistance == -1.0f || minSquareDistance > d)
					{
						minSquareDistance = d;
					}
				}
				float radius = minSquareDistance/2.0f +0.5f;
				cv::cornerSubPix( viewGray, pointBuf[id], cv::Size(radius, radius), cv::Size(-1,-1),
						cv::TermCriteria( CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1 ));

				// Draw the corners.
				cv::drawChessboardCorners(images[id], boardSize, cv::Mat(pointBuf[id]), boardFound[id]);

				std::vector<float> params(4,0);
				getParams(pointBuf[id], boardSize, imageSize_[id], params[0], params[1], params[2], params[3]);

				bool addSample = true;
				for(unsigned int i=0; i<imageParams_[id].size(); ++i)
				{
					if(fabs(params[0] - imageParams_[id][i].at(0)) < 0.1 && // x
						fabs(params[1] - imageParams_[id][i].at(1)) < 0.1 && // y
						fabs(params[2] - imageParams_[id][i].at(2)) < 0.05 && // size
						fabs(params[3] - imageParams_[id][i].at(3)) < 0.1) // skew
					{
						addSample = false;
					}
				}
				if(addSample)
				{
					boardAccepted[id] = true;

					imagePoints_[id].push_back(pointBuf[id]);
					imageParams_[id].push_back(params);

					UINFO("[%d] Added board, total=%d. (x=%f, y=%f, size=%f, skew=%f)", id, (int)imagePoints_[id].size(), params[0], params[1], params[2], params[3]);
				}

				// update statistics
				std::vector<float> xRange(2, imageParams_[id][0].at(0));
				std::vector<float> yRange(2, imageParams_[id][0].at(1));
				std::vector<float> sizeRange(2, imageParams_[id][0].at(2));
				std::vector<float> skewRange(2, imageParams_[id][0].at(3));
				for(unsigned int i=1; i<imageParams_[id].size(); ++i)
				{
					xRange[0] = imageParams_[id][i].at(0) < xRange[0] ? imageParams_[id][i].at(0) : xRange[0];
					xRange[1] = imageParams_[id][i].at(0) > xRange[1] ? imageParams_[id][i].at(0) : xRange[1];
					yRange[0] = imageParams_[id][i].at(1) < yRange[0] ? imageParams_[id][i].at(1) : yRange[0];
					yRange[1] = imageParams_[id][i].at(1) > yRange[1] ? imageParams_[id][i].at(1) : yRange[1];
					sizeRange[0] = imageParams_[id][i].at(2) < sizeRange[0] ? imageParams_[id][i].at(2) : sizeRange[0];
					sizeRange[1] = imageParams_[id][i].at(2) > sizeRange[1] ? imageParams_[id][i].at(2) : sizeRange[1];
					skewRange[0] = imageParams_[id][i].at(3) < skewRange[0] ? imageParams_[id][i].at(3) : skewRange[0];
					skewRange[1] = imageParams_[id][i].at(3) > skewRange[1] ? imageParams_[id][i].at(3) : skewRange[1];
				}
				//UINFO("Stats [%d]:", id);
				//UINFO("  Count = %d", (int)imagePoints_[id].size());
				//UINFO("  x =    [%f -> %f]", xRange[0], xRange[1]);
				//UINFO("  y =    [%f -> %f]", yRange[0], yRange[1]);
				//UINFO("  size = [%f -> %f]", sizeRange[0], sizeRange[1]);
				//UINFO("  skew = [%f -> %f]", skewRange[0], skewRange[1]);

				float xGood = xRange[1] - xRange[0];
				float yGood = yRange[1] - yRange[0];
				float sizeGood = sizeRange[1] - sizeRange[0];
				float skewGood = skewRange[1] - skewRange[0];

				if(id == 0)
				{
					ui_->progressBar_x->setValue(xGood*100);
					ui_->progressBar_y->setValue(yGood*100);
					ui_->progressBar_size->setValue(sizeGood*100);
					ui_->progressBar_skew->setValue(skewGood*100);
					if((int)imagePoints_[id].size() > ui_->progressBar_count->maximum())
					{
						ui_->progressBar_count->setMaximum((int)imagePoints_[id].size());
					}
					ui_->progressBar_count->setValue((int)imagePoints_[id].size());
				}
				else
				{
					ui_->progressBar_x_2->setValue(xGood*100);
					ui_->progressBar_y_2->setValue(yGood*100);
					ui_->progressBar_size_2->setValue(sizeGood*100);
					ui_->progressBar_skew_2->setValue(skewGood*100);

					if((int)imagePoints_[id].size() > ui_->progressBar_count_2->maximum())
					{
						ui_->progressBar_count_2->setMaximum((int)imagePoints_[id].size());
					}
					ui_->progressBar_count_2->setValue((int)imagePoints_[id].size());
				}

				if(imagePoints_[id].size() >= COUNT_MIN && xGood > 0.5 && yGood > 0.5 && sizeGood > 0.4 && skewGood > 0.5)
				{
					readyToCalibrate[id] = true;
				}

				//update IR values
				if(inputRawImages[id].type() == CV_16UC1)
				{
					//update min max IR if the chessboard was found
					minIrs_[id] = 0xFFFF;
					maxIrs_[id] = 0;
					for(size_t i = 0; i < pointBuf[id].size(); ++i)
					{
						const cv::Point2f &p = pointBuf[id][i];
						cv::Rect roi(std::max(0, (int)p.x - 3), std::max(0, (int)p.y - 3), 6, 6);

						roi.width = std::min(roi.width, inputRawImages[id].cols - roi.x);
						roi.height = std::min(roi.height, inputRawImages[id].rows - roi.y);

						//find minMax in the roi
						double min, max;
						cv::minMaxLoc(inputRawImages[id](roi), &min, &max);
						if(min < minIrs_[id])
						{
							minIrs_[id] = min;
						}
						if(max > maxIrs_[id])
						{
							maxIrs_[id] = max;
						}
					}
				}
			}
		}
	}
	ui_->label_baseline->setVisible(!depthDetected);
	ui_->label_baseline_name->setVisible(!depthDetected);

	if(stereo_ && ((boardAccepted[0] && boardFound[1]) || (boardAccepted[1] && boardFound[0])))
	{
		stereoImagePoints_[0].push_back(pointBuf[0]);
		stereoImagePoints_[1].push_back(pointBuf[1]);
		UINFO("Add stereo image points (size=%d)", (int)stereoImagePoints_[0].size());
	}

	if(!stereo_ && readyToCalibrate[0])
	{
		ui_->pushButton_calibrate->setEnabled(true);
	}
	else if(stereo_ && readyToCalibrate[0] && readyToCalibrate[1] && stereoImagePoints_[0].size())
	{
		ui_->pushButton_calibrate->setEnabled(true);
	}

	if(ui_->radioButton_rectified->isChecked())
	{
		if(models_[0].isValid())
		{
			images[0] = models_[0].rectifyImage(images[0]);
		}
		if(models_[1].isValid())
		{
			images[1] = models_[1].rectifyImage(images[1]);
		}
	}
	else if(ui_->radioButton_stereoRectified->isChecked() &&
			(stereoModel_.left().isValid() &&
			stereoModel_.right().isValid()&&
			(!ui_->label_baseline->isVisible() || stereoModel_.baseline() > 0.0)))
	{
		images[0] = stereoModel_.left().rectifyImage(images[0]);
		images[1] = stereoModel_.right().rectifyImage(images[1]);
	}

	if(ui_->checkBox_showHorizontalLines->isChecked())
	{
		for(int id=0; id<(stereo_?2:1); ++id)
		{
			int step = imageSize_[id].height/16;
			for(int i=step; i<imageSize_[id].height; i+=step)
			{
				cv::line(images[id], cv::Point(0, i), cv::Point(imageSize_[id].width, i), CV_RGB(0,255,0));
			}
		}
	}

	ui_->label_left->setText(tr("%1x%2").arg(images[0].cols).arg(images[0].rows));

	//show frame
	ui_->image_view->setImage(uCvMat2QImage(images[0]).mirrored(ui_->checkBox_mirror->isChecked(), false));
	if(stereo_)
	{
		ui_->label_right->setText(tr("%1x%2").arg(images[1].cols).arg(images[1].rows));
		ui_->image_view_2->setImage(uCvMat2QImage(images[1]).mirrored(ui_->checkBox_mirror->isChecked(), false));
	}
	processingData_ = false;
}
Пример #22
0
void LoopClosureViewer::updateView(const Transform & transform, const ParametersMap & parameters)
{
	if(sA_.id()>0 && sB_.id()>0)
	{
		int decimation = 1;
		float maxDepth = 0;
		float minDepth = 0;

		if(!ui_->checkBox_rawCloud->isChecked())
		{
			decimation = decimation_;
			maxDepth = maxDepth_;
			minDepth = minDepth_;
		}

		UDEBUG("decimation = %d", decimation);
		UDEBUG("maxDepth = %f", maxDepth);
		UDEBUG("minDepth = %d", minDepth);

		Transform t;
		if(!transform.isNull())
		{
			transform_ = transform;
			t = transform;
		}
		else if(!transform_.isNull())
		{
			t = transform_;
		}
		else
		{
			t = sB_.getPose();
		}

		UDEBUG("t= %s", t.prettyPrint().c_str());
		ui_->label_transform->setText(QString("(%1)").arg(t.prettyPrint().c_str()));
		if(!t.isNull())
		{
			//cloud 3d
			pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloudA, cloudB;
			cloudA = util3d::cloudRGBFromSensorData(sA_.sensorData(), decimation, maxDepth, minDepth, 0, parameters);
			cloudB = util3d::cloudRGBFromSensorData(sB_.sensorData(), decimation, maxDepth, minDepth, 0, parameters);

			//cloud 2d
			pcl::PointCloud<pcl::PointXYZ>::Ptr scanA, scanB;
			scanA = util3d::laserScanToPointCloud(sA_.sensorData().laserScanRaw(), sA_.sensorData().laserScanRaw().localTransform());
			scanB = util3d::laserScanToPointCloud(sB_.sensorData().laserScanRaw(), sB_.sensorData().laserScanRaw().localTransform());

			ui_->label_idA->setText(QString("[%1 (%2) -> %3 (%4)]").arg(sB_.id()).arg(cloudB->size()).arg(sA_.id()).arg(cloudA->size()));

			if(cloudA->size())
			{
				ui_->cloudViewerTransform->addCloud("cloud0", cloudA);
			}
			if(cloudB->size())
			{
				cloudB = util3d::transformPointCloud(cloudB, t);
				ui_->cloudViewerTransform->addCloud("cloud1", cloudB);
			}
			if(scanA->size())
			{
				ui_->cloudViewerTransform->addCloud("scan0", scanA);
			}
			if(scanB->size())
			{
				scanB = util3d::transformPointCloud(scanB, t);
				ui_->cloudViewerTransform->addCloud("scan1", scanB);
			}
		}
		else
		{
			UERROR("loop transform is null !?!?");
			ui_->cloudViewerTransform->removeAllClouds();
		}
		ui_->cloudViewerTransform->update();
	}
Пример #23
0
	void computeDescriptors( const cv::Mat& image,
		std::vector<cv::KeyPoint>& keypoints,
		cv::Mat& descriptors)
	{
		UERROR("GPUFAST:computeDescriptors() Should not be used!");
	}
Пример #24
0
void DBReader::mainLoop()
{
	OdometryEvent odom = this->getNextData();
	if(odom.data().id())
	{
		std::string goalId;
		double previousStamp = odom.data().stamp();
		if(previousStamp == 0)
		{
			odom.data().setStamp(UTimer::now());
		}

		if(!_goalsIgnored &&
		   odom.data().userDataRaw().type() == CV_8SC1 &&
		   odom.data().userDataRaw().cols >= 7 && // including null str ending
		   odom.data().userDataRaw().rows == 1 &&
		   memcmp(odom.data().userDataRaw().data, "GOAL:", 5) == 0)
		{
			//GOAL format detected, remove it from the user data and send it as goal event
			std::string goalStr = (const char *)odom.data().userDataRaw().data;
			if(!goalStr.empty())
			{
				std::list<std::string> strs = uSplit(goalStr, ':');
				if(strs.size() == 2)
				{
					goalId = *strs.rbegin();
					odom.data().setUserData(cv::Mat());
				}
			}
		}
		if(!_odometryIgnored)
		{
			if(odom.pose().isNull())
			{
				UWARN("Reading the database: odometry is null! "
					  "Please set \"Ignore odometry = true\" if there is "
					  "no odometry in the database.");
			}
			this->post(new OdometryEvent(odom));
		}
		else
		{
			this->post(new CameraEvent(odom.data()));
		}

		if(!goalId.empty())
		{
			double delay = 0.0;
			if(!_ignoreGoalDelay && _currentId != _ids.end())
			{
				// get stamp for the next signature to compute the delay
				// that was used originally for planning
				int weight;
				std::string label;
				double stamp;
				int mapId;
				Transform localTransform, pose;
				_dbDriver->getNodeInfo(*_currentId, pose, mapId, weight, label, stamp);
				if(previousStamp && stamp && stamp > previousStamp)
				{
					delay = stamp - previousStamp;
				}
			}

			if(delay > 0.0)
			{
				UWARN("Goal \"%s\" detected, posting it! Waiting %f seconds before sending next data...",
					   goalId.c_str(), delay);
			}
			else
			{
				UWARN("Goal \"%s\" detected, posting it!", goalId.c_str());
			}

			if(uIsInteger(goalId))
			{
				this->post(new RtabmapEventCmd(RtabmapEventCmd::kCmdGoal, atoi(goalId.c_str())));
			}
			else
			{
				this->post(new RtabmapEventCmd(RtabmapEventCmd::kCmdGoal, goalId));
			}

			if(delay > 0.0)
			{
				uSleep(delay*1000);
			}
		}

	}
	else if(!this->isKilled())
	{
		UINFO("no more images...");
		if(_paths.size() > 1)
		{
			_paths.pop_front();
			UWARN("Loading next database \"%s\"...", _paths.front().c_str());
			if(!this->init())
			{
				UERROR("Failed to initialize the next database \"%s\"", _paths.front().c_str());
				this->kill();
				this->post(new CameraEvent());
			}
		}
		else
		{
			this->kill();
			this->post(new CameraEvent());
		}

	}

}
Пример #25
0
int main(int argc, char * argv[])
{
	ULogger::setType(ULogger::kTypeConsole);
	ULogger::setLevel(ULogger::kInfo);

	int device = 0;
	std::string path;
	float rate = 0.0f;
	std::string calibrationFile;
	for(int i=1; i<argc; ++i)
	{
		if(strcmp(argv[i], "--rate") == 0)
		{
			++i;
			if(i < argc)
			{
				rate = uStr2Float(argv[i]);
				if(rate < 0)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "--device") == 0)
		{
			++i;
			if(i < argc)
			{
				device = std::atoi(argv[i]);
				if(device < 0)
				{
					showUsage();
				}
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "--path") == 0)
		{
			++i;
			if(i < argc)
			{
				path = argv[i];
			}
			else
			{
				showUsage();
			}
			continue;
		}
		if(strcmp(argv[i], "--calibration") == 0)
		{
			++i;
			if(i < argc)
			{
				calibrationFile = argv[i];
			}
			else
			{
				showUsage();
			}
			continue;
		}

		printf("Unrecognized option : %s\n", argv[i]);
		showUsage();
	}

	if(path.empty())
	{
		UINFO("Using device %d", device);
	}
	else
	{
		UINFO("Using path %s", path.c_str());
	}

	rtabmap::Camera * camera = 0;
	rtabmap::DBReader * dbReader = 0;

	if(!path.empty())
	{
		if(UFile::exists(path))
		{
			if(UFile::getExtension(path).compare("db") == 0)
			{
				dbReader = new rtabmap::DBReader(path, rate);
			}
			else
			{
				camera = new rtabmap::CameraVideo(path, rate);
			}
		}
		else if(UDirectory::exists(path))
		{
			camera = new rtabmap::CameraImages(path, rate);
		}
		else
		{
			UERROR("Path not valid! \"%s\"", path.c_str());
			return -1;
		}
	}
	else
	{
		camera = new rtabmap::CameraVideo(device, rate);
	}

	if(camera)
	{
		if(!calibrationFile.empty())
		{
			UINFO("Set calibration: %s", calibrationFile.c_str());
		}
		if(!camera->init(UDirectory::getDir(calibrationFile), UFile::getName(calibrationFile)))
		{
			delete camera;
			UERROR("Cannot initialize the camera.");
			return -1;
		}
	}

	if(dbReader)
	{
		if(!dbReader->init())
		{
			delete dbReader;
			UERROR("Cannot initialize the camera.");
			return -1;
		}
	}

	cv::Mat rgb;
	rgb = camera?camera->takeImage().imageRaw():dbReader->getNextData().data().imageRaw();
	cv::namedWindow("Video", CV_WINDOW_AUTOSIZE); // create window
	while(!rgb.empty())
	{
		cv::imshow("Video", rgb); // show frame

		int c = cv::waitKey(10); // wait 10 ms or for key stroke
		if(c == 27)
			break; // if ESC, break and quit

		rgb = camera?camera->takeImage().imageRaw():dbReader->getNextData().data().imageRaw();
	}
	cv::destroyWindow("Video");
	if(camera)
	{
		delete camera;
	}
	if(dbReader)
	{
		delete dbReader;
	}
	return 0;
}
Пример #26
0
OdometryEvent DBReader::getNextData()
{
	OdometryEvent odom;
	if(_dbDriver)
	{
		if(!this->isKilled() && _currentId != _ids.end())
		{
			int mapId;
			SensorData data;
			_dbDriver->getNodeData(*_currentId, data);

			// info
			Transform pose;
			int weight;
			std::string label;
			double stamp;
			_dbDriver->getNodeInfo(*_currentId, pose, mapId, weight, label, stamp);

			cv::Mat infMatrix = cv::Mat::eye(6,6,CV_64FC1);
			if(!_odometryIgnored)
			{
				std::map<int, Link> links;
				_dbDriver->loadLinks(*_currentId, links, Link::kNeighbor);
				if(links.size())
				{
					// assume the first is the backward neighbor, take its variance
					infMatrix = links.begin()->second.infMatrix();
				}
			}
			else
			{
				pose.setNull();
			}

			int seq = *_currentId;
			++_currentId;
			if(data.imageCompressed().empty())
			{
				UWARN("No image loaded from the database for id=%d!", *_currentId);
			}

			// Frame rate
			if(_frameRate < 0.0f)
			{
				if(stamp == 0)
				{
					UERROR("The option to use database stamps is set (framerate<0), but there are no stamps saved in the database! Aborting...");
					this->kill();
				}
				else if(_previousMapID == mapId && _previousStamp > 0)
				{
					int sleepTime = 1000.0*(stamp-_previousStamp) - 1000.0*_timer.getElapsedTime();
					if(sleepTime > 10000)
					{
						UWARN("Detected long delay (%d sec, stamps = %f vs %f). Waiting a maximum of 10 seconds.",
								sleepTime/1000, _previousStamp, stamp);
						sleepTime = 10000;
					}
					if(sleepTime > 2)
					{
						uSleep(sleepTime-2);
					}

					// Add precision at the cost of a small overhead
					while(_timer.getElapsedTime() < (stamp-_previousStamp)-0.000001)
					{
						//
					}

					double slept = _timer.getElapsedTime();
					_timer.start();
					UDEBUG("slept=%fs vs target=%fs", slept, stamp-_previousStamp);
				}
				_previousStamp = stamp;
				_previousMapID = mapId;
			}
			else if(_frameRate>0.0f)
			{
				int sleepTime = (1000.0f/_frameRate - 1000.0f*_timer.getElapsedTime());
				if(sleepTime > 2)
				{
					uSleep(sleepTime-2);
				}

				// Add precision at the cost of a small overhead
				while(_timer.getElapsedTime() < 1.0/double(_frameRate)-0.000001)
				{
					//
				}

				double slept = _timer.getElapsedTime();
				_timer.start();
				UDEBUG("slept=%fs vs target=%fs", slept, 1.0/double(_frameRate));
			}

			if(!this->isKilled())
			{
				data.uncompressData();
				data.setId(seq);
				data.setStamp(stamp);
				UDEBUG("Laser=%d RGB/Left=%d Depth/Right=%d, UserData=%d",
						data.laserScanRaw().empty()?0:1,
						data.imageRaw().empty()?0:1,
						data.depthOrRightRaw().empty()?0:1,
						data.userDataRaw().empty()?0:1);

				odom = OdometryEvent(data, pose, infMatrix.inv());
			}
		}
	}
	else
	{
		UERROR("Not initialized...");
	}
	return odom;
}
Пример #27
0
// return not null transform if odometry is correctly computed
Transform OdometryDVO::computeTransform(
		SensorData & data,
		const Transform & guess,
		OdometryInfo * info)
{
	Transform t;

#ifdef RTABMAP_DVO
	UTimer timer;

	if(data.imageRaw().empty() ||
		data.imageRaw().rows != data.depthOrRightRaw().rows ||
		data.imageRaw().cols != data.depthOrRightRaw().cols)
	{
		UERROR("Not supported input!");
		return t;
	}

	if(!(data.cameraModels().size() == 1 && data.cameraModels()[0].isValidForReprojection()))
	{
		UERROR("Invalid camera model! Only single RGB-D camera supported by DVO. Try another odometry approach.");
		return t;
	}

	if(dvo_ == 0)
	{
		dvo::DenseTracker::Config cfg = dvo::DenseTracker::getDefaultConfig();

		dvo_ = new dvo::DenseTracker(cfg);
	}

	cv::Mat grey, grey_s16, depth_inpainted, depth_mask, depth_mono, depth_float;
	if(data.imageRaw().type() != CV_32FC1)
	{
		if(data.imageRaw().type() == CV_8UC3)
		{
			cv::cvtColor(data.imageRaw(), grey, CV_BGR2GRAY);
		}
		else
		{
			grey = data.imageRaw();
		}

		grey.convertTo(grey_s16, CV_32F);
	}
	else
	{
		grey_s16 = data.imageRaw();
	}

	// make sure all zeros are NAN
	if(data.depthRaw().type() == CV_32FC1)
	{
		depth_float = data.depthRaw();
		for(int i=0; i<depth_float.rows; ++i)
		{
			for(int j=0; j<depth_float.cols; ++j)
			{
				float & d = depth_float.at<float>(i,j);
				if(d == 0.0f)
				{
					d = NAN;
				}
			}
		}
	}
	else if(data.depthRaw().type() == CV_16UC1)
	{
		depth_float = cv::Mat(data.depthRaw().size(), CV_32FC1);
		for(int i=0; i<data.depthRaw().rows; ++i)
		{
			for(int j=0; j<data.depthRaw().cols; ++j)
			{
				float d = float(data.depthRaw().at<unsigned short>(i,j))/1000.0f;
				depth_float.at<float>(i, j) = d==0.0f?NAN:d;
			}
		}
	}
	else
	{
		UFATAL("Unknown depth format!");
	}

	if(camera_ == 0)
	{
		dvo::core::IntrinsicMatrix intrinsics = dvo::core::IntrinsicMatrix::create(
						data.cameraModels()[0].fx(),
						data.cameraModels()[0].fy(),
						data.cameraModels()[0].cx(),
						data.cameraModels()[0].cy());
		camera_ = new dvo::core::RgbdCameraPyramid(
				data.cameraModels()[0].imageWidth(),
				data.cameraModels()[0].imageHeight(),
				intrinsics);
	}

	dvo::core::RgbdImagePyramid * current = new dvo::core::RgbdImagePyramid(*camera_, grey_s16, depth_float);

	cv::Mat covariance;
	if(reference_ == 0)
	{
		reference_ = current;
		if(!lost_)
		{
			t.setIdentity();
		}
		covariance = cv::Mat::eye(6,6,CV_64FC1) * 9999.0;
	}
	else
	{
		dvo::DenseTracker::Result result;
		dvo_->match(*reference_, *current, result);
		t = Transform::fromEigen3d(result.Transformation);

		if(result.Information(0,0) > 0.0 && result.Information(0,0) != 1.0)
		{
			lost_ = false;
			cv::Mat information = cv::Mat::eye(6,6, CV_64FC1);
			memcpy(information.data, result.Information.data(), 36*sizeof(double));
			covariance = information.inv();
			covariance *= 100.0; // to be in the same scale than loop closure detection

			Transform currentMotion = t;
			t = motionFromKeyFrame_.inverse() * t;

			// TODO make parameters?
			if(currentMotion.getNorm() > 0.01 || currentMotion.getAngle() > 0.01)
			{
				if(info)
				{
					info->keyFrameAdded = true;
				}
				// new keyframe
				delete reference_;
				reference_ = current;
				motionFromKeyFrame_.setIdentity();
			}
			else
			{
				delete current;
				motionFromKeyFrame_ = currentMotion;
			}
		}
		else
		{
			lost_ = true;
			delete reference_;
			delete current;
			reference_ = 0; // this will make restart from the next frame
			motionFromKeyFrame_.setIdentity();
			t.setNull();
			covariance = cv::Mat::eye(6,6,CV_64FC1) * 9999.0;
			UWARN("dvo failed to estimate motion, tracking will be reinitialized on next frame.");
		}
	}

	const Transform & localTransform = data.cameraModels()[0].localTransform();
	if(!t.isNull() && !t.isIdentity() && !localTransform.isIdentity() && !localTransform.isNull())
	{
		// from camera frame to base frame
		t = localTransform * t * localTransform.inverse();
	}

	if(info)
	{
		info->type = (int)kTypeDVO;
		info->covariance = covariance;
	}

	UINFO("Odom update time = %fs", timer.elapsed());

#else
	UERROR("RTAB-Map is not built with DVO support! Select another visual odometry approach.");
#endif
	return t;
}
Пример #28
0
// return not null transform if odometry is correctly computed
Transform OdometryICP::computeTransform(const SensorData & data, OdometryInfo * info)
{
	UTimer timer;
	Transform output;

	bool hasConverged = false;
	double variance = 0;
	unsigned int minPoints = 100;
	if(!data.depth().empty())
	{
		if(data.depth().type() == CV_8UC1)
		{
			UERROR("ICP 3D cannot be done on stereo images!");
			return output;
		}

		pcl::PointCloud<pcl::PointXYZ>::Ptr newCloudXYZ = util3d::getICPReadyCloud(
						data.depth(),
						data.fx(),
						data.fy(),
						data.cx(),
						data.cy(),
						_decimation,
						this->getMaxDepth(),
						_voxelSize,
						_samples,
						data.localTransform());

		if(_pointToPlane)
		{
			pcl::PointCloud<pcl::PointNormal>::Ptr newCloud = util3d::computeNormals(newCloudXYZ);

			std::vector<int> indices;
			newCloud = util3d::removeNaNNormalsFromPointCloud<pcl::PointNormal>(newCloud);
			if(newCloudXYZ->size() != newCloud->size())
			{
				UWARN("removed nan normals...");
			}

			if(_previousCloudNormal->size() > minPoints && newCloud->size() > minPoints)
			{
				int correspondences = 0;
				Transform transform = util3d::icpPointToPlane(newCloud,
						_previousCloudNormal,
						_maxCorrespondenceDistance,
						_maxIterations,
						&hasConverged,
						&variance,
						&correspondences);

				// verify if there are enough correspondences
				float correspondencesRatio = float(correspondences)/float(_previousCloudNormal->size()>newCloud->size()?_previousCloudNormal->size():newCloud->size());

				if(!transform.isNull() && hasConverged &&
		   	   	   correspondencesRatio >= _correspondenceRatio)
				{
					output = transform;
					_previousCloudNormal = newCloud;
				}
				else
				{
					UWARN("Transform not valid (hasConverged=%s variance = %f)",
							hasConverged?"true":"false", variance);
				}
			}
			else if(newCloud->size() > minPoints)
			{
				output.setIdentity();
				_previousCloudNormal = newCloud;
			}
		}
		else
		{
			//point to point
			if(_previousCloud->size() > minPoints && newCloudXYZ->size() > minPoints)
			{
				int correspondences = 0;
				Transform transform = util3d::icp(newCloudXYZ,
						_previousCloud,
						_maxCorrespondenceDistance,
						_maxIterations,
						&hasConverged,
						&variance,
						&correspondences);

				// verify if there are enough correspondences
				float correspondencesRatio = float(correspondences)/float(_previousCloud->size()>newCloudXYZ->size()?_previousCloud->size():newCloudXYZ->size());

				if(!transform.isNull() && hasConverged &&
				   correspondencesRatio >= _correspondenceRatio)
				{
					output = transform;
					_previousCloud = newCloudXYZ;
				}
				else
				{
					UWARN("Transform not valid (hasConverged=%s variance = %f)",
							hasConverged?"true":"false", variance);
				}
			}
			else if(newCloudXYZ->size() > minPoints)
			{
				output.setIdentity();
				_previousCloud = newCloudXYZ;
			}
		}
	}
	else
	{
		UERROR("Depth is empty?!?");
	}

	if(info)
	{
		info->variance = variance;
	}

	UINFO("Odom update time = %fs hasConverged=%s variance=%f cloud=%d",
			timer.elapsed(),
			hasConverged?"true":"false",
			variance,
			(int)(_pointToPlane?_previousCloudNormal->size():_previousCloud->size()));

	return output;
}