Esempio n. 1
0
	virtual void OnEvent (const mrptEvent &e)
	{
		if (e.isOfType<mrptEventGLPreRender>())
		{
			//const mrptEventGLPreRender* ev = e.getAs<mrptEventGLPreRender>();
			//ev-> ...
		}
		else
		if (e.isOfType<mrptEventGLPostRender>())
		{
			//const mrptEventGLPostRender* ev = e.getAs<mrptEventGLPostRender>();

			// Show small message in the corner:
			mrpt::opengl::gl_utils::renderMessageBox(
				0.7f,  0.9f,  // x,y (in screen "ratios")
				0.29f, 0.09f, // width, height (in screen "ratios")
				"Press 'h' for help",
				0.02f  // text size
				);

			// Also showing help?
			if (showing_help || hiding_help)
			{
				static const double TRANSP_ANIMATION_TIME_SEC = 0.5;

				const double show_tim = tim_show_start.Tac();
				const double hide_tim = tim_show_end.Tac();

				const double tranparency = hiding_help ?
					1.0-std::min(1.0,hide_tim/TRANSP_ANIMATION_TIME_SEC)
					:
					std::min(1.0,show_tim/TRANSP_ANIMATION_TIME_SEC);

				mrpt::opengl::gl_utils::renderMessageBox(
					0.05f,  0.05f,  // x,y (in screen "ratios")
					0.90f, 0.90f, // width, height (in screen "ratios")
					"These are the supported commands:\n"
					" - 'h': Toogle help view\n"
					" - '<-' and '->': Rotate camera\n"
					" - 'Alt+Enter': Toogle fullscreen\n"
					" - 'ESC': Quit",
					0.05f,  // text size
					mrpt::utils::TColor(190,190,190, 200*tranparency),   // background
					mrpt::utils::TColor(0,0,0, 200*tranparency),  // border
					mrpt::utils::TColor(200,0,0, 150*tranparency), // text
					6.0f, // border width
					"serif", // text font
					mrpt::opengl::NICE // text style
					);

				if (hide_tim>TRANSP_ANIMATION_TIME_SEC && hiding_help)
					hiding_help = false;
			}




		}
	}
Esempio n. 2
0
		/** This virtual function will be called upon receive of any event after starting listening at any CObservable object. */
		virtual void OnEvent(const mrptEvent &e)
		{
			if (e.isOfType<mrpt::gui::mrptEventWindowChar>())
			{
				const mrpt::gui::mrptEventWindowChar * ev = e.getAs<mrpt::gui::mrptEventWindowChar>();
				bool rebuild_3d_obj = false;
				switch (ev->char_code)
				{
				case 'h':
				case 'H':
					if (!showing_help) {
						tim_show_start.Tic();
						showing_help = true;
					}
					else {
						tim_show_end.Tic();
						showing_help = false;
						hiding_help = true;
					}
					break;
				case 'q':
				case 'Q':
					request_to_quit=true;
					break;
				case 'i':
				case 'I':
					params["show_ID_labels"] = params["show_ID_labels"]!=0 ? 0:1;
					rebuild_3d_obj = true;
					break;
				case 'e':
				case 'E':
					params["show_edges"] = params["show_edges"]!=0 ? 0:1;
					rebuild_3d_obj = true;
					break;
				case 'c':
				case 'C':
					params["show_node_corners"] = params["show_node_corners"]!=0 ? 0:1;
					rebuild_3d_obj = true;
					break;
				case 's':
				case 'S':
					{
						static int cnt = 0;
						const std::string sFil = mrpt::format("dump_graph_%05i.3Dscene",++cnt);
						std::cout << "Dumping scene to file: " << sFil << std::endl;
						
						mrpt::opengl::COpenGLScene scene;
						scene.insert(m_new_3dobj);
						mrpt::utils::CFileGZOutputStream  f(sFil);
						f << scene;
					}
					break;
				case 'v':
				case 'V':
					params["show_edge_rel_poses"] = params["show_edge_rel_poses"]!=0 ? 0:1;
					rebuild_3d_obj = true;
					break;
				case '+':
					params["nodes_point_size"]+=1.0;
					rebuild_3d_obj = true;
					break;
				case '-':
					params["nodes_point_size"]=std::max(0.0, params["nodes_point_size"] - 1.0 );
					rebuild_3d_obj = true;
					break;
				case 'o':
				case 'O':
					params["nodes_corner_scale"]*=1.2;
					rebuild_3d_obj = true;
					break;
				case 'p':
				case 'P':
					params["nodes_corner_scale"]*=1.0/1.2;
					rebuild_3d_obj = true;
					break;
				case 'k':
				case 'K':
					params["nodes_edges_corner_scale"]*=1.2;
					rebuild_3d_obj = true;
					break;
				case 'l':
				case 'L':
					params["nodes_edges_corner_scale"]*=1.0/1.2;
					rebuild_3d_obj = true;
					break;
				};

				if (rebuild_3d_obj) 
				{
					m_new_3dobj = mrpt::opengl::graph_tools::graph_visualize(m_graph,params);
					request_to_refresh_3D_view = true;
				}
			}
			else
			if (e.isOfType<mrptEventGLPostRender>())
			{
				//const mrptEventGLPostRender* ev = e.getAs<mrptEventGLPostRender>();
				// Show small message in the corner:
				mrpt::opengl::gl_utils::renderMessageBox(
					0.8f,  0.94f,  // x,y (in screen "ratios")
					0.19f, 0.05f, // width, height (in screen "ratios")
					"Press 'h' for help",
					0.015f  // text size
					);

				// Also showing help?
				if (showing_help || hiding_help)
				{
					static const double TRANSP_ANIMATION_TIME_SEC = 0.5;

					const double show_tim = tim_show_start.Tac();
					const double hide_tim = tim_show_end.Tac();

					const double tranparency = hiding_help ?
						1.0-std::min(1.0,hide_tim/TRANSP_ANIMATION_TIME_SEC)
						:
						std::min(1.0,show_tim/TRANSP_ANIMATION_TIME_SEC);

					mrpt::opengl::gl_utils::renderMessageBox(
						0.05f,  0.05f,  // x,y (in screen "ratios")
						0.50f, 0.50f, // width, height (in screen "ratios")
						MSG_HELP_WINDOW,
						0.02f,  // text size
						mrpt::utils::TColor(190,190,190, 200*tranparency),   // background
						mrpt::utils::TColor(0,0,0, 200*tranparency),  // border
						mrpt::utils::TColor(200,0,0, 150*tranparency), // text
						5.0f, // border width
						"serif", // text font
						mrpt::opengl::NICE // text style
						);

					if (hide_tim>TRANSP_ANIMATION_TIME_SEC && hiding_help)
						hiding_help = false;
				}
			}

		}
/**
 * This is where the localization is done... not really callback in the offline version
 */ 
void callback(const sensor_msgs::LaserScan &scan, tf::Transform odo_pose, tf::Transform state_pose)
{
	static int counter = 0;
	counter++;
	
	static tf::TransformListener tf_listener;
	
	double looptime = TT.Tac();
	TT.Tic();
	fprintf(stderr,"Lt( %.1lfms %.1lfHz seq:%d) -",looptime*1000,1.0/looptime,scan.header.seq);
	
	if(has_sensor_offset_set == false) return;
	
	double gx,gy,gyaw,x,y,yaw;
	
	///Get the ground truth
	gyaw = tf::getYaw(state_pose.getRotation());  
	gx = state_pose.getOrigin().x();
	gy = state_pose.getOrigin().y();
	
	///Get the odometry
	yaw = tf::getYaw(odo_pose.getRotation());  
	x = odo_pose.getOrigin().x();
	y = odo_pose.getOrigin().y();
		
	mrpt::utils::CTicTac	tictac;
	tictac.Tic();

	int N =(scan.angle_max - scan.angle_min)/scan.angle_increment; ///< number of scan lines
	
	/////
	/// Pose conversions
	////
	
	Eigen::Affine3d T = getAsAffine(x,y,yaw);
	Eigen::Affine3d Tgt = getAsAffine(gx,gy,gyaw);
	
	/**
	 * We are now using the information from the ground truth to initialize the filter
	 */ 
	if(isFirstLoad){
		fprintf(stderr,"Initializing to (%lf, %lf, %lf)\n",gx,gy,gyaw);
		///Initialize the filter with 1m^2 variance in position and 20deg in heading
		ndtmcl->initializeFilter(gx, gy,gyaw,1.0, 1.0, 20.0*M_PI/180.0, 450);
		Told = T;
		Todo = Tgt;
	}
	
	///Compute the differential motion between two time steps 
	Eigen::Affine3d Tmotion = Told.inverse() * T;
	Todo = Todo*Tmotion;
	Told = T;
	
	///Get the laser pose with respect to the base
	float dy =offy;
	float dx = offx;
	float alpha = atan2(dy,dx);
	float L = sqrt(dx*dx+dy*dy);
	
	///Laser pose in base frame
	float lpx = L * cos(alpha);
	float lpy = L * sin(alpha);
	float lpa = offa;
	
	
	
	///Laser scan to PointCloud transformed with respect to the base
	pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);	
	for (int j=0;j<N;j++){
		double r  = scan.ranges[j];
		if(r>=scan.range_min && r<scan.range_max && r>0.3 && r<20.0){
			double a  = scan.angle_min + j*scan.angle_increment;
			pcl::PointXYZ pt;
			pt.x = r*cos(a+lpa)+lpx;
			pt.y = r*sin(a+lpa)+lpy;
			pt.z = 0.1+0.02 * (double)rand()/(double)RAND_MAX;
			cloud->push_back(pt);
		}
	}
	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// Now we have the differential motion and pointcloud -- Lets do MCL
	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	ndtmcl->updateAndPredict(Tmotion, *cloud); ///< does the prediction, update and resampling steps (see ndt_mcl.hpp)
	
	Eigen::Vector3d dm = ndtmcl->getMean(); ///<Maximum aposteriori estimate of the pose
	Eigen::Matrix3d cov = ndtmcl->pf.getDistributionVariances(); ///distribution variances
	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	double Time = tictac.Tac();
	fprintf(stderr,"Time elapsed %.1lfms (%lf %lf %lf) \n",Time*1000,dm[0],dm[1],dm[2]);
	isFirstLoad = false;
	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	
	//If visualization desired
#ifdef USE_VISUALIZATION_DEBUG
	///The sensor pose in the global frame for visualization
	Eigen::Vector3d origin(dm[0] + L * cos(dm[2]+alpha),dm[1] + L * sin(dm[2]+alpha),0.1);

	Eigen::Affine3d ppos = getAsAffine(dm[0],dm[1],dm[2]);
	
	//lslgeneric::transformPointCloudInPlace(Tgt, *cloud);
	lslgeneric::transformPointCloudInPlace(ppos, *cloud);
	mrpt::opengl::COpenGLScenePtr &scene = win3D.get3DSceneAndLock();	
	win3D.setCameraPointingToPoint(gx,gy,1);
	if(counter%2000==0) gl_points->clear();
	scene->clear();
	scene->insert(plane);
	
	addMap2Scene(ndtmcl->map, origin, scene);
	addPoseCovariance(dm[0],dm[1],cov,scene);
	addScanToScene(scene, origin, cloud);
	addParticlesToWorld(ndtmcl->pf,Tgt.translation(),dm, Todo.translation());
	scene->insert(gl_points);
	scene->insert(gl_particles);
	win3D.unlockAccess3DScene();
	win3D.repaint();

	if (win3D.keyHit())
	{
		mrpt::gui::mrptKeyModifier kmods;
		int key = win3D.getPushedKey(&kmods);
	}
#endif
	
	

}