bool MyReactiveInterface::senseObstacles(CSimplePointsMap &obstacles){

	cout << "senseObstacles" << endl;

	CObservation2DRangeScan laserScan;
	CPose2D rPose;
	CPose3D rPose3D;

	getCurrentMeasures(laserScan,rPose);
	rPose3D=rPose;


	obstacles.insertionOptions.minDistBetweenLaserPoints=0.005f;
	obstacles.insertionOptions.also_interpolate=false;

	obstacles.clear();
	obstacles.insertObservation(&laserScan);

	// Update data for plot thread
	pthread_mutex_lock(&m_mutex);
	path.AddVertex(rPose.x(),rPose.y());
	laserCurrentMap.loadFromRangeScan(laserScan,&rPose3D);
	newScan=true;
	pthread_mutex_unlock(&m_mutex);

	refreshPlot();


	return true;

}
Пример #2
0
/** Convert sensor_msgs/PointCloud -> mrpt::slam::CSimplePointsMap
  *
  * \return true on sucessful conversion, false on any error.
  */
bool point_cloud::ros2mrpt(
		const sensor_msgs::PointCloud &msg,
		CSimplePointsMap  &obj )
{
	const size_t N = msg.points.size();

	obj.clear();
	obj.reserve(N);
	for (size_t i=0;i<N;i++)
		obj.insertPoint(msg.points[i].x,msg.points[i].y,msg.points[i].z);

	return true;
}
Пример #3
0
// Test function
void laserTest(){
  initLaser();

#if MRPT_HAS_WXWIDGETS
	CDisplayWindowPlots		win("Sick Laser scaner");
#endif

	CSimplePointsMap receiveMap;
	//	CObservation2DRangeScan* obs = new CObservation2DRangeScan();
	CObservation* obs = new CObservation2DRangeScan();

  while(!mrpt::system::os::kbhit()){

	if(BREAK){
	  cout << index << "frame, Input the any key to continue" << endl;
	  //cin >>  temp;
	  getc(stdin);
	  fflush(stdin);
	}

	if(receiveDataFromSensor((CObservation2DRangeScan*)obs, SICK))
	{
	  ((CObservation2DRangeScan*)obs)->sensorPose = CPose3D(0,0,0);

	  receiveMap.clear();
	  receiveMap.insertionOptions.minDistBetweenLaserPoints	= 0;
	  receiveMap.insertObservation( obs );
	    
	}

#if MRPT_HAS_WXWIDGETS

	vector_float xs, ys, zs;
	receiveMap.getAllPoints(xs,ys,zs);

	win.plot(xs,ys,".r3","t1");
	win.axis_equal();

	//	win.axis_fit();
#endif
	
  }

  delete obs;
  sick.stop();


}
Пример #4
0
void MyLaser::plot(CObservation2DRangeScan obs){

	if(simulateLaser){

		CSimplePointsMap map;

		map.insertionOptions.minDistBetweenLaserPoints=0;
		map.insertionOptions.also_interpolate=false;
		map.insertionOptions.isPlanarMap=true;

		map.clear();
		map.insertObservation(&obs);


		vector_float	xs,ys,zs;
		map.getAllPoints(xs,ys,zs);


		float width(gridMap.getXMax()-gridMap.getXMin()),height(gridMap.getYMax()-gridMap.getYMin());

		win.image(mapImage,-x_center_pixel*res*1000,-y_center_pixel*res*1000,width*1000,height*1000);

		win.plot(xs*1000,ys*1000,".b3");

	}

	else{


	}





}
	/** Callback: On recalc local map & publish it */
	void onDoPublish(const ros::TimerEvent& )
	{
		mrpt::utils::CTimeLoggerEntry tle(m_profiler,"onDoPublish");

		// Purge old observations & latch a local copy:
		TListObservations obs;
		{
			mrpt::utils::CTimeLoggerEntry tle(m_profiler,"onDoPublish.removingOld");
			m_hist_obs_mtx.lock();

			// Purge old obs:
			if (!m_hist_obs.empty())
			{
				const double last_time = m_hist_obs.rbegin()->first;
				TListObservations::iterator it_first_valid = m_hist_obs.lower_bound( last_time-m_time_window );
				const size_t nToRemove = std::distance( m_hist_obs.begin(), it_first_valid );
				ROS_DEBUG("[onDoPublish] Removing %u old entries, last_time=%lf",static_cast<unsigned int>(nToRemove), last_time);
				m_hist_obs.erase(m_hist_obs.begin(), it_first_valid);
			}
			// Local copy in this thread:
			obs = m_hist_obs;
			m_hist_obs_mtx.unlock();
		}

		ROS_DEBUG("Building local map with %u observations.",static_cast<unsigned int>(obs.size()));
		if (obs.empty())
			return;

		// Build local map(s):
		// -----------------------------------------------
		m_localmap_pts.clear();
		mrpt::poses::CPose3D curRobotPose;
		{
			mrpt::utils::CTimeLoggerEntry tle2(m_profiler,"onDoPublish.buildLocalMap");

			// Get the latest robot pose in the reference frame (typ: /odom -> /base_link)
			// so we can build the local map RELATIVE to it:
			try {
				tf::StampedTransform tx;
				m_tf_listener.lookupTransform(m_frameid_reference,m_frameid_robot, ros::Time(0), tx);
				mrpt_bridge::convert(tx,curRobotPose);
				ROS_DEBUG("[onDoPublish] Building local map relative to latest robot pose: %s", curRobotPose.asString().c_str() );
			}
			catch (tf::TransformException &ex) {
				ROS_ERROR("%s",ex.what());
				return;
			}

			// For each observation: compute relative robot pose & insert obs into map:
			for (TListObservations::const_iterator it=obs.begin();it!=obs.end();++it)
			{
				const TInfoPerTimeStep & ipt = it->second;

				// Relative pose in the past:
				mrpt::poses::CPose3D relPose(mrpt::poses::UNINITIALIZED_POSE);
				relPose.inverseComposeFrom( ipt.robot_pose, curRobotPose );

				// Insert obs:
				m_localmap_pts.insertObservationPtr(ipt.observation,&relPose);

			} // end for

		}

		// Publish them:
		if (m_pub_local_map_pointcloud.getNumSubscribers()>0)
		{
			sensor_msgs::PointCloudPtr msg_pts = sensor_msgs::PointCloudPtr( new sensor_msgs::PointCloud);
			msg_pts->header.frame_id = m_frameid_robot;
			msg_pts->header.stamp = ros::Time( obs.rbegin()->first );
			mrpt_bridge::point_cloud::mrpt2ros(m_localmap_pts,msg_pts->header, *msg_pts);
			m_pub_local_map_pointcloud.publish(msg_pts);
		}

		// Show gui:
		if (m_show_gui)
		{
			if (!m_gui_win)
			{
				m_gui_win = mrpt::gui::CDisplayWindow3D::Create("LocalObstaclesNode",800,600);
				mrpt::opengl::COpenGLScenePtr &scene = m_gui_win->get3DSceneAndLock();
				scene->insert( mrpt::opengl::CGridPlaneXY::Create() );
				scene->insert( mrpt::opengl::stock_objects::CornerXYZSimple(1.0,4.0) );

				mrpt::opengl::CSetOfObjectsPtr gl_obs = mrpt::opengl::CSetOfObjects::Create();
				gl_obs->setName("obstacles");
				scene->insert( gl_obs );

				mrpt::opengl::CPointCloudPtr gl_pts = mrpt::opengl::CPointCloud::Create();
				gl_pts->setName("points");
				gl_pts->setPointSize(2.0);
				gl_pts->setColor_u8( mrpt::utils::TColor(0x0000ff) );
				scene->insert( gl_pts );

				m_gui_win->unlockAccess3DScene();
			}

			mrpt::opengl::COpenGLScenePtr &scene = m_gui_win->get3DSceneAndLock();
			mrpt::opengl::CSetOfObjectsPtr gl_obs = mrpt::opengl::CSetOfObjectsPtr( scene->getByName("obstacles") );
			ROS_ASSERT(gl_obs.present());
			gl_obs->clear();

			mrpt::opengl::CPointCloudPtr gl_pts = mrpt::opengl::CPointCloudPtr( scene->getByName("points") );

			for (TListObservations::const_iterator it=obs.begin();it!=obs.end();++it)
			{
				const TInfoPerTimeStep & ipt = it->second;
				// Relative pose in the past:
				mrpt::poses::CPose3D relPose(mrpt::poses::UNINITIALIZED_POSE);
				relPose.inverseComposeFrom( ipt.robot_pose, curRobotPose );

				mrpt::opengl::CSetOfObjectsPtr gl_axis = mrpt::opengl::stock_objects::CornerXYZSimple(0.9,2.0);
				gl_axis->setPose(relPose);
				gl_obs->insert(gl_axis);
			} // end for

			gl_pts->loadFromPointsMap(&m_localmap_pts);


			m_gui_win->unlockAccess3DScene();
			m_gui_win->repaint();
		}


	} // onDoPublish
/** The PF algorithm implementation for "optimal sampling" approximated with scan matching (Stachniss method)
  */
void  CLSLAM_RBPF_2DLASER::prediction_and_update_pfOptimalProposal(
	CLocalMetricHypothesis	*LMH,
	const mrpt::slam::CActionCollection	* actions,
	const mrpt::slam::CSensoryFrame		* sf,
	const bayes::CParticleFilter::TParticleFilterOptions &PF_options )
{
	MRPT_START

	CTicTac tictac;

	// Get the current robot pose estimation:
	TPoseID		currentPoseID = LMH->m_currentRobotPose;

	// ----------------------------------------------------------------------
	//	  We can execute optimal PF only when we have both, an action, and
	//     a valid observation from which to compute the likelihood:
	//   Accumulate odometry/actions until we have a valid observation, then
	//    process them simultaneously.
	// ----------------------------------------------------------------------
	bool  SFhasValidObservations = false;
	// A valid action?
	if (actions!=NULL)
	{
		CActionRobotMovement2DPtr act = actions->getBestMovementEstimation();	// Find a robot movement estimation:
		if (!act) THROW_EXCEPTION("Action list does not contain any CActionRobotMovement2D derived object!");

		if (!LMH->m_accumRobotMovementIsValid) // Reset accum.
		{
			act->poseChange->getMean( LMH->m_accumRobotMovement.rawOdometryIncrementReading );
			LMH->m_accumRobotMovement.motionModelConfiguration = act->motionModelConfiguration;
		}
		else
			LMH->m_accumRobotMovement.rawOdometryIncrementReading =
				LMH->m_accumRobotMovement.rawOdometryIncrementReading +
				act->poseChange->getMeanVal();

		LMH->m_accumRobotMovementIsValid = true;
	}

	if (sf!=NULL)
	{
		ASSERT_(LMH->m_particles.size()>0);
		SFhasValidObservations = (*LMH->m_particles.begin()).d->metricMaps.canComputeObservationsLikelihood( *sf );
	}

	// All the needed things?
	if (!LMH->m_accumRobotMovementIsValid || !SFhasValidObservations)
		return; // Nothing we can do here...
	ASSERT_(sf!=NULL);
	ASSERT_(!PF_options.adaptiveSampleSize);

	// OK, we have all we need, let's start!

	// The odometry-based increment since last step is
	// in:   LMH->m_accumRobotMovement.rawOdometryIncrementReading
	const CPose2D initialPoseEstimation = LMH->m_accumRobotMovement.rawOdometryIncrementReading;
	LMH->m_accumRobotMovementIsValid = false; // To reset odometry at next iteration!


	// ----------------------------------------------------------------------
	//						1) FIXED SAMPLE SIZE VERSION
	// ----------------------------------------------------------------------

	// ICP used if "pfOptimalProposal_mapSelection" = 0 or 1
	CICP					icp;
	CICP::TReturnInfo	icpInfo;

	// ICP options
	// ------------------------------
	icp.options.maxIterations = 80;
	icp.options.thresholdDist = 0.50f;
	icp.options.thresholdAng = DEG2RAD( 20 );
	icp.options.smallestThresholdDist = 0.05f;
	icp.options.ALFA		  = 0.5f;
	icp.options.onlyClosestCorrespondences = true;
	icp.options.doRANSAC = false;

	// Build the map of points to align:
	CSimplePointsMap	localMapPoints;

	ASSERT_( LMH->m_particles[0].d->metricMaps.m_gridMaps.size() > 0);
	//float	minDistBetweenPointsInLocalMaps = 0.02f; //3.0f * m_particles[0].d->metricMaps.m_gridMaps[0]->getResolution();

	// Build local map:
	localMapPoints.clear();
	localMapPoints.insertionOptions.minDistBetweenLaserPoints =  0.02;
	sf->insertObservationsInto( &localMapPoints );

	// Process the particles
	const size_t M = LMH->m_particles.size();
	LMH->m_log_w_metric_history.resize(M);

	for (size_t i=0;i<M;i++)
	{
		CLocalMetricHypothesis::CParticleData  &part = LMH->m_particles[i];
		CPose3D *part_pose = LMH->getCurrentPose(i);

		if ( LMH->m_SFs.empty() )
		{
			// The first robot pose in the SLAM execution: All m_particles start
			// at the same point (this is the lowest bound of subsequent uncertainty):
			// New pose = old pose.
			// part_pose: Unmodified
		}
		else
		{
			// ICP and add noise:
			CPosePDFGaussian	icpEstimation;

			// Select map to use with ICP:
			CMetricMap *mapalign;

			if (!part.d->metricMaps.m_pointsMaps.empty())
				mapalign = part.d->metricMaps.m_pointsMaps[0].pointer();
			else if (!part.d->metricMaps.m_gridMaps.empty())
				mapalign = part.d->metricMaps.m_gridMaps[0].pointer();
			else
				THROW_EXCEPTION("There is no point or grid map. At least one needed for ICP.");

			// Use ICP to align to each particle's map:
			CPosePDFPtr alignEst = icp.Align(
				mapalign,
				&localMapPoints,
				initialPoseEstimation,
				NULL,
				&icpInfo);

			icpEstimation.copyFrom( *alignEst );

#if 0
			// HACK:
			CPose3D Ap = finalEstimatedPoseGauss.mean - ith_last_pose;
			double  Ap_dist = Ap.norm();
			finalEstimatedPoseGauss.cov.zeros();
			finalEstimatedPoseGauss.cov(0,0) = square( fabs(Ap_dist)*0.01 );
			finalEstimatedPoseGauss.cov(1,1) = square( fabs(Ap_dist)*0.01 );
			finalEstimatedPoseGauss.cov(2,2) = square( fabs(Ap.yaw())*0.02 );
#endif

			// Generate gaussian-distributed 2D-pose increments according to "finalEstimatedPoseGauss":
			// -------------------------------------------------------------------------------------------
			// Set the gaussian pose:
			CPose3DPDFGaussian finalEstimatedPoseGauss( icpEstimation );

			CPose3D noisy_increment;
			finalEstimatedPoseGauss.drawSingleSample(noisy_increment);

			CPose3D new_pose;
			new_pose.composeFrom(*part_pose,noisy_increment);

			CPose2D new_pose2d = CPose2D(new_pose);

			// Add the pose to the path:
			part.d->robotPoses[ LMH->m_currentRobotPose ] = new_pose;

			// Update the weight:
			// ---------------------------------------------------------------------------
			const double log_lik =
				PF_options.powFactor *
				auxiliarComputeObservationLikelihood(
					PF_options,
					LMH,
					i,
					sf,
					&new_pose2d);

			part.log_w += log_lik;

			// Add to historic record of log_w weights:
			LMH->m_log_w_metric_history[i][currentPoseID] += log_lik;

		} // end else we can do ICP

	} // end for each particle i

	// Accumulate the log likelihood of this LMH as a whole:
	double out_max_log_w;
	LMH->normalizeWeights( &out_max_log_w );	// Normalize weights:
	LMH->m_log_w += out_max_log_w;

	printf("[CLSLAM_RBPF_2DLASER] Overall likelihood = %.2e\n",out_max_log_w);
	printf("[CLSLAM_RBPF_2DLASER] Done in %.03fms\n",1e3*tictac.Tac());

	MRPT_END
}
Пример #7
0
// ------------------------------------------------------
//				MapBuilding_ICP
// ------------------------------------------------------
void MapBuilding_ICP()
{
	MRPT_TRY_START
	  

	CTicTac								tictac,tictacGlobal,tictac_JH;
	int									step = 0;
	std::string							str;
	CSensFrameProbSequence				finalMap;
	float								t_exec;
	COccupancyGridMap2D::TEntropyInfo	entropy;

	size_t						rawlogEntry = 0;
	CFileGZInputStream					rawlogFile( RAWLOG_FILE.c_str() );

	CFileGZOutputStream sensor_data;

	// ---------------------------------
	//		Constructor
	// ---------------------------------
	CMetricMapBuilderICP mapBuilder(
		&metricMapsOpts,
		insertionLinDistance,
		insertionAngDistance,
		&icpOptions
		);

	mapBuilder.ICP_options.matchAgainstTheGrid = matchAgainstTheGrid;

	// ---------------------------------
	//   CMetricMapBuilder::TOptions
	// ---------------------------------
	mapBuilder.options.verbose					= true;
	mapBuilder.options.enableMapUpdating		= true;
    mapBuilder.options.debugForceInsertion		= false;
	mapBuilder.options.insertImagesAlways		= false;

	// Prepare output directory:
	// --------------------------------
	deleteFilesInDirectory(OUT_DIR);
	createDirectory(OUT_DIR);

	// Open log files:
	// ----------------------------------
	CFileOutputStream  f_log(format("%s/log_times.txt",OUT_DIR));
	CFileOutputStream  f_path(format("%s/log_estimated_path.txt",OUT_DIR));
	CFileOutputStream  f_pathOdo(format("%s/log_odometry_path.txt",OUT_DIR));


	// Create 3D window if requested:
	CDisplayWindow3DPtr	win3D;
#if MRPT_HAS_WXWIDGETS
	if (SHOW_PROGRESS_3D_REAL_TIME)
	{
		win3D = CDisplayWindow3DPtr( new CDisplayWindow3D("ICP-SLAM @ MRPT C++ Library (C) 2004-2008", 600, 500) );
		win3D->setCameraZoom(20);
		win3D->setCameraAzimuthDeg(-45);
	}
#endif

	if(OBS_FROM_FILE == 0){
	  sensor_data.open("sensor_data.rawlog");
	  printf("Receive From Sensor\n");
	  initLaser();
	  printf("OK\n");
	}
	

	// ----------------------------------------------------------
	//						Map Building
	// ----------------------------------------------------------
	CActionCollectionPtr	action;
	CSensoryFramePtr		observations, temp_obs;
	CSensoryFramePtr obs_set;
	CPose2D					odoPose(0,0,0);

	CSimplePointsMap	oldMap, newMap;
	CICP					ICP;

	vector_float		accum_x, accum_y, accum_z;
	
	// ICP Setting
	ICP.options.ICP_algorithm = (TICPAlgorithm)ICP_method;

	ICP.options.maxIterations			= 40;
	ICP.options.thresholdAng =			0.15;
	ICP.options.thresholdDist			= 0.75f;
	ICP.options.ALFA					= 0.30f;
	ICP.options.smallestThresholdDist	= 0.10f;
	ICP.options.doRANSAC = false;
	ICP.options.dumpToConsole();
	//
	CObservationPtr obsSick = CObservationPtr(new CObservation2DRangeScan());
	CObservationPtr obsHokuyo = CObservationPtr(new CObservation2DRangeScan());
	CSimplePointsMap hokuyoMap;
	
	bool isFirstTime = true;
	bool loop = true;

    /*
		  cout << index << "frame, Input the any key to continue" << endl;
		  getc(stdin);
		  fflush(stdin);
    */

	tictacGlobal.Tic();
	while(loop)
	{
      /*
		if(BREAK){
		  cout << index << "frame, Input the any key to continue" << endl;
		  getc(stdin);
		  fflush(stdin);
		}else{
		  if(os::kbhit())
			loop = true;
		}
      */

        if(os::kbhit())
          loop = true;
        

		if(DELAY) {
		  sleep(15);
		}
	  

		// Load action/observation pair from the rawlog:
		// --------------------------------------------------

		if(OBS_FROM_FILE == 1) {
		  if (! CRawlog::readActionObservationPair( rawlogFile, action, temp_obs, rawlogEntry) )
			break; // file EOF

		  obsSick = temp_obs->getObservationByIndex(0);
		  obsHokuyo = temp_obs->getObservationByIndex(1);
		  
		  observations = CSensoryFramePtr(new CSensoryFrame());		  
		  observations->insert((CObservationPtr)obsSick);

		  hokuyoMap.clear();
		  hokuyoMap.insertObservation(obsHokuyo.pointer());
		  
		}else{
		  rawlogEntry = rawlogEntry+2;

		  tictac.Tic();

		  obsSick = CObservationPtr(new CObservation2DRangeScan());
		  obsHokuyo = CObservationPtr(new CObservation2DRangeScan());

		  if(!receiveDataFromSensor((CObservation2DRangeScan*)obsSick.pointer(),SICK)){
			cout << " Error in receive sensor data" << endl;
			return;
		  }

		  if(!receiveDataFromSensor((CObservation2DRangeScan*)obsHokuyo.pointer(),HOKUYO)){
			cout << " Error in receive sensor data" << endl;

			return;
		  }
		  
		  cout << "Time to receive data : " << tictac.Tac()*1000.0f << endl;

		  obsSick->timestamp = mrpt::system::now();
		  obsSick->setSensorPose(CPose3D(0,0,0,DEG2RAD(0),DEG2RAD(0),DEG2RAD(0)));
		  ((CObservation2DRangeScan*)obsSick.pointer())->rightToLeft = true;
		  ((CObservation2DRangeScan*)obsSick.pointer())->stdError = 0.003f;

		  obsHokuyo->timestamp = mrpt::system::now();
		  obsHokuyo->setSensorPose(CPose3D(0,0,0.4,DEG2RAD(0),DEG2RAD(-90),DEG2RAD(0)));
		  ((CObservation2DRangeScan*)obsHokuyo.pointer())->rightToLeft = true;
		  ((CObservation2DRangeScan*)obsHokuyo.pointer())->stdError = 0.003f;
		  
		  cout << "rawlogEntry : " << rawlogEntry << endl;

		  CActionRobotMovement2D myAction;

		  newMap.clear();
		  obsSick.pointer()->insertObservationInto(&newMap);

		  if(!isFirstTime){

			static float					runningTime;
			static CICP::TReturnInfo		info;
			static CPose2D initial(0,0,0);

			CPosePDFPtr ICPPdf = ICP.AlignPDF(&oldMap, &newMap, initial, &runningTime, (void*)&info);
			CPose2D		estMean;
			CMatrixDouble33		estCov;
			ICPPdf->getCovarianceAndMean(estCov, estMean);
			printf("ICP run in %.02fms, %d iterations (%.02fms/iter), %.01f%% goodness\n -> ", runningTime*1000, info.nIterations, runningTime*1000.0f/info.nIterations, info.goodness*100 );
			cout << "ICP Odometry : " << ICPPdf->getMeanVal() << endl;

			myAction.estimationMethod = CActionRobotMovement2D::emScan2DMatching;
			myAction.poseChange = CPosePDFPtr( new CPosePDFGaussian(estMean, estCov));
		  }else{
			isFirstTime = false;
		  }

		  oldMap.clear();
		  oldMap.copyFrom(newMap);

		  observations = CSensoryFramePtr(new CSensoryFrame());
		  action = CActionCollectionPtr(new CActionCollection());		  

		  observations->insert((CObservationPtr)obsSick);

		  obs_set = CSensoryFramePtr(new CSensoryFrame());

		  obs_set->insert((CObservationPtr)obsSick);
		  obs_set->insert((CObservationPtr)obsHokuyo);
		  
		  action->insert(myAction);

		  sensor_data << action << obs_set;

		  hokuyoMap.clear();
		  hokuyoMap.insertObservation(obsHokuyo.pointer());

		}


		if (rawlogEntry>=rawlog_offset)
		{
			// Update odometry:
			{
				CActionRobotMovement2DPtr act= action->getBestMovementEstimation();
				if (act)
					odoPose = odoPose + act->poseChange->getMeanVal();
			}

			// Execute:
			// ----------------------------------------
			tictac.Tic();
				mapBuilder.processActionObservation( *action, *observations );
			t_exec = tictac.Tac();
			printf("Map building executed in %.03fms\n", 1000.0f*t_exec );


			// Info log:
			// -----------
			f_log.printf("%f %i\n",1000.0f*t_exec,mapBuilder.getCurrentlyBuiltMapSize() );

			const CMultiMetricMap* mostLikMap =  mapBuilder.getCurrentlyBuiltMetricMap();

			if (0==(step % LOG_FREQUENCY))
			{
				// Pose log:
				// -------------
				if (SAVE_POSE_LOG)
				{
					printf("Saving pose log information...");
					mapBuilder.getCurrentPoseEstimation()->saveToTextFile( format("%s/mapbuild_posepdf_%03u.txt",OUT_DIR,step) );
					printf("Ok\n");
				}
			}


			// Save a 3D scene view of the mapping process:
			if (0==(step % LOG_FREQUENCY) || (SAVE_3D_SCENE || win3D.present()))
			{
                CPose3D robotPose;
				mapBuilder.getCurrentPoseEstimation()->getMean(robotPose);

				COpenGLScenePtr		scene = COpenGLScene::Create();

                COpenGLViewportPtr view=scene->getViewport("main");
                ASSERT_(view);

                COpenGLViewportPtr view_map = scene->createViewport("mini-map");
                view_map->setBorderSize(2);
                view_map->setViewportPosition(0.01,0.01,0.35,0.35);
                view_map->setTransparent(false);

				{
					mrpt::opengl::CCamera &cam = view_map->getCamera();
					cam.setAzimuthDegrees(-90);
					cam.setElevationDegrees(90);
					cam.setPointingAt(robotPose);
					cam.setZoomDistance(20);
					cam.setOrthogonal();
				}

				// The ground:
				mrpt::opengl::CGridPlaneXYPtr groundPlane = mrpt::opengl::CGridPlaneXY::Create(-200,200,-200,200,0,5);
				groundPlane->setColor(0.4,0.4,0.4);
				view->insert( groundPlane );
				view_map->insert( CRenderizablePtr( groundPlane) ); // A copy

				// The camera pointing to the current robot pose:
				if (CAMERA_3DSCENE_FOLLOWS_ROBOT)
				{
				    scene->enableFollowCamera(true);

					mrpt::opengl::CCamera &cam = view_map->getCamera();
					cam.setAzimuthDegrees(-45);
					cam.setElevationDegrees(45);
					cam.setPointingAt(robotPose);
				}

				// The maps:
				{
					opengl::CSetOfObjectsPtr obj = opengl::CSetOfObjects::Create();
					mostLikMap->getAs3DObject( obj );
					view->insert(obj);

					// Only the point map:
					opengl::CSetOfObjectsPtr ptsMap = opengl::CSetOfObjects::Create();
					if (mostLikMap->m_pointsMaps.size())
					{
                        mostLikMap->m_pointsMaps[0]->getAs3DObject(ptsMap);
                        view_map->insert( ptsMap );
					}
				}

				// Draw the robot path:
				CPose3DPDFPtr posePDF =  mapBuilder.getCurrentPoseEstimation();
				CPose3D  curRobotPose;
				posePDF->getMean(curRobotPose);
				{
					opengl::CSetOfObjectsPtr obj = opengl::stock_objects::RobotPioneer();
					obj->setPose( curRobotPose );
					view->insert(obj);
				}
				{
					opengl::CSetOfObjectsPtr obj = opengl::stock_objects::RobotPioneer();
					obj->setPose( curRobotPose );
					view_map->insert( obj );
				}


				// Draw Hokuyo total data
				{
				  CRenderizablePtr hokuyoRender_t = scene->getByName("hokuyo_total");

				  if(!hokuyoRender_t){
					hokuyoRender_t = CPointCloud::Create();
					hokuyoRender_t->setName("hokuyo_total");
					hokuyoRender_t->setColor(0,0,1);
					hokuyoRender_t->setPose( CPose3D(0,0,0) );
					getAs<CPointCloud>(hokuyoRender_t)->setPointSize(3);
					scene->insert( hokuyoRender_t);					
				  }

				  for(size_t i =0 ; i < accum_x.size(); i++){
					getAs<CPointCloud>(hokuyoRender_t)->insertPoint(accum_x[i], accum_y[i], accum_z[i]);					
				  }
				  cout << "accum_x size : " << accum_x.size() << endl;


				}
				
				// Draw Hokuyo Current data plate
				{
				  CRenderizablePtr hokuyoRender = scene->getByName("hokuyo_cur");
				  hokuyoRender = CPointCloud::Create();
				  hokuyoRender->setName("hokuyo_cur");
				  hokuyoRender->setColor(0,1,0);
				  hokuyoRender->setPose( curRobotPose );
				  getAs<CPointCloud>(hokuyoRender)->setPointSize(0.1);
				  getAs<CPointCloud>(hokuyoRender)->loadFromPointsMap(&hokuyoMap);				  
				  scene->insert( hokuyoRender);

				  vector_float cur_x = getAs<CPointCloud>(hokuyoRender)->getArrayX();
				  vector_float cur_y = getAs<CPointCloud>(hokuyoRender)->getArrayY();
				  vector_float cur_z = getAs<CPointCloud>(hokuyoRender)->getArrayZ();


				  //				  cout << "current pose : " << curRobotPose << endl;				  
				  for(size_t i =0 ; i < cur_x.size(); i++){
					/*
					float rotate_x = cur_x[i]+ curRobotPose.y()*sin(curRobotPose.yaw()*3.14/180.0);
					float rotate_y = cur_y[i]+ curRobotPose.y()*cos(curRobotPose.yaw()*3.14/180.0);
					*/
					float rotate_x = curRobotPose.x() + cur_y[i]*sin(-curRobotPose.yaw());
					float rotate_y = curRobotPose.y() + cur_y[i]*cos(-curRobotPose.yaw());
					//					printf("cur_x, cur_y, cur_z : %f %f %f \n", rotate_x,rotate_y, cur_z[i]);

					accum_x.push_back(rotate_x);
					accum_y.push_back(rotate_y);
					accum_z.push_back(cur_z[i]);
				  }
				}

				// Save as file:
				if (0==(step % LOG_FREQUENCY) && SAVE_3D_SCENE)
				{
					CFileGZOutputStream	f( format( "%s/buildingmap_%05u.3Dscene",OUT_DIR,step ));
					f << *scene;
				}

				// Show 3D?
				if (win3D)
				{
					opengl::COpenGLScenePtr &ptrScene = win3D->get3DSceneAndLock();
					ptrScene = scene;

					win3D->unlockAccess3DScene();

					// Move camera:
					win3D->setCameraPointingToPoint( curRobotPose.x(),curRobotPose.y(),curRobotPose.z() );

					// Update:
					win3D->forceRepaint();

					sleep( SHOW_PROGRESS_3D_REAL_TIME_DELAY_MS );
				}
			}


			// Save the memory usage:
			// ------------------------------------------------------------------
			{
				printf("Saving memory usage...");
				unsigned long	memUsage = getMemoryUsage();
				FILE		*f=os::fopen( format("%s/log_MemoryUsage.txt",OUT_DIR).c_str() ,"at");
				if (f)
				{
					os::fprintf(f,"%u\t%lu\n",step,memUsage);
					os::fclose(f);
				}
				printf("Ok! (%.04fMb)\n", ((float)memUsage)/(1024*1024) );
			}

			// Save the robot estimated pose for each step:
			f_path.printf("%i %f %f %f\n",
				step,
				mapBuilder.getCurrentPoseEstimation()->getMeanVal().x(),
				mapBuilder.getCurrentPoseEstimation()->getMeanVal().y(),
				mapBuilder.getCurrentPoseEstimation()->getMeanVal().yaw() );


			f_pathOdo.printf("%i %f %f %f\n",step,odoPose.x(),odoPose.y(),odoPose.phi());

		} // end of if "rawlog_offset"...

		step++;
		printf("\n---------------- STEP %u | RAWLOG ENTRY %u ----------------\n",step, (unsigned)rawlogEntry);

		// Free memory:
		action.clear_unique();
		observations.clear_unique();
	};

	printf("\n---------------- END!! (total time: %.03f sec) ----------------\n",tictacGlobal.Tac());

	//	hokuyo.turnOff();	
	sick.stop();

	

	// Save map:
	mapBuilder.getCurrentlyBuiltMap(finalMap);

	str = format("%s/_finalmap_.simplemap",OUT_DIR);
	printf("Dumping final map in binary format to: %s\n", str.c_str() );
	mapBuilder.saveCurrentMapToFile(str);

	CMultiMetricMap  *finalPointsMap = mapBuilder.getCurrentlyBuiltMetricMap();
	str = format("%s/_finalmaps_.txt",OUT_DIR);
	printf("Dumping final metric maps to %s_XXX\n", str.c_str() );
	finalPointsMap->saveMetricMapRepresentationToFile( str );

	if (win3D)
		win3D->waitForKey();

	MRPT_TRY_END
}