void  exec_setPoseByLabel( mrpt::slam::CActionCollection *acts, mrpt::slam::CSensoryFrame *SF, int &changesCount  )
{
	if (SF)
	{
		for (CSensoryFrame::iterator it= SF->begin();it!=SF->end();++it)
		{
		    CObservationPtr obs = *it;

		    if ( obs->sensorLabel == labelToProcess)
		    {
				if (changeOnlyXYZ)
				{
					CPose3D   tmpPose;
					obs->getSensorPose(tmpPose);
					tmpPose.setFromValues(
						sensorPoseToSet.x,
						sensorPoseToSet.y,
						sensorPoseToSet.z,
						tmpPose.yaw(),
						tmpPose.pitch(),
						tmpPose.roll()
						);
					obs->setSensorPose( tmpPose );
				}
				else
				{
					obs->setSensorPose( sensorPoseToSet );
				}
				changesCount++;
		    }
		} // end for each obs.
	}
}
// ------------------------------------------------------------
//    Set the sensor pose
// ------------------------------------------------------------
void  exec_setPoseByIdx( mrpt::slam::CActionCollection *acts, mrpt::slam::CSensoryFrame *SF, int &changesCount  )
{
	if (SF)
		if (SF->size()>idxToProcess)
		{
		    CObservationPtr obs = SF->getObservationByIndex(idxToProcess);
		    if (changeOnlyXYZ)
		    {
                CPose3D   tmpPose;
                obs->getSensorPose(tmpPose);
                tmpPose.setFromValues(
                    sensorPoseToSet.x,
                    sensorPoseToSet.y,
                    sensorPoseToSet.z,
                    tmpPose.yaw(),
                    tmpPose.pitch(),
                    tmpPose.roll()
                    );
                obs->setSensorPose( tmpPose );
		    }
		    else
		    {
		        obs->setSensorPose( sensorPoseToSet );
		    }
			changesCount++;
		}
}
Esempio n. 3
0
// ------------------------------------------------------
//				Benchmark Point Maps
// ------------------------------------------------------
double pointmap_test_0(int a1, int a2)
{
	// test 0: insert scan
	// ----------------------------------------

	// prepare the laser scan:
	CObservation2DRangeScan	scan1;
	scan1.aperture = M_PIf;
	scan1.rightToLeft = true;
	scan1.validRange.resize( sizeof(SCAN_RANGES_1)/sizeof(SCAN_RANGES_1[0]) );
	scan1.scan.resize( sizeof(SCAN_RANGES_1)/sizeof(SCAN_RANGES_1[0]) );
	memcpy( &scan1.scan[0], SCAN_RANGES_1, sizeof(SCAN_RANGES_1) );
	memcpy( &scan1.validRange[0], SCAN_VALID_1, sizeof(SCAN_VALID_1) );


	CSimplePointsMap  pt_map;

	pt_map.insertionOptions.minDistBetweenLaserPoints = 0.03;
	CPose3D pose;

	CTicTac	 tictac;
	for (int n=0;n<a2;n++)
	{
		pt_map.clear();
		for (long i=0;i<a1;i++)
		{
			pose.setFromValues( pose.x()+0.04, pose.y()+0.08,0, pose.yaw()+0.02);
			pt_map.insertObservation(&scan1, &pose);
		}
	}
	return tictac.Tac()/a2;
}
Esempio n. 4
0
double pointmap_test_1(int a1, int a2)
{
	// test 1: insert scan
	// ----------------------------------------

	// prepare the laser scan:
	CObservation2DRangeScan	scan1;
	scan1.aperture = M_PIf;
	scan1.rightToLeft = true;
	scan1.validRange.resize( sizeof(SCAN_RANGES_1)/sizeof(SCAN_RANGES_1[0]) );
	scan1.scan.resize( sizeof(SCAN_RANGES_1)/sizeof(SCAN_RANGES_1[0]) );
	memcpy( &scan1.scan[0], SCAN_RANGES_1, sizeof(SCAN_RANGES_1) );
	memcpy( &scan1.validRange[0], SCAN_VALID_1, sizeof(SCAN_VALID_1) );


	CSimplePointsMap  pt_map;

	pt_map.insertionOptions.minDistBetweenLaserPoints = 0.03;
	CPose3D pose;

	CTicTac	 tictac;
	for (long i=0;i<a1;i++)
	{
		pose.setFromValues( pose.x()+0.04, pose.y()+0.08,0, pose.yaw()+0.02);

		pt_map.insertObservation(&scan1, &pose);
	}

	const unsigned N_REPS = 25;

	if (a2==0)
		return tictac.Tac();
	else if (a2==1)
	{ // 2d kd-tree
		float x,y, dist2;

		tictac.Tic();

		for (unsigned k=N_REPS;k!=0;--k)
			/*size_t idx = */ pt_map.kdTreeClosestPoint2D(5.0, 6.0, x,y, dist2);

		return tictac.Tac()/N_REPS;
	}
	else
	{ // 3d kd-tree
		float x,y,z, dist2;
		tictac.Tic();

		for (unsigned k=N_REPS;k!=0;--k)
			/*size_t idx =*/ pt_map.kdTreeClosestPoint3D(5.0, 6.0, 1.0, x,y,z, dist2);

		return tictac.Tac()/N_REPS;
	}
}
Esempio n. 5
0
	void test_to_from_2d(double x,double y, double phi)
	{
	    const CPose2D p2d = CPose2D(x,y,phi);
	    const CPose3D p3d = CPose3D(p2d);

	    const CPose2D p2d_bis = CPose2D(p3d);

        EXPECT_FLOAT_EQ( p2d.x(), p2d_bis.x() ) << "p2d: " << p2d << endl;
        EXPECT_FLOAT_EQ( p2d.y(), p2d_bis.y() ) << "p2d: " << p2d << endl;
        EXPECT_FLOAT_EQ( p2d.phi(), p2d_bis.phi() ) << "p2d: " << p2d << endl;

        EXPECT_FLOAT_EQ( p2d.phi(), p3d.yaw() )  << "p2d: " << p2d << endl;
	}
Esempio n. 6
0
	void test_default_values(const CPose3D &p, const std::string & label)
	{
		EXPECT_EQ(p.x(),0);
		EXPECT_EQ(p.y(),0);
		EXPECT_EQ(p.z(),0);
		EXPECT_EQ(p.yaw(),0);
		EXPECT_EQ(p.pitch(),0);
		EXPECT_EQ(p.roll(),0);
		for (size_t i=0;i<4;i++)
			for (size_t j=0;j<4;j++)
				EXPECT_NEAR(p.getHomogeneousMatrixVal()(i,j), i==j ? 1.0 : 0.0, 1e-8 )
					<< "Failed for (i,j)=" << i << "," << j << endl
					<< "Matrix is: " << endl << p.getHomogeneousMatrixVal() << endl
					<< "case was: " << label << endl;
	}
Esempio n. 7
0
double pointmap_test_3(int a1, int a2)
{
	// test 3: query2D
	// ----------------------------------------

	// prepare the laser scan:
	CObservation2DRangeScan	scan1;
	scan1.aperture = M_PIf;
	scan1.rightToLeft = true;
	scan1.validRange.resize( sizeof(SCAN_RANGES_1)/sizeof(SCAN_RANGES_1[0]) );
	scan1.scan.resize( sizeof(SCAN_RANGES_1)/sizeof(SCAN_RANGES_1[0]) );
	memcpy( &scan1.scan[0], SCAN_RANGES_1, sizeof(SCAN_RANGES_1) );
	memcpy( &scan1.validRange[0], SCAN_VALID_1, sizeof(SCAN_VALID_1) );


	CSimplePointsMap  pt_map;

	pt_map.insertionOptions.minDistBetweenLaserPoints = 0.03;
	CPose3D pose;
	for (long i=0;i<a1;i++)
	{
		pose.setFromValues( pose.x()+0.04, pose.y()+0.08,0, pose.yaw()+0.02);
		pt_map.insertObservation(&scan1, &pose);
	}

	CTicTac	 tictac;
	float x0=-5;
	float y0=-4;
	float sq;
	float x,y;
	for (long i=0;i<a2;i++)
	{
		pt_map.kdTreeClosestPoint2D(x0,y0,x,y,sq);
		x0+=0.05;
		y0+=0.05;
		if (x0>20) x0=-10;
		if (y0>20) y0=-10;
	}

	return tictac.Tac()/a2;
}
void ICPslamWrapper::publishMapPose()
{
  // get currently builded map
  metric_map_ = mapBuilder.getCurrentlyBuiltMetricMap();

  // publish map
  if (metric_map_->m_gridMaps.size())
  {
    nav_msgs::OccupancyGrid _msg;
    // if we have new map for current sensor update it
    mrpt_bridge::convert(*metric_map_->m_gridMaps[0], _msg);
    pub_map_.publish(_msg);
    pub_metadata_.publish(_msg.info);
  }
  if (metric_map_->m_pointsMaps.size())
  {
    sensor_msgs::PointCloud _msg;
    std_msgs::Header header;
    header.stamp = ros::Time(0);
    header.frame_id = global_frame_id;
    // if we have new map for current sensor update it
    mrpt_bridge::point_cloud::mrpt2ros(*metric_map_->m_pointsMaps[0], header, _msg);
    pub_point_cloud_.publish(_msg);
  }

  CPose3D robotPose;
  mapBuilder.getCurrentPoseEstimation()->getMean(robotPose);

  // publish pose
  // geometry_msgs::PoseStamped pose;
  pose.header.frame_id = global_frame_id;

  // the pose
  pose.pose.position.x = robotPose.x();
  pose.pose.position.y = robotPose.y();
  pose.pose.position.z = 0.0;
  pose.pose.orientation = tf::createQuaternionMsgFromYaw(robotPose.yaw());

  pub_pose_.publish(pose);
}
Esempio n. 9
0
/*---------------------------------------------------------------
					saveInterpolatedToTextFile
  ---------------------------------------------------------------*/
bool CPose3DInterpolator::saveInterpolatedToTextFile(const std::string &s, double period) const
{
	try
	{
		CFileOutputStream	f(s);

		if (m_path.empty()) return true;


		const TTimeStamp t_ini = m_path.begin()->first;
		const TTimeStamp t_end = m_path.rbegin()->first;

		TTimeStamp At = mrpt::system::secondsToTimestamp(period);

		//cout << "Interp: " << t_ini << " " << t_end << " " << At << endl;

		CPose3D	   p;
		bool       valid;

		for (TTimeStamp t=t_ini;t<=t_end;t+=At)
		{
			this->interpolate( t, p, valid );
			if (!valid) continue;

			int r = f.printf("%.06f %.06f %.06f %.06f %.06f %.06f %.06f\n",
				mrpt::system::timestampTotime_t(t),
				p.x(),p.y(),p.z(),
				p.yaw(),p.pitch(),p.roll());
			ASSERT_(r>0);
		}

		return true;
	}
	catch(...)
	{
		return false;
	}
}
Esempio n. 10
0
double pointmap_test_5(int a1, int a2)
{
	// test 5: boundingBox
	// ----------------------------------------

	// prepare the laser scan:
	CObservation2DRangeScan	scan1;
	scan1.aperture = M_PIf;
	scan1.rightToLeft = true;
	scan1.validRange.resize( sizeof(SCAN_RANGES_1)/sizeof(SCAN_RANGES_1[0]) );
	scan1.scan.resize( sizeof(SCAN_RANGES_1)/sizeof(SCAN_RANGES_1[0]) );
	memcpy( &scan1.scan[0], SCAN_RANGES_1, sizeof(SCAN_RANGES_1) );
	memcpy( &scan1.validRange[0], SCAN_VALID_1, sizeof(SCAN_VALID_1) );


	CSimplePointsMap  pt_map;

	pt_map.insertionOptions.minDistBetweenLaserPoints = 0.03;
	CPose3D pose;
	for (long i=0;i<a1;i++)
	{
		pose.setFromValues( pose.x()+0.04, pose.y()+0.08,0, pose.yaw()+0.02);
		pt_map.insertObservation(&scan1, &pose);
	}

	CTicTac	 tictac;

	float x0,x1, y0,y1, z0,z1;
	for (long i=0;i<a2;i++)
	{
		// Modify the map so the bounding box cache is invalidated:
		pt_map.setPoint(0, 0,0,0);

		pt_map.boundingBox(x0,x1, y0,y1, z0,z1);
	}

	return tictac.Tac()/a2;
}
Esempio n. 11
0
CPose3DPDFPtr CICP::ICP3D_Method_Classic(
		const mrpt::maps::CMetricMap		*m1,
		const mrpt::maps::CMetricMap		*mm2,
		const CPose3DPDFGaussian &initialEstimationPDF,
		TReturnInfo				&outInfo )
{
	MRPT_START

	// The result can be either a Gaussian or a SOG:
	CPose3DPDFPtr								resultPDF;
	CPose3DPDFGaussianPtr						gaussPdf;

	size_t									nCorrespondences=0;
	bool									keepApproaching;
	CPose3D									grossEst = initialEstimationPDF.mean;
	mrpt::utils::TMatchingPairList			correspondences,old_correspondences;
	CPose3D									lastMeanPose;

	// Assure the class of the maps:
	ASSERT_(mm2->GetRuntimeClass()->derivedFrom(CLASS_ID(CPointsMap)));
	const CPointsMap		*m2 = (CPointsMap*)mm2;

	// Asserts:
	// -----------------
	ASSERT_( options.ALFA>0 && options.ALFA<1 );

	// The algorithm output auxiliar info:
	// -------------------------------------------------
	outInfo.nIterations		= 0;
	outInfo.goodness		= 1;
	outInfo.quality			= 0;

	// The gaussian PDF to estimate:
	// ------------------------------------------------------
	gaussPdf = CPose3DPDFGaussian::Create();

	// First gross approximation:
	gaussPdf->mean = grossEst;

	// Initial thresholds:
	TMatchingParams matchParams;
	TMatchingExtraResults matchExtraResults;

	matchParams.maxDistForCorrespondence = options.thresholdDist;			// Distance threshold
	matchParams.maxAngularDistForCorrespondence = options.thresholdAng;	// Angular threshold
	matchParams.onlyKeepTheClosest = options.onlyClosestCorrespondences;
	matchParams.onlyUniqueRobust = options.onlyUniqueRobust;
	matchParams.decimation_other_map_points = options.corresponding_points_decimation;

	// Asure maps are not empty!
	// ------------------------------------------------------
	if ( !m2->isEmpty() )
	{
		matchParams.offset_other_map_points = 0;

		// ------------------------------------------------------
		//					The ICP loop
		// ------------------------------------------------------
		do
		{
			matchParams.angularDistPivotPoint = TPoint3D(gaussPdf->mean.x(),gaussPdf->mean.y(),gaussPdf->mean.z());

			// ------------------------------------------------------
			//		Find the matching (for a points map)
			// ------------------------------------------------------
			m1->determineMatching3D(
				m2,						// The other map
				gaussPdf->mean,						// The other map pose
				correspondences,
				matchParams, matchExtraResults);

			nCorrespondences = correspondences.size();

			if ( !nCorrespondences )
			{
				// Nothing we can do !!
				keepApproaching = false;
			}
			else
			{
				// Compute the estimated pose, using Horn's method.
				// ----------------------------------------------------------------------
				mrpt::poses::CPose3DQuat estPoseQuat;
				double transf_scale;
				mrpt::tfest::se3_l2(correspondences, estPoseQuat, transf_scale, false /* dont force unit scale */ );
				gaussPdf->mean = estPoseQuat;

				// If matching has not changed, decrease the thresholds:
				// --------------------------------------------------------
				keepApproaching = true;
				if	(!(fabs(lastMeanPose.x()-gaussPdf->mean.x())>options.minAbsStep_trans ||
					fabs(lastMeanPose.y()-gaussPdf->mean.y())>options.minAbsStep_trans ||
					fabs(lastMeanPose.z()-gaussPdf->mean.z())>options.minAbsStep_trans ||
					fabs(math::wrapToPi(lastMeanPose.yaw()-gaussPdf->mean.yaw()))>options.minAbsStep_rot ||
					fabs(math::wrapToPi(lastMeanPose.pitch()-gaussPdf->mean.pitch()))>options.minAbsStep_rot ||
					fabs(math::wrapToPi(lastMeanPose.roll()-gaussPdf->mean.roll()))>options.minAbsStep_rot ))
				{
					matchParams.maxDistForCorrespondence		*= options.ALFA;
					matchParams.maxAngularDistForCorrespondence		*= options.ALFA;
					if (matchParams.maxDistForCorrespondence < options.smallestThresholdDist )
						keepApproaching = false;

					if (++matchParams.offset_other_map_points>=options.corresponding_points_decimation)
						matchParams.offset_other_map_points=0;
				}

				lastMeanPose = gaussPdf->mean;

			}	// end of "else, there are correspondences"

			// Next iteration:
			outInfo.nIterations++;

			if (outInfo.nIterations >= options.maxIterations && matchParams.maxDistForCorrespondence>options.smallestThresholdDist)
			{
				matchParams.maxDistForCorrespondence		*= options.ALFA;
			}

		} while	( (keepApproaching && outInfo.nIterations<options.maxIterations) ||
					(outInfo.nIterations >= options.maxIterations && matchParams.maxDistForCorrespondence>options.smallestThresholdDist) );


		// -------------------------------------------------
		//   Obtain the covariance matrix of the estimation
		// -------------------------------------------------
		if (!options.skip_cov_calculation && nCorrespondences)
		{
			// ...
		}

		//
		outInfo.goodness = matchExtraResults.correspondencesRatio;


	} // end of "if m2 is not empty"

	// Return the gaussian distribution:
	resultPDF = gaussPdf;

	return resultPDF;

	MRPT_END
}
Esempio n. 12
0
CPose2D::CPose2D(const CPose3D &p) : m_phi(p.yaw()),m_cossin_uptodate(false)
{
	m_coords[0] = p.x();
	m_coords[1] = p.y();
}
Esempio n. 13
0
int main(int argc, char ** argv)
{
    try
    {
        bool showHelp    = argc>1 && !os::_strcmp(argv[1],"--help");
        bool showVersion = argc>1 && !os::_strcmp(argv[1],"--version");

        printf(" simul-landmarks - Part of the MRPT\n");
        printf(" MRPT C++ Library: %s - BUILD DATE %s\n", MRPT_getVersion().c_str(), MRPT_getCompilationDate().c_str());

        if (showVersion)
            return 0;	// Program end

        // Process arguments:
        if (argc<2 || showHelp )
        {
            printf("Usage: %s <config_file.ini>\n\n",argv[0]);
            if (!showHelp)
            {
                mrpt::system::pause();
                return -1;
            }
            else	return 0;
        }

        string INI_FILENAME = std::string( argv[1] );
        ASSERT_FILE_EXISTS_(INI_FILENAME)

        CConfigFile		ini( INI_FILENAME );


        const int random_seed = ini.read_int("Params","random_seed",0);

        if (random_seed!=0)
            randomGenerator.randomize(random_seed);
        else	randomGenerator.randomize();

        // Set default values:
        unsigned int nLandmarks = 3;
        unsigned int nSteps = 100;
        std::string 	outFile("out.rawlog");
        std::string 	outDir("OUT");

        float min_x =-5;
        float max_x =5;
        float min_y =-5;
        float max_y =5;
        float min_z =0;
        float max_z =0;

        float odometryNoiseXY_std = 0.001f;
        float odometryNoisePhi_std_deg = 0.01f;
        float odometryNoisePitch_std_deg = 0.01f;
        float odometryNoiseRoll_std_deg = 0.01f;

        float minSensorDistance = 0;
        float maxSensorDistance = 10;
        float fieldOfView_deg= 180.0f;

        float sensorPose_x = 0;
        float sensorPose_y = 0;
        float sensorPose_z = 0;
        float sensorPose_yaw_deg = 0;
        float sensorPose_pitch_deg = 0;
        float sensorPose_roll_deg = 0;

        float stdRange = 0.01f;
        float stdYaw_deg = 0.1f;
        float stdPitch_deg = 0.1f;

        bool sensorDetectsIDs=true;

        bool  circularPath = true;
        bool  random6DPath = false;
        size_t squarePathLength=40;

        // Load params from INI:
        MRPT_LOAD_CONFIG_VAR(outFile,string,	ini,"Params");
        MRPT_LOAD_CONFIG_VAR(outDir,string,	ini,"Params");
        MRPT_LOAD_CONFIG_VAR(nSteps,int,	ini,"Params");
        MRPT_LOAD_CONFIG_VAR(circularPath,bool,	ini,"Params");
        MRPT_LOAD_CONFIG_VAR(random6DPath,bool,	ini,"Params");
        MRPT_LOAD_CONFIG_VAR(squarePathLength,int,ini,"Params");

        MRPT_LOAD_CONFIG_VAR(sensorDetectsIDs,bool, ini,"Params");

        MRPT_LOAD_CONFIG_VAR(odometryNoiseXY_std,float,	ini,"Params");
        MRPT_LOAD_CONFIG_VAR(odometryNoisePhi_std_deg,float, ini,"Params");
        MRPT_LOAD_CONFIG_VAR(odometryNoisePitch_std_deg,float, ini,"Params");
        MRPT_LOAD_CONFIG_VAR(odometryNoiseRoll_std_deg,float, ini,"Params");

        MRPT_LOAD_CONFIG_VAR(sensorPose_x,float,	ini,"Params");
        MRPT_LOAD_CONFIG_VAR(sensorPose_y,float,	ini,"Params");
        MRPT_LOAD_CONFIG_VAR(sensorPose_z,float,	ini,"Params");
        MRPT_LOAD_CONFIG_VAR(sensorPose_yaw_deg,float,	ini,"Params");
        MRPT_LOAD_CONFIG_VAR(sensorPose_pitch_deg,float,	ini,"Params");
        MRPT_LOAD_CONFIG_VAR(sensorPose_roll_deg,float,	ini,"Params");


        MRPT_LOAD_CONFIG_VAR(minSensorDistance,float,	ini,"Params");
        MRPT_LOAD_CONFIG_VAR(maxSensorDistance,float,	ini,"Params");
        MRPT_LOAD_CONFIG_VAR(fieldOfView_deg,float,	ini,"Params");

        bool show_in_3d = false;
        MRPT_LOAD_CONFIG_VAR(show_in_3d,bool,	ini,"Params");


        MRPT_LOAD_CONFIG_VAR(stdRange,float,	ini,"Params");
        MRPT_LOAD_CONFIG_VAR(stdYaw_deg,float,	ini,"Params");
        MRPT_LOAD_CONFIG_VAR(stdPitch_deg,float,	ini,"Params");


        float stdYaw = DEG2RAD(stdYaw_deg);
        float stdPitch = DEG2RAD(stdPitch_deg);

        float odometryNoisePhi_std = DEG2RAD(odometryNoisePhi_std_deg);
        float odometryNoisePitch_std = DEG2RAD(odometryNoisePitch_std_deg);
        float odometryNoiseRoll_std = DEG2RAD(odometryNoiseRoll_std_deg);

        float fieldOfView = DEG2RAD(fieldOfView_deg);

        CPose3D  sensorPoseOnRobot(
            sensorPose_x,
            sensorPose_y,
            sensorPose_z,
            DEG2RAD( sensorPose_yaw_deg ),
            DEG2RAD( sensorPose_pitch_deg ),
            DEG2RAD( sensorPose_roll_deg ) );

        // Create out dir:
        mrpt::system::createDirectory(outDir);

        // ---------------------------------------------
        // Create the point-beacons:
        // ---------------------------------------------
        printf("Creating landmark map...");
        mrpt::maps::CLandmarksMap    landmarkMap;
        int randomSetCount = 0;
        int uniqueIds = 1;

        // Process each of the "RANDOMSET_%i" found:
        do
        {
            string sectName = format("RANDOMSET_%i",++randomSetCount);

            nLandmarks = 0;
            MRPT_LOAD_CONFIG_VAR(nLandmarks,uint64_t,	ini,sectName);
            MRPT_LOAD_CONFIG_VAR(min_x,float,	ini,sectName);
            MRPT_LOAD_CONFIG_VAR(max_x,float,	ini,sectName);
            MRPT_LOAD_CONFIG_VAR(min_y,float,	ini,sectName);
            MRPT_LOAD_CONFIG_VAR(max_y,float,	ini,sectName);
            MRPT_LOAD_CONFIG_VAR(min_z,float,	ini,sectName);
            MRPT_LOAD_CONFIG_VAR(max_z,float,	ini,sectName);

            for (size_t i=0; i<nLandmarks; i++)
            {
                CLandmark   LM;
                CPointPDFGaussian   pt3D;

                // Random coordinates:
                pt3D.mean = CPoint3D(
                                randomGenerator.drawUniform(min_x,max_x),
                                randomGenerator.drawUniform(min_y,max_y),
                                randomGenerator.drawUniform(min_z,max_z) );

                // Add:
                LM.createOneFeature();
                LM.features[0]->type = featBeacon;
                LM.ID = uniqueIds++;
                LM.setPose( pt3D );

                landmarkMap.landmarks.push_back(LM);
            }

            if (nLandmarks) cout << nLandmarks << " generated for the 'randomset' " << randomSetCount << endl;

        }
        while (nLandmarks);


        landmarkMap.saveToTextFile( format("%s/%s_ground_truth.txt",outDir.c_str(),outFile.c_str()) );
        printf("Done!\n");

        // ---------------------------------------------
        // Simulate:
        // ---------------------------------------------
        size_t nWarningsNoSight=0;

        CActionRobotMovement2D::TMotionModelOptions   opts;
        opts.modelSelection = CActionRobotMovement2D::mmGaussian;
        opts.gausianModel.a1=0;
        opts.gausianModel.a2=0;
        opts.gausianModel.a3=0;
        opts.gausianModel.a4=0;
        opts.gausianModel.minStdXY = odometryNoiseXY_std;
        opts.gausianModel.minStdPHI = odometryNoisePhi_std;

        // Output rawlog, gz-compressed.
        CFileGZOutputStream  fil( format("%s/%s",outDir.c_str(),outFile.c_str()));
        CPose3D         realPose;

        const size_t N_STEPS_STOP_AT_THE_BEGINNING = 4;

        CMatrixDouble  GT_path;

        for (size_t i=0; i<nSteps; i++)
        {
            cout << "Generating step " << i << "...\n";
            CSensoryFrame           SF;
            CActionCollection         acts;

            CPose3D	incPose3D;
            bool    incPose_is_3D = random6DPath;

            if (i>=N_STEPS_STOP_AT_THE_BEGINNING)
            {
                if (random6DPath)
                {   // 3D path
                    const double Ar = DEG2RAD(3);
                    TPose3D  Ap = TPose3D(0.20*cos(Ar),0.20*sin(Ar),0,Ar,0,0);
                    //Ap.z  += randomGenerator.drawGaussian1D(0,0.05);
                    Ap.yaw   += randomGenerator.drawGaussian1D(0,DEG2RAD(0.2));
                    Ap.pitch += randomGenerator.drawGaussian1D(0,DEG2RAD(2));
                    Ap.roll += randomGenerator.drawGaussian1D(0,DEG2RAD(4));

                    incPose3D = CPose3D(Ap);
                }
                else
                {   // 2D path:
                    if (circularPath)
                    {
                        // Circular path:
                        float Ar = DEG2RAD(5);
                        incPose3D = CPose3D(CPose2D(0.20f*cos(Ar),0.20f*sin(Ar),Ar));
                    }
                    else
                    {
                        // Square path:
                        if ( (i % squarePathLength) > (squarePathLength-5) )
                            incPose3D = CPose3D(CPose2D(0,0,DEG2RAD(90.0f/4)));
                        else		incPose3D = CPose3D(CPose2D(0.20f,0,0));
                    }
                }
            }
            else
            {
                // Robot is still at the beginning:
                incPose3D = CPose3D(0,0,0,0,0,0);
            }

            // Simulate observations:
            CObservationBearingRangePtr obs=CObservationBearingRange::Create();

            obs->minSensorDistance=minSensorDistance;
            obs->maxSensorDistance=maxSensorDistance;
            obs->fieldOfView_yaw = fieldOfView;
            obs->fieldOfView_pitch = fieldOfView;
            obs->sensorLocationOnRobot = sensorPoseOnRobot;

            landmarkMap.simulateRangeBearingReadings(
                realPose,
                sensorPoseOnRobot,
                *obs,
                sensorDetectsIDs, // wheter to identy landmarks
                stdRange,
                stdYaw,
                stdPitch );

            // Keep the GT of the robot pose:
            GT_path.setSize(i+1,6);
            for (size_t k=0; k<6; k++)
                GT_path(i,k)=realPose[k];

            cout << obs->sensedData.size() << " landmarks in sight";

            if (obs->sensedData.empty()) nWarningsNoSight++;

            SF.push_back( obs );

            // Simulate odometry, from "incPose3D" with noise:
            if (!incPose_is_3D)
            {   // 2D odometry:
                CActionRobotMovement2D    act;
                CPose2D     incOdo( incPose3D );
                if (incPose3D.x()!=0 || incPose3D.y()!=0 || incPose3D.yaw()!=0)
                {
                    incOdo.x_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoiseXY_std );
                    incOdo.y_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoiseXY_std );
                    incOdo.phi_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoisePhi_std );
                }
                act.computeFromOdometry(incOdo, opts);
                acts.insert( act );
            }
            else
            {   // 3D odometry:
                CActionRobotMovement3D    act;
                act.estimationMethod	= CActionRobotMovement3D::emOdometry;

                CPose3D   noisyIncPose = incPose3D;

                if (incPose3D.x()!=0 || incPose3D.y()!=0 || incPose3D.yaw()!=0)
                {
                    noisyIncPose.x_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoiseXY_std );
                    noisyIncPose.y_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoiseXY_std );
                    noisyIncPose.z_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoiseXY_std );
                    noisyIncPose.setYawPitchRoll(
                        noisyIncPose.yaw()+ randomGenerator.drawGaussian1D_normalized() * odometryNoisePhi_std,
                        noisyIncPose.pitch()+ randomGenerator.drawGaussian1D_normalized() * odometryNoisePitch_std,
                        noisyIncPose.roll()+ randomGenerator.drawGaussian1D_normalized() * odometryNoiseRoll_std );
                }

                act.poseChange.mean = noisyIncPose;
                act.poseChange.cov.eye();

                act.poseChange.cov(0,0) =
                    act.poseChange.cov(1,1) =
                        act.poseChange.cov(2,2) = square(odometryNoiseXY_std);
                act.poseChange.cov(3,3) = square(odometryNoisePhi_std);
                act.poseChange.cov(4,4) = square(odometryNoisePitch_std);
                act.poseChange.cov(5,5) = square(odometryNoiseRoll_std);

                acts.insert( act );
            }

            // Save:
            fil << SF << acts;

            // Next pose:
            realPose = realPose + incPose3D;

            cout << endl;
        }

        // Save the ground truth for the robot poses as well:
        GT_path.saveToTextFile(
            format("%s/%s_ground_truth_robot_path.txt",outDir.c_str(),outFile.c_str()),
            MATRIX_FORMAT_FIXED,
            true,
            "% Columns are: x(m) y(m) z(m) yaw(rad) pitch(rad) roll(rad)\n");

        cout << "Data saved to directory: " << outDir << endl;

        if (nWarningsNoSight)
            cout << "WARNING: " << nWarningsNoSight << " observations contained zero landmarks in the sensor range." << endl;


        // Optionally, display in 3D:
        if (show_in_3d && size(GT_path,1)>1)
        {
#if MRPT_HAS_OPENGL_GLUT  && MRPT_HAS_WXWIDGETS
            mrpt::gui::CDisplayWindow3D		win("Final simulation",400,300);

            mrpt::opengl::COpenGLScenePtr &scene = win.get3DSceneAndLock();

            scene->insert( mrpt::opengl::CGridPlaneXY::Create( min_x-10,max_x+10,min_y-10,max_y+10,0 ));
            scene->insert( mrpt::opengl::stock_objects::CornerXYZ() );

            // Insert all landmarks:
            for (CLandmarksMap::TCustomSequenceLandmarks::const_iterator it=landmarkMap.landmarks.begin(); it!=landmarkMap.landmarks.end(); ++it)
            {
                mrpt::opengl::CSpherePtr lm = mrpt::opengl::CSphere::Create();
                lm->setColor(1,0,0);
                lm->setRadius(0.1);
                lm->setLocation( it->pose_mean );
                lm->setName( format("LM#%u",(unsigned) it->ID ) );
                //lm->enableShowName(true);
                scene->insert(lm);
            }

            // Insert all robot poses:
            const size_t N = size(GT_path,1);
            mrpt::opengl::CSetOfLinesPtr  pathLines = mrpt::opengl::CSetOfLines::Create();
            pathLines->setColor(0,0,1,0.5);
            pathLines->setLineWidth(3.0);
            pathLines->resize(N-1);

            for (size_t i=0; i<N-1; i++)
                pathLines->setLineByIndex(i, GT_path(i,0),GT_path(i,1),GT_path(i,2),  GT_path(i+1,0),GT_path(i+1,1),GT_path(i+1,2) );

            scene->insert(pathLines);

            for (size_t i=0; i<N; i++)
            {
                mrpt::opengl::CSetOfObjectsPtr  corner = mrpt::opengl::stock_objects::CornerXYZ();
                corner->setScale(0.2);
                corner->setPose(TPose3D(GT_path(i,0),GT_path(i,1),GT_path(i,2),GT_path(i,3),GT_path(i,4),GT_path(i,5)));
                scene->insert(corner);
            }

            win.unlockAccess3DScene();
            win.forceRepaint();

            cout << "Press any key or close the 3D window to exit." << endl;
            win.waitForKey();

#endif // MRPT_HAS_OPENGL_GLUT  && MRPT_HAS_WXWIDGETS
        }

    }
    catch (std::exception &e)
    {
        std::cout << e.what();
    }
    catch (...)
    {
        std::cout << "Untyped exception!";
    }
}
Esempio n. 14
0
/*---------------------------------------------------------------
						getCovarianceAndMean
  ---------------------------------------------------------------*/
void CPose3DPDFParticles::getCovarianceAndMean(
	CMatrixDouble66& cov, CPose3D& mean) const
{
	MRPT_START

	getMean(mean);  // First! the mean value:

	// Now the covariance:
	cov.zeros();
	CVectorDouble vars;
	vars.assign(6, 0.0);  // The diagonal of the final covariance matrix

	// Elements off the diagonal of the covariance matrix:
	double std_xy = 0, std_xz = 0, std_xya = 0, std_xp = 0, std_xr = 0;
	double std_yz = 0, std_yya = 0, std_yp = 0, std_yr = 0;
	double std_zya = 0, std_zp = 0, std_zr = 0;
	double std_yap = 0, std_yar = 0;
	double std_pr = 0;

	// Mean values in [0, 2pi] range:
	double mean_yaw = mean.yaw();
	double mean_pitch = mean.pitch();
	double mean_roll = mean.roll();
	if (mean_yaw < 0) mean_yaw += M_2PI;
	if (mean_pitch < 0) mean_pitch += M_2PI;
	if (mean_roll < 0) mean_roll += M_2PI;

	// Enought information to estimate the covariance?
	if (m_particles.size() < 2) return;

	// Sum all weight values:
	double W = 0;
	for (CPose3DPDFParticles::CParticleList::const_iterator it =
			 m_particles.begin();
		 it != m_particles.end(); ++it)
		W += exp(it->log_w);

	ASSERT_(W > 0);

	// Compute covariance:
	for (CPose3DPDFParticles::CParticleList::const_iterator it =
			 m_particles.begin();
		 it != m_particles.end(); ++it)
	{
		double w = exp(it->log_w) / W;

		// Manage 1 PI range:
		double err_yaw = wrapToPi(fabs(it->d->yaw() - mean_yaw));
		double err_pitch = wrapToPi(fabs(it->d->pitch() - mean_pitch));
		double err_roll = wrapToPi(fabs(it->d->roll() - mean_roll));

		double err_x = it->d->x() - mean.x();
		double err_y = it->d->y() - mean.y();
		double err_z = it->d->z() - mean.z();

		vars[0] += square(err_x) * w;
		vars[1] += square(err_y) * w;
		vars[2] += square(err_z) * w;
		vars[3] += square(err_yaw) * w;
		vars[4] += square(err_pitch) * w;
		vars[5] += square(err_roll) * w;

		std_xy += err_x * err_y * w;
		std_xz += err_x * err_z * w;
		std_xya += err_x * err_yaw * w;
		std_xp += err_x * err_pitch * w;
		std_xr += err_x * err_roll * w;

		std_yz += err_y * err_z * w;
		std_yya += err_y * err_yaw * w;
		std_yp += err_y * err_pitch * w;
		std_yr += err_y * err_roll * w;

		std_zya += err_z * err_yaw * w;
		std_zp += err_z * err_pitch * w;
		std_zr += err_z * err_roll * w;

		std_yap += err_yaw * err_pitch * w;
		std_yar += err_yaw * err_roll * w;

		std_pr += err_pitch * err_roll * w;
	}  // end for it

	// Unbiased estimation of variance:
	cov(0, 0) = vars[0];
	cov(1, 1) = vars[1];
	cov(2, 2) = vars[2];
	cov(3, 3) = vars[3];
	cov(4, 4) = vars[4];
	cov(5, 5) = vars[5];

	cov(1, 0) = cov(0, 1) = std_xy;
	cov(2, 0) = cov(0, 2) = std_xz;
	cov(3, 0) = cov(0, 3) = std_xya;
	cov(4, 0) = cov(0, 4) = std_xp;
	cov(5, 0) = cov(0, 5) = std_xr;

	cov(2, 1) = cov(1, 2) = std_yz;
	cov(3, 1) = cov(1, 3) = std_yya;
	cov(4, 1) = cov(1, 4) = std_yp;
	cov(5, 1) = cov(1, 5) = std_yr;

	cov(3, 2) = cov(2, 3) = std_zya;
	cov(4, 2) = cov(2, 4) = std_zp;
	cov(5, 2) = cov(2, 5) = std_zr;

	cov(4, 3) = cov(3, 4) = std_yap;
	cov(5, 3) = cov(3, 5) = std_yar;

	cov(5, 4) = cov(4, 5) = std_pr;

	MRPT_END
}
Esempio n. 15
0
// ------------------------------------------------------
//				MapBuilding_ICP
// ------------------------------------------------------
void MapBuilding_ICP()
{
	MRPT_TRY_START
	  

	CTicTac								tictac,tictacGlobal,tictac_JH;
	int									step = 0;
	std::string							str;
	CSensFrameProbSequence				finalMap;
	float								t_exec;
	COccupancyGridMap2D::TEntropyInfo	entropy;

	size_t						rawlogEntry = 0;
	CFileGZInputStream					rawlogFile( RAWLOG_FILE.c_str() );

	CFileGZOutputStream sensor_data;

	// ---------------------------------
	//		Constructor
	// ---------------------------------
	CMetricMapBuilderICP mapBuilder(
		&metricMapsOpts,
		insertionLinDistance,
		insertionAngDistance,
		&icpOptions
		);

	mapBuilder.ICP_options.matchAgainstTheGrid = matchAgainstTheGrid;

	// ---------------------------------
	//   CMetricMapBuilder::TOptions
	// ---------------------------------
	mapBuilder.options.verbose					= true;
	mapBuilder.options.enableMapUpdating		= true;
    mapBuilder.options.debugForceInsertion		= false;
	mapBuilder.options.insertImagesAlways		= false;

	// Prepare output directory:
	// --------------------------------
	deleteFilesInDirectory(OUT_DIR);
	createDirectory(OUT_DIR);

	// Open log files:
	// ----------------------------------
	CFileOutputStream  f_log(format("%s/log_times.txt",OUT_DIR));
	CFileOutputStream  f_path(format("%s/log_estimated_path.txt",OUT_DIR));
	CFileOutputStream  f_pathOdo(format("%s/log_odometry_path.txt",OUT_DIR));


	// Create 3D window if requested:
	CDisplayWindow3DPtr	win3D;
#if MRPT_HAS_WXWIDGETS
	if (SHOW_PROGRESS_3D_REAL_TIME)
	{
		win3D = CDisplayWindow3DPtr( new CDisplayWindow3D("ICP-SLAM @ MRPT C++ Library (C) 2004-2008", 600, 500) );
		win3D->setCameraZoom(20);
		win3D->setCameraAzimuthDeg(-45);
	}
#endif

	if(OBS_FROM_FILE == 0){
	  sensor_data.open("sensor_data.rawlog");
	  printf("Receive From Sensor\n");
	  initLaser();
	  printf("OK\n");
	}
	

	// ----------------------------------------------------------
	//						Map Building
	// ----------------------------------------------------------
	CActionCollectionPtr	action;
	CSensoryFramePtr		observations, temp_obs;
	CSensoryFramePtr obs_set;
	CPose2D					odoPose(0,0,0);

	CSimplePointsMap	oldMap, newMap;
	CICP					ICP;

	vector_float		accum_x, accum_y, accum_z;
	
	// ICP Setting
	ICP.options.ICP_algorithm = (TICPAlgorithm)ICP_method;

	ICP.options.maxIterations			= 40;
	ICP.options.thresholdAng =			0.15;
	ICP.options.thresholdDist			= 0.75f;
	ICP.options.ALFA					= 0.30f;
	ICP.options.smallestThresholdDist	= 0.10f;
	ICP.options.doRANSAC = false;
	ICP.options.dumpToConsole();
	//
	CObservationPtr obsSick = CObservationPtr(new CObservation2DRangeScan());
	CObservationPtr obsHokuyo = CObservationPtr(new CObservation2DRangeScan());
	CSimplePointsMap hokuyoMap;
	
	bool isFirstTime = true;
	bool loop = true;

    /*
		  cout << index << "frame, Input the any key to continue" << endl;
		  getc(stdin);
		  fflush(stdin);
    */

	tictacGlobal.Tic();
	while(loop)
	{
      /*
		if(BREAK){
		  cout << index << "frame, Input the any key to continue" << endl;
		  getc(stdin);
		  fflush(stdin);
		}else{
		  if(os::kbhit())
			loop = true;
		}
      */

        if(os::kbhit())
          loop = true;
        

		if(DELAY) {
		  sleep(15);
		}
	  

		// Load action/observation pair from the rawlog:
		// --------------------------------------------------

		if(OBS_FROM_FILE == 1) {
		  if (! CRawlog::readActionObservationPair( rawlogFile, action, temp_obs, rawlogEntry) )
			break; // file EOF

		  obsSick = temp_obs->getObservationByIndex(0);
		  obsHokuyo = temp_obs->getObservationByIndex(1);
		  
		  observations = CSensoryFramePtr(new CSensoryFrame());		  
		  observations->insert((CObservationPtr)obsSick);

		  hokuyoMap.clear();
		  hokuyoMap.insertObservation(obsHokuyo.pointer());
		  
		}else{
		  rawlogEntry = rawlogEntry+2;

		  tictac.Tic();

		  obsSick = CObservationPtr(new CObservation2DRangeScan());
		  obsHokuyo = CObservationPtr(new CObservation2DRangeScan());

		  if(!receiveDataFromSensor((CObservation2DRangeScan*)obsSick.pointer(),SICK)){
			cout << " Error in receive sensor data" << endl;
			return;
		  }

		  if(!receiveDataFromSensor((CObservation2DRangeScan*)obsHokuyo.pointer(),HOKUYO)){
			cout << " Error in receive sensor data" << endl;

			return;
		  }
		  
		  cout << "Time to receive data : " << tictac.Tac()*1000.0f << endl;

		  obsSick->timestamp = mrpt::system::now();
		  obsSick->setSensorPose(CPose3D(0,0,0,DEG2RAD(0),DEG2RAD(0),DEG2RAD(0)));
		  ((CObservation2DRangeScan*)obsSick.pointer())->rightToLeft = true;
		  ((CObservation2DRangeScan*)obsSick.pointer())->stdError = 0.003f;

		  obsHokuyo->timestamp = mrpt::system::now();
		  obsHokuyo->setSensorPose(CPose3D(0,0,0.4,DEG2RAD(0),DEG2RAD(-90),DEG2RAD(0)));
		  ((CObservation2DRangeScan*)obsHokuyo.pointer())->rightToLeft = true;
		  ((CObservation2DRangeScan*)obsHokuyo.pointer())->stdError = 0.003f;
		  
		  cout << "rawlogEntry : " << rawlogEntry << endl;

		  CActionRobotMovement2D myAction;

		  newMap.clear();
		  obsSick.pointer()->insertObservationInto(&newMap);

		  if(!isFirstTime){

			static float					runningTime;
			static CICP::TReturnInfo		info;
			static CPose2D initial(0,0,0);

			CPosePDFPtr ICPPdf = ICP.AlignPDF(&oldMap, &newMap, initial, &runningTime, (void*)&info);
			CPose2D		estMean;
			CMatrixDouble33		estCov;
			ICPPdf->getCovarianceAndMean(estCov, estMean);
			printf("ICP run in %.02fms, %d iterations (%.02fms/iter), %.01f%% goodness\n -> ", runningTime*1000, info.nIterations, runningTime*1000.0f/info.nIterations, info.goodness*100 );
			cout << "ICP Odometry : " << ICPPdf->getMeanVal() << endl;

			myAction.estimationMethod = CActionRobotMovement2D::emScan2DMatching;
			myAction.poseChange = CPosePDFPtr( new CPosePDFGaussian(estMean, estCov));
		  }else{
			isFirstTime = false;
		  }

		  oldMap.clear();
		  oldMap.copyFrom(newMap);

		  observations = CSensoryFramePtr(new CSensoryFrame());
		  action = CActionCollectionPtr(new CActionCollection());		  

		  observations->insert((CObservationPtr)obsSick);

		  obs_set = CSensoryFramePtr(new CSensoryFrame());

		  obs_set->insert((CObservationPtr)obsSick);
		  obs_set->insert((CObservationPtr)obsHokuyo);
		  
		  action->insert(myAction);

		  sensor_data << action << obs_set;

		  hokuyoMap.clear();
		  hokuyoMap.insertObservation(obsHokuyo.pointer());

		}


		if (rawlogEntry>=rawlog_offset)
		{
			// Update odometry:
			{
				CActionRobotMovement2DPtr act= action->getBestMovementEstimation();
				if (act)
					odoPose = odoPose + act->poseChange->getMeanVal();
			}

			// Execute:
			// ----------------------------------------
			tictac.Tic();
				mapBuilder.processActionObservation( *action, *observations );
			t_exec = tictac.Tac();
			printf("Map building executed in %.03fms\n", 1000.0f*t_exec );


			// Info log:
			// -----------
			f_log.printf("%f %i\n",1000.0f*t_exec,mapBuilder.getCurrentlyBuiltMapSize() );

			const CMultiMetricMap* mostLikMap =  mapBuilder.getCurrentlyBuiltMetricMap();

			if (0==(step % LOG_FREQUENCY))
			{
				// Pose log:
				// -------------
				if (SAVE_POSE_LOG)
				{
					printf("Saving pose log information...");
					mapBuilder.getCurrentPoseEstimation()->saveToTextFile( format("%s/mapbuild_posepdf_%03u.txt",OUT_DIR,step) );
					printf("Ok\n");
				}
			}


			// Save a 3D scene view of the mapping process:
			if (0==(step % LOG_FREQUENCY) || (SAVE_3D_SCENE || win3D.present()))
			{
                CPose3D robotPose;
				mapBuilder.getCurrentPoseEstimation()->getMean(robotPose);

				COpenGLScenePtr		scene = COpenGLScene::Create();

                COpenGLViewportPtr view=scene->getViewport("main");
                ASSERT_(view);

                COpenGLViewportPtr view_map = scene->createViewport("mini-map");
                view_map->setBorderSize(2);
                view_map->setViewportPosition(0.01,0.01,0.35,0.35);
                view_map->setTransparent(false);

				{
					mrpt::opengl::CCamera &cam = view_map->getCamera();
					cam.setAzimuthDegrees(-90);
					cam.setElevationDegrees(90);
					cam.setPointingAt(robotPose);
					cam.setZoomDistance(20);
					cam.setOrthogonal();
				}

				// The ground:
				mrpt::opengl::CGridPlaneXYPtr groundPlane = mrpt::opengl::CGridPlaneXY::Create(-200,200,-200,200,0,5);
				groundPlane->setColor(0.4,0.4,0.4);
				view->insert( groundPlane );
				view_map->insert( CRenderizablePtr( groundPlane) ); // A copy

				// The camera pointing to the current robot pose:
				if (CAMERA_3DSCENE_FOLLOWS_ROBOT)
				{
				    scene->enableFollowCamera(true);

					mrpt::opengl::CCamera &cam = view_map->getCamera();
					cam.setAzimuthDegrees(-45);
					cam.setElevationDegrees(45);
					cam.setPointingAt(robotPose);
				}

				// The maps:
				{
					opengl::CSetOfObjectsPtr obj = opengl::CSetOfObjects::Create();
					mostLikMap->getAs3DObject( obj );
					view->insert(obj);

					// Only the point map:
					opengl::CSetOfObjectsPtr ptsMap = opengl::CSetOfObjects::Create();
					if (mostLikMap->m_pointsMaps.size())
					{
                        mostLikMap->m_pointsMaps[0]->getAs3DObject(ptsMap);
                        view_map->insert( ptsMap );
					}
				}

				// Draw the robot path:
				CPose3DPDFPtr posePDF =  mapBuilder.getCurrentPoseEstimation();
				CPose3D  curRobotPose;
				posePDF->getMean(curRobotPose);
				{
					opengl::CSetOfObjectsPtr obj = opengl::stock_objects::RobotPioneer();
					obj->setPose( curRobotPose );
					view->insert(obj);
				}
				{
					opengl::CSetOfObjectsPtr obj = opengl::stock_objects::RobotPioneer();
					obj->setPose( curRobotPose );
					view_map->insert( obj );
				}


				// Draw Hokuyo total data
				{
				  CRenderizablePtr hokuyoRender_t = scene->getByName("hokuyo_total");

				  if(!hokuyoRender_t){
					hokuyoRender_t = CPointCloud::Create();
					hokuyoRender_t->setName("hokuyo_total");
					hokuyoRender_t->setColor(0,0,1);
					hokuyoRender_t->setPose( CPose3D(0,0,0) );
					getAs<CPointCloud>(hokuyoRender_t)->setPointSize(3);
					scene->insert( hokuyoRender_t);					
				  }

				  for(size_t i =0 ; i < accum_x.size(); i++){
					getAs<CPointCloud>(hokuyoRender_t)->insertPoint(accum_x[i], accum_y[i], accum_z[i]);					
				  }
				  cout << "accum_x size : " << accum_x.size() << endl;


				}
				
				// Draw Hokuyo Current data plate
				{
				  CRenderizablePtr hokuyoRender = scene->getByName("hokuyo_cur");
				  hokuyoRender = CPointCloud::Create();
				  hokuyoRender->setName("hokuyo_cur");
				  hokuyoRender->setColor(0,1,0);
				  hokuyoRender->setPose( curRobotPose );
				  getAs<CPointCloud>(hokuyoRender)->setPointSize(0.1);
				  getAs<CPointCloud>(hokuyoRender)->loadFromPointsMap(&hokuyoMap);				  
				  scene->insert( hokuyoRender);

				  vector_float cur_x = getAs<CPointCloud>(hokuyoRender)->getArrayX();
				  vector_float cur_y = getAs<CPointCloud>(hokuyoRender)->getArrayY();
				  vector_float cur_z = getAs<CPointCloud>(hokuyoRender)->getArrayZ();


				  //				  cout << "current pose : " << curRobotPose << endl;				  
				  for(size_t i =0 ; i < cur_x.size(); i++){
					/*
					float rotate_x = cur_x[i]+ curRobotPose.y()*sin(curRobotPose.yaw()*3.14/180.0);
					float rotate_y = cur_y[i]+ curRobotPose.y()*cos(curRobotPose.yaw()*3.14/180.0);
					*/
					float rotate_x = curRobotPose.x() + cur_y[i]*sin(-curRobotPose.yaw());
					float rotate_y = curRobotPose.y() + cur_y[i]*cos(-curRobotPose.yaw());
					//					printf("cur_x, cur_y, cur_z : %f %f %f \n", rotate_x,rotate_y, cur_z[i]);

					accum_x.push_back(rotate_x);
					accum_y.push_back(rotate_y);
					accum_z.push_back(cur_z[i]);
				  }
				}

				// Save as file:
				if (0==(step % LOG_FREQUENCY) && SAVE_3D_SCENE)
				{
					CFileGZOutputStream	f( format( "%s/buildingmap_%05u.3Dscene",OUT_DIR,step ));
					f << *scene;
				}

				// Show 3D?
				if (win3D)
				{
					opengl::COpenGLScenePtr &ptrScene = win3D->get3DSceneAndLock();
					ptrScene = scene;

					win3D->unlockAccess3DScene();

					// Move camera:
					win3D->setCameraPointingToPoint( curRobotPose.x(),curRobotPose.y(),curRobotPose.z() );

					// Update:
					win3D->forceRepaint();

					sleep( SHOW_PROGRESS_3D_REAL_TIME_DELAY_MS );
				}
			}


			// Save the memory usage:
			// ------------------------------------------------------------------
			{
				printf("Saving memory usage...");
				unsigned long	memUsage = getMemoryUsage();
				FILE		*f=os::fopen( format("%s/log_MemoryUsage.txt",OUT_DIR).c_str() ,"at");
				if (f)
				{
					os::fprintf(f,"%u\t%lu\n",step,memUsage);
					os::fclose(f);
				}
				printf("Ok! (%.04fMb)\n", ((float)memUsage)/(1024*1024) );
			}

			// Save the robot estimated pose for each step:
			f_path.printf("%i %f %f %f\n",
				step,
				mapBuilder.getCurrentPoseEstimation()->getMeanVal().x(),
				mapBuilder.getCurrentPoseEstimation()->getMeanVal().y(),
				mapBuilder.getCurrentPoseEstimation()->getMeanVal().yaw() );


			f_pathOdo.printf("%i %f %f %f\n",step,odoPose.x(),odoPose.y(),odoPose.phi());

		} // end of if "rawlog_offset"...

		step++;
		printf("\n---------------- STEP %u | RAWLOG ENTRY %u ----------------\n",step, (unsigned)rawlogEntry);

		// Free memory:
		action.clear_unique();
		observations.clear_unique();
	};

	printf("\n---------------- END!! (total time: %.03f sec) ----------------\n",tictacGlobal.Tac());

	//	hokuyo.turnOff();	
	sick.stop();

	

	// Save map:
	mapBuilder.getCurrentlyBuiltMap(finalMap);

	str = format("%s/_finalmap_.simplemap",OUT_DIR);
	printf("Dumping final map in binary format to: %s\n", str.c_str() );
	mapBuilder.saveCurrentMapToFile(str);

	CMultiMetricMap  *finalPointsMap = mapBuilder.getCurrentlyBuiltMetricMap();
	str = format("%s/_finalmaps_.txt",OUT_DIR);
	printf("Dumping final metric maps to %s_XXX\n", str.c_str() );
	finalPointsMap->saveMetricMapRepresentationToFile( str );

	if (win3D)
		win3D->waitForKey();

	MRPT_TRY_END
}
Esempio n. 16
0
int main(int argc, char* argv[]) {
    if (argc < 4)
    {
        puts("Not enough arguments.");
        return -1;
    }

    ifstream laserLog, robotLog;
    string laserLine, robotLine;
    CConfigFile iniFile(argv[3]); // configurations file
    double accumX = 0.0, accumY = 0.0, accumPhi = 0.0;

    // pathfinding
    int resolution = 4;
    PathFinder pathFinder(resolution);
    deque<TPoint2D> path;


    // Load configurations
    CMetricMapBuilderICP icp_slam;
    icp_slam.ICP_options.loadFromConfigFile(iniFile, "MappingApplication");
    icp_slam.ICP_params.loadFromConfigFile(iniFile, "ICP");
    icp_slam.initialize();

    laserLog.open(argv[1]); // log of laser scan
    robotLog.open(argv[2]); // log of robot odometer

    sf::RenderWindow window(sf::VideoMode(800, 600), "bam!");
    sf::Texture texture;
    texture.create(800, 600);
    Matrix<sf::Color> pixels(600, 800);
    sf::Sprite sprite(texture);
    window.setVerticalSyncEnabled(true);
    bool paused = false;

    sf::Clock clock;
    unsigned frameCount = 0;
    while (laserLog.good() && window.isOpen()) {
        sf::Event event;
        while (window.pollEvent(event))
        {
            switch (event.type)
            {
            case sf::Event::Closed:
                window.close();
                break;
            }
        }

        double f;
        getline(laserLog, laserLine);
        getline(robotLog, robotLine);

        // Extract the laser scan info and convert it into a range scan observation to feed into icp-slam
        CObservation2DRangeScanPtr obs = CObservation2DRangeScan::Create();
        stringstream ssLaser(laserLine);
        while (ssLaser >> f)
        {
            obs->scan.push_back(f);
            obs->validRange.push_back(1);
        }
        icp_slam.processObservation(obs);

        // Extract the odometer values and convert it into an observation to feed into icp-slam
        stringstream ssRobot(robotLine);
        ssRobot >> f;
        accumX += f;
        ssRobot >> f;
        accumY += f;
        ssRobot >> f;
        accumPhi += f;
        // Need the ABSOLUTE odometer readings, meaning the accumulated values
        CPose2DPtr rawOdo(new CPose2D(accumX, accumY, accumPhi));
        CObservationOdometryPtr obs2 = CObservationOdometry::Create();
        obs2->odometry = *rawOdo;
        obs2->hasEncodersInfo = false;
        obs2->hasVelocities = false;
        icp_slam.processObservation(obs2);

        // Extract current estimates
        // NOTE: coordinate points are in METERS
        // First get the grid map
        CMultiMetricMap* curMapEst = icp_slam.getCurrentlyBuiltMetricMap();
        /* Grid representation of the current map.
         * Grid X Range: [0, getSizeX()]
         * Grid Y Range: [0, getSizeY()]
         * Convert from coordinate to grid loc: x2idx(float), y2idx(float)
         * Convert from grid loc to coordinate: idx2x(int), idx2y(float)
         * Use getCell(int x, int y) to tell if the cell is empty or not. A real value [0,1], which is the probablity that cell is empty
         */
        COccupancyGridMap2DPtr gridMap = curMapEst->m_gridMaps[0];

        // Get the position estimates
        CPose3D curPosEst = icp_slam.getCurrentPoseEstimation()->getMeanVal();
        // (estimated) X,Y coordinates of the robot, and robot yaw angle (direction)
        double robx = curPosEst.x();
        double roby = curPosEst.y();
        double robphi = curPosEst.yaw();
        // Convert real coordinate to grid coordinate points
        int gridRobX = gridMap->x2idx(robx);
        int gridRobY = gridMap->y2idx(roby);


        cout << "robot location: " << gridRobX << ' ' << gridRobY << '\n';
        cout << "gridMap size: " << gridMap->getSizeX() << ' ' << gridMap->getSizeY() << '\n';

        // Perform path finding
        bool pathFound = true;
        pathFinder.update(*gridMap);
        pathFound = pathFinder.findPath(TPoint2D(gridRobX, gridRobY), TPoint2D(890, 500), path);
        printf("pathFound: %d\tpath length: %d\n", pathFound, path.size());


        // windows drawing
        window.clear(sf::Color::White);
        sf::View view;
        view.setSize(800, 600);
        view.setCenter(gridRobX, gridRobY);
        window.setView(view);

        // draw the grayscale probability map
        int yStart = max(0, gridRobY - 300);
        int yEnd = min((int)gridMap->getSizeY(), gridRobY + 300);
        int xStart = max(0, gridRobX - 400);
        int xEnd = min((int)gridMap->getSizeX(), gridRobX + 400);
        for (int y = yStart; y < yEnd; ++y)
        {
            for (int x = xStart; x < xEnd; ++x)
            {
                sf::Color &color = pixels(y-yStart, x-xStart);
                sf::Uint8 col = gridMap->getCell(x, y) * 255;
                color.r = col;
                color.g = col;
                color.b = col;
            }
        }
        texture.update((sf::Uint8*)pixels.getData());
        sprite.setPosition(xStart, yStart);
        window.draw(sprite);

        // draw the robot's position
        sf::CircleShape circle(5);
        circle.setPosition(gridRobX-resolution/2, gridRobY-resolution/2);
        circle.setOutlineColor(sf::Color::Red);
        circle.setFillColor(sf::Color::Red);
        window.draw(circle);

        // draw the path
        std::vector<sf::Vertex> verticies;
        verticies.resize(path.size() + 1);
        verticies[0].position.x = (gridRobX / resolution) * resolution + resolution/2;
        verticies[0].position.y = (gridRobY / resolution) * resolution + resolution/2;
        verticies[0].color = sf::Color::Blue;
        for (unsigned i = 0; i < path.size(); ++i)
        {
            verticies[i+1].position.x = path[i].x * resolution + resolution / 2;
            verticies[i+1].position.y = path[i].y * resolution + resolution / 2;
            verticies[i+1].color = sf::Color::Blue;
        }
        window.draw(&verticies[0], verticies.size(), sf::LinesStrip); 
        
        // draw the grid representation (only the occupied cells)
        /*
        sf::Color col = sf::Color::Yellow;
        col.a = 128;
        for (unsigned y = max(0, (gridRobY-400)/resolution); y < min<int>(pathFinder.occupancyGrid.height(), (gridRobY+400)/resolution); ++y)
            for (unsigned x = max(0, (gridRobX-400)/resolution); x < min<int>(pathFinder.occupancyGrid.width(), (gridRobX+400)/resolution); ++x)
            {
                if (!pathFinder.occupancyGrid(y, x)) continue;

                int xx = x * resolution;
                int yy = y * resolution;
                sf::RectangleShape rect;
                rect.setPosition(xx, yy);
                rect.setSize(sf::Vector2f(resolution, resolution));
                rect.setFillColor(col);
                window.draw(rect);
            }
            */

        window.display();

        frameCount++;
        if (clock.getElapsedTime().asSeconds() >= 1.0)
        {
            char timestr[16];
            sprintf(timestr, "%d fps", frameCount);
            window.setTitle(timestr);

            clock.restart();
            frameCount = 0;
        }
    }

    return 0;
}
Esempio n. 17
0
/*---------------------------------------------------------------
				pose3D = point3D + pose3D
  ---------------------------------------------------------------*/
CPose3D CPoint3D::operator+(const CPose3D& b) const
{
	return CPose3D(
		m_coords[0] + b.x(), m_coords[1] + b.y(), m_coords[2] + b.z(), b.yaw(),
		b.pitch(), b.roll());
}
Esempio n. 18
0
// ------------------------------------------------------
//				Test_Kinect
// ------------------------------------------------------
void Test_Kinect()
{
	// Launch grabbing thread:
	// --------------------------------------------------------
	TThreadParam thrPar;
	std::thread thHandle = std::thread(thread_grabbing, std::ref(thrPar));

	// Wait until data stream starts so we can say for sure the sensor has been
	// initialized OK:
	cout << "Waiting for sensor initialization...\n";
	do
	{
		CObservation3DRangeScan::Ptr possiblyNewObs =
			std::atomic_load(&thrPar.new_obs);
		if (possiblyNewObs && possiblyNewObs->timestamp != INVALID_TIMESTAMP)
			break;
		else
			std::this_thread::sleep_for(10ms);
	} while (!thrPar.quit);

	// Check error condition:
	if (thrPar.quit) return;

	// Feature tracking variables:
	CFeatureList trackedFeats;
	unsigned int step_num = 0;

	bool SHOW_FEAT_IDS = true;
	bool SHOW_RESPONSES = true;

	CGenericFeatureTrackerAutoPtr tracker;

	// "CFeatureTracker_KL" is by far the most robust implementation for now:
	tracker = CGenericFeatureTrackerAutoPtr(new CFeatureTracker_KL);
	tracker->enableTimeLogger(true);  // Do time profiling.

	// Set of parameters common to any tracker implementation:
	// To see all the existing params and documentation, see
	// mrpt::vision::CGenericFeatureTracker
	//  http://reference.mrpt.org/devel/structmrpt_1_1vision_1_1_c_generic_feature_tracker.html
	tracker->extra_params["add_new_features"] =
		1;  // track, AND ALSO, add new features
	tracker->extra_params["add_new_feat_min_separation"] = 25;
	tracker->extra_params["add_new_feat_max_features"] = 150;
	tracker->extra_params["add_new_feat_patch_size"] = 21;

	tracker->extra_params["minimum_KLT_response_to_add"] = 40;
	tracker->extra_params["check_KLT_response_every"] =
		5;  // Re-check the KLT-response to assure features are in good points.
	tracker->extra_params["minimum_KLT_response"] =
		25;  // Re-check the KLT-response to assure features are in good points.

	tracker->extra_params["update_patches_every"] = 0;  // Update patches

	// Specific params for "CFeatureTracker_KL"
	tracker->extra_params["window_width"] = 25;
	tracker->extra_params["window_height"] = 25;

	// Global points map:
	CColouredPointsMap globalPtsMap;
	globalPtsMap.colorScheme.scheme =
		CColouredPointsMap::cmFromIntensityImage;  // Take points color from
	// RGB+D observations
	// globalPtsMap.colorScheme.scheme =
	// CColouredPointsMap::cmFromHeightRelativeToSensorGray;

	// Create window and prepare OpenGL object in the scene:
	// --------------------------------------------------------
	mrpt::gui::CDisplayWindow3D win3D("kinect-3d-slam 3D view", 800, 600);

	win3D.setCameraAzimuthDeg(140);
	win3D.setCameraElevationDeg(20);
	win3D.setCameraZoom(8.0);
	win3D.setFOV(90);
	win3D.setCameraPointingToPoint(2.5, 0, 0);

	mrpt::opengl::CPointCloudColoured::Ptr gl_points =
		mrpt::make_aligned_shared<mrpt::opengl::CPointCloudColoured>();
	gl_points->setPointSize(2.5);

	mrpt::opengl::CSetOfObjects::Ptr gl_curFeats =
		mrpt::make_aligned_shared<mrpt::opengl::CSetOfObjects>();
	mrpt::opengl::CSetOfObjects::Ptr gl_keyframes =
		mrpt::make_aligned_shared<mrpt::opengl::CSetOfObjects>();

	mrpt::opengl::CPointCloudColoured::Ptr gl_points_map =
		mrpt::make_aligned_shared<mrpt::opengl::CPointCloudColoured>();
	gl_points_map->setPointSize(2.0);

	const double aspect_ratio =
		480.0 / 640.0;  // kinect.rows() / double( kinect.cols() );

	mrpt::opengl::CSetOfObjects::Ptr gl_cur_cam_corner =
		mrpt::opengl::stock_objects::CornerXYZSimple(0.4f, 4);

	opengl::COpenGLViewport::Ptr viewInt;
	{
		mrpt::opengl::COpenGLScene::Ptr& scene = win3D.get3DSceneAndLock();

		// Create the Opengl object for the point cloud:
		scene->insert(gl_points_map);
		scene->insert(gl_points);
		scene->insert(gl_curFeats);
		scene->insert(gl_keyframes);
		scene->insert(mrpt::make_aligned_shared<mrpt::opengl::CGridPlaneXY>());

		scene->insert(gl_cur_cam_corner);

		const int VW_WIDTH =
			350;  // Size of the viewport into the window, in pixel units.
		const int VW_HEIGHT = aspect_ratio * VW_WIDTH;

		// Create the Opengl objects for the planar images each in a separate
		// viewport:
		viewInt = scene->createViewport("view2d_int");
		viewInt->setViewportPosition(2, 2, VW_WIDTH, VW_HEIGHT);
		viewInt->setTransparent(true);

		win3D.unlockAccess3DScene();
		win3D.repaint();
	}

	CImage previous_image;

	map<TFeatureID, TPoint3D> lastVisibleFeats;
	std::vector<TPose3D>
		camera_key_frames_path;  // The 6D path of the Kinect camera.
	CPose3D
		currentCamPose_wrt_last;  // wrt last pose in "camera_key_frames_path"

	bool gl_keyframes_must_refresh =
		true;  // Need to update gl_keyframes from camera_key_frames_path??
	CObservation3DRangeScan::Ptr last_obs;
	string str_status, str_status2;

	while (win3D.isOpen() && !thrPar.quit)
	{
		CObservation3DRangeScan::Ptr possiblyNewObs =
			std::atomic_load(&thrPar.new_obs);
		if (possiblyNewObs && possiblyNewObs->timestamp != INVALID_TIMESTAMP &&
			(!last_obs || possiblyNewObs->timestamp != last_obs->timestamp))
		{
			// It IS a new observation:
			last_obs = possiblyNewObs;

			// Feature tracking -------------------------------------------
			ASSERT_(last_obs->hasIntensityImage);

			CImage theImg;  // The grabbed image:
			theImg = last_obs->intensityImage;

			// Do tracking:
			if (step_num > 1)  // we need "previous_image" to be valid.
			{
				tracker->trackFeatures(previous_image, theImg, trackedFeats);

				// Remove those now out of the image plane:
				CFeatureList::iterator itFeat = trackedFeats.begin();
				while (itFeat != trackedFeats.end())
				{
					const TFeatureTrackStatus status = (*itFeat)->track_status;
					bool eras =
						(status_TRACKED != status && status_IDLE != status);
					if (!eras)
					{
						// Also, check if it's too close to the image border:
						const float x = (*itFeat)->x;
						const float y = (*itFeat)->y;
						static const float MIN_DIST_MARGIN_TO_STOP_TRACKING =
							10;
						if (x < MIN_DIST_MARGIN_TO_STOP_TRACKING ||
							y < MIN_DIST_MARGIN_TO_STOP_TRACKING ||
							x > (last_obs->cameraParamsIntensity.ncols -
								 MIN_DIST_MARGIN_TO_STOP_TRACKING) ||
							y > (last_obs->cameraParamsIntensity.nrows -
								 MIN_DIST_MARGIN_TO_STOP_TRACKING))
						{
							eras = true;
						}
					}
					if (eras)  // Erase or keep?
						itFeat = trackedFeats.erase(itFeat);
					else
						++itFeat;
				}
			}

			// Create list of 3D features in space, wrt current camera pose:
			// --------------------------------------------------------------------
			map<TFeatureID, TPoint3D> curVisibleFeats;
			for (CFeatureList::iterator itFeat = trackedFeats.begin();
				 itFeat != trackedFeats.end(); ++itFeat)
			{
				// Pixel coordinates in the intensity image:
				const int int_x = (*itFeat)->x;
				const int int_y = (*itFeat)->y;

				// Convert to pixel coords in the range image:
				//  APPROXIMATION: Assume coordinates are equal (that's not
				//  exact!!)
				const int x = int_x;
				const int y = int_y;

				// Does this (x,y) have valid range data?
				const float d = last_obs->rangeImage(y, x);
				if (d > 0.05 && d < 10.0)
				{
					ASSERT_(
						size_t(
							last_obs->rangeImage.cols() *
							last_obs->rangeImage.rows()) ==
						last_obs->points3D_x.size());
					const size_t nPt = last_obs->rangeImage.cols() * y + x;
					curVisibleFeats[(*itFeat)->ID] = TPoint3D(
						last_obs->points3D_x[nPt], last_obs->points3D_y[nPt],
						last_obs->points3D_z[nPt]);
				}
			}

			// Load local points map from 3D points + color:
			CColouredPointsMap localPntsMap;
			localPntsMap.colorScheme.scheme =
				CColouredPointsMap::cmFromIntensityImage;
			localPntsMap.loadFromRangeScan(*last_obs);

			// Estimate our current camera pose from feature2feature matching:
			// --------------------------------------------------------------------
			if (!lastVisibleFeats.empty())
			{
				TMatchingPairList corrs;  // pairs of correspondences

				for (map<TFeatureID, TPoint3D>::const_iterator itCur =
						 curVisibleFeats.begin();
					 itCur != curVisibleFeats.end(); ++itCur)
				{
					map<TFeatureID, TPoint3D>::const_iterator itFound =
						lastVisibleFeats.find(itCur->first);
					if (itFound != lastVisibleFeats.end())
					{
						corrs.push_back(
							TMatchingPair(
								itFound->first, itCur->first, itFound->second.x,
								itFound->second.y, itFound->second.z,
								itCur->second.x, itCur->second.y,
								itCur->second.z));
					}
				}

				if (corrs.size() >= 3)
				{
					// Find matchings:
					mrpt::tfest::TSE3RobustParams params;
					params.ransac_minSetSize = 3;
					params.ransac_maxSetSizePct = 6.0 / corrs.size();

					mrpt::tfest::TSE3RobustResult results;
					bool register_ok = false;
					try
					{
						mrpt::tfest::se3_l2_robust(corrs, params, results);
						register_ok = true;
					}
					catch (std::exception&)
					{
						/* Cannot find a minimum number of matches, inconsistent
						 * parameters due to very reduced numberof matches,etc.
						 */
					}

					const CPose3D relativePose =
						CPose3D(results.transformation);

					str_status = mrpt::format(
						"%d corrs | inliers: %d | rel.pose: %s ",
						int(corrs.size()), int(results.inliers_idx.size()),
						relativePose.asString().c_str());
					str_status2 = string(
						results.inliers_idx.size() == 0
							? "LOST! Please, press 'r' to restart"
							: "");

					if (register_ok && std::abs(results.scale - 1.0) < 0.1)
					{
						// Seems a good match:
						if ((relativePose.norm() > KEYFRAMES_MIN_DISTANCE ||
							 std::abs(relativePose.yaw()) > KEYFRAMES_MIN_ANG ||
							 std::abs(relativePose.pitch()) >
								 KEYFRAMES_MIN_ANG ||
							 std::abs(relativePose.roll()) > KEYFRAMES_MIN_ANG))
						{
							// Accept this as a new key-frame pose ------------
							// Append new global pose of this key-frame:

							const CPose3D new_keyframe_global =
								CPose3D(*camera_key_frames_path.rbegin()) +
								relativePose;

							camera_key_frames_path.push_back(
								new_keyframe_global.asTPose());

							gl_keyframes_must_refresh = true;

							currentCamPose_wrt_last =
								CPose3D();  // It's (0,0,0) since the last
							// key-frame is the current pose!
							lastVisibleFeats = curVisibleFeats;

							cout << "Adding new key-frame: pose="
								 << new_keyframe_global << endl;

							// Update global map: append another map at a given
							// position:
							globalPtsMap.insertObservation(
								last_obs.get(), &new_keyframe_global);
							win3D.get3DSceneAndLock();
							gl_points_map->loadFromPointsMap(&globalPtsMap);
							win3D.unlockAccess3DScene();
						}
						else
						{
							currentCamPose_wrt_last = relativePose;
							// cout << "cur pose: " << currentCamPose_wrt_last
							// << endl;
						}
					}
				}
			}

			if (camera_key_frames_path.empty() || lastVisibleFeats.empty())
			{
				// First iteration:
				camera_key_frames_path.clear();
				camera_key_frames_path.push_back(TPose3D(0, 0, 0, 0, 0, 0));
				gl_keyframes_must_refresh = true;
				lastVisibleFeats = curVisibleFeats;

				// Update global map:
				globalPtsMap.clear();
				globalPtsMap.insertObservation(last_obs.get());

				win3D.get3DSceneAndLock();
				gl_points_map->loadFromPointsMap(&globalPtsMap);
				win3D.unlockAccess3DScene();
			}

			// Save the image for the next step:
			previous_image = theImg;

			// Draw marks on the RGB image:
			theImg.selectTextFont("10x20");
			{  // Tracked feats:
				theImg.drawFeatures(
					trackedFeats, TColor(0, 0, 255), SHOW_FEAT_IDS,
					SHOW_RESPONSES);
				theImg.textOut(
					3, 22,
					format("# feats: %u", (unsigned int)trackedFeats.size()),
					TColor(200, 20, 20));
			}

			// Update visualization ---------------------------------------

			// Show intensity image
			win3D.get3DSceneAndLock();
			viewInt->setImageView(theImg);
			win3D.unlockAccess3DScene();

			// Show 3D points & current visible feats, at the current camera 3D
			// pose "currentCamPose_wrt_last"
			// ---------------------------------------------------------------------
			if (last_obs->hasPoints3D)
			{
				const CPose3D curGlobalPose =
					CPose3D(*camera_key_frames_path.rbegin()) +
					currentCamPose_wrt_last;
				win3D.get3DSceneAndLock();
				// All 3D points:
				gl_points->loadFromPointsMap(&localPntsMap);
				gl_points->setPose(curGlobalPose);
				gl_cur_cam_corner->setPose(curGlobalPose);

				// Current visual landmarks:
				gl_curFeats->clear();
				for (map<TFeatureID, TPoint3D>::const_iterator it =
						 curVisibleFeats.begin();
					 it != curVisibleFeats.end(); ++it)
				{
					static double D = 0.02;
					mrpt::opengl::CBox::Ptr box =
						mrpt::make_aligned_shared<mrpt::opengl::CBox>(
							TPoint3D(-D, -D, -D), TPoint3D(D, D, D));
					box->setWireframe(true);
					box->setName(format("%d", int(it->first)));
					box->enableShowName(true);
					box->setLocation(it->second);
					gl_curFeats->insert(box);
				}
				gl_curFeats->setPose(curGlobalPose);

				win3D.unlockAccess3DScene();
				win3D.repaint();
			}

			win3D.get3DSceneAndLock();
			win3D.addTextMessage(
				-100, -20, format("%.02f Hz", thrPar.Hz), TColorf(0, 1, 1), 100,
				MRPT_GLUT_BITMAP_HELVETICA_18);
			win3D.unlockAccess3DScene();

			win3D.repaint();

			step_num++;

		}  // end update visualization:

		if (gl_keyframes_must_refresh)
		{
			gl_keyframes_must_refresh = false;
			// cout << "Updating gl_keyframes with " <<
			// camera_key_frames_path.size() << " frames.\n";
			win3D.get3DSceneAndLock();
			gl_keyframes->clear();
			for (size_t i = 0; i < camera_key_frames_path.size(); i++)
			{
				CSetOfObjects::Ptr obj =
					mrpt::opengl::stock_objects::CornerXYZSimple(0.3f, 3);
				obj->setPose(camera_key_frames_path[i]);
				gl_keyframes->insert(obj);
			}
			win3D.unlockAccess3DScene();
		}

		// Process possible keyboard commands:
		// --------------------------------------
		if (win3D.keyHit() && thrPar.pushed_key == 0)
		{
			const int key = tolower(win3D.getPushedKey());

			switch (key)
			{
				// Some of the keys are processed in this thread:
				case 'r':
					lastVisibleFeats.clear();
					camera_key_frames_path.clear();
					gl_keyframes_must_refresh = true;
					globalPtsMap.clear();
					win3D.get3DSceneAndLock();
					gl_points_map->loadFromPointsMap(&globalPtsMap);
					win3D.unlockAccess3DScene();

					break;
				case 's':
				{
					const std::string s = "point_cloud.txt";
					cout << "Dumping 3D point-cloud to: " << s << endl;
					globalPtsMap.save3D_to_text_file(s);
					break;
				}
				case 'o':
					win3D.setCameraZoom(win3D.getCameraZoom() * 1.2);
					win3D.repaint();
					break;
				case 'i':
					win3D.setCameraZoom(win3D.getCameraZoom() / 1.2);
					win3D.repaint();
					break;
				// ...and the rest in the kinect thread:
				default:
					thrPar.pushed_key = key;
					break;
			};
		}

		win3D.get3DSceneAndLock();
		win3D.addTextMessage(
			2, -30, format(
						"'s':save point cloud, 'r': reset, 'o'/'i': zoom "
						"out/in, mouse: orbit 3D, ESC: quit"),
			TColorf(1, 1, 1), 110, MRPT_GLUT_BITMAP_HELVETICA_12);
		win3D.addTextMessage(
			2, -50, str_status, TColorf(1, 1, 1), 111,
			MRPT_GLUT_BITMAP_HELVETICA_12);
		win3D.addTextMessage(
			2, -70, str_status2, TColorf(1, 1, 1), 112,
			MRPT_GLUT_BITMAP_HELVETICA_18);
		win3D.unlockAccess3DScene();

		std::this_thread::sleep_for(1ms);
	}

	cout << "Waiting for grabbing thread to exit...\n";
	thrPar.quit = true;
	thHandle.join();
	cout << "Bye!\n";
}
Esempio n. 19
0
/*---------------------------------------------------------------
					path_from_rtk_gps
 ---------------------------------------------------------------*/
void  mrpt::topography::path_from_rtk_gps(
	mrpt::poses::CPose3DInterpolator	&robot_path,
	const mrpt::slam::CRawlog			&rawlog,
	size_t 								first,
	size_t 								last,
	bool								isGUI,
	bool								disableGPSInterp,
	int									PATH_SMOOTH_FILTER ,
	TPathFromRTKInfo					*outInfo
	)
{
	MRPT_START

#if MRPT_HAS_WXWIDGETS
	// Use a smart pointer so we are safe against exceptions:
	stlplus::smart_ptr<wxBusyCursor>	waitCursorPtr;
	if (isGUI)
		waitCursorPtr = stlplus::smart_ptr<wxBusyCursor>( new  wxBusyCursor() );
#endif

    // Go: generate the map:
    size_t      i;

    ASSERT_(first<=last);
    ASSERT_(last<=rawlog.size()-1);


	set<string>	lstGPSLabels;

    size_t count = 0;

	robot_path.clear();
	robot_path.setMaxTimeInterpolation(3.0);	// Max. seconds of GPS blackout not to interpolate.
	robot_path.setInterpolationMethod( CPose3DInterpolator::imSSLSLL );

	TPathFromRTKInfo	outInfoTemp;
	if (outInfo) *outInfo = outInfoTemp;

	map<string, map<TTimeStamp,TPoint3D> >	gps_paths;	// label -> (time -> 3D local coords)

    bool		abort = false;
	bool		ref_valid = false;

	// Load configuration block:
	CConfigFileMemory	memFil;
	rawlog.getCommentTextAsConfigFile(memFil);

	TGeodeticCoords ref(
		memFil.read_double("GPS_ORIGIN","lat_deg",0),
		memFil.read_double("GPS_ORIGIN","lon_deg",0),
		memFil.read_double("GPS_ORIGIN","height",0) );

	ref_valid = !ref.isClear();

	// Do we have info for the consistency test?
	const double std_0 = memFil.read_double("CONSISTENCY_TEST","std_0",0);
	bool  doConsistencyCheck = std_0>0;

	// Do we have the "reference uncertainty" matrix W^\star ??
	memFil.read_matrix("UNCERTAINTY","W_star",outInfoTemp.W_star);
	const bool doUncertaintyCovs = size(outInfoTemp.W_star,1)!=0;
	if (doUncertaintyCovs && (size(outInfoTemp.W_star,1)!=6 || size(outInfoTemp.W_star,2)!=6))
		THROW_EXCEPTION("ERROR: W_star matrix for uncertainty estimation is provided but it's not a 6x6 matrix.");

	// ------------------------------------------
	// Look for the 2 observations:
	// ------------------------------------------
#if MRPT_HAS_WXWIDGETS
	wxProgressDialog    *progDia=NULL;
	if (isGUI)
	{
		progDia = new wxProgressDialog(
			wxT("Building map"),
			wxT("Getting GPS observations..."),
			(int)(last-first+1), // range
			NULL, // parent
			wxPD_CAN_ABORT |
			wxPD_APP_MODAL |
			wxPD_SMOOTH |
			wxPD_AUTO_HIDE |
			wxPD_ELAPSED_TIME |
			wxPD_ESTIMATED_TIME |
			wxPD_REMAINING_TIME);
	}
#endif

	// The list with all time ordered gps's in valid RTK mode
	typedef std::map< mrpt::system::TTimeStamp, std::map<std::string,CObservationGPSPtr> > TListGPSs;
	TListGPSs	list_gps_obs;

	map<string,size_t>  GPS_RTK_reads;	// label-># of RTK readings
	map<string,TPoint3D> GPS_local_coords_on_vehicle;  // label -> local pose on the vehicle

    for (i=first;!abort && i<=last;i++)
    {
        switch ( rawlog.getType(i) )
		{
		default:
			break;

        case CRawlog::etObservation:
            {
                CObservationPtr o = rawlog.getAsObservation(i);

				if (o->GetRuntimeClass()==CLASS_ID(CObservationGPS))
				{
					CObservationGPSPtr obs = CObservationGPSPtr(o);

					if (obs->has_GGA_datum && obs->GGA_datum.fix_quality==4)
					{
						// Add to the list:
						list_gps_obs[obs->timestamp][obs->sensorLabel] = obs;

						lstGPSLabels.insert(obs->sensorLabel);
					}

					// Save to GPS paths:
					if (obs->has_GGA_datum && (obs->GGA_datum.fix_quality==4 || obs->GGA_datum.fix_quality==5))
					{
						GPS_RTK_reads[obs->sensorLabel]++;

						// map<string,TPoint3D> GPS_local_coords_on_vehicle;  // label -> local pose on the vehicle
						if (GPS_local_coords_on_vehicle.find(obs->sensorLabel)==GPS_local_coords_on_vehicle.end())
							GPS_local_coords_on_vehicle[obs->sensorLabel] = TPoint3D( obs->sensorPose );

						//map<string, map<TTimeStamp,TPoint3D> >	gps_paths;	// label -> (time -> 3D local coords)
						gps_paths[obs->sensorLabel][obs->timestamp] = TPoint3D(
							obs->GGA_datum.longitude_degrees,
							obs->GGA_datum.latitude_degrees,
							obs->GGA_datum.altitude_meters );
					}
                }
            }
            break;
        } // end switch type

		// Show progress:
        if ((count++ % 100)==0)
        {
#if MRPT_HAS_WXWIDGETS
        	if (progDia)
        	{
				if (!progDia->Update((int)(i-first)))
					abort = true;
				wxTheApp->Yield();
        	}
#endif
        }
    } // end for i

#if MRPT_HAS_WXWIDGETS
	if (progDia)
	{
		delete progDia;
		progDia=NULL;
	}
#endif

	// -----------------------------------------------------------
	// At this point we already have the sensor positions, thus
	//  we can estimate the covariance matrix D:
	//
	// TODO: Generalize equations for # of GPS > 3
	// -----------------------------------------------------------
	map< set<string>, double >  Ad_ij;  // InterGPS distances in 2D
	map< set<string>, double >  phi_ij; // Directions on XY of the lines between i-j
	map< string, size_t>  D_cov_indexes; // Sensor label-> index in the matrix (first=0, ...)
	map< size_t, string>  D_cov_rev_indexes; // Reverse of D_cov_indexes

	CMatrixDouble	  D_cov;   // square distances cov
	CMatrixDouble	  D_cov_1;   // square distances cov (inverse)
	vector_double	  D_mean;  // square distances mean

	if (doConsistencyCheck && GPS_local_coords_on_vehicle.size()==3)
	{
		unsigned int cnt = 0;
		for(map<string,TPoint3D>::iterator i=GPS_local_coords_on_vehicle.begin();i!=GPS_local_coords_on_vehicle.end();++i)
		{
			// Index tables:
			D_cov_indexes[i->first] = cnt;
			D_cov_rev_indexes[cnt] = i->first;
			cnt++;

			for(map<string,TPoint3D>::iterator j=i;j!=GPS_local_coords_on_vehicle.end();++j)
			{
				if (i!=j)
				{
					const TPoint3D  &pi = i->second;
					const TPoint3D  &pj = j->second;
					Ad_ij[ make_set(i->first,j->first) ]  = pi.distanceTo( pj );
					phi_ij[ make_set(i->first,j->first) ] = atan2( pj.y-pi.y, pj.x-pi.x );
				}
			}
		}
		ASSERT_( D_cov_indexes.size()==3  && D_cov_rev_indexes.size()==3 );

		D_cov.setSize( D_cov_indexes.size(), D_cov_indexes.size() );
		D_mean.resize( D_cov_indexes.size() );

		// See paper for the formulas!
		// TODO: generalize for N>3

		D_cov(0,0) = 2*square( Ad_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[1])] );
		D_cov(1,1) = 2*square( Ad_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[2])] );
		D_cov(2,2) = 2*square( Ad_ij[ make_set(D_cov_rev_indexes[1],D_cov_rev_indexes[2])] );

		D_cov(1,0) = Ad_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[1])] * Ad_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[2])]
					 * cos( phi_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[1])] - phi_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[2])] );
		D_cov(0,1) = D_cov(1,0);

		D_cov(2,0) =-Ad_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[1])] * Ad_ij[ make_set(D_cov_rev_indexes[1],D_cov_rev_indexes[2])]
					 * cos( phi_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[1])] - phi_ij[ make_set(D_cov_rev_indexes[1],D_cov_rev_indexes[2])] );
		D_cov(0,2) = D_cov(2,0);

		D_cov(2,1) = Ad_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[2])] * Ad_ij[ make_set(D_cov_rev_indexes[1],D_cov_rev_indexes[2])]
					 * cos( phi_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[2])] - phi_ij[ make_set(D_cov_rev_indexes[1],D_cov_rev_indexes[2])] );
		D_cov(1,2) = D_cov(2,1);

		D_cov *= 4*square(std_0);

		D_cov_1 = D_cov.inv();

		//cout << D_cov.inMatlabFormat() << endl;

		D_mean[0] = square( Ad_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[1])] );
		D_mean[1] = square( Ad_ij[ make_set(D_cov_rev_indexes[0],D_cov_rev_indexes[2])] );
		D_mean[2] = square( Ad_ij[ make_set(D_cov_rev_indexes[1],D_cov_rev_indexes[2])] );

	}
	else doConsistencyCheck =false;


	// ------------------------------------------
	// Look for the 2 observations:
	// ------------------------------------------
	int N_GPSs = list_gps_obs.size();

	if (N_GPSs)
	{
		// loop interpolate 1-out-of-5: this solves the issue with JAVAD GPSs
		//  that skip some readings at some times .0 .2 .4 .6 .8
		if (list_gps_obs.size()>4)
		{
			TListGPSs::iterator F = list_gps_obs.begin();
			++F; ++F;
			TListGPSs::iterator E = list_gps_obs.end();
			--E; --E;

			for (TListGPSs::iterator i=F;i!=E;++i)
			{
				// Now check if we have 3 gps with the same time stamp:
				//const size_t N = i->second.size();
				std::map<std::string, CObservationGPSPtr> & GPS = i->second;

				// Check if any in "lstGPSLabels" is missing here:
				for (set<string>::iterator l=lstGPSLabels.begin();l!=lstGPSLabels.end();++l)
				{
					// For each GPS in the current timestamp:
					bool fnd = ( GPS.find(*l)!=GPS.end() );

					if (fnd) continue; // this one is present.

					// Ok, we have "*l" missing in the set "*i".
					// Try to interpolate from neighbors:
					TListGPSs::iterator	i_b1 = i; --i_b1;
					TListGPSs::iterator	i_a1 = i; ++i_a1;

					CObservationGPSPtr	GPS_b1, GPS_a1;

					if (i_b1->second.find( *l )!=i_b1->second.end())
						GPS_b1 = i_b1->second.find( *l )->second;

					if (i_a1->second.find( *l )!=i_a1->second.end())
						GPS_a1 = i_a1->second.find( *l )->second;

					if (!disableGPSInterp && GPS_a1 && GPS_b1)
					{
						if ( mrpt::system::timeDifference( GPS_b1->timestamp, GPS_a1->timestamp ) < 0.5 )
						{
							CObservationGPSPtr	new_gps = CObservationGPSPtr( new CObservationGPS(*GPS_a1) );
							new_gps->sensorLabel = *l;

							//cout << mrpt::system::timeLocalToString(GPS_b1->timestamp) << " " << mrpt::system::timeLocalToString(GPS_a1->timestamp) << " " << *l;
							//cout << endl;

							new_gps->GGA_datum.longitude_degrees = 0.5 * ( GPS_a1->GGA_datum.longitude_degrees + GPS_b1->GGA_datum.longitude_degrees );
							new_gps->GGA_datum.latitude_degrees  = 0.5 * ( GPS_a1->GGA_datum.latitude_degrees + GPS_b1->GGA_datum.latitude_degrees );
							new_gps->GGA_datum.altitude_meters   = 0.5 * ( GPS_a1->GGA_datum.altitude_meters + GPS_b1->GGA_datum.altitude_meters );

							new_gps->timestamp = (GPS_a1->timestamp + GPS_b1->timestamp) / 2;

							i->second[new_gps->sensorLabel] = new_gps;
						}
					}
				}
			} // end loop interpolate 1-out-of-5
		}



#if MRPT_HAS_WXWIDGETS
	wxProgressDialog    *progDia3=NULL;
	if (isGUI)
	{
		progDia3 = new wxProgressDialog(
			wxT("Building map"),
			wxT("Estimating 6D path..."),
			N_GPSs, // range
			NULL, // parent
			wxPD_CAN_ABORT |
			wxPD_APP_MODAL |
			wxPD_SMOOTH |
			wxPD_AUTO_HIDE |
			wxPD_ELAPSED_TIME |
			wxPD_ESTIMATED_TIME |
			wxPD_REMAINING_TIME);
	}
#endif

		int	idx_in_GPSs = 0;

		for (TListGPSs::iterator i=list_gps_obs.begin();i!=list_gps_obs.end();++i, idx_in_GPSs++)
		{
			// Now check if we have 3 gps with the same time stamp:
			if (i->second.size()>=3)
			{
				const size_t N = i->second.size();
				std::map<std::string, CObservationGPSPtr> & GPS = i->second;
				vector_double X(N),Y(N),Z(N); 	// Global XYZ coordinates
				std::map<string,size_t> XYZidxs;  // Sensor label -> indices in X Y Z

				if (!ref_valid)	// get the reference lat/lon, if it's not set from rawlog configuration block.
				{
					ref_valid 	= true;
					ref = GPS.begin()->second->GGA_datum.getAsStruct<TGeodeticCoords>();
				}

				// Compute the XYZ coordinates of all sensors:
				TMatchingPairList corrs;
				unsigned int k;
				std::map<std::string, CObservationGPSPtr>::iterator g_it;

				for (k=0,g_it=GPS.begin();g_it!=GPS.end();g_it++,k++)
				{
					TPoint3D P;
					mrpt::topography::geodeticToENU_WGS84(
						g_it->second->GGA_datum.getAsStruct<TGeodeticCoords>(),
						P,
						ref );

					// Correction of offsets:
					const string sect = string("OFFSET_")+g_it->second->sensorLabel;
					P.x += memFil.read_double( sect, "x", 0 );
					P.y += memFil.read_double( sect, "y", 0 );
					P.z += memFil.read_double( sect, "z", 0 );

					XYZidxs[g_it->second->sensorLabel] = k; // Save index correspondence

					// Create the correspondence:
					corrs.push_back( TMatchingPair(
						k,k, 	// Indices
						P.x,P.y,P.z, // "This"/Global coords
						g_it->second->sensorPose.x(),g_it->second->sensorPose.y(),g_it->second->sensorPose.z() // "other"/local coordinates
						));

					X[k] = P.x;
					Y[k] = P.y;
					Z[k] = P.z;
				}

				if (doConsistencyCheck && GPS.size()==3)
				{
					// XYZ[k] have the k'd final coordinates of each GPS
					// GPS[k] are the CObservations:

					// Compute the inter-GPS square distances:
					vector_double iGPSdist2(3);

					// [0]: sq dist between: D_cov_rev_indexes[0],D_cov_rev_indexes[1]
					TPoint3D   P0( X[XYZidxs[D_cov_rev_indexes[0]]], Y[XYZidxs[D_cov_rev_indexes[0]]], Z[XYZidxs[D_cov_rev_indexes[0]]] );
					TPoint3D   P1( X[XYZidxs[D_cov_rev_indexes[1]]], Y[XYZidxs[D_cov_rev_indexes[1]]], Z[XYZidxs[D_cov_rev_indexes[1]]] );
					TPoint3D   P2( X[XYZidxs[D_cov_rev_indexes[2]]], Y[XYZidxs[D_cov_rev_indexes[2]]], Z[XYZidxs[D_cov_rev_indexes[2]]] );

					iGPSdist2[0] = P0.sqrDistanceTo(P1);
					iGPSdist2[1] = P0.sqrDistanceTo(P2);
					iGPSdist2[2] = P1.sqrDistanceTo(P2);

					double mahaD = mrpt::math::mahalanobisDistance( iGPSdist2, D_mean, D_cov_1 );
					outInfoTemp.mahalabis_quality_measure[i->first] = mahaD;

					//cout << "x: " << iGPSdist2 << " MU: " <<  D_mean << " -> " << mahaD  << endl;
				} // end consistency

				// Use a 6D matching method to estimate the location of the vehicle:
				CPose3D optimal_pose;
				double  optimal_scale;

				// "this" (reference map) -> GPS global coordinates
				// "other" -> GPS local coordinates on the vehicle
				mrpt::scanmatching::leastSquareErrorRigidTransformation6D( corrs,optimal_pose,optimal_scale, true ); // Force scale=1
				//cout << "optimal pose: " << optimal_pose << " " << optimal_scale << endl;

				// Final vehicle pose:
				CPose3D	&veh_pose= optimal_pose;


				// Add to the interpolator:
				MRPT_CHECK_NORMAL_NUMBER( veh_pose.x() );
				MRPT_CHECK_NORMAL_NUMBER( veh_pose.y() );
				MRPT_CHECK_NORMAL_NUMBER( veh_pose.z() );
				MRPT_CHECK_NORMAL_NUMBER( veh_pose.yaw() );
				MRPT_CHECK_NORMAL_NUMBER( veh_pose.pitch() );
				MRPT_CHECK_NORMAL_NUMBER( veh_pose.roll() );

				robot_path.insert( i->first, veh_pose );

				// If we have W_star, compute the pose uncertainty:
				if (doUncertaintyCovs)
				{
					CPose3DPDFGaussian	final_veh_uncert;
					final_veh_uncert.mean.setFromValues(0,0,0,0,0,0);
					final_veh_uncert.cov = outInfoTemp.W_star;

					// Rotate the covariance according to the real vehicle pose:
					final_veh_uncert.changeCoordinatesReference(veh_pose);

					outInfoTemp.vehicle_uncertainty[ i->first ] = final_veh_uncert.cov;
				}
			}

			// Show progress:
			if ((count++ % 100)==0)
			{
#if MRPT_HAS_WXWIDGETS
				if (progDia3)
				{
					if (!progDia3->Update(idx_in_GPSs))
						abort = true;
					wxTheApp->Yield();
				}
#endif
			}
		} // end for i

#if MRPT_HAS_WXWIDGETS
	if (progDia3)
	{
		delete progDia3;
		progDia3=NULL;
	}
#endif

		if (PATH_SMOOTH_FILTER>0 && robot_path.size()>1)
		{
			CPose3DInterpolator	filtered_robot_path = robot_path;

			// Do Angles smoother filter of the path:
			// ---------------------------------------------
			const double MAX_DIST_TO_FILTER = 4.0;

			for (CPose3DInterpolator::iterator i=robot_path.begin();i!=robot_path.end();++i)
			{
				CPose3D   p = i->second;

				vector_double pitchs, rolls;	// The elements to average

				pitchs.push_back(p.pitch());
				rolls.push_back(p.roll());

				CPose3DInterpolator::iterator q=i;
				for (int k=0;k<PATH_SMOOTH_FILTER && q!=robot_path.begin();k++)
				{
					--q;
					if (abs( mrpt::system::timeDifference(q->first,i->first))<MAX_DIST_TO_FILTER )
					{
						pitchs.push_back( q->second.pitch() );
						rolls.push_back( q->second.roll() );
					}
				}
				q=i;
				for (int k=0;k<PATH_SMOOTH_FILTER && q!=(--robot_path.end()) ;k++)
				{
					++q;
					if (abs( mrpt::system::timeDifference(q->first,i->first))<MAX_DIST_TO_FILTER )
					{
						pitchs.push_back( q->second.pitch() );
						rolls.push_back( q->second.roll() );
					}
				}

				p.setYawPitchRoll(p.yaw(), mrpt::math::averageWrap2Pi(pitchs), mrpt::math::averageWrap2Pi(rolls) );

				// save in filtered path:
				filtered_robot_path.insert( i->first, p );
			}
			// Replace:
			robot_path = filtered_robot_path;

		} // end PATH_SMOOTH_FILTER

	} // end step generate 6D path


	// Here we can set best_gps_path  (that with the max. number of RTK fixed/foat readings):
	if (outInfo)
	{
		string bestLabel;
		size_t bestNum = 0;
		for (map<string,size_t>::iterator i=GPS_RTK_reads.begin();i!=GPS_RTK_reads.end();++i)
		{
			if (i->second>bestNum)
			{
				bestNum = i->second;
				bestLabel = i->first;
			}
		}
		outInfoTemp.best_gps_path = gps_paths[bestLabel];

		// and transform to XYZ:
		// Correction of offsets:
		const string sect = string("OFFSET_")+bestLabel;
		const double off_X = memFil.read_double( sect, "x", 0 );
		const double off_Y = memFil.read_double( sect, "y", 0 );
		const double off_Z = memFil.read_double( sect, "z", 0 );

		// map<TTimeStamp,TPoint3D> best_gps_path;		// time -> 3D local coords
		for (map<TTimeStamp,TPoint3D>::iterator i=outInfoTemp.best_gps_path.begin();i!=outInfoTemp.best_gps_path.end();++i)
		{
			TPoint3D P;
			mrpt::topography::geodeticToENU_WGS84(
				TGeodeticCoords(i->second.x,i->second.y,i->second.z),  // i->second.x,i->second.y,i->second.z, // lat, lon, heigh
				P, // X Y Z
				ref );

			i->second.x = P.x + off_X;
			i->second.y = P.y + off_Y;
			i->second.z = P.z + off_Z;
		}
	} // end best_gps_path

	if (outInfo) *outInfo = outInfoTemp;


    MRPT_END
}
bool ICPslamWrapper::rawlogPlay()
{
  if (rawlog_play_ == false)
  {
    return false;
  }
  else
  {
    size_t rawlogEntry = 0;
#if MRPT_VERSION>=0x199
#include <mrpt/serialization/CArchive.h>
    CFileGZInputStream rawlog_stream(rawlog_filename);
    auto rawlogFile = mrpt::serialization::archiveFrom(rawlog_stream);
#else
    CFileGZInputStream rawlogFile(rawlog_filename);
#endif
    CActionCollection::Ptr action;

    for (;;)
    {
      if (ros::ok())
      {
        if (!CRawlog::getActionObservationPairOrObservation(rawlogFile, action, observations, observation, rawlogEntry))
        {
          break;  // file EOF
        }
        isObsBasedRawlog = (bool)observation;
        // Execute:
        // ----------------------------------------
        tictac.Tic();
        if (isObsBasedRawlog)
          mapBuilder.processObservation(observation);
        else
          mapBuilder.processActionObservation(*action, *observations);
        t_exec = tictac.Tac();
        ROS_INFO("Map building executed in %.03fms", 1000.0f * t_exec);

        ros::Duration(rawlog_play_delay).sleep();

        metric_map_ = mapBuilder.getCurrentlyBuiltMetricMap();

        CPose3D robotPose;
        mapBuilder.getCurrentPoseEstimation()->getMean(robotPose);

        // publish map
        if (metric_map_->m_gridMaps.size())
        {
          nav_msgs::OccupancyGrid _msg;

          // if we have new map for current sensor update it
          mrpt_bridge::convert(*metric_map_->m_gridMaps[0], _msg);
          pub_map_.publish(_msg);
          pub_metadata_.publish(_msg.info);
        }

        if (metric_map_->m_pointsMaps.size())
        {
          sensor_msgs::PointCloud _msg;
          std_msgs::Header header;
          header.stamp = ros::Time(0);
          header.frame_id = global_frame_id;
          // if we have new map for current sensor update it
          mrpt_bridge::point_cloud::mrpt2ros(*metric_map_->m_pointsMaps[0], header, _msg);
          pub_point_cloud_.publish(_msg);
          // pub_metadata_.publish(_msg.info)
        }

        // publish pose
        // geometry_msgs::PoseStamped pose;
        pose.header.frame_id = global_frame_id;

        // the pose
        pose.pose.position.x = robotPose.x();
        pose.pose.position.y = robotPose.y();
        pose.pose.position.z = 0.0;
        pose.pose.orientation = tf::createQuaternionMsgFromYaw(robotPose.yaw());

        pub_pose_.publish(pose);
      }

      run3Dwindow();
      ros::spinOnce();
    }

    // if there is mrpt_gui it will wait until push any key in order to close the window
    if (win3D_)
      win3D_->waitForKey();

    return true;
  }
}
Esempio n. 21
0
int main(int argc, char* argv[]) {
    if (argc != 3)
    {
        puts("Usage: <ICP_SLAM init file> <port>");
        return -1;
    }

    // connect to memdb
    db = db_connect(argv[2]);
    if (db == -1)
    {
        puts("control failed to connect to memdb.");
        return -1;
    }

   
    CConfigFile iniFile(argv[1]); // configurations file

    // Load configurations
    CMetricMapBuilderICP icp_slam;
    icp_slam.ICP_options.loadFromConfigFile(iniFile, "MappingApplication");
    icp_slam.ICP_params.loadFromConfigFile(iniFile, "ICP");
    icp_slam.initialize();

    // pathfinding
    int resolution = 8;
    PathFinder pathFinder(resolution);
    deque<TPoint2D> path;
    deque<PathCommand> pathCommands;

    // timing
    sf::Clock globalClock;
    double transitionHoverStartTime;
    double startReadLidarTime;
    double prevTime = 0.0;
    double dt = 0.0;

    // THE state of the main control
    State state = INIT_HOVER;
    State nextState;
    double delta_distance = 0.0;
    double delta_phi = 0.0;

    sf::RenderWindow window(sf::VideoMode(800, 600), "2:00am");
    window.setVerticalSyncEnabled(true);
    sf::Texture texture;
    texture.create(800, 600);
    Matrix<sf::Color> pixels(600, 800);
    sf::Sprite sprite(texture);
    sf::Clock fpsClock;
    int frameCount = 0;
    
    float prev_gyroz;
    deque<float> past_vx;
    deque<float> past_vy;

    globalClock.restart();

    while (window.isOpen()) { // TODO: What will be our terminal case? target location reached?
        sf::Event event;
        while (window.pollEvent(event))
        {
            switch (event.type)
            {
            case sf::Event::Closed:
                window.close();
                break;
            }
        }


        double curTime = globalClock.getElapsedTime().asSeconds();
        dt = curTime - prevTime;
        prevTime = curTime;

        // read the navadata from the ardrone
        char buffer[1024];
        while (db_tryget(db, "navdata", buffer, sizeof(buffer)) != -1)
        {
            unsigned drone_dt_u;
            float cur_gyroz;
            sscanf(buffer, "%u,%f,%f,%f,%f,%f,%f",
                   &drone_dt_u, &vx, &vy, &vz, &gyrox, &gyroy, &cur_gyroz);

            // compute the angle
            gyrox /= 1000;
            gyroy /= 1000;
            cur_gyroz /= 1000;
            cur_gyroz = fmod(cur_gyroz + 360, 360);

            gyroz += angle_diff(prev_gyroz, cur_gyroz);
            accumPhi = gyroz;
            prev_gyroz = cur_gyroz;

            // here goes the terrible part....
            double drone_dt = (double)drone_dt_u / 1e6;
            past_vx.push_back(vx);
            past_vy.push_back(vy);
            if (past_vx.size() > 19)
            {
                past_vx.pop_front();
                past_vy.pop_front();
            }

            CObservationOdometryPtr obs = CObservationOdometry::Create();
            obs->odometry = CPose2D(accumX/1000.0f, accumY/1000.0f, 0.0f);
            obs->hasEncodersInfo = false;
            obs->hasVelocities = false;
//            icp_slam.processObservation(obs);
        }
        accumX += median(past_vx) * dt;
        accumY += median(past_vy) * dt;
        accumDist = sqrt(pow(accumX, 2.0f) + pow(accumY, 2.0f));
        if (fabs(accumX) > fabs(accumY) && accumX < 0)
            accumDist = -accumDist;
        if (fabs(accumY) > fabs(accumX) && accumY < 0)
            accumDist = -accumDist;


        if (state == INIT_HOVER)
        {
            if (globalClock.getElapsedTime().asSeconds() >= 10.0)
            {
                initialize_feedback();
                state = READ_LIDAR;
                gyroz = accumPhi = 0.0f;
                startReadLidarTime = curTime;
            }
            else
                continue;
        }
        

        // get the map object and the grid representation of the map
        CMultiMetricMap* curMapEst = icp_slam.getCurrentlyBuiltMetricMap();
        COccupancyGridMap2DPtr gridMap = curMapEst->m_gridMaps[0];

        // Get the position estimates
        CPose3D curPosEst = icp_slam.getCurrentPoseEstimation()->getMeanVal();
        // (estimated) X,Y coordinates of the robot, and robot yaw angle (direction)
        double robx = curPosEst.x();
        double roby = curPosEst.y();
        double robphi = curPosEst.yaw();
        // Convert real coordinate to grid coordinate points
        int gridRobX = gridMap->x2idx(robx);
        int gridRobY = gridMap->y2idx(roby);


        // state machine actions
        if (state == READ_LIDAR)
        {
            // Extract the laser scan info and convert it into a range scan observation to feed into icp-slam
            // Need to define 2 values
            // 1.) scan: a vector of floats signalling the distances. Each element is a degree
            // 2.) validRange: a vector of ints where 1 signals the reading is good and 0 means its bad (and won't be used)

            while (db_tryget(db, "ultrasonic", buffer, sizeof(buffer)) != -1)
            {
                CObservation2DRangeScanPtr obs = CObservation2DRangeScan::Create();
                obs->aperture = M_PI * 2;
                obs->scan.resize(360, 0);
                obs->validRange.resize(360, 0);

                float distances[4];
                sscanf(buffer, "%f,%f,%f,%f", distances, distances+1, distances+2, distances+3);
                printf("Ultra: %f,%f,%f,%f\n", *distances, *(distances+1), *(distances+2), *(distances+3));

                for (int k = 0; k < 4; ++k)
                {
                    int sensorIndex = k * 90;
                    int startIndex = sensorIndex - 20 + 180;
                    int endIndex = sensorIndex + 20 + 180;
                    for (int i = startIndex; i < endIndex; ++i)
                    {
                        int j = (i + 360) % 360;
                        if (distances[k] >= 0)
                        {
                            obs->scan[j] = distances[k];
                            obs->validRange[j] = 1;
                        }
                        else
                        {
                            obs->scan[j] = distances[k];
                            obs->validRange[j] = 0;
                        }
                    }
                }

                icp_slam.processObservation(obs);
            }

            if (curTime - startReadLidarTime >= 5.0)
                state = PLAN;
        }


        else if (state == PLAN)
        {
            // Perform path finding
            pathFinder.update(*gridMap);
            bool pathFound = true;
            pathFound = pathFinder.findPath(TPoint2D(gridRobX, gridRobY), TPoint2D(gridRobX+40, gridRobY), path);
            printf("pathFound: %d\tpath length: %lu\n", pathFound, path.size());
/*            path.push_back(TPoint2D(gridRobX, gridRobY));
            path.push_back(TPoint2D(gridRobX+1000, gridRobY));
            path.push_back(TPoint2D(gridRobX+1000, gridRobY+1000));
            path.push_back(TPoint2D(gridRobX, gridRobY+1000));
            path.push_back(TPoint2D(gridRobX, gridRobY));
            */
            float prevAngle = 0.0f;
            puts("PATH!!!!");
            for (int i = 1; i < path.size(); i++)
            {
                float dx = path[i].x - path[i-1].x;
                float dy = path[i].y - path[i-1].y;
                printf("dx: %f, dy: %f\n", dx, dy);
                float angle = (float)fmod(atan2(dy, dx)*57.2957795 + 360, 360);
                PathCommand command;
                command.delta_phi = angle_diff(prevAngle, angle);
                command.delta_distance = sqrt(dx*dx + dy*dy) * 0.4 * 1000;
                pathCommands.push_back(command);
                prevAngle = angle;
            }

            for (int i = 0; i < path.size(); ++i)
                printf("%f, %f\n", pathCommands[i].delta_phi, pathCommands[i].delta_distance);

            delta_phi = pathCommands[0].delta_phi;
            delta_distance = pathCommands[0].delta_distance;
            pathCommands.pop_front();
            initialize_feedback();

            state = TURNING;
        }
        
        else if (state == TURNING)
        {
            float k = do_feedback_turn(delta_phi);
            printf("delta_phi: %f, turning k: %f\n", delta_phi, k);
            if (k != 0.0f)
                turn(k);
            else
            {
                nextState = MOVE_FORWARD;
                state = TRANSITION_HOVER;
                transitionHoverStartTime = curTime;
                state = MOVE_FORWARD;
            }
        }

        else if (state == MOVE_FORWARD)
        {
            float k = do_feedback_forward(delta_distance);
	    printf("delta_distance: %f, forward k: %f\n", delta_distance, k);
            if (k != 0.0f)
                moveForward(k);
            else if (pathCommands.size() == 0)
                state = LAND;
            else
            {
                delta_phi = pathCommands[0].delta_phi;
                delta_distance = pathCommands[0].delta_distance;
                pathCommands.pop_front();
                initialize_feedback();
				
                nextState = TURNING;
                state = TRANSITION_HOVER;
                transitionHoverStartTime = curTime;
            }
        }
        
        else if (state == HOVER)
        {
            hover();
        }

        else if (state == TRANSITION_HOVER)
        {
            hover();
            if (curTime - transitionHoverStartTime > 2.0)
            {
                state = nextState;
                initialize_feedback();
            }
        }

        else if (state == LAND)
        {
            land();
        }
 
        // windows drawing
        window.clear(sf::Color::White);
        sf::View view;
        view.setSize(800, 600);
        view.setCenter(gridRobX, gridRobY);
        window.setView(view);

        // draw the grayscale probability map
        int yStart = max(0, gridRobY - 300);
        int yEnd = min((int)gridMap->getSizeY(), gridRobY + 300);
        int xStart = max(0, gridRobX - 400);
        int xEnd = min((int)gridMap->getSizeX(), gridRobX + 400);
        for (int y = yStart; y < yEnd; ++y)
        {
            for (int x = xStart; x < xEnd; ++x)
            {
                sf::Color &color = pixels(y-yStart, x-xStart);
                sf::Uint8 col = gridMap->getCell(x, y) * 255;
                color.r = col;
                color.g = col;
                color.b = col;
            }
        }
        texture.update((sf::Uint8*)pixels.getData());
        sprite.setPosition(xStart, yStart);
        window.draw(sprite);

        // draw the robot's position
        sf::CircleShape circle(5);
        circle.setPosition(gridRobX-resolution/2, gridRobY-resolution/2);
        circle.setOutlineColor(sf::Color::Red);
        circle.setFillColor(sf::Color::Red);
        window.draw(circle);

        // draw the path
        std::vector<sf::Vertex> verticies;
        verticies.resize(path.size() + 1);
        verticies[0].position.x = (gridRobX / resolution) * resolution + resolution/2;
        verticies[0].position.y = (gridRobY / resolution) * resolution + resolution/2;
        verticies[0].color = sf::Color::Blue;
        for (unsigned i = 0; i < path.size(); ++i)
        {
            verticies[i+1].position.x = path[i].x * resolution + resolution / 2;
            verticies[i+1].position.y = path[i].y * resolution + resolution / 2;
            verticies[i+1].color = sf::Color::Blue;
        }
        window.draw(&verticies[0], verticies.size(), sf::LinesStrip); 
        
        // draw the grid representation (only the occupied cells)
        sf::Color col = sf::Color::Red;
        col.a = 128;
        for (unsigned y = 0; y < pathFinder.occupancyGrid.height(); ++y)
        {
            for (unsigned x = 0; x < pathFinder.occupancyGrid.width(); ++x)
            {
                if (!pathFinder.occupancyGrid(y, x)) continue;
                sf::RectangleShape rect;
                rect.setPosition(x * resolution, y * resolution);
                rect.setSize(sf::Vector2f(resolution, resolution));
                rect.setFillColor(col);
                window.draw(rect);
            }
        }


        window.display();

        
        frameCount++;
        if (fpsClock.getElapsedTime().asSeconds() >= 1.0f)
        {
            char windowTitle[32];
            sprintf(windowTitle, "%d fps", frameCount);
            window.setTitle(windowTitle);

            fpsClock.restart();
            frameCount = 0;
        }
    }

    puts("exiting?!");

    return 0;
}
/*---------------------------------------------------------------

					CLSLAM_RBPF_2DLASER

	Implements a 2D local SLAM method based on a RBPF
	    over an occupancy grid map. A part of HMT-SLAM.

\param LMH   The local metric hypothesis which must be updated by this SLAM algorithm.
\param act   The action to process (or NULL).
\param sf    The observations to process (or NULL).

 WE ALREADY HAVE CONTROL OVER THE CRITICAL SECTION OF THE LMHs!

--------------------------------------------------------------- */
void CLSLAM_RBPF_2DLASER::processOneLMH(
	CLocalMetricHypothesis	*LMH,
	const CActionCollectionPtr &actions,
	const CSensoryFramePtr     &sf )
{
	MRPT_START

	// Get the current robot pose estimation:
	TPoseID		currentPoseID = LMH->m_currentRobotPose;

	// If this is the first iteration, just create a new robot pose at the origin:
	if (currentPoseID == POSEID_INVALID )
	{
		currentPoseID = CHMTSLAM::generatePoseID();
		LMH->m_currentRobotPose = currentPoseID; // Change it in the LMH

		// Create a new robot pose:
		CPose3D	initPose(0,0,0);

		ASSERT_( LMH->m_particles.size()>0 );
		for (CLocalMetricHypothesis::CParticleList::iterator  it=LMH->m_particles.begin();it!=LMH->m_particles.end();++it)
			it->d->robotPoses[ currentPoseID ] = initPose;

		ASSERT_( m_parent->m_map.nodeCount()==1 );

		m_parent->m_map_cs.enter();
		CHMHMapNodePtr firstArea = m_parent->m_map.getFirstNode();
		ASSERT_(firstArea);
		LMH->m_nodeIDmemberships[currentPoseID] = firstArea->getID();

		// Set annotation for the reference pose:
		firstArea->m_annotations.setElemental( NODE_ANNOTATION_REF_POSEID,  currentPoseID , LMH->m_ID);
		m_parent->m_map_cs.leave();
	}

	bool	insertNewRobotPose = false;
	if (sf)
	{
		if ( LMH->m_nodeIDmemberships.size()<2 )  // If there is just 1 node (the current robot pose), then there is no observation in the map yet!
		{	// Update map if this is the first observation!
			insertNewRobotPose = true;
		}
		else
		{
			// Check minimum distance from current robot pose to those in the neighborhood:
			map< TPoseID, CPose3D >				lstRobotPoses;
			LMH->getMeans( lstRobotPoses );

			CPose3D	  *currentRobotPose = & lstRobotPoses[currentPoseID];
			float		minDistLin    = 1e6f;
			float		minDistAng    = 1e6f;

			//printf("Poses in graph:\n");
			for (map< TPoseID, CPose3D >::iterator	it = lstRobotPoses.begin();it!=lstRobotPoses.end();++it)
			{
				if (it->first != currentPoseID )
				{
					float linDist = it->second.distanceTo( *currentRobotPose );
					float angDist = fabs(math::wrapToPi( it->second.yaw() - currentRobotPose->yaw() ));

					minDistLin = min( minDistLin, linDist );

					if ( linDist < m_parent->m_options.SLAM_MIN_DIST_BETWEEN_OBS  )
						minDistAng = min(minDistAng, angDist);
				}
			}

			// time to insert a new node??
			insertNewRobotPose = (minDistLin>m_parent->m_options.SLAM_MIN_DIST_BETWEEN_OBS) || ( minDistAng > m_parent->m_options.SLAM_MIN_HEADING_BETWEEN_OBS );
		}

	} // end if there are SF

	// Save data in members so PF callback "prediction_and_update_pfXXXX" have access to them:
	m_insertNewRobotPose   = insertNewRobotPose;

	// ------------------------------------------------
	//  Execute RBPF method:
	// 	1) PROCESS ACTION
	// 	2) PROCESS OBSERVATIONS
	// ------------------------------------------------
	CParticleFilter		pf;
	pf.m_options = m_parent->m_options.pf_options;
	pf.executeOn( *LMH, actions.pointer(), sf.pointer() );

	// 3) The appearance observation: update the log likelihood
	// ...


	// -----------------------------------------------------------
	//			4) UPDATE THE MAP
	// -----------------------------------------------------------
	if (insertNewRobotPose)
	{
		m_parent->printf_debug("[CLSLAM_RBPF_2DLASER] Adding new pose...\n");

		//	Leave the up-to-now "current pose" in the map, insert the SF in it, and...
		// ----------------------------------------------------------------------------
		TPoseID newCurrentPoseID = CHMTSLAM::generatePoseID();

		//	...and create a new "current pose" making a copy of the previous one:
		//     and insert the observations into the metric maps:
		// ----------------------------------------------------------------------------
		for (CLocalMetricHypothesis::CParticleList::iterator partIt = LMH->m_particles.begin();
			  partIt!=LMH->m_particles.end();
			  partIt++)
		{
			const CPose3D  *curRobotPose = &partIt->d->robotPoses[currentPoseID];
			partIt->d->robotPoses[newCurrentPoseID]= *curRobotPose;
			sf->insertObservationsInto( &partIt->d->metricMaps, curRobotPose );
		}

		// Add node membership: for now, a copy of the current pose:
		LMH->m_nodeIDmemberships[newCurrentPoseID] = LMH->m_nodeIDmemberships[currentPoseID];


		// Now, insert the SF in the just added robot pose:
		// -----------------------------------------------------
		LMH->m_SFs[ currentPoseID ] = *sf;

		// Sets the new poseID as current robot pose:
		// ----------------------------------------------------
		TPoseID		 newlyAddedPose = currentPoseID;
		currentPoseID = LMH->m_currentRobotPose = newCurrentPoseID;

		// Mark the "newlyAddedPose" as pending to reconsider in the graph-cut method
		//  (Done in the main loop @ LSLAM thread)
		// --------------------------------------------------------------------------------
		LMH->m_posesPendingAddPartitioner.push_back( newlyAddedPose );

		m_parent->printf_debug("[CLSLAM_RBPF_2DLASER] Added pose %i.\n", (int)newlyAddedPose);


		// Notice LC detectors:
		// ------------------------------
		{
			synch::CCriticalSectionLocker	lock( &m_parent->m_topLCdets_cs );

			for (std::deque<CTopLCDetectorBase*>::iterator it=m_parent->m_topLCdets.begin();it!=m_parent->m_topLCdets.end();++it)
				(*it)->OnNewPose( newlyAddedPose, sf.pointer() );
		}


	} // end of insertNewRobotPose


	MRPT_END
}
/** The PF algorithm implementation for "optimal sampling" approximated with scan matching (Stachniss method)
  */
void  CLSLAM_RBPF_2DLASER::prediction_and_update_pfOptimalProposal(
	CLocalMetricHypothesis	*LMH,
	const mrpt::slam::CActionCollection	* actions,
	const mrpt::slam::CSensoryFrame		* sf,
	const bayes::CParticleFilter::TParticleFilterOptions &PF_options )
{
	MRPT_START

	CTicTac tictac;

	// Get the current robot pose estimation:
	TPoseID		currentPoseID = LMH->m_currentRobotPose;

	// ----------------------------------------------------------------------
	//	  We can execute optimal PF only when we have both, an action, and
	//     a valid observation from which to compute the likelihood:
	//   Accumulate odometry/actions until we have a valid observation, then
	//    process them simultaneously.
	// ----------------------------------------------------------------------
	bool  SFhasValidObservations = false;
	// A valid action?
	if (actions!=NULL)
	{
		CActionRobotMovement2DPtr act = actions->getBestMovementEstimation();	// Find a robot movement estimation:
		if (!act) THROW_EXCEPTION("Action list does not contain any CActionRobotMovement2D derived object!");

		if (!LMH->m_accumRobotMovementIsValid) // Reset accum.
		{
			act->poseChange->getMean( LMH->m_accumRobotMovement.rawOdometryIncrementReading );
			LMH->m_accumRobotMovement.motionModelConfiguration = act->motionModelConfiguration;
		}
		else
			LMH->m_accumRobotMovement.rawOdometryIncrementReading =
				LMH->m_accumRobotMovement.rawOdometryIncrementReading +
				act->poseChange->getMeanVal();

		LMH->m_accumRobotMovementIsValid = true;
	}

	if (sf!=NULL)
	{
		ASSERT_(LMH->m_particles.size()>0);
		SFhasValidObservations = (*LMH->m_particles.begin()).d->metricMaps.canComputeObservationsLikelihood( *sf );
	}

	// All the needed things?
	if (!LMH->m_accumRobotMovementIsValid || !SFhasValidObservations)
		return; // Nothing we can do here...
	ASSERT_(sf!=NULL);
	ASSERT_(!PF_options.adaptiveSampleSize);

	// OK, we have all we need, let's start!

	// The odometry-based increment since last step is
	// in:   LMH->m_accumRobotMovement.rawOdometryIncrementReading
	const CPose2D initialPoseEstimation = LMH->m_accumRobotMovement.rawOdometryIncrementReading;
	LMH->m_accumRobotMovementIsValid = false; // To reset odometry at next iteration!


	// ----------------------------------------------------------------------
	//						1) FIXED SAMPLE SIZE VERSION
	// ----------------------------------------------------------------------

	// ICP used if "pfOptimalProposal_mapSelection" = 0 or 1
	CICP					icp;
	CICP::TReturnInfo	icpInfo;

	// ICP options
	// ------------------------------
	icp.options.maxIterations = 80;
	icp.options.thresholdDist = 0.50f;
	icp.options.thresholdAng = DEG2RAD( 20 );
	icp.options.smallestThresholdDist = 0.05f;
	icp.options.ALFA		  = 0.5f;
	icp.options.onlyClosestCorrespondences = true;
	icp.options.doRANSAC = false;

	// Build the map of points to align:
	CSimplePointsMap	localMapPoints;

	ASSERT_( LMH->m_particles[0].d->metricMaps.m_gridMaps.size() > 0);
	//float	minDistBetweenPointsInLocalMaps = 0.02f; //3.0f * m_particles[0].d->metricMaps.m_gridMaps[0]->getResolution();

	// Build local map:
	localMapPoints.clear();
	localMapPoints.insertionOptions.minDistBetweenLaserPoints =  0.02;
	sf->insertObservationsInto( &localMapPoints );

	// Process the particles
	const size_t M = LMH->m_particles.size();
	LMH->m_log_w_metric_history.resize(M);

	for (size_t i=0;i<M;i++)
	{
		CLocalMetricHypothesis::CParticleData  &part = LMH->m_particles[i];
		CPose3D *part_pose = LMH->getCurrentPose(i);

		if ( LMH->m_SFs.empty() )
		{
			// The first robot pose in the SLAM execution: All m_particles start
			// at the same point (this is the lowest bound of subsequent uncertainty):
			// New pose = old pose.
			// part_pose: Unmodified
		}
		else
		{
			// ICP and add noise:
			CPosePDFGaussian	icpEstimation;

			// Select map to use with ICP:
			CMetricMap *mapalign;

			if (!part.d->metricMaps.m_pointsMaps.empty())
				mapalign = part.d->metricMaps.m_pointsMaps[0].pointer();
			else if (!part.d->metricMaps.m_gridMaps.empty())
				mapalign = part.d->metricMaps.m_gridMaps[0].pointer();
			else
				THROW_EXCEPTION("There is no point or grid map. At least one needed for ICP.");

			// Use ICP to align to each particle's map:
			CPosePDFPtr alignEst = icp.Align(
				mapalign,
				&localMapPoints,
				initialPoseEstimation,
				NULL,
				&icpInfo);

			icpEstimation.copyFrom( *alignEst );

#if 0
			// HACK:
			CPose3D Ap = finalEstimatedPoseGauss.mean - ith_last_pose;
			double  Ap_dist = Ap.norm();
			finalEstimatedPoseGauss.cov.zeros();
			finalEstimatedPoseGauss.cov(0,0) = square( fabs(Ap_dist)*0.01 );
			finalEstimatedPoseGauss.cov(1,1) = square( fabs(Ap_dist)*0.01 );
			finalEstimatedPoseGauss.cov(2,2) = square( fabs(Ap.yaw())*0.02 );
#endif

			// Generate gaussian-distributed 2D-pose increments according to "finalEstimatedPoseGauss":
			// -------------------------------------------------------------------------------------------
			// Set the gaussian pose:
			CPose3DPDFGaussian finalEstimatedPoseGauss( icpEstimation );

			CPose3D noisy_increment;
			finalEstimatedPoseGauss.drawSingleSample(noisy_increment);

			CPose3D new_pose;
			new_pose.composeFrom(*part_pose,noisy_increment);

			CPose2D new_pose2d = CPose2D(new_pose);

			// Add the pose to the path:
			part.d->robotPoses[ LMH->m_currentRobotPose ] = new_pose;

			// Update the weight:
			// ---------------------------------------------------------------------------
			const double log_lik =
				PF_options.powFactor *
				auxiliarComputeObservationLikelihood(
					PF_options,
					LMH,
					i,
					sf,
					&new_pose2d);

			part.log_w += log_lik;

			// Add to historic record of log_w weights:
			LMH->m_log_w_metric_history[i][currentPoseID] += log_lik;

		} // end else we can do ICP

	} // end for each particle i

	// Accumulate the log likelihood of this LMH as a whole:
	double out_max_log_w;
	LMH->normalizeWeights( &out_max_log_w );	// Normalize weights:
	LMH->m_log_w += out_max_log_w;

	printf("[CLSLAM_RBPF_2DLASER] Overall likelihood = %.2e\n",out_max_log_w);
	printf("[CLSLAM_RBPF_2DLASER] Done in %.03fms\n",1e3*tictac.Tac());

	MRPT_END
}
Esempio n. 24
0
// ------------------------------------------------------
//                  TestGeometry3D
// ------------------------------------------------------
void TestLaser2Imgs()
{
	 // Set your rawlog file name
	if (!mrpt::system::fileExists(RAWLOG_FILE))
		THROW_EXCEPTION_CUSTOM_MSG1("Rawlog file does not exist: %s",RAWLOG_FILE.c_str())

	CActionCollectionPtr	action;
	CSensoryFramePtr		observations;
	size_t					rawlogEntry		= 0;
	//bool					end 			= false;
	CDisplayWindow			wind;

	// Set relative path for externally-stored images in rawlogs:
	string	rawlog_images_path = extractFileDirectory( RAWLOG_FILE );
	rawlog_images_path+="/Images";
	CImage::IMAGES_PATH_BASE = rawlog_images_path;		// Set it.

	CFileGZInputStream		rawlogFile( RAWLOG_FILE );


	for (;;)
	{
		if (os::kbhit())
		{
			char c = os::getch();
			if (c==27)
				break;
		}

		// Load action/observation pair from the rawlog:
		// --------------------------------------------------
		cout << "Reading act/oct pair from rawlog..." << endl;
		if (! CRawlog::readActionObservationPair( rawlogFile, action, observations, rawlogEntry) )
			break; // file EOF

		// CAMERA............
		// Get CObservationStereoImages
		CObservationStereoImagesPtr sImgs;
		CObservationImagePtr Img;

		sImgs = observations->getObservationByClass<CObservationStereoImages>();
		if (!sImgs)
		{
			Img = observations->getObservationByClass<CObservationImage>();
			if(!Img)
				continue;
		}

		CPose3D cameraPose;	// Get Camera Pose (B) (CPose3D)
		CMatrixDouble33 K;			// Get Calibration matrix (K)

		sImgs ? sImgs->getSensorPose( cameraPose ) : Img->getSensorPose( cameraPose );
		K = sImgs ? sImgs->leftCamera.intrinsicParams : Img->cameraParams.intrinsicParams;

		// LASER.............
		// Get CObservationRange2D
		CObservation2DRangeScanPtr laserScan = observations->getObservationByClass<CObservation2DRangeScan>();
		if (!laserScan) continue;

		// Get Laser Pose (A) (CPose3D)
		CPose3D laserPose;
		laserScan->getSensorPose( laserPose );

		if (abs(laserPose.yaw())>DEG2RAD(90)) continue; // Only front lasers

		// Get 3D Point relative to the Laser coordinate Frame (P1) (CPoint3D)
		CPoint3D point;
		CSimplePointsMap mapa;
		mapa.insertionOptions.minDistBetweenLaserPoints = 0;
		observations->insertObservationsInto( &mapa );		// <- The map contains the pose of the points (P1)

		// Get the points into the map
		vector<float>			X, Y, Z;
		vector<float>::iterator	itX, itY, itZ;
		mapa.getAllPoints(X,Y,Z);

		unsigned int imgW = sImgs? sImgs->imageLeft.getWidth() : Img->image.getWidth();
		unsigned int imgH = sImgs? sImgs->imageLeft.getHeight() : Img->image.getHeight();

		//unsigned int			idx = 0;
		vector<float>			imgX,imgY;
		vector<float>::iterator	itImgX,itImgY;
		imgX.resize( X.size() );
		imgY.resize( Y.size() );

		CImage image;
		image = sImgs ? sImgs->imageLeft : Img->image;

		// Get pixels in the image:
		// Pimg = (kx,ky,k)^T = K(I|0)*P2
		// Main loop
		for( itX = X.begin(), itY = Y.begin(), itZ = Z.begin(), itImgX = imgX.begin(), itImgY = imgY.begin();
			 itX != X.end();
			 itX++, itY++, itZ++, itImgX++, itImgY++)
		{
			// Coordinates Transformation
			CPoint3D pLaser(*itX,*itY,*itZ);
			CPoint3D pCamera(pLaser-cameraPose);

			if( pCamera.z() > 0 )
			{
				*itImgX = - K(0,0)*((pCamera.x())/(pCamera.z())) + K(0,2);
				*itImgY = - K(1,1)*((pCamera.y())/(pCamera.z())) + K(1,2);

				if( *itImgX > 0 && *itImgX < imgW && *itImgY > 0 && *itImgY < imgH )
					image.filledRectangle(*itImgX-1,*itImgY-1,*itImgX+1,*itImgY+1,TColor(255,0,0));
			} // end if
		} // end for

		action.clear_unique();
		observations.clear_unique();

		wind.showImage(image);

		mrpt::system::sleep(50);
	}; // end for

	mrpt::system::pause();
}
Esempio n. 25
0
/*---------------------------------------------------------------
				   getAs3DScene

	Returns a 3D representation of the current robot pose, all
	the poses in the auxiliary graph, and each of the areas
	they belong to.
  ---------------------------------------------------------------*/
void CLocalMetricHypothesis::getAs3DScene( opengl::CSetOfObjectsPtr &objs ) const
{
	objs->clear();

	// -------------------------------------------
	// Draw a grid on the ground:
	// -------------------------------------------
	{
		opengl::CGridPlaneXYPtr obj = opengl::CGridPlaneXY::Create(-100,100,-100,100,0,5);
		obj->setColor(0.4,0.4,0.4);

		objs->insert(obj);  // it will free the memory
	}

	// ---------------------------------------------------------
	// Draw each of the robot poses as 2D/3D ellipsoids
	//  For the current pose, draw the robot itself as well.
	// ---------------------------------------------------------
	std::map< TPoseID, CPose3DPDFParticles > lstPoses;
	std::map< TPoseID, CPose3DPDFParticles >::iterator	it;
	getPathParticles( lstPoses );

	// -----------------------------------------------
	// Draw a 3D coordinates corner for each cluster
	// -----------------------------------------------
	{
		CCriticalSectionLocker	locker( &m_parent->m_map_cs );

		for ( TNodeIDSet::const_iterator n=m_neighbors.begin();n!=m_neighbors.end();++n)
		{
			const CHMHMapNodePtr node = m_parent->m_map.getNodeByID( *n );
			ASSERT_(node);
			TPoseID poseID_origin;
			CPose3D originPose;
			if (node->m_annotations.getElemental( NODE_ANNOTATION_REF_POSEID,poseID_origin, m_ID))
			{
				lstPoses[ poseID_origin ].getMean(originPose);

				opengl::CSetOfObjectsPtr corner = stock_objects::CornerXYZ();
				corner->setPose(originPose);
				objs->insert( corner );
			}
		}
	} // end of lock on map_cs

	// Add a camera following the robot:
	// -----------------------------------------
	const CPose3D meanCurPose = lstPoses[ m_currentRobotPose ].getMeanVal();
	{
		opengl::CCameraPtr cam = opengl::CCamera::Create();
		cam->setZoomDistance(85);
		cam->setAzimuthDegrees(45 + RAD2DEG(meanCurPose.yaw()));
		cam->setElevationDegrees(45);
		cam->setPointingAt(meanCurPose);
		objs->insert(cam);
	}



	map<CHMHMapNode::TNodeID,CPoint3D>      areas_mean;  // Store the mean pose of each area
	map<CHMHMapNode::TNodeID,int>			areas_howmany;

	for (it=lstPoses.begin(); it!=lstPoses.end();it++)
	{
		opengl::CEllipsoidPtr ellip = opengl::CEllipsoid::Create();
		// Color depending on being into the current area:
		if ( m_nodeIDmemberships.find(it->first)->second == m_nodeIDmemberships.find(m_currentRobotPose)->second )
			ellip->setColor(0,0,1);
		else
			ellip->setColor(1,0,0);

		const CPose3DPDFParticles  *pdfParts = &it->second;
		CPose3DPDFGaussian   pdf;
		pdf.copyFrom( *pdfParts );

		// Mean:
		ellip->setLocation(pdf.mean.x(), pdf.mean.y(), pdf.mean.z()+0.005);  // To be above the ground gridmap...

		// Cov:
		CMatrixDouble C = CMatrixDouble(pdf.cov); // 6x6 cov.
		C.setSize(3,3);

		// Is a 2D pose??
		if ( pdf.mean.pitch() == 0 && pdf.mean.roll() == 0 && pdf.cov(2,2) < 1e-4f )
			C.setSize(2,2);

		ellip->setCovMatrix(C);

		ellip->enableDrawSolid3D(false);

		// Name:
		ellip->setName( format("P%u", (unsigned int) it->first ) );
		ellip->enableShowName();

		// Add it:
		objs->insert( ellip );

		// Add an arrow for the mean direction also:
		{
			mrpt::opengl::CArrowPtr obj = mrpt::opengl::CArrow::Create(
        		0,0,0,
				0.20,0,0,
				0.25f,0.005f,0.02f);
			obj->setColor(1,0,0);
			obj->setPose(pdf.mean);
			objs->insert( obj );
	    }



		// Is this the current robot pose?
		if (it->first == m_currentRobotPose )
		{
			// Draw robot:
			opengl::CSetOfObjectsPtr robot = stock_objects::RobotPioneer();
			robot->setPose(pdf.mean);

			// Add it:
			objs->insert( robot );
		}
		else // The current robot pose does not count
		{
			// Compute the area means:
			std::map<TPoseID,CHMHMapNode::TNodeID>::const_iterator itAreaId = m_nodeIDmemberships.find( it->first );
			ASSERT_( itAreaId != m_nodeIDmemberships.end() );
			CHMHMapNode::TNodeID  areaId = itAreaId->second;
			areas_howmany[ areaId  ]++;
			areas_mean[ areaId  ].x_incr( pdf.mean.x() );
			areas_mean[ areaId  ].y_incr( pdf.mean.y() );
			areas_mean[ areaId  ].z_incr( pdf.mean.z() );
		}
	} // end for it

	// Average area means:
	map<CHMHMapNode::TNodeID,CPoint3D>::iterator itMeans;
	map<CHMHMapNode::TNodeID,int>::iterator      itHowMany;
	ASSERT_( areas_howmany.size() == areas_mean.size() );
	for ( itMeans = areas_mean.begin(), itHowMany = areas_howmany.begin(); itMeans!=areas_mean.end(); itMeans++, itHowMany++ )
	{
		ASSERT_( itHowMany->second >0);

		float f = 1.0f / itHowMany->second;
		itMeans->second *= f;
	}


	// -------------------------------------------------------------------
	// Draw lines between robot poses & their corresponding area sphere
	// -------------------------------------------------------------------
	for (it=lstPoses.begin(); it!=lstPoses.end();it++)
	{
		if (it->first != m_currentRobotPose )
		{
			CPoint3D  areaPnt( areas_mean[ m_nodeIDmemberships.find( it->first )->second ] );
			areaPnt.z_incr( m_parent->m_options.VIEW3D_AREA_SPHERES_HEIGHT );

			const CPose3DPDFParticles  *pdfParts = &it->second;
			CPose3DPDFGaussian   pdf;
			pdf.copyFrom( *pdfParts );

			opengl::CSimpleLinePtr line = opengl::CSimpleLine::Create();
			line->setColor(0.8,0.8,0.8, 0.3);
			line->setLineWidth(2);

			line->setLineCoords(
				pdf.mean.x(), pdf.mean.y(), pdf.mean.z(),
				areaPnt.x(), areaPnt.y(), areaPnt.z() );
			objs->insert( line );
		}
	} // end for it

	// -------------------------------------------------------------------
	// Draw lines for links between robot poses
	// -------------------------------------------------------------------
//	for (it=m_robotPoses.begin(); it!=m_robotPoses.end();it++)
/*	for (it=lstPoses.begin(); it!=lstPoses.end();it++)
	{
		const CPose3DPDFParticles  *myPdfParts = &it->second;
		CPose3DPDFGaussian   myPdf;
		myPdf.copyFrom( *myPdfParts );

		std::map<TPoseID,TInterRobotPosesInfo>::const_iterator itLink;
		for (itLink=it->second.m_links.begin();itLink!=it->second.m_links.end();itLink++)
		{
			if (itLink->second.SSO>0.7)
			{
				CRobotPosesAuxiliaryGraph::const_iterator hisIt = m_robotPoses.find( itLink->first );
				ASSERT_( hisIt !=m_robotPoses.end() );

				const CPose3DPDFGaussian  *hisPdf = & hisIt->second.m_pose;

				opengl::CSimpleLinePtr line = opengl::CSimpleLine::Create();
				line->m_color_R = 0.2f;
				line->m_color_G = 0.8f;
				line->m_color_B = 0.2f;
				line->m_color_A = 0.3f;
				line->m_lineWidth = 3.0f;

				line->m_x0 = myPdf->mean.x;
				line->m_y0 = myPdf->mean.y;
				line->m_z0 = myPdf->mean.z;

				line->m_x1 = hisPdf->mean.x;
				line->m_y1 = hisPdf->mean.y;
				line->m_z1 = hisPdf->mean.z;

				objs->insert( line );
			}
		}
	} // end for it
*/

	// ---------------------------------------------------------
	// Draw each of the areas in the neighborhood:
	// ---------------------------------------------------------
	{
		CCriticalSectionLocker	locker( &m_parent->m_map_cs ); //To access nodes' labels.

		for ( itMeans = areas_mean.begin(); itMeans!=areas_mean.end(); itMeans++ )
		{
			opengl::CSpherePtr sphere = opengl::CSphere::Create();

			if (itMeans->first == m_nodeIDmemberships.find( m_currentRobotPose)->second )
			{   // Color of current area
				sphere->setColor(0.1,0.1,0.7);
			}
			else
			{   // Color of other areas
				sphere->setColor(0.7,0,0);
			}

			sphere->setLocation(itMeans->second.x(), itMeans->second.y(), itMeans->second.z() + m_parent->m_options.VIEW3D_AREA_SPHERES_HEIGHT);

			sphere->setRadius( m_parent->m_options.VIEW3D_AREA_SPHERES_RADIUS );

			// Add it:
			objs->insert( sphere );

			// And text label:
			opengl::CTextPtr txt = opengl::CText::Create();
			txt->setColor(1,1,1);

			const CHMHMapNodePtr node = m_parent->m_map.getNodeByID( itMeans->first );
			ASSERT_(node);

			txt->setLocation( itMeans->second.x(), itMeans->second.y(), itMeans->second.z() + m_parent->m_options.VIEW3D_AREA_SPHERES_HEIGHT );

//			txt->m_str = node->m_label;
			txt->setString( format("%li",(long int)node->getID()) );

			objs->insert( txt );
		}
	} // end of lock on map_cs

	// ---------------------------------------------------------
	// Draw links between areas:
	// ---------------------------------------------------------
	{
		CCriticalSectionLocker	locker( &m_parent->m_map_cs );

		for ( itMeans = areas_mean.begin(); itMeans!=areas_mean.end(); itMeans++ )
		{
			CHMHMapNode::TNodeID  srcAreaID = itMeans->first;
			const CHMHMapNodePtr srcArea = m_parent->m_map.getNodeByID( srcAreaID );
			ASSERT_(srcArea);

			TArcList		lstArcs;
			srcArea->getArcs(lstArcs);
			for (TArcList::const_iterator a=lstArcs.begin();a!=lstArcs.end();++a)
			{
				// target is in the neighborhood of LMH:
				if ( (*a)->getNodeFrom() == srcAreaID )
				{
					map<CHMHMapNode::TNodeID,CPoint3D>::const_iterator trgAreaPoseIt = areas_mean.find( (*a)->getNodeTo() );
					if ( trgAreaPoseIt != areas_mean.end() )
					{
						// Yes, target node of the arc is in the LMH: Draw it:
						opengl::CSimpleLinePtr line = opengl::CSimpleLine::Create();
						line->setColor(0.8,0.8,0);
						line->setLineWidth(3);

						line->setLineCoords(
							areas_mean[srcAreaID].x(), areas_mean[srcAreaID].y(), areas_mean[srcAreaID].z() + m_parent->m_options.VIEW3D_AREA_SPHERES_HEIGHT,
							trgAreaPoseIt->second.x(), trgAreaPoseIt->second.y(), trgAreaPoseIt->second.z() + m_parent->m_options.VIEW3D_AREA_SPHERES_HEIGHT );

						objs->insert( line );
					}
				}
			} // end for each arc
		} // end for each area

	} // end of lock on map_cs


}