Beispiel #1
0
int DiscretePolicy::confSample(real* Qs, real* vQs) {
    static NormalDistribution gaussian;
    static LaplacianDistribution laplacian;
    static UniformDistribution uniform;

    for (int a=0; a<n_actions; a++) {
        //eval[a] = Qs[a] + urandom(-1.0,1.0)*vQs[a];
        switch(confidence_distribution) {
        case SINGULAR:
            sample[a] = Qs[a];
            break;
        case BOUNDED:
            uniform.setMean(Qs[a]);
            uniform.setVariance(vQs[a]);
            sample[a] = uniform.generate();
            break;
        case GAUSSIAN:
            gaussian.setMean(Qs[a]);
            gaussian.setVariance(vQs[a]);
            sample[a] = gaussian.generate();
            break;
        case LAPLACIAN:
            laplacian.setMean(Qs[a]);
            laplacian.setVariance(vQs[a]);
            sample[a] = Qs[a] + laplacian.generate();
            break;
        default:
            Serror ("Unknown distribution ID:%d\n", confidence_distribution);
            break;
        }
    }
    return argMax(sample);
}
TEST_F(AnalysisFixture, NormalDistribution) {

  //Check default constructor
  NormalDistribution defaultDistribution;
  EXPECT_EQ(defaultDistribution.mean(),0.0);
  EXPECT_EQ(defaultDistribution.standardDeviation(),1.0);
  EXPECT_EQ(defaultDistribution.lowerBound(),boost::none);

  //Check normal constructor
  NormalDistribution distribution(2.0,3.0);
  distribution.setLowerBound(0.0);
  distribution.setUpperBound(4.0);
  EXPECT_EQ(distribution.mean(),2.0);
  EXPECT_EQ(distribution.standardDeviation(),3.0);
  EXPECT_EQ(distribution.lowerBound(),0.0);
  EXPECT_EQ(distribution.upperBound(),4.0);
  
}
TEST_F(AnalysisFixture,UncertaintyDescription_ConstructionCopyingAndCasting) {
  NormalDistribution stdNormal;
  {
    UncertaintyDescription baseCopy = stdNormal; // copies impl
  } // calls destructor
  EXPECT_DOUBLE_EQ(0.0,stdNormal.mean());
  EXPECT_DOUBLE_EQ(1.0,stdNormal.standardDeviation());
  EXPECT_FALSE(stdNormal.lowerBound());
  EXPECT_FALSE(stdNormal.upperBound());

  {
    UncertaintyDescription baseCopy(stdNormal);
    NormalDistribution stdNormalCopy = baseCopy.cast<NormalDistribution>();
    EXPECT_EQ(UncertaintyDescriptionType(UncertaintyDescriptionType::normal_uncertain),baseCopy.type());
  }

  {
    GenericUncertaintyDescription genericCopy = stdNormal.cast<GenericUncertaintyDescription>();
    EXPECT_EQ(2u,genericCopy.attributes().size());
  }

}
Beispiel #4
0
ICPRegistrationTools::CC_ICP_RESULT ICPRegistrationTools::RegisterClouds(GenericIndexedCloudPersist* _modelCloud,
																			GenericIndexedCloudPersist* _dataCloud,
																			PointProjectionTools::Transformation& transform,
																			CC_ICP_CONVERGENCE_TYPE convType,
																			double minErrorDecrease,
																			unsigned nbMaxIterations,
																			double& finalError,
																			GenericProgressCallback* progressCb/*=0*/,
																			bool filterOutFarthestPoints/*=false*/,
																			unsigned samplingLimit/*=20000*/,
																			ScalarField* modelWeights/*=0*/,
																			ScalarField* dataWeights/*=0*/)
{
    assert(_modelCloud && _dataCloud);

    finalError = -1.0;

	//MODEL CLOUD (reference, won't move)
	GenericIndexedCloudPersist* modelCloud=_modelCloud;
	ScalarField* _modelWeights=modelWeights;
	{
		if (_modelCloud->size()>samplingLimit) //shall we resample the clouds? (speed increase)
		{
			ReferenceCloud* subModelCloud = CloudSamplingTools::subsampleCloudRandomly(_modelCloud,samplingLimit);
			if (subModelCloud && modelWeights)
			{
				_modelWeights = new ScalarField("ResampledModelWeights",modelWeights->isPositive());
				unsigned realCount = subModelCloud->size();
				if (_modelWeights->reserve(realCount))
				{
					for (unsigned i=0;i<realCount;++i)
						_modelWeights->addElement(modelWeights->getValue(subModelCloud->getPointGlobalIndex(i)));
					_modelWeights->computeMinAndMax();
				}
				else
				{
					//not enough memory
					delete subModelCloud;
					subModelCloud=0;
				}
			}
			modelCloud = subModelCloud;
		}
		if (!modelCloud) //something bad happened
			return ICP_ERROR_NOT_ENOUGH_MEMORY;
	}

	//DATA CLOUD (will move)
	ReferenceCloud* dataCloud=0;
	ScalarField* _dataWeights=dataWeights;
	SimpleCloud* rotatedDataCloud=0; //temporary structure (rotated vertices)
	{
		if (_dataCloud->size()>samplingLimit) //shall we resample the clouds? (speed increase)
		{
			dataCloud = CloudSamplingTools::subsampleCloudRandomly(_dataCloud,samplingLimit);
			if (dataCloud && dataWeights)
			{
				_dataWeights = new ScalarField("ResampledDataWeights",dataWeights->isPositive());
				unsigned realCount = dataCloud->size();
				if (_dataWeights->reserve(realCount))
				{
					for (unsigned i=0;i<realCount;++i)
						_dataWeights->addElement(dataWeights->getValue(dataCloud->getPointGlobalIndex(i)));
					_dataWeights->computeMinAndMax();
				}
				else
				{
					//not enough memory
					delete dataCloud;
					dataCloud=0;
				}
			}
		}
		else
		{
			//create a 'fake' reference cloud with all points
			dataCloud = new ReferenceCloud(_dataCloud);
			if (dataCloud->reserve(_dataCloud->size()))
			{
				dataCloud->addPointIndex(0,_dataCloud->size());
			}
			else //not enough memory
			{
				delete dataCloud;
				dataCloud=0;
			}
		}

		if (!dataCloud || !dataCloud->enableScalarField()) //something bad happened
		{
			if (dataCloud)
				delete dataCloud;
			if (modelCloud && modelCloud != _modelCloud)
				delete modelCloud;
			if (_modelWeights && _modelWeights!=modelWeights)
				_modelWeights->release();
			return ICP_ERROR_NOT_ENOUGH_MEMORY;
		}
	}

	//Closest Point Set (see ICP algorithm)
	ReferenceCloud* CPSet = new ReferenceCloud(modelCloud);
	ScalarField* CPSetWeights = _modelWeights ? new ScalarField("CPSetWeights",_modelWeights->isPositive()) : 0;

	//algorithm result
	CC_ICP_RESULT result = ICP_NOTHING_TO_DO;
	unsigned iteration = 0;
	double error = 0.0;

    //we compute the initial distance between the two clouds (and the CPSet by the way)
    dataCloud->forEach(ScalarFieldTools::razDistsToHiddenValue);
	DistanceComputationTools::Cloud2CloudDistanceComputationParams params;
	params.CPSet = CPSet;
	if (DistanceComputationTools::computeHausdorffDistance(dataCloud,modelCloud,params,progressCb)>=0)
	{
		//12/11/2008 - A.BEY: ICP guarantees only the decrease of the squared distances sum (not the distances sum)
		error = DistanceComputationTools::computeMeanSquareDist(dataCloud);
	}
	else
	{
	    //if an error occured during distances computation...
		error = -1.0;
		result = ICP_ERROR_DIST_COMPUTATION;
	}

	if (error > 0.0)
	{
#ifdef _DEBUG
		FILE* fp = fopen("registration_trace_log.txt","wt");
		if (fp)
			fprintf(fp,"Initial error: %f\n",error);
#endif

		double lastError=error,initialErrorDelta=0.0,errorDelta=0.0;
		result = ICP_APPLY_TRANSFO; //as soon as we do at least one iteration, we'll have to apply a transformation

		while (true)
		{
			++iteration;

			//regarding the progress bar
			if (progressCb && iteration>1) //on the first iteration, we do... nothing
			{
				char buffer[256];
				//then on the second iteration, we init/show it
				if (iteration==2)
				{
					initialErrorDelta = errorDelta;

					progressCb->reset();
					progressCb->setMethodTitle("Clouds registration");
					sprintf(buffer,"Initial mean square error = %f\n",lastError);
					progressCb->setInfo(buffer);
					progressCb->start();
				}
				else //and after we update it continuously
				{
                    sprintf(buffer,"Mean square error = %f [%f]\n",error,-errorDelta);
					progressCb->setInfo(buffer);
					progressCb->update((float)((initialErrorDelta-errorDelta)/(initialErrorDelta-minErrorDecrease)*100.0));
				}

				if (progressCb->isCancelRequested())
                    break;
			}

			//shall we remove points with distance above a given threshold?
			if (filterOutFarthestPoints)
			{
				NormalDistribution N;
				N.computeParameters(dataCloud,false);
				if (N.isValid())
				{
					DistanceType mu,sigma2;
					N.getParameters(mu,sigma2);

					ReferenceCloud* c = new ReferenceCloud(dataCloud->getAssociatedCloud());
					ReferenceCloud* newCPSet = new ReferenceCloud(CPSet->getAssociatedCloud()); //we must also update the CPSet!
					ScalarField* newdataWeights = (_dataWeights ? new ScalarField("ResampledDataWeights",_dataWeights->isPositive()) : 0);
				//unsigned realCount = dataCloud->size();
				//if (_dataWeights->reserve(realCount))
				//{
				//	for (unsigned i=0;i<realCount;++i)
				//		_dataWeights->addElement(dataWeights->getValue(dataCloud->getPointGlobalIndex(i)));
				//	_dataWeights->computeMinAndMax();
				//}
				//else
				//{
				//	//not enough memory
				//	delete dataCloud;
				//	dataCloud=0;
				//}

					unsigned n=dataCloud->size();
					if (!c->reserve(n) || !newCPSet->reserve(n) || (newdataWeights && !newdataWeights->reserve(n)))
					{
						//not enough memory
						delete c;
						delete newCPSet;
						if (newdataWeights)
							newdataWeights->release();
						result = ICP_ERROR_REGISTRATION_STEP;
						break;
					}

					//we keep only the points with "not too high" distances
					DistanceType maxDist = mu+3.0f*sqrt(sigma2);
					unsigned realSize=0;
					for (unsigned i=0;i<n;++i)
					{
						unsigned index = dataCloud->getPointGlobalIndex(i);
						if (dataCloud->getAssociatedCloud()->getPointScalarValue(index)<maxDist)
						{
							c->addPointIndex(index);
							newCPSet->addPointIndex(CPSet->getPointGlobalIndex(i));
							if (newdataWeights)
								newdataWeights->addElement(_dataWeights->getValue(index));
							++realSize;
						}
					}

					//resize should be ok as we have called reserve first
					c->resize(realSize); //should always be ok as counter<n
					newCPSet->resize(realSize); //idem
					if (newdataWeights)
						newdataWeights->resize(realSize); //idem

					//replace old structures by new ones
					delete CPSet;
					CPSet=newCPSet;
					delete dataCloud;
					dataCloud=c;
					if (_dataWeights)
					{
						_dataWeights->release();
						_dataWeights = newdataWeights;
					}
				}
			}

			//update CPSet weights (if any)
			if (_modelWeights)
			{
				unsigned count=CPSet->size();
				assert(CPSetWeights);
				if (CPSetWeights->currentSize()!=count)
				{
					if (!CPSetWeights->resize(count))
					{
						result = ICP_ERROR_REGISTRATION_STEP;
						break;
					}
				}
				for (unsigned i=0;i<count;++i)
					CPSetWeights->setValue(i,_modelWeights->getValue(CPSet->getPointGlobalIndex(i)));
				CPSetWeights->computeMinAndMax();
			}

            PointProjectionTools::Transformation currentTrans;
			//if registration procedure fails
            if (!RegistrationTools::RegistrationProcedure(dataCloud, CPSet, currentTrans, _dataWeights, _modelWeights))
			{
				result = ICP_ERROR_REGISTRATION_STEP;
				break;
			}

			//get rotated data cloud
			if (!rotatedDataCloud || filterOutFarthestPoints)
			{
				//we create a new structure, with rotated points
				SimpleCloud* newDataCloud = PointProjectionTools::applyTransformation(dataCloud,currentTrans);
				if (!newDataCloud)
				{
					//not enough memory
					result = ICP_ERROR_REGISTRATION_STEP;
					break;
				}
				//update dataCloud
				if (rotatedDataCloud)
					delete rotatedDataCloud;
				rotatedDataCloud = newDataCloud;
				delete dataCloud;
				dataCloud = new ReferenceCloud(rotatedDataCloud);
				if (!dataCloud->reserve(rotatedDataCloud->size()))
				{
					//not enough  memory
					result = ICP_ERROR_REGISTRATION_STEP;
					break;
				}
				dataCloud->addPointIndex(0,rotatedDataCloud->size());
			}
			else
			{
				//we simply have to rotate the existing temporary cloud
				rotatedDataCloud->applyTransformation(currentTrans);
			}

			//compute (new) distances to model
			params.CPSet = CPSet;
			if (DistanceComputationTools::computeHausdorffDistance(dataCloud,modelCloud,params)<0)
			{
                //an error occured during distances computation...
				result = ICP_ERROR_REGISTRATION_STEP;
				break;
			}

			lastError = error;
            //12/11/2008 - A.BEY: ICP guarantees only the decrease of the squared distances sum (not the distances sum)
			error = DistanceComputationTools::computeMeanSquareDist(dataCloud);
			finalError = (error>0 ? sqrt(error) : error);

#ifdef _DEBUG
			if (fp)
			    fprintf(fp,"Iteration #%i --> error: %f\n",iteration,error);
#endif

            //error update
			errorDelta = lastError-error;

            //is it better?
            if (errorDelta > 0.0)
            {
			    //we update global transformation matrix
			    if (currentTrans.R.isValid())
			    {
			        if (transform.R.isValid())
                        transform.R = currentTrans.R * transform.R;
                    else
                        transform.R = currentTrans.R;
                    transform.T = currentTrans.R * transform.T;
			    }
                transform.T += currentTrans.T;
            }

            //stop criterion
            if ((errorDelta < 0.0) || //error increase
                (convType == MAX_ERROR_CONVERGENCE && errorDelta < minErrorDecrease) || //convergence reached
                (convType == MAX_ITER_CONVERGENCE && iteration > nbMaxIterations)) //max iteration reached
			{
				break;
			}

		}

		if (progressCb)
            progressCb->stop();

#ifdef _DEBUG
		if (fp)
		{
			fclose(fp);
			fp=0;
		}
#endif
	}

	if (CPSet)
		delete CPSet;
	CPSet=0;
	if (CPSetWeights)
		CPSetWeights->release();

	//release memory
	if (modelCloud && modelCloud != _modelCloud)
        delete modelCloud;
	if (_modelWeights && _modelWeights!=modelWeights)
		_modelWeights->release();
	if (dataCloud)
		delete dataCloud;
	if (_dataWeights && _dataWeights!=dataWeights)
		_dataWeights->release();
	if (rotatedDataCloud)
		delete rotatedDataCloud;

	return result;
}
void TestNormalDistribution::testClone()
{
  NormalDistribution distSigmaWithNumDeviations( *mean, *stdDev, numDevs );
  NormalDistribution distSigmaWithoutNumDeviations( *mean, *stdDev );
  NormalDistribution distBoundsWithoutNumDeviations( lb, ub );
  NormalDistribution distBoundsWithNumDeviations( lb, ub, numDevs );
   
   // call clone and test results
   NormalDistribution* rtn = (NormalDistribution*)distSigmaWithNumDeviations.clone();

   _test( rtn->lowerBound() == distSigmaWithNumDeviations.lowerBound() );
   _test( rtn->upperBound() == distSigmaWithNumDeviations.upperBound() );
   _test( rtn->mean() == distSigmaWithNumDeviations.mean() );
   _test( rtn->stdDev() == distSigmaWithNumDeviations.stdDev() );
   _test( rtn->typeName() == distSigmaWithNumDeviations.typeName() );

   // clean up memory
   delete rtn;

   // call clone and test results
   rtn = (NormalDistribution*)distSigmaWithoutNumDeviations.clone();

   _test( rtn->lowerBound() == distSigmaWithoutNumDeviations.lowerBound() );
   _test( rtn->upperBound() == distSigmaWithoutNumDeviations.upperBound() );
   _test( rtn->mean() == distSigmaWithoutNumDeviations.mean() );
   _test( rtn->stdDev() == distSigmaWithoutNumDeviations.stdDev() );
   _test( rtn->typeName() == distSigmaWithoutNumDeviations.typeName() );

   // clean up memory
   delete rtn;

   // call clone and test results
   rtn = (NormalDistribution*)distBoundsWithoutNumDeviations.clone();

   _test( rtn->lowerBound() == distBoundsWithoutNumDeviations.lowerBound() );
   _test( rtn->upperBound() == distBoundsWithoutNumDeviations.upperBound() );
   _test( rtn->mean() == distBoundsWithoutNumDeviations.mean() );
   _test( rtn->stdDev() == distBoundsWithoutNumDeviations.stdDev() );
   _test( rtn->typeName() == distBoundsWithoutNumDeviations.typeName() );

   // clean up memory
   delete rtn;

   // call clone and test results
   rtn = (NormalDistribution*)distBoundsWithNumDeviations.clone();

   _test( rtn->lowerBound() ==distBoundsWithNumDeviations.lowerBound() );
   _test( rtn->upperBound() ==distBoundsWithNumDeviations.upperBound() );
   _test( rtn->mean() ==distBoundsWithNumDeviations.mean() );
   _test( rtn->stdDev() ==distBoundsWithNumDeviations.stdDev() );
   _test( rtn->typeName() ==distBoundsWithNumDeviations.typeName() );

   // clean up memory
   delete rtn;
}
double Pulsar::GaussianBaseline::get_variance_correction (double sigma)
{
  NormalDistribution normal;
  return 1.0 / normal.moment2 (-sigma, sigma);
}
int main ()
{
  double a = 1.6;
  
  NormalDistribution normal;

  if (normal.density (a) <= normal.density (a+1)) {
    fprintf (stderr, "probability is not decreasing\n");
    return -1;
  }

  if (fabs(normal.density (a) - normal.density (-a)) > 1e-12) {
    fprintf (stderr, "probability is not symmetric\n");
    fprintf (stderr, "a=%lf; prob(a)=%lf; prob(-a)=%lf\n", a,
	     normal.density (a), normal.density (-a));
    return -1;
  }

  if (normal.cumulative_distribution (a) >= normal.cumulative_distribution (a+1)) {
    fprintf (stderr, "cummulative is not increasing\n");
    return -1;
  }

  if (fabs(normal.cumulative_distribution (a) + normal.cumulative_distribution (-a) -1) > 1e-12) {
    fprintf (stderr, "cummulative is not anti-symmetric about 0.5\n");
    fprintf (stderr, "a=%lf; cummulative(a)=%lf; cummulative(-a)=%lf\n", a,
	     normal.cumulative_distribution (a), normal.cumulative_distribution (-a));
    return -1;
  }

  if (normal.cumulative_distribution (0) != 0.5) {
    fprintf (stderr, "cummulative(0) != 0.5\n");
    return -1;
  }

  fprintf (stderr, "all normal distribution functions pass tests\n");

  return 0;
}