Esempio n. 1
0
bool AutoSegmentationTools::frontPropagationBasedSegmentation(GenericIndexedCloudPersist* theCloud,
                                                                bool signedSF,
                                                                DistanceType minSeedDist,
                                                                uchar octreeLevel,
                                                                ReferenceCloudContainer& theSegmentedLists,
                                                                GenericProgressCallback* progressCb,
                                                                DgmOctree* _theOctree,
                                                                bool applyGaussianFilter,
                                                                float alpha)
{
	if (!theCloud)
        return false;
	unsigned numberOfPoints = theCloud->size();
	if (numberOfPoints<1)
        return false;

	//on calcule l'octree
	DgmOctree* theOctree = _theOctree;
	if (!theOctree)
	{
		theOctree = new DgmOctree(theCloud);
		if (theOctree->build(progressCb)<1)
		{
			delete theOctree;
			return false;
		}
	}

	ScalarField* theDists = new ScalarField("distances",true);
	if (!theDists->reserve(numberOfPoints))
	{
		if (!_theOctree)
			delete theOctree;
		return false;
	}
	theCloud->placeIteratorAtBegining();
	unsigned k=0;
	DistanceType d = theCloud->getPointScalarValue(k);
	for (;k<numberOfPoints;++k)
        theDists->addElement(d);

	//on calcule le gradient (va écraser le champ des distances)
	if (ScalarFieldTools::computeScalarFieldGradient(theCloud,signedSF,true,true,progressCb,theOctree)<0)
	{
		if (!_theOctree)
			delete theOctree;
		return false;
	}

	//et on lisse le résultat
	if (applyGaussianFilter)
	{
		uchar level = theOctree->findBestLevelForAGivenPopulationPerCell(NUMBER_OF_POINTS_FOR_GRADIENT_COMPUTATION);
		float cellSize = theOctree->getCellSize(level);
        ScalarFieldTools::applyScalarFieldGaussianFilter(cellSize*0.33f,theCloud,signedSF,-1,progressCb,theOctree);
	}

	DistanceType maxDist;
	unsigned seedPoints = 0;
	unsigned numberOfSegmentedLists = 0;

	//on va faire la propagation avec le FastMarching();
	FastMarchingForPropagation* fm = new FastMarchingForPropagation();

	fm->setJumpCoef(50.0);
	fm->setDetectionThreshold(alpha);

	int result = fm->init(theCloud,theOctree,octreeLevel);
	int octreeLength = OCTREE_LENGTH(octreeLevel)-1;

	if (result<0)
	{
		if (!_theOctree)
            delete theOctree;
		delete fm;
		return false;
	}

	if (progressCb)
	{
		progressCb->reset();
		progressCb->setMethodTitle("FM Propagation");
		char buffer[256];
		sprintf(buffer,"Octree level: %i\nNumber of points: %i",octreeLevel,numberOfPoints);
		progressCb->setInfo(buffer);
		progressCb->start();
	}

	unsigned maxDistIndex=0,begin = 0;
	CCVector3 startPoint;

	while (true)
	{
		maxDist = HIDDEN_VALUE;

		//on cherche la première distance supérieure ou égale à "minSeedDist"
		while (begin<numberOfPoints)
		{
			const CCVector3 *thePoint = theCloud->getPoint(begin);
			const DistanceType& theDistance = theDists->getValue(begin);
			++begin;

			if ((theCloud->getPointScalarValue(begin)>=0.0)&&(theDistance >= minSeedDist))
			{
				maxDist = theDistance;
				startPoint = *thePoint;
				maxDistIndex = begin;
				break;
			}
		}

		//il n'y a plus de point avec des distances suffisamment grandes !
		if (maxDist<minSeedDist)
            break;

		//on finit la recherche du max
		for (unsigned i=begin;i<numberOfPoints;++i)
		{
			const CCVector3 *thePoint = theCloud->getPoint(i);
			const DistanceType& theDistance = theDists->getValue(i);

			if ((theCloud->getPointScalarValue(i)>=0.0)&&(theDistance > maxDist))
			{
				maxDist = theDistance;
				startPoint = *thePoint;
				maxDistIndex = i;
			}
		}

		//on lance la propagation à partir du point de distance maximale
		//propagateFromPoint(aList,_GradientNorms,maxDistIndex,octreeLevel,_gui);

		int pos[3];
		theOctree->getTheCellPosWhichIncludesThePoint(&startPoint,pos,octreeLevel);
		//clipping (important !)
		pos[0] = std::min(octreeLength,pos[0]);
		pos[1] = std::min(octreeLength,pos[1]);
		pos[2] = std::min(octreeLength,pos[2]);
		fm->setSeedCell(pos);
		++seedPoints;

		int result = fm->propagate();

		//si la propagation s'est bien passée
		if (result>=0)
		{
			//on la termine (i.e. on extrait les points correspondant)
			ReferenceCloud* newCloud = fm->extractPropagatedPoints();

			//si la liste convient
			//on la rajoute au groupe des listes segmentées
			theSegmentedLists.push_back(newCloud);
			++numberOfSegmentedLists;

			if (progressCb)
                progressCb->update(float(numberOfSegmentedLists % 100));

			fm->endPropagation();

			//break;
		}

		if (maxDistIndex == begin)
            ++begin;
	}

	if (progressCb)
		progressCb->stop();

	for (unsigned i=0;i<numberOfPoints;++i)
		theCloud->setPointScalarValue(i,theDists->getValue(i));

	delete fm;
	theDists->release();
	theDists=0;

	if (!_theOctree)
		delete theOctree;

	return true;
}
Esempio n. 2
0
ICPRegistrationTools::CC_ICP_RESULT ICPRegistrationTools::RegisterClouds(GenericIndexedCloudPersist* _modelCloud,
																			GenericIndexedCloudPersist* _dataCloud,
																			PointProjectionTools::Transformation& transform,
																			CC_ICP_CONVERGENCE_TYPE convType,
																			double minErrorDecrease,
																			unsigned nbMaxIterations,
																			double& finalError,
																			GenericProgressCallback* progressCb/*=0*/,
																			bool filterOutFarthestPoints/*=false*/,
																			unsigned samplingLimit/*=20000*/,
																			ScalarField* modelWeights/*=0*/,
																			ScalarField* dataWeights/*=0*/)
{
    assert(_modelCloud && _dataCloud);

    finalError = -1.0;

	//MODEL CLOUD (reference, won't move)
	GenericIndexedCloudPersist* modelCloud=_modelCloud;
	ScalarField* _modelWeights=modelWeights;
	{
		if (_modelCloud->size()>samplingLimit) //shall we resample the clouds? (speed increase)
		{
			ReferenceCloud* subModelCloud = CloudSamplingTools::subsampleCloudRandomly(_modelCloud,samplingLimit);
			if (subModelCloud && modelWeights)
			{
				_modelWeights = new ScalarField("ResampledModelWeights",modelWeights->isPositive());
				unsigned realCount = subModelCloud->size();
				if (_modelWeights->reserve(realCount))
				{
					for (unsigned i=0;i<realCount;++i)
						_modelWeights->addElement(modelWeights->getValue(subModelCloud->getPointGlobalIndex(i)));
					_modelWeights->computeMinAndMax();
				}
				else
				{
					//not enough memory
					delete subModelCloud;
					subModelCloud=0;
				}
			}
			modelCloud = subModelCloud;
		}
		if (!modelCloud) //something bad happened
			return ICP_ERROR_NOT_ENOUGH_MEMORY;
	}

	//DATA CLOUD (will move)
	ReferenceCloud* dataCloud=0;
	ScalarField* _dataWeights=dataWeights;
	SimpleCloud* rotatedDataCloud=0; //temporary structure (rotated vertices)
	{
		if (_dataCloud->size()>samplingLimit) //shall we resample the clouds? (speed increase)
		{
			dataCloud = CloudSamplingTools::subsampleCloudRandomly(_dataCloud,samplingLimit);
			if (dataCloud && dataWeights)
			{
				_dataWeights = new ScalarField("ResampledDataWeights",dataWeights->isPositive());
				unsigned realCount = dataCloud->size();
				if (_dataWeights->reserve(realCount))
				{
					for (unsigned i=0;i<realCount;++i)
						_dataWeights->addElement(dataWeights->getValue(dataCloud->getPointGlobalIndex(i)));
					_dataWeights->computeMinAndMax();
				}
				else
				{
					//not enough memory
					delete dataCloud;
					dataCloud=0;
				}
			}
		}
		else
		{
			//create a 'fake' reference cloud with all points
			dataCloud = new ReferenceCloud(_dataCloud);
			if (dataCloud->reserve(_dataCloud->size()))
			{
				dataCloud->addPointIndex(0,_dataCloud->size());
			}
			else //not enough memory
			{
				delete dataCloud;
				dataCloud=0;
			}
		}

		if (!dataCloud || !dataCloud->enableScalarField()) //something bad happened
		{
			if (dataCloud)
				delete dataCloud;
			if (modelCloud && modelCloud != _modelCloud)
				delete modelCloud;
			if (_modelWeights && _modelWeights!=modelWeights)
				_modelWeights->release();
			return ICP_ERROR_NOT_ENOUGH_MEMORY;
		}
	}

	//Closest Point Set (see ICP algorithm)
	ReferenceCloud* CPSet = new ReferenceCloud(modelCloud);
	ScalarField* CPSetWeights = _modelWeights ? new ScalarField("CPSetWeights",_modelWeights->isPositive()) : 0;

	//algorithm result
	CC_ICP_RESULT result = ICP_NOTHING_TO_DO;
	unsigned iteration = 0;
	double error = 0.0;

    //we compute the initial distance between the two clouds (and the CPSet by the way)
    dataCloud->forEach(ScalarFieldTools::razDistsToHiddenValue);
	DistanceComputationTools::Cloud2CloudDistanceComputationParams params;
	params.CPSet = CPSet;
	if (DistanceComputationTools::computeHausdorffDistance(dataCloud,modelCloud,params,progressCb)>=0)
	{
		//12/11/2008 - A.BEY: ICP guarantees only the decrease of the squared distances sum (not the distances sum)
		error = DistanceComputationTools::computeMeanSquareDist(dataCloud);
	}
	else
	{
	    //if an error occured during distances computation...
		error = -1.0;
		result = ICP_ERROR_DIST_COMPUTATION;
	}

	if (error > 0.0)
	{
#ifdef _DEBUG
		FILE* fp = fopen("registration_trace_log.txt","wt");
		if (fp)
			fprintf(fp,"Initial error: %f\n",error);
#endif

		double lastError=error,initialErrorDelta=0.0,errorDelta=0.0;
		result = ICP_APPLY_TRANSFO; //as soon as we do at least one iteration, we'll have to apply a transformation

		while (true)
		{
			++iteration;

			//regarding the progress bar
			if (progressCb && iteration>1) //on the first iteration, we do... nothing
			{
				char buffer[256];
				//then on the second iteration, we init/show it
				if (iteration==2)
				{
					initialErrorDelta = errorDelta;

					progressCb->reset();
					progressCb->setMethodTitle("Clouds registration");
					sprintf(buffer,"Initial mean square error = %f\n",lastError);
					progressCb->setInfo(buffer);
					progressCb->start();
				}
				else //and after we update it continuously
				{
                    sprintf(buffer,"Mean square error = %f [%f]\n",error,-errorDelta);
					progressCb->setInfo(buffer);
					progressCb->update((float)((initialErrorDelta-errorDelta)/(initialErrorDelta-minErrorDecrease)*100.0));
				}

				if (progressCb->isCancelRequested())
                    break;
			}

			//shall we remove points with distance above a given threshold?
			if (filterOutFarthestPoints)
			{
				NormalDistribution N;
				N.computeParameters(dataCloud,false);
				if (N.isValid())
				{
					DistanceType mu,sigma2;
					N.getParameters(mu,sigma2);

					ReferenceCloud* c = new ReferenceCloud(dataCloud->getAssociatedCloud());
					ReferenceCloud* newCPSet = new ReferenceCloud(CPSet->getAssociatedCloud()); //we must also update the CPSet!
					ScalarField* newdataWeights = (_dataWeights ? new ScalarField("ResampledDataWeights",_dataWeights->isPositive()) : 0);
				//unsigned realCount = dataCloud->size();
				//if (_dataWeights->reserve(realCount))
				//{
				//	for (unsigned i=0;i<realCount;++i)
				//		_dataWeights->addElement(dataWeights->getValue(dataCloud->getPointGlobalIndex(i)));
				//	_dataWeights->computeMinAndMax();
				//}
				//else
				//{
				//	//not enough memory
				//	delete dataCloud;
				//	dataCloud=0;
				//}

					unsigned n=dataCloud->size();
					if (!c->reserve(n) || !newCPSet->reserve(n) || (newdataWeights && !newdataWeights->reserve(n)))
					{
						//not enough memory
						delete c;
						delete newCPSet;
						if (newdataWeights)
							newdataWeights->release();
						result = ICP_ERROR_REGISTRATION_STEP;
						break;
					}

					//we keep only the points with "not too high" distances
					DistanceType maxDist = mu+3.0f*sqrt(sigma2);
					unsigned realSize=0;
					for (unsigned i=0;i<n;++i)
					{
						unsigned index = dataCloud->getPointGlobalIndex(i);
						if (dataCloud->getAssociatedCloud()->getPointScalarValue(index)<maxDist)
						{
							c->addPointIndex(index);
							newCPSet->addPointIndex(CPSet->getPointGlobalIndex(i));
							if (newdataWeights)
								newdataWeights->addElement(_dataWeights->getValue(index));
							++realSize;
						}
					}

					//resize should be ok as we have called reserve first
					c->resize(realSize); //should always be ok as counter<n
					newCPSet->resize(realSize); //idem
					if (newdataWeights)
						newdataWeights->resize(realSize); //idem

					//replace old structures by new ones
					delete CPSet;
					CPSet=newCPSet;
					delete dataCloud;
					dataCloud=c;
					if (_dataWeights)
					{
						_dataWeights->release();
						_dataWeights = newdataWeights;
					}
				}
			}

			//update CPSet weights (if any)
			if (_modelWeights)
			{
				unsigned count=CPSet->size();
				assert(CPSetWeights);
				if (CPSetWeights->currentSize()!=count)
				{
					if (!CPSetWeights->resize(count))
					{
						result = ICP_ERROR_REGISTRATION_STEP;
						break;
					}
				}
				for (unsigned i=0;i<count;++i)
					CPSetWeights->setValue(i,_modelWeights->getValue(CPSet->getPointGlobalIndex(i)));
				CPSetWeights->computeMinAndMax();
			}

            PointProjectionTools::Transformation currentTrans;
			//if registration procedure fails
            if (!RegistrationTools::RegistrationProcedure(dataCloud, CPSet, currentTrans, _dataWeights, _modelWeights))
			{
				result = ICP_ERROR_REGISTRATION_STEP;
				break;
			}

			//get rotated data cloud
			if (!rotatedDataCloud || filterOutFarthestPoints)
			{
				//we create a new structure, with rotated points
				SimpleCloud* newDataCloud = PointProjectionTools::applyTransformation(dataCloud,currentTrans);
				if (!newDataCloud)
				{
					//not enough memory
					result = ICP_ERROR_REGISTRATION_STEP;
					break;
				}
				//update dataCloud
				if (rotatedDataCloud)
					delete rotatedDataCloud;
				rotatedDataCloud = newDataCloud;
				delete dataCloud;
				dataCloud = new ReferenceCloud(rotatedDataCloud);
				if (!dataCloud->reserve(rotatedDataCloud->size()))
				{
					//not enough  memory
					result = ICP_ERROR_REGISTRATION_STEP;
					break;
				}
				dataCloud->addPointIndex(0,rotatedDataCloud->size());
			}
			else
			{
				//we simply have to rotate the existing temporary cloud
				rotatedDataCloud->applyTransformation(currentTrans);
			}

			//compute (new) distances to model
			params.CPSet = CPSet;
			if (DistanceComputationTools::computeHausdorffDistance(dataCloud,modelCloud,params)<0)
			{
                //an error occured during distances computation...
				result = ICP_ERROR_REGISTRATION_STEP;
				break;
			}

			lastError = error;
            //12/11/2008 - A.BEY: ICP guarantees only the decrease of the squared distances sum (not the distances sum)
			error = DistanceComputationTools::computeMeanSquareDist(dataCloud);
			finalError = (error>0 ? sqrt(error) : error);

#ifdef _DEBUG
			if (fp)
			    fprintf(fp,"Iteration #%i --> error: %f\n",iteration,error);
#endif

            //error update
			errorDelta = lastError-error;

            //is it better?
            if (errorDelta > 0.0)
            {
			    //we update global transformation matrix
			    if (currentTrans.R.isValid())
			    {
			        if (transform.R.isValid())
                        transform.R = currentTrans.R * transform.R;
                    else
                        transform.R = currentTrans.R;
                    transform.T = currentTrans.R * transform.T;
			    }
                transform.T += currentTrans.T;
            }

            //stop criterion
            if ((errorDelta < 0.0) || //error increase
                (convType == MAX_ERROR_CONVERGENCE && errorDelta < minErrorDecrease) || //convergence reached
                (convType == MAX_ITER_CONVERGENCE && iteration > nbMaxIterations)) //max iteration reached
			{
				break;
			}

		}

		if (progressCb)
            progressCb->stop();

#ifdef _DEBUG
		if (fp)
		{
			fclose(fp);
			fp=0;
		}
#endif
	}

	if (CPSet)
		delete CPSet;
	CPSet=0;
	if (CPSetWeights)
		CPSetWeights->release();

	//release memory
	if (modelCloud && modelCloud != _modelCloud)
        delete modelCloud;
	if (_modelWeights && _modelWeights!=modelWeights)
		_modelWeights->release();
	if (dataCloud)
		delete dataCloud;
	if (_dataWeights && _dataWeights!=dataWeights)
		_dataWeights->release();
	if (rotatedDataCloud)
		delete rotatedDataCloud;

	return result;
}
Esempio n. 3
0
int ScalarFieldTools::computeScalarFieldGradient(	GenericIndexedCloudPersist* theCloud,
													PointCoordinateType radius,
													bool euclideanDistances,
													bool sameInAndOutScalarField/*=false*/,
													GenericProgressCallback* progressCb/*=0*/,
													DgmOctree* theCloudOctree/*=0*/)
{
	if (!theCloud)
        return -1;

	DgmOctree* theOctree = theCloudOctree;
	if (!theOctree)
	{
		theOctree = new DgmOctree(theCloud);
		if (theOctree->build(progressCb)<1)
		{
			delete theOctree;
			return -2;
		}
	}

	unsigned char octreeLevel = 0;
	if (radius <= 0)
	{
		octreeLevel = theOctree->findBestLevelForAGivenPopulationPerCell(AVERAGE_NUMBER_OF_POINTS_FOR_GRADIENT_COMPUTATION);
		radius = theOctree->getCellSize(octreeLevel);
	}
	else
	{
		octreeLevel = theOctree->findBestLevelForAGivenNeighbourhoodSizeExtraction(radius);
	}

	ScalarField* theGradientNorms = new ScalarField("gradient norms");
	ScalarField* _theGradientNorms = 0;

	//mode champ scalaire "IN" et "OUT" identique
	if (sameInAndOutScalarField)
	{
		if (!theGradientNorms->reserve(theCloud->size())) //not enough memory
		{
			if (!theCloudOctree)
				delete theOctree;
			theGradientNorms->release();
			return -3;
		}
		_theGradientNorms = theGradientNorms;
	}
	else
	//mode champs scalaires "IN" et "OUT" dfferents (par defaut)
	{
		//on initialise les distances (IN - en ecriture) pour recevoir les normes du gradient
		if (!theCloud->enableScalarField())
		{
			if (!theCloudOctree)
				delete theOctree;
			theGradientNorms->release();
			return -4;
		}
	}

	//structure contenant les parametres additionnels
	void* additionalParameters[3] = {	static_cast<void*>(&euclideanDistances),
										static_cast<void*>(&radius),
										static_cast<void*>(_theGradientNorms)
	};

	int result = 0;

	if (theOctree->executeFunctionForAllCellsAtLevel(	octreeLevel,
														computeMeanGradientOnPatch,
														additionalParameters,
														true,
														progressCb,
														"Gradient Computation") == 0)
	{
		//something went wrong
		result = -5;
	}

	if (!theCloudOctree)
        delete theOctree;

	theGradientNorms->release();
	theGradientNorms=0;

    return result;
}