bool ChunkedPointCloud::enableScalarField() { ScalarField* currentInScalarField = getCurrentInScalarField(); if (!currentInScalarField) { //if we get there, it means that either the caller has forgot to create //(and assign) a scalar field to the cloud, or that we are in a compatibility //mode with old/basic behaviour: a unique SF for everything (input/output) //we look for any already existing "default" scalar field m_currentInScalarFieldIndex = getScalarFieldIndexByName("Default"); if (m_currentInScalarFieldIndex < 0) { //if not, we create it m_currentInScalarFieldIndex = addScalarField("Default"); if (m_currentInScalarFieldIndex < 0) //Something went wrong return false; } currentInScalarField = getCurrentInScalarField(); assert(currentInScalarField); } //if there's no output scalar field either, we set this new scalar field as output also if (!getCurrentOutScalarField()) m_currentOutScalarFieldIndex = m_currentInScalarFieldIndex; return currentInScalarField->resize(m_points->capacity()); }
int ChunkedPointCloud::addScalarField(const char* uniqueName) { //we don't accept two SF with the same name! if (getScalarFieldIndexByName(uniqueName) >= 0) return -1; //create requested scalar field ScalarField* sf = new ScalarField(uniqueName); if (!sf || (size() && !sf->resize(size()))) { //Not enough memory! if (sf) sf->release(); return -1; } try { //we don't want 'm_scalarFields' to grow by 50% each time! (default behavior of std::vector::push_back) m_scalarFields.resize(m_scalarFields.size()+1); } catch (std::bad_alloc) //out of memory { sf->release(); return -1; } m_scalarFields.back() = sf; sf->link(); return (int)m_scalarFields.size()-1; }
bool AutoSegmentationTools::frontPropagationBasedSegmentation(GenericIndexedCloudPersist* theCloud, ScalarType minSeedDist, uchar octreeLevel, ReferenceCloudContainer& theSegmentedLists, GenericProgressCallback* progressCb, DgmOctree* inputOctree, bool applyGaussianFilter, float alpha) { unsigned numberOfPoints = (theCloud ? theCloud->size() : 0); if (numberOfPoints == 0) return false; //compute octree if none was provided DgmOctree* theOctree = inputOctree; if (!theOctree) { theOctree = new DgmOctree(theCloud); if (theOctree->build(progressCb)<1) { delete theOctree; return false; } } //on calcule le gradient (va ecraser le champ des distances) if (ScalarFieldTools::computeScalarFieldGradient(theCloud,true,true,progressCb,theOctree) < 0) { if (!inputOctree) delete theOctree; return false; } //et on lisse le resultat if (applyGaussianFilter) { uchar level = theOctree->findBestLevelForAGivenPopulationPerCell(NUMBER_OF_POINTS_FOR_GRADIENT_COMPUTATION); PointCoordinateType cellSize = theOctree->getCellSize(level); ScalarFieldTools::applyScalarFieldGaussianFilter(static_cast<float>(cellSize/3),theCloud,-1,progressCb,theOctree); } unsigned seedPoints = 0; unsigned numberOfSegmentedLists = 0; //on va faire la propagation avec le FastMarching(); FastMarchingForPropagation* fm = new FastMarchingForPropagation(); fm->setJumpCoef(50.0); fm->setDetectionThreshold(alpha); int result = fm->init(theCloud,theOctree,octreeLevel); int octreeLength = OCTREE_LENGTH(octreeLevel)-1; if (result<0) { if (!inputOctree) delete theOctree; delete fm; return false; } if (progressCb) { progressCb->reset(); progressCb->setMethodTitle("FM Propagation"); char buffer[256]; sprintf(buffer,"Octree level: %i\nNumber of points: %u",octreeLevel,numberOfPoints); progressCb->setInfo(buffer); progressCb->start(); } ScalarField* theDists = new ScalarField("distances"); { ScalarType d = theCloud->getPointScalarValue(0); if (!theDists->resize(numberOfPoints,true,d)) { if (!inputOctree) delete theOctree; return false; } } unsigned maxDistIndex = 0, begin = 0; CCVector3 startPoint; while (true) { ScalarType maxDist = NAN_VALUE; //on cherche la premiere distance superieure ou egale a "minSeedDist" while (begin<numberOfPoints) { const CCVector3 *thePoint = theCloud->getPoint(begin); const ScalarType& theDistance = theDists->getValue(begin); ++begin; //FIXME DGM: what happens if SF is negative?! if (theCloud->getPointScalarValue(begin) >= 0 && theDistance >= minSeedDist) { maxDist = theDistance; startPoint = *thePoint; maxDistIndex = begin; break; } } //il n'y a plus de point avec des distances suffisamment grandes ! if (maxDist<minSeedDist) break; //on finit la recherche du max for (unsigned i=begin; i<numberOfPoints; ++i) { const CCVector3 *thePoint = theCloud->getPoint(i); const ScalarType& theDistance = theDists->getValue(i); if ((theCloud->getPointScalarValue(i)>=0.0)&&(theDistance > maxDist)) { maxDist = theDistance; startPoint = *thePoint; maxDistIndex = i; } } int pos[3]; theOctree->getTheCellPosWhichIncludesThePoint(&startPoint,pos,octreeLevel); //clipping (important!) pos[0] = std::min(octreeLength,pos[0]); pos[1] = std::min(octreeLength,pos[1]); pos[2] = std::min(octreeLength,pos[2]); fm->setSeedCell(pos); ++seedPoints; int result = fm->propagate(); //if the propagation was successful if (result >= 0) { //we extract the corresponding points ReferenceCloud* newCloud = new ReferenceCloud(theCloud); if (fm->extractPropagatedPoints(newCloud) && newCloud->size() != 0) { theSegmentedLists.push_back(newCloud); ++numberOfSegmentedLists; } else { //not enough memory?! delete newCloud; newCloud = 0; } if (progressCb) progressCb->update(float(numberOfSegmentedLists % 100)); fm->cleanLastPropagation(); //break; } if (maxDistIndex == begin) ++begin; } if (progressCb) progressCb->stop(); for (unsigned i=0; i<numberOfPoints; ++i) theCloud->setPointScalarValue(i,theDists->getValue(i)); delete fm; fm = 0; theDists->release(); theDists = 0; if (!inputOctree) delete theOctree; return true; }
ICPRegistrationTools::CC_ICP_RESULT ICPRegistrationTools::RegisterClouds(GenericIndexedCloudPersist* _modelCloud, GenericIndexedCloudPersist* _dataCloud, PointProjectionTools::Transformation& transform, CC_ICP_CONVERGENCE_TYPE convType, double minErrorDecrease, unsigned nbMaxIterations, double& finalError, GenericProgressCallback* progressCb/*=0*/, bool filterOutFarthestPoints/*=false*/, unsigned samplingLimit/*=20000*/, ScalarField* modelWeights/*=0*/, ScalarField* dataWeights/*=0*/) { assert(_modelCloud && _dataCloud); finalError = -1.0; //MODEL CLOUD (reference, won't move) GenericIndexedCloudPersist* modelCloud=_modelCloud; ScalarField* _modelWeights=modelWeights; { if (_modelCloud->size()>samplingLimit) //shall we resample the clouds? (speed increase) { ReferenceCloud* subModelCloud = CloudSamplingTools::subsampleCloudRandomly(_modelCloud,samplingLimit); if (subModelCloud && modelWeights) { _modelWeights = new ScalarField("ResampledModelWeights",modelWeights->isPositive()); unsigned realCount = subModelCloud->size(); if (_modelWeights->reserve(realCount)) { for (unsigned i=0;i<realCount;++i) _modelWeights->addElement(modelWeights->getValue(subModelCloud->getPointGlobalIndex(i))); _modelWeights->computeMinAndMax(); } else { //not enough memory delete subModelCloud; subModelCloud=0; } } modelCloud = subModelCloud; } if (!modelCloud) //something bad happened return ICP_ERROR_NOT_ENOUGH_MEMORY; } //DATA CLOUD (will move) ReferenceCloud* dataCloud=0; ScalarField* _dataWeights=dataWeights; SimpleCloud* rotatedDataCloud=0; //temporary structure (rotated vertices) { if (_dataCloud->size()>samplingLimit) //shall we resample the clouds? (speed increase) { dataCloud = CloudSamplingTools::subsampleCloudRandomly(_dataCloud,samplingLimit); if (dataCloud && dataWeights) { _dataWeights = new ScalarField("ResampledDataWeights",dataWeights->isPositive()); unsigned realCount = dataCloud->size(); if (_dataWeights->reserve(realCount)) { for (unsigned i=0;i<realCount;++i) _dataWeights->addElement(dataWeights->getValue(dataCloud->getPointGlobalIndex(i))); _dataWeights->computeMinAndMax(); } else { //not enough memory delete dataCloud; dataCloud=0; } } } else { //create a 'fake' reference cloud with all points dataCloud = new ReferenceCloud(_dataCloud); if (dataCloud->reserve(_dataCloud->size())) { dataCloud->addPointIndex(0,_dataCloud->size()); } else //not enough memory { delete dataCloud; dataCloud=0; } } if (!dataCloud || !dataCloud->enableScalarField()) //something bad happened { if (dataCloud) delete dataCloud; if (modelCloud && modelCloud != _modelCloud) delete modelCloud; if (_modelWeights && _modelWeights!=modelWeights) _modelWeights->release(); return ICP_ERROR_NOT_ENOUGH_MEMORY; } } //Closest Point Set (see ICP algorithm) ReferenceCloud* CPSet = new ReferenceCloud(modelCloud); ScalarField* CPSetWeights = _modelWeights ? new ScalarField("CPSetWeights",_modelWeights->isPositive()) : 0; //algorithm result CC_ICP_RESULT result = ICP_NOTHING_TO_DO; unsigned iteration = 0; double error = 0.0; //we compute the initial distance between the two clouds (and the CPSet by the way) dataCloud->forEach(ScalarFieldTools::razDistsToHiddenValue); DistanceComputationTools::Cloud2CloudDistanceComputationParams params; params.CPSet = CPSet; if (DistanceComputationTools::computeHausdorffDistance(dataCloud,modelCloud,params,progressCb)>=0) { //12/11/2008 - A.BEY: ICP guarantees only the decrease of the squared distances sum (not the distances sum) error = DistanceComputationTools::computeMeanSquareDist(dataCloud); } else { //if an error occured during distances computation... error = -1.0; result = ICP_ERROR_DIST_COMPUTATION; } if (error > 0.0) { #ifdef _DEBUG FILE* fp = fopen("registration_trace_log.txt","wt"); if (fp) fprintf(fp,"Initial error: %f\n",error); #endif double lastError=error,initialErrorDelta=0.0,errorDelta=0.0; result = ICP_APPLY_TRANSFO; //as soon as we do at least one iteration, we'll have to apply a transformation while (true) { ++iteration; //regarding the progress bar if (progressCb && iteration>1) //on the first iteration, we do... nothing { char buffer[256]; //then on the second iteration, we init/show it if (iteration==2) { initialErrorDelta = errorDelta; progressCb->reset(); progressCb->setMethodTitle("Clouds registration"); sprintf(buffer,"Initial mean square error = %f\n",lastError); progressCb->setInfo(buffer); progressCb->start(); } else //and after we update it continuously { sprintf(buffer,"Mean square error = %f [%f]\n",error,-errorDelta); progressCb->setInfo(buffer); progressCb->update((float)((initialErrorDelta-errorDelta)/(initialErrorDelta-minErrorDecrease)*100.0)); } if (progressCb->isCancelRequested()) break; } //shall we remove points with distance above a given threshold? if (filterOutFarthestPoints) { NormalDistribution N; N.computeParameters(dataCloud,false); if (N.isValid()) { DistanceType mu,sigma2; N.getParameters(mu,sigma2); ReferenceCloud* c = new ReferenceCloud(dataCloud->getAssociatedCloud()); ReferenceCloud* newCPSet = new ReferenceCloud(CPSet->getAssociatedCloud()); //we must also update the CPSet! ScalarField* newdataWeights = (_dataWeights ? new ScalarField("ResampledDataWeights",_dataWeights->isPositive()) : 0); //unsigned realCount = dataCloud->size(); //if (_dataWeights->reserve(realCount)) //{ // for (unsigned i=0;i<realCount;++i) // _dataWeights->addElement(dataWeights->getValue(dataCloud->getPointGlobalIndex(i))); // _dataWeights->computeMinAndMax(); //} //else //{ // //not enough memory // delete dataCloud; // dataCloud=0; //} unsigned n=dataCloud->size(); if (!c->reserve(n) || !newCPSet->reserve(n) || (newdataWeights && !newdataWeights->reserve(n))) { //not enough memory delete c; delete newCPSet; if (newdataWeights) newdataWeights->release(); result = ICP_ERROR_REGISTRATION_STEP; break; } //we keep only the points with "not too high" distances DistanceType maxDist = mu+3.0f*sqrt(sigma2); unsigned realSize=0; for (unsigned i=0;i<n;++i) { unsigned index = dataCloud->getPointGlobalIndex(i); if (dataCloud->getAssociatedCloud()->getPointScalarValue(index)<maxDist) { c->addPointIndex(index); newCPSet->addPointIndex(CPSet->getPointGlobalIndex(i)); if (newdataWeights) newdataWeights->addElement(_dataWeights->getValue(index)); ++realSize; } } //resize should be ok as we have called reserve first c->resize(realSize); //should always be ok as counter<n newCPSet->resize(realSize); //idem if (newdataWeights) newdataWeights->resize(realSize); //idem //replace old structures by new ones delete CPSet; CPSet=newCPSet; delete dataCloud; dataCloud=c; if (_dataWeights) { _dataWeights->release(); _dataWeights = newdataWeights; } } } //update CPSet weights (if any) if (_modelWeights) { unsigned count=CPSet->size(); assert(CPSetWeights); if (CPSetWeights->currentSize()!=count) { if (!CPSetWeights->resize(count)) { result = ICP_ERROR_REGISTRATION_STEP; break; } } for (unsigned i=0;i<count;++i) CPSetWeights->setValue(i,_modelWeights->getValue(CPSet->getPointGlobalIndex(i))); CPSetWeights->computeMinAndMax(); } PointProjectionTools::Transformation currentTrans; //if registration procedure fails if (!RegistrationTools::RegistrationProcedure(dataCloud, CPSet, currentTrans, _dataWeights, _modelWeights)) { result = ICP_ERROR_REGISTRATION_STEP; break; } //get rotated data cloud if (!rotatedDataCloud || filterOutFarthestPoints) { //we create a new structure, with rotated points SimpleCloud* newDataCloud = PointProjectionTools::applyTransformation(dataCloud,currentTrans); if (!newDataCloud) { //not enough memory result = ICP_ERROR_REGISTRATION_STEP; break; } //update dataCloud if (rotatedDataCloud) delete rotatedDataCloud; rotatedDataCloud = newDataCloud; delete dataCloud; dataCloud = new ReferenceCloud(rotatedDataCloud); if (!dataCloud->reserve(rotatedDataCloud->size())) { //not enough memory result = ICP_ERROR_REGISTRATION_STEP; break; } dataCloud->addPointIndex(0,rotatedDataCloud->size()); } else { //we simply have to rotate the existing temporary cloud rotatedDataCloud->applyTransformation(currentTrans); } //compute (new) distances to model params.CPSet = CPSet; if (DistanceComputationTools::computeHausdorffDistance(dataCloud,modelCloud,params)<0) { //an error occured during distances computation... result = ICP_ERROR_REGISTRATION_STEP; break; } lastError = error; //12/11/2008 - A.BEY: ICP guarantees only the decrease of the squared distances sum (not the distances sum) error = DistanceComputationTools::computeMeanSquareDist(dataCloud); finalError = (error>0 ? sqrt(error) : error); #ifdef _DEBUG if (fp) fprintf(fp,"Iteration #%i --> error: %f\n",iteration,error); #endif //error update errorDelta = lastError-error; //is it better? if (errorDelta > 0.0) { //we update global transformation matrix if (currentTrans.R.isValid()) { if (transform.R.isValid()) transform.R = currentTrans.R * transform.R; else transform.R = currentTrans.R; transform.T = currentTrans.R * transform.T; } transform.T += currentTrans.T; } //stop criterion if ((errorDelta < 0.0) || //error increase (convType == MAX_ERROR_CONVERGENCE && errorDelta < minErrorDecrease) || //convergence reached (convType == MAX_ITER_CONVERGENCE && iteration > nbMaxIterations)) //max iteration reached { break; } } if (progressCb) progressCb->stop(); #ifdef _DEBUG if (fp) { fclose(fp); fp=0; } #endif } if (CPSet) delete CPSet; CPSet=0; if (CPSetWeights) CPSetWeights->release(); //release memory if (modelCloud && modelCloud != _modelCloud) delete modelCloud; if (_modelWeights && _modelWeights!=modelWeights) _modelWeights->release(); if (dataCloud) delete dataCloud; if (_dataWeights && _dataWeights!=dataWeights) _dataWeights->release(); if (rotatedDataCloud) delete rotatedDataCloud; return result; }