ReferenceCloud* ManualSegmentationTools::segment(GenericIndexedCloudPersist* aCloud, ScalarType minDist, ScalarType maxDist) { if (!aCloud) { assert(false); return 0; } ReferenceCloud* Y = new ReferenceCloud(aCloud); //for each point for (unsigned i=0; i<aCloud->size(); ++i) { const ScalarType dist = aCloud->getPointScalarValue(i); //we test if its assocaited scalar value falls inside the specified intervale if (dist >= minDist && dist <= maxDist) { if (!Y->addPointIndex(i)) { //not engouh memory delete Y; Y=0; break; } } } return Y; }
bool CloudSamplingTools::subsampleCellAtLevel(const DgmOctree::octreeCell& cell, void** additionalParameters) { ReferenceCloud* cloud = (ReferenceCloud*)additionalParameters[0]; SUBSAMPLING_CELL_METHOD subsamplingMethod = *((SUBSAMPLING_CELL_METHOD*)additionalParameters[1]); unsigned selectedPointIndex=0; unsigned pointsCount = cell.points->size(); if (subsamplingMethod == RANDOM_POINT) { selectedPointIndex = (static_cast<unsigned>(rand()) % pointsCount); } else // if (subsamplingMethod == NEAREST_POINT_TO_CELL_CENTER) { PointCoordinateType center[3]; cell.parentOctree->computeCellCenter(cell.truncatedCode,cell.level,center,true); ScalarType dist,minDist; minDist = CCVector3::vdistance2(cell.points->getPoint(0)->u,center); for (unsigned i=1;i<pointsCount;++i) { dist = CCVector3::vdistance2(cell.points->getPoint(i)->u,center); if (dist<minDist) { selectedPointIndex = i; minDist=dist; } } } return cloud->addPointIndex(cell.points->getPointGlobalIndex(selectedPointIndex)); }
ReferenceCloud* FastMarchingForPropagation::extractPropagatedPoints() { if (!m_initialized || !m_octree || m_gridLevel > DgmOctree::MAX_OCTREE_LEVEL) return 0; ReferenceCloud* Zk = new ReferenceCloud(m_octree->associatedCloud()); for (unsigned i=0; i<m_activeCells.size(); ++i) { PropagationCell* aCell = (PropagationCell*)m_theGrid[m_activeCells[i]]; ReferenceCloud* Yk = m_octree->getPointsInCell(aCell->cellCode,m_gridLevel,true); if (!Zk->reserve(Yk->size())) //not enough memory { delete Zk; return 0; } Yk->placeIteratorAtBegining(); for (unsigned k=0;k<Yk->size();++k) { Zk->addPointIndex(Yk->getCurrentPointGlobalIndex()); //can't fail (see above) //raz de la norme du gradient du point, pour qu'il ne soit plus pris en compte par la suite ! Yk->setCurrentPointScalarValue(NAN_VALUE); Yk->forwardIterator(); } //Yk->clear(); //inutile } return Zk; }
ReferenceCloud* CloudSamplingTools::subsampleCloudRandomly(GenericIndexedCloudPersist* inputCloud, unsigned newNumberOfPoints, GenericProgressCallback* progressCb/*=0*/) { assert(inputCloud); unsigned theCloudSize = inputCloud->size(); //we put all input points in a ReferenceCloud ReferenceCloud* newCloud = new ReferenceCloud(inputCloud); if (!newCloud->addPointIndex(0,theCloudSize)) { delete newCloud; return 0; } //we have less points than requested?! if (theCloudSize <= newNumberOfPoints) { return newCloud; } unsigned pointsToRemove = theCloudSize - newNumberOfPoints; std::random_device rd; // non-deterministic generator std::mt19937 gen(rd()); // to seed mersenne twister. NormalizedProgress* normProgress = 0; if (progressCb) { progressCb->setInfo("Random subsampling"); normProgress = new NormalizedProgress(progressCb, pointsToRemove); progressCb->reset(); progressCb->start(); } //we randomly remove "inputCloud.size() - newNumberOfPoints" points (much simpler) unsigned lastPointIndex = theCloudSize-1; for (unsigned i=0; i<pointsToRemove; ++i) { std::uniform_int_distribution<unsigned> dist(0, lastPointIndex); unsigned index = dist(gen); newCloud->swap(index,lastPointIndex); --lastPointIndex; if (normProgress && !normProgress->oneStep()) { //cancel process delete normProgress; delete newCloud; return 0; } } newCloud->resize(newNumberOfPoints); //always smaller, so it should be ok! if (normProgress) delete normProgress; return newCloud; }
ReferenceCloud* CloudSamplingTools::subsampleCloudRandomly(GenericIndexedCloudPersist* theCloud, unsigned newNumberOfPoints, GenericProgressCallback* progressCb/*=0*/) { assert(theCloud); unsigned theCloudSize = theCloud->size(); //we put all input points in a ReferenceCloud ReferenceCloud* newCloud = new ReferenceCloud(theCloud); if (!newCloud->addPointIndex(0,theCloudSize)) { delete newCloud; return 0; } //we have less points than requested?! if (theCloudSize <= newNumberOfPoints) { return newCloud; } unsigned pointsToRemove = theCloudSize-newNumberOfPoints; NormalizedProgress* normProgress=0; if (progressCb) { progressCb->setInfo("Random subsampling"); normProgress = new NormalizedProgress(progressCb,pointsToRemove); progressCb->reset(); progressCb->start(); } //we randomly remove "theCloud.size() - newNumberOfPoints" points (much simpler) unsigned lastPointIndex = theCloudSize-1; for (unsigned i=0; i<pointsToRemove; ++i) { unsigned index = (unsigned)floor((float)rand()/(float)RAND_MAX * (float)lastPointIndex); newCloud->swap(index,lastPointIndex); --lastPointIndex; if (normProgress && !normProgress->oneStep()) { //cancel process delete normProgress; delete newCloud; return 0; } } newCloud->resize(newNumberOfPoints); //always smaller, so it should be ok! if (normProgress) delete normProgress; return newCloud; }
bool TrueKdTree::build( double maxError, DistanceComputationTools::ERROR_MEASURES errorMeasure/*=DistanceComputationTools::RMS*/, unsigned maxPointCountPerCell/*=0*/, GenericProgressCallback* progressCb/*=0*/) { if (!m_associatedCloud) return false; //tree already computed! (call clear before) if (m_root) return false; unsigned count = m_associatedCloud->size(); if (count == 0) //no point, no node! { return false; } //structures used to sort the points along the 3 dimensions try { s_sortedCoordsForSplit.resize(count); } catch(std::bad_alloc) { //not enough memory! return false; } //initial 'subset' to start recursion ReferenceCloud* subset = new ReferenceCloud(m_associatedCloud); if (!subset->addPointIndex(0,count)) { //not enough memory delete subset; return false; } InitProgress(progressCb,count); //launch recursive process m_maxError = maxError; m_maxPointCountPerCell = maxPointCountPerCell; m_errorMeasure = errorMeasure; m_root = split(subset); //clear static structure s_sortedCoordsForSplit.clear(); return (m_root != 0); }
ReferenceCloud* CloudSamplingTools::subsampleCloudWithOctreeAtLevel(GenericIndexedCloudPersist* inputCloud, unsigned char octreeLevel, SUBSAMPLING_CELL_METHOD subsamplingMethod, GenericProgressCallback* progressCb/*=0*/, DgmOctree* inputOctree/*=0*/) { assert(inputCloud); DgmOctree* octree = inputOctree; if (!octree) { octree = new DgmOctree(inputCloud); if (octree->build(progressCb) < 1) { delete octree; return 0; } } ReferenceCloud* cloud = new ReferenceCloud(inputCloud); unsigned nCells = octree->getCellNumber(octreeLevel); if (!cloud->reserve(nCells)) { if (!inputOctree) delete octree; delete cloud; return 0; } //structure contenant les parametres additionnels void* additionalParameters[2] = { reinterpret_cast<void*>(cloud), reinterpret_cast<void*>(&subsamplingMethod) }; if (octree->executeFunctionForAllCellsAtLevel( octreeLevel, &subsampleCellAtLevel, additionalParameters, false, //the process is so simple that MT is slower! progressCb, "Cloud Subsampling") == 0) { //something went wrong delete cloud; cloud=0; } if (!inputOctree) delete octree; return cloud; }
ReferenceCloud* CloudSamplingTools::subsampleCloudWithOctreeAtLevel(GenericIndexedCloudPersist* theCloud, uchar octreeLevel, SUBSAMPLING_CELL_METHOD subsamplingMethod, GenericProgressCallback* progressCb/*=0*/, DgmOctree* _theOctree/*=0*/) { assert(theCloud); DgmOctree* theOctree = _theOctree; if (!theOctree) { theOctree = new DgmOctree(theCloud); if (theOctree->build(progressCb) < 1) { delete theOctree; return 0; } } ReferenceCloud* cloud = new ReferenceCloud(theCloud); unsigned nCells = theOctree->getCellNumber(octreeLevel); if (!cloud->reserve(nCells)) { if (!_theOctree) delete theOctree; delete cloud; return 0; } //structure contenant les parametres additionnels void* additionalParameters[2]; additionalParameters[0] = (void*)cloud; additionalParameters[1] = (void*)&subsamplingMethod; //The process is so simple that MT is slower! //#ifdef ENABLE_MT_OCTREE //theOctree->executeFunctionForAllCellsAtLevel_MT(octreeLevel, //#else if (theOctree->executeFunctionForAllCellsAtLevel(octreeLevel, &subsampleCellAtLevel, additionalParameters, progressCb, "Cloud Subsampling") == 0) { //something went wrong delete cloud; cloud=0; } if (!_theOctree) delete theOctree; return cloud; }
bool CloudSamplingTools::subsampleCellAtLevel( const DgmOctree::octreeCell& cell, void** additionalParameters, NormalizedProgress* nProgress/*=0*/) { ReferenceCloud* cloud = static_cast<ReferenceCloud*>(additionalParameters[0]); SUBSAMPLING_CELL_METHOD subsamplingMethod = *static_cast<SUBSAMPLING_CELL_METHOD*>(additionalParameters[1]); unsigned selectedPointIndex = 0; unsigned pointsCount = cell.points->size(); if (subsamplingMethod == RANDOM_POINT) { selectedPointIndex = (static_cast<unsigned>(rand()) % pointsCount); if (nProgress && !nProgress->steps(pointsCount)) { return false; } } else // if (subsamplingMethod == NEAREST_POINT_TO_CELL_CENTER) { CCVector3 center; cell.parentOctree->computeCellCenter(cell.truncatedCode,cell.level,center,true); PointCoordinateType minSquareDist = (*cell.points->getPoint(0) - center).norm2(); for (unsigned i=1; i<pointsCount; ++i) { PointCoordinateType squareDist = (*cell.points->getPoint(i) - center).norm2(); if (squareDist < minSquareDist) { selectedPointIndex = i; minSquareDist = squareDist; } if (nProgress && !nProgress->oneStep()) { return false; } } } return cloud->addPointIndex(cell.points->getPointGlobalIndex(selectedPointIndex)); }
bool FastMarchingForPropagation::setPropagationTimingsAsDistances() { if (!m_initialized || !m_octree || m_gridLevel > DgmOctree::MAX_OCTREE_LEVEL) return false; for (unsigned i=0;i<m_activeCells.size();++i) { PropagationCell* aCell = (PropagationCell*)m_theGrid[m_activeCells[i]]; ReferenceCloud* Yk = m_octree->getPointsInCell(aCell->cellCode,m_gridLevel,true); Yk->placeIteratorAtBegining(); for (unsigned k=0;k<Yk->size();++k) { Yk->setCurrentPointScalarValue(aCell->T); Yk->forwardIterator(); } //Yk->clear(); //inutile } return true; }
ReferenceCloud* ManualSegmentationTools::segment(GenericIndexedCloudPersist* aCloud, const Polyline* poly, bool keepInside, const float* viewMat) { assert(poly && aCloud); CCLib::SquareMatrix* trans = (viewMat ? new CCLib::SquareMatrix(viewMat) : 0); ReferenceCloud* Y = new ReferenceCloud(aCloud); //we check for each point if it falls inside the polyline unsigned count = aCloud->size(); for (unsigned i=0; i<count; ++i) { CCVector3 P; aCloud->getPoint(i,P); //we project the point in screen space first if necessary if (trans) { P = (*trans) * P; } bool pointInside = isPointInsidePoly(CCVector2(P.x,P.y),poly); if ((keepInside && pointInside) || (!keepInside && !pointInside)) { if (!Y->addPointIndex(i)) { //not engouh memory delete Y; Y = 0; break; } } } if (trans) delete trans; return Y; }
bool CloudSamplingTools::applyNoiseFilterAtLevel( const DgmOctree::octreeCell& cell, void** additionalParameters, NormalizedProgress* nProgress/*=0*/) { ReferenceCloud* cloud = static_cast<ReferenceCloud*>(additionalParameters[0]); PointCoordinateType kernelRadius = *static_cast<PointCoordinateType*>(additionalParameters[1]); double nSigma = *static_cast<double*>(additionalParameters[2]); bool removeIsolatedPoints = *static_cast<bool*>(additionalParameters[3]); bool useKnn = *static_cast<bool*>(additionalParameters[4]); int knn = *static_cast<int*>(additionalParameters[5]); bool useAbsoluteError = *static_cast<bool*>(additionalParameters[6]); double absoluteError = *static_cast<double*>(additionalParameters[7]); //structure for nearest neighbors search DgmOctree::NearestNeighboursSphericalSearchStruct nNSS; nNSS.level = cell.level; nNSS.prepare(kernelRadius,cell.parentOctree->getCellSize(nNSS.level)); if (useKnn) { nNSS.minNumberOfNeighbors = knn; } cell.parentOctree->getCellPos(cell.truncatedCode,cell.level,nNSS.cellPos,true); cell.parentOctree->computeCellCenter(nNSS.cellPos,cell.level,nNSS.cellCenter); unsigned n = cell.points->size(); //number of points in the current cell //for each point in the cell for (unsigned i=0; i<n; ++i) { cell.points->getPoint(i,nNSS.queryPoint); //look for neighbors (either inside a sphere or the k nearest ones) //warning: there may be more points at the end of nNSS.pointsInNeighbourhood than the actual nearest neighbors (neighborCount)! unsigned neighborCount = 0; if (useKnn) neighborCount = cell.parentOctree->findNearestNeighborsStartingFromCell(nNSS); else neighborCount = cell.parentOctree->findNeighborsInASphereStartingFromCell(nNSS,kernelRadius,false); if (neighborCount > 3) //we want 3 points or more (other than the point itself!) { //find the query point in the nearest neighbors set and place it at the end const unsigned globalIndex = cell.points->getPointGlobalIndex(i); unsigned localIndex = 0; while (localIndex < neighborCount && nNSS.pointsInNeighbourhood[localIndex].pointIndex != globalIndex) ++localIndex; //the query point should be in the nearest neighbors set! assert(localIndex < neighborCount); if (localIndex+1 < neighborCount) //no need to swap with another point if it's already at the end! { std::swap(nNSS.pointsInNeighbourhood[localIndex],nNSS.pointsInNeighbourhood[neighborCount-1]); } unsigned realNeighborCount = neighborCount-1; DgmOctreeReferenceCloud neighboursCloud(&nNSS.pointsInNeighbourhood,realNeighborCount); //we don't take the query point into account! Neighbourhood Z(&neighboursCloud); const PointCoordinateType* lsPlane = Z.getLSPlane(); if (lsPlane) { double maxD = absoluteError; if (!useAbsoluteError) { //compute the std. dev. to this plane double sum_d = 0; double sum_d2 = 0; for (unsigned j=0; j<realNeighborCount; ++j) { const CCVector3* P = neighboursCloud.getPoint(j); double d = CCLib::DistanceComputationTools::computePoint2PlaneDistance(P,lsPlane); sum_d += d; sum_d2 += d*d; } double stddev = sqrt(fabs(sum_d2*realNeighborCount - sum_d*sum_d))/realNeighborCount; maxD = stddev * nSigma; } //distance from the query point to the plane double d = fabs(CCLib::DistanceComputationTools::computePoint2PlaneDistance(&nNSS.queryPoint,lsPlane)); if (d <= maxD) cloud->addPointIndex(globalIndex); } else { //TODO: ??? } } else { //not enough points to fit a plane AND compute distances to it if (!removeIsolatedPoints) { //we keep the point unsigned globalIndex = cell.points->getPointGlobalIndex(i); cloud->addPointIndex(globalIndex); } } if (nProgress && !nProgress->oneStep()) { return false; } } return true; }
bool AutoSegmentationTools::frontPropagationBasedSegmentation(GenericIndexedCloudPersist* theCloud, ScalarType minSeedDist, uchar octreeLevel, ReferenceCloudContainer& theSegmentedLists, GenericProgressCallback* progressCb, DgmOctree* inputOctree, bool applyGaussianFilter, float alpha) { unsigned numberOfPoints = (theCloud ? theCloud->size() : 0); if (numberOfPoints == 0) return false; //compute octree if none was provided DgmOctree* theOctree = inputOctree; if (!theOctree) { theOctree = new DgmOctree(theCloud); if (theOctree->build(progressCb)<1) { delete theOctree; return false; } } //on calcule le gradient (va ecraser le champ des distances) if (ScalarFieldTools::computeScalarFieldGradient(theCloud,true,true,progressCb,theOctree) < 0) { if (!inputOctree) delete theOctree; return false; } //et on lisse le resultat if (applyGaussianFilter) { uchar level = theOctree->findBestLevelForAGivenPopulationPerCell(NUMBER_OF_POINTS_FOR_GRADIENT_COMPUTATION); PointCoordinateType cellSize = theOctree->getCellSize(level); ScalarFieldTools::applyScalarFieldGaussianFilter(static_cast<float>(cellSize/3),theCloud,-1,progressCb,theOctree); } unsigned seedPoints = 0; unsigned numberOfSegmentedLists = 0; //on va faire la propagation avec le FastMarching(); FastMarchingForPropagation* fm = new FastMarchingForPropagation(); fm->setJumpCoef(50.0); fm->setDetectionThreshold(alpha); int result = fm->init(theCloud,theOctree,octreeLevel); int octreeLength = OCTREE_LENGTH(octreeLevel)-1; if (result<0) { if (!inputOctree) delete theOctree; delete fm; return false; } if (progressCb) { progressCb->reset(); progressCb->setMethodTitle("FM Propagation"); char buffer[256]; sprintf(buffer,"Octree level: %i\nNumber of points: %u",octreeLevel,numberOfPoints); progressCb->setInfo(buffer); progressCb->start(); } ScalarField* theDists = new ScalarField("distances"); { ScalarType d = theCloud->getPointScalarValue(0); if (!theDists->resize(numberOfPoints,true,d)) { if (!inputOctree) delete theOctree; return false; } } unsigned maxDistIndex = 0, begin = 0; CCVector3 startPoint; while (true) { ScalarType maxDist = NAN_VALUE; //on cherche la premiere distance superieure ou egale a "minSeedDist" while (begin<numberOfPoints) { const CCVector3 *thePoint = theCloud->getPoint(begin); const ScalarType& theDistance = theDists->getValue(begin); ++begin; //FIXME DGM: what happens if SF is negative?! if (theCloud->getPointScalarValue(begin) >= 0 && theDistance >= minSeedDist) { maxDist = theDistance; startPoint = *thePoint; maxDistIndex = begin; break; } } //il n'y a plus de point avec des distances suffisamment grandes ! if (maxDist<minSeedDist) break; //on finit la recherche du max for (unsigned i=begin; i<numberOfPoints; ++i) { const CCVector3 *thePoint = theCloud->getPoint(i); const ScalarType& theDistance = theDists->getValue(i); if ((theCloud->getPointScalarValue(i)>=0.0)&&(theDistance > maxDist)) { maxDist = theDistance; startPoint = *thePoint; maxDistIndex = i; } } int pos[3]; theOctree->getTheCellPosWhichIncludesThePoint(&startPoint,pos,octreeLevel); //clipping (important!) pos[0] = std::min(octreeLength,pos[0]); pos[1] = std::min(octreeLength,pos[1]); pos[2] = std::min(octreeLength,pos[2]); fm->setSeedCell(pos); ++seedPoints; int result = fm->propagate(); //if the propagation was successful if (result >= 0) { //we extract the corresponding points ReferenceCloud* newCloud = new ReferenceCloud(theCloud); if (fm->extractPropagatedPoints(newCloud) && newCloud->size() != 0) { theSegmentedLists.push_back(newCloud); ++numberOfSegmentedLists; } else { //not enough memory?! delete newCloud; newCloud = 0; } if (progressCb) progressCb->update(float(numberOfSegmentedLists % 100)); fm->cleanLastPropagation(); //break; } if (maxDistIndex == begin) ++begin; } if (progressCb) progressCb->stop(); for (unsigned i=0; i<numberOfPoints; ++i) theCloud->setPointScalarValue(i,theDists->getValue(i)); delete fm; fm = 0; theDists->release(); theDists = 0; if (!inputOctree) delete theOctree; return true; }
ReferenceCloud* CloudSamplingTools::resampleCloudSpatially(GenericIndexedCloudPersist* inputCloud, PointCoordinateType minDistance, const SFModulationParams& modParams, DgmOctree* inputOctree/*=0*/, GenericProgressCallback* progressCb/*=0*/) { assert(inputCloud); unsigned cloudSize = inputCloud->size(); DgmOctree* octree = inputOctree; if (!octree) { octree = new DgmOctree(inputCloud); if (octree->build() < static_cast<int>(cloudSize)) { delete octree; return 0; } } assert(octree && octree->associatedCloud() == inputCloud); //output cloud ReferenceCloud* sampledCloud = new ReferenceCloud(inputCloud); const unsigned c_reserveStep = 65536; if (!sampledCloud->reserve(std::min(cloudSize,c_reserveStep))) { if (!inputOctree) delete octree; return 0; } GenericChunkedArray<1,char>* markers = new GenericChunkedArray<1,char>(); //DGM: upgraded from vector, as this can be quite huge! if (!markers->resize(cloudSize,true,1)) //true by default { markers->release(); if (!inputOctree) delete octree; delete sampledCloud; return 0; } //best octree level (there may be several of them if we use parameter modulation) std::vector<unsigned char> bestOctreeLevel; bool modParamsEnabled = modParams.enabled; ScalarType sfMin = 0, sfMax = 0; try { if (modParams.enabled) { //compute min and max sf values ScalarFieldTools::computeScalarFieldExtremas(inputCloud,sfMin,sfMax); if (!ScalarField::ValidValue(sfMin)) { //all SF values are NAN?! modParamsEnabled = false; } else { //compute min and max 'best' levels PointCoordinateType dist0 = static_cast<PointCoordinateType>(sfMin * modParams.a + modParams.b); PointCoordinateType dist1 = static_cast<PointCoordinateType>(sfMax * modParams.a + modParams.b); unsigned char level0 = octree->findBestLevelForAGivenNeighbourhoodSizeExtraction(dist0); unsigned char level1 = octree->findBestLevelForAGivenNeighbourhoodSizeExtraction(dist1); bestOctreeLevel.push_back(level0); if (level1 != level0) { //add intermediate levels if necessary size_t levelCount = (level1 < level0 ? level0-level1 : level1-level0) + 1; assert(levelCount != 0); for (size_t i=1; i<levelCount-1; ++i) //we already know level0 and level1! { ScalarType sfVal = sfMin + i*((sfMax-sfMin)/levelCount); PointCoordinateType dist = static_cast<PointCoordinateType>(sfVal * modParams.a + modParams.b); unsigned char level = octree->findBestLevelForAGivenNeighbourhoodSizeExtraction(dist); bestOctreeLevel.push_back(level); } } bestOctreeLevel.push_back(level1); } } else { unsigned char defaultLevel = octree->findBestLevelForAGivenNeighbourhoodSizeExtraction(minDistance); bestOctreeLevel.push_back(defaultLevel); } } catch (const std::bad_alloc&) { //not enough memory markers->release(); if (!inputOctree) { delete octree; } delete sampledCloud; return 0; } //progress notification NormalizedProgress normProgress(progressCb, cloudSize); if (progressCb) { if (progressCb->textCanBeEdited()) { progressCb->setMethodTitle("Spatial resampling"); char buffer[256]; sprintf(buffer, "Points: %u\nMin dist.: %f", cloudSize, minDistance); progressCb->setInfo(buffer); } progressCb->update(0); progressCb->start(); } //for each point in the cloud that is still 'marked', we look //for its neighbors and remove their own marks markers->placeIteratorAtBegining(); bool error = false; //default octree level assert(!bestOctreeLevel.empty()); unsigned char octreeLevel = bestOctreeLevel.front(); //default distance between points PointCoordinateType minDistBetweenPoints = minDistance; for (unsigned i=0; i<cloudSize; i++, markers->forwardIterator()) { //no mark? we skip this point if (markers->getCurrentValue() != 0) { //init neighbor search structure const CCVector3* P = inputCloud->getPoint(i); //parameters modulation if (modParamsEnabled) { ScalarType sfVal = inputCloud->getPointScalarValue(i); if (ScalarField::ValidValue(sfVal)) { //modulate minDistance minDistBetweenPoints = static_cast<PointCoordinateType>(sfVal * modParams.a + modParams.b); //get (approximate) best level size_t levelIndex = static_cast<size_t>(bestOctreeLevel.size() * (sfVal / (sfMax-sfMin))); if (levelIndex == bestOctreeLevel.size()) --levelIndex; octreeLevel = bestOctreeLevel[levelIndex]; } else { minDistBetweenPoints = minDistance; octreeLevel = bestOctreeLevel.front(); } } //look for neighbors and 'de-mark' them { DgmOctree::NeighboursSet neighbours; octree->getPointsInSphericalNeighbourhood(*P,minDistBetweenPoints,neighbours,octreeLevel); for (DgmOctree::NeighboursSet::iterator it = neighbours.begin(); it != neighbours.end(); ++it) if (it->pointIndex != i) markers->setValue(it->pointIndex,0); } //At this stage, the ith point is the only one marked in a radius of <minDistance>. //Therefore it will necessarily be in the final cloud! if (sampledCloud->size() == sampledCloud->capacity() && !sampledCloud->reserve(sampledCloud->capacity() + c_reserveStep)) { //not enough memory error = true; break; } if (!sampledCloud->addPointIndex(i)) { //not enough memory error = true; break; } } //progress indicator if (progressCb && !normProgress.oneStep()) { //cancel process error = true; break; } } //remove unnecessarily allocated memory if (!error) { if (sampledCloud->capacity() > sampledCloud->size()) sampledCloud->resize(sampledCloud->size()); } else { delete sampledCloud; sampledCloud = 0; } if (progressCb) { progressCb->stop(); } if (!inputOctree) { //locally computed octree delete octree; octree = 0; } markers->release(); markers = 0; return sampledCloud; }
TrueKdTree::BaseNode* TrueKdTree::split(ReferenceCloud* subset) { assert(subset); //subset will always be taken care of by this method unsigned count = subset->size(); const PointCoordinateType* planeEquation = Neighbourhood(subset).getLSQPlane(); if (!planeEquation) { //an error occurred during LS plane computation?! delete subset; return 0; } //we always split sets larger the a given size (but we can't skip cells with less than 6 points!) ScalarType error = -1; if (count < m_maxPointCountPerCell || m_maxPointCountPerCell < 6) { assert(fabs(CCVector3(planeEquation).norm2() - 1.0) < 1.0e-6); error = (count != 3 ? DistanceComputationTools::ComputeCloud2PlaneDistance(subset, planeEquation, m_errorMeasure) : 0); //if we have less than 6 points, then the subdivision would produce a subset with less than 3 points //(and we can't fit a plane on less than 3 points!) bool isLeaf = (count < 6 || error <= m_maxError); if (isLeaf) { UpdateProgress(count); //the Leaf class takes ownership of the subset! return new Leaf(subset,planeEquation,error); } } /*** proceed with a 'standard' binary partition ***/ //cell limits (dimensions) CCVector3 dims; { CCVector3 bbMin,bbMax; subset->getBoundingBox(bbMin.u,bbMax.u); dims = bbMax - bbMin; } //find the largest dimension uint8_t splitDim = X_DIM; if (dims.y > dims.x) splitDim = Y_DIM; if (dims.z > dims.u[splitDim]) splitDim = Z_DIM; //find the median by sorting the points coordinates assert(s_sortedCoordsForSplit.size() >= static_cast<size_t>(count)); for (unsigned i=0; i<count; ++i) { const CCVector3* P = subset->getPoint(i); s_sortedCoordsForSplit[i] = P->u[splitDim]; } std::sort(s_sortedCoordsForSplit.begin(),s_sortedCoordsForSplit.begin()+count); unsigned splitCount = count/2; assert(splitCount >= 3); //count >= 6 (see above) //we must check that the split value is the 'first one' if (s_sortedCoordsForSplit[splitCount-1] == s_sortedCoordsForSplit[splitCount]) { if (s_sortedCoordsForSplit[2] != s_sortedCoordsForSplit[splitCount]) //can we go backward? { while (/*splitCount>0 &&*/ s_sortedCoordsForSplit[splitCount-1] == s_sortedCoordsForSplit[splitCount]) { assert(splitCount > 3); --splitCount; } } else if (s_sortedCoordsForSplit[count-3] != s_sortedCoordsForSplit[splitCount]) //can we go forward? { do { ++splitCount; assert(splitCount < count-3); } while (/*splitCount+1<count &&*/ s_sortedCoordsForSplit[splitCount] == s_sortedCoordsForSplit[splitCount-1]); } else //in fact we can't split this cell! { UpdateProgress(count); if (error < 0) error = (count != 3 ? DistanceComputationTools::ComputeCloud2PlaneDistance(subset, planeEquation, m_errorMeasure) : 0); //the Leaf class takes ownership of the subset! return new Leaf(subset,planeEquation,error); } } PointCoordinateType splitCoord = s_sortedCoordsForSplit[splitCount]; //count > 3 --> splitCount >= 2 ReferenceCloud* leftSubset = new ReferenceCloud(subset->getAssociatedCloud()); ReferenceCloud* rightSubset = new ReferenceCloud(subset->getAssociatedCloud()); if (!leftSubset->reserve(splitCount) || !rightSubset->reserve(count-splitCount)) { //not enough memory! delete leftSubset; delete rightSubset; delete subset; return 0; } //fill subsets for (unsigned i=0; i<count; ++i) { const CCVector3* P = subset->getPoint(i); if (P->u[splitDim] < splitCoord) { leftSubset->addPointIndex(subset->getPointGlobalIndex(i)); } else { rightSubset->addPointIndex(subset->getPointGlobalIndex(i)); } } //release some memory before the next incursion! delete subset; subset = 0; //process subsets (if any) BaseNode* leftChild = split(leftSubset); if (!leftChild) { delete rightSubset; return 0; } BaseNode* rightChild = split(rightSubset); if (!rightChild) { delete leftChild; return 0; } Node* node = new Node; { node->leftChild = leftChild; leftChild->parent = node; node->rightChild = rightChild; rightChild->parent = node; node->splitDim = splitDim; node->splitValue = splitCoord; } return node; }
ReferenceCloud* CloudSamplingTools::sorFilter( GenericIndexedCloudPersist* inputCloud, int knn/*=6*/, double nSigma/*=1.0*/, DgmOctree* inputOctree/*=0*/, GenericProgressCallback* progressCb/*=0*/) { if (!inputCloud || knn <= 0 || inputCloud->size() <= static_cast<unsigned>(knn)) { //invalid input assert(false); return 0; } DgmOctree* octree = inputOctree; if (!octree) { //compute the octree if necessary octree = new DgmOctree(inputCloud); if (octree->build(progressCb) < 1) { delete octree; return 0; } } //output ReferenceCloud* filteredCloud = 0; for (unsigned step=0; step<1; ++step) //fake loop for easy break { unsigned pointCount = inputCloud->size(); std::vector<PointCoordinateType> meanDistances; try { meanDistances.resize(pointCount,0); } catch (const std::bad_alloc&) { //not enough memory break; } double avgDist = 0, stdDev = 0; //1st step: compute the average distance to the neighbors { //additional parameters void* additionalParameters[] = {reinterpret_cast<void*>(&knn), reinterpret_cast<void*>(&meanDistances) }; unsigned char octreeLevel = octree->findBestLevelForAGivenPopulationPerCell(knn); if (octree->executeFunctionForAllCellsAtLevel( octreeLevel, &applySORFilterAtLevel, additionalParameters, true, progressCb, "SOR filter") == 0) { //something went wrong break; } //deduce the average distance and std. dev. double sumDist = 0; double sumSquareDist = 0; for (unsigned i=0; i<pointCount; ++i) { sumDist += meanDistances[i]; sumSquareDist += meanDistances[i]*meanDistances[i]; } avgDist = sumDist / pointCount; stdDev = sqrt(fabs(sumSquareDist / pointCount - avgDist*avgDist)); } //2nd step: remove the farthest points { //deduce the max distance double maxDist = avgDist + nSigma * stdDev; filteredCloud = new ReferenceCloud(inputCloud); if (!filteredCloud->reserve(pointCount)) { //not enough memory delete filteredCloud; filteredCloud = 0; break; } for (unsigned i=0; i<pointCount; ++i) { if (meanDistances[i] <= maxDist) { filteredCloud->addPointIndex(i); } } filteredCloud->resize(filteredCloud->size()); } } if (!inputOctree) { delete octree; octree = 0; } return filteredCloud; }
ICPRegistrationTools::CC_ICP_RESULT ICPRegistrationTools::RegisterClouds(GenericIndexedCloudPersist* _modelCloud, GenericIndexedCloudPersist* _dataCloud, PointProjectionTools::Transformation& transform, CC_ICP_CONVERGENCE_TYPE convType, double minErrorDecrease, unsigned nbMaxIterations, double& finalError, GenericProgressCallback* progressCb/*=0*/, bool filterOutFarthestPoints/*=false*/, unsigned samplingLimit/*=20000*/, ScalarField* modelWeights/*=0*/, ScalarField* dataWeights/*=0*/) { assert(_modelCloud && _dataCloud); finalError = -1.0; //MODEL CLOUD (reference, won't move) GenericIndexedCloudPersist* modelCloud=_modelCloud; ScalarField* _modelWeights=modelWeights; { if (_modelCloud->size()>samplingLimit) //shall we resample the clouds? (speed increase) { ReferenceCloud* subModelCloud = CloudSamplingTools::subsampleCloudRandomly(_modelCloud,samplingLimit); if (subModelCloud && modelWeights) { _modelWeights = new ScalarField("ResampledModelWeights",modelWeights->isPositive()); unsigned realCount = subModelCloud->size(); if (_modelWeights->reserve(realCount)) { for (unsigned i=0;i<realCount;++i) _modelWeights->addElement(modelWeights->getValue(subModelCloud->getPointGlobalIndex(i))); _modelWeights->computeMinAndMax(); } else { //not enough memory delete subModelCloud; subModelCloud=0; } } modelCloud = subModelCloud; } if (!modelCloud) //something bad happened return ICP_ERROR_NOT_ENOUGH_MEMORY; } //DATA CLOUD (will move) ReferenceCloud* dataCloud=0; ScalarField* _dataWeights=dataWeights; SimpleCloud* rotatedDataCloud=0; //temporary structure (rotated vertices) { if (_dataCloud->size()>samplingLimit) //shall we resample the clouds? (speed increase) { dataCloud = CloudSamplingTools::subsampleCloudRandomly(_dataCloud,samplingLimit); if (dataCloud && dataWeights) { _dataWeights = new ScalarField("ResampledDataWeights",dataWeights->isPositive()); unsigned realCount = dataCloud->size(); if (_dataWeights->reserve(realCount)) { for (unsigned i=0;i<realCount;++i) _dataWeights->addElement(dataWeights->getValue(dataCloud->getPointGlobalIndex(i))); _dataWeights->computeMinAndMax(); } else { //not enough memory delete dataCloud; dataCloud=0; } } } else { //create a 'fake' reference cloud with all points dataCloud = new ReferenceCloud(_dataCloud); if (dataCloud->reserve(_dataCloud->size())) { dataCloud->addPointIndex(0,_dataCloud->size()); } else //not enough memory { delete dataCloud; dataCloud=0; } } if (!dataCloud || !dataCloud->enableScalarField()) //something bad happened { if (dataCloud) delete dataCloud; if (modelCloud && modelCloud != _modelCloud) delete modelCloud; if (_modelWeights && _modelWeights!=modelWeights) _modelWeights->release(); return ICP_ERROR_NOT_ENOUGH_MEMORY; } } //Closest Point Set (see ICP algorithm) ReferenceCloud* CPSet = new ReferenceCloud(modelCloud); ScalarField* CPSetWeights = _modelWeights ? new ScalarField("CPSetWeights",_modelWeights->isPositive()) : 0; //algorithm result CC_ICP_RESULT result = ICP_NOTHING_TO_DO; unsigned iteration = 0; double error = 0.0; //we compute the initial distance between the two clouds (and the CPSet by the way) dataCloud->forEach(ScalarFieldTools::razDistsToHiddenValue); DistanceComputationTools::Cloud2CloudDistanceComputationParams params; params.CPSet = CPSet; if (DistanceComputationTools::computeHausdorffDistance(dataCloud,modelCloud,params,progressCb)>=0) { //12/11/2008 - A.BEY: ICP guarantees only the decrease of the squared distances sum (not the distances sum) error = DistanceComputationTools::computeMeanSquareDist(dataCloud); } else { //if an error occured during distances computation... error = -1.0; result = ICP_ERROR_DIST_COMPUTATION; } if (error > 0.0) { #ifdef _DEBUG FILE* fp = fopen("registration_trace_log.txt","wt"); if (fp) fprintf(fp,"Initial error: %f\n",error); #endif double lastError=error,initialErrorDelta=0.0,errorDelta=0.0; result = ICP_APPLY_TRANSFO; //as soon as we do at least one iteration, we'll have to apply a transformation while (true) { ++iteration; //regarding the progress bar if (progressCb && iteration>1) //on the first iteration, we do... nothing { char buffer[256]; //then on the second iteration, we init/show it if (iteration==2) { initialErrorDelta = errorDelta; progressCb->reset(); progressCb->setMethodTitle("Clouds registration"); sprintf(buffer,"Initial mean square error = %f\n",lastError); progressCb->setInfo(buffer); progressCb->start(); } else //and after we update it continuously { sprintf(buffer,"Mean square error = %f [%f]\n",error,-errorDelta); progressCb->setInfo(buffer); progressCb->update((float)((initialErrorDelta-errorDelta)/(initialErrorDelta-minErrorDecrease)*100.0)); } if (progressCb->isCancelRequested()) break; } //shall we remove points with distance above a given threshold? if (filterOutFarthestPoints) { NormalDistribution N; N.computeParameters(dataCloud,false); if (N.isValid()) { DistanceType mu,sigma2; N.getParameters(mu,sigma2); ReferenceCloud* c = new ReferenceCloud(dataCloud->getAssociatedCloud()); ReferenceCloud* newCPSet = new ReferenceCloud(CPSet->getAssociatedCloud()); //we must also update the CPSet! ScalarField* newdataWeights = (_dataWeights ? new ScalarField("ResampledDataWeights",_dataWeights->isPositive()) : 0); //unsigned realCount = dataCloud->size(); //if (_dataWeights->reserve(realCount)) //{ // for (unsigned i=0;i<realCount;++i) // _dataWeights->addElement(dataWeights->getValue(dataCloud->getPointGlobalIndex(i))); // _dataWeights->computeMinAndMax(); //} //else //{ // //not enough memory // delete dataCloud; // dataCloud=0; //} unsigned n=dataCloud->size(); if (!c->reserve(n) || !newCPSet->reserve(n) || (newdataWeights && !newdataWeights->reserve(n))) { //not enough memory delete c; delete newCPSet; if (newdataWeights) newdataWeights->release(); result = ICP_ERROR_REGISTRATION_STEP; break; } //we keep only the points with "not too high" distances DistanceType maxDist = mu+3.0f*sqrt(sigma2); unsigned realSize=0; for (unsigned i=0;i<n;++i) { unsigned index = dataCloud->getPointGlobalIndex(i); if (dataCloud->getAssociatedCloud()->getPointScalarValue(index)<maxDist) { c->addPointIndex(index); newCPSet->addPointIndex(CPSet->getPointGlobalIndex(i)); if (newdataWeights) newdataWeights->addElement(_dataWeights->getValue(index)); ++realSize; } } //resize should be ok as we have called reserve first c->resize(realSize); //should always be ok as counter<n newCPSet->resize(realSize); //idem if (newdataWeights) newdataWeights->resize(realSize); //idem //replace old structures by new ones delete CPSet; CPSet=newCPSet; delete dataCloud; dataCloud=c; if (_dataWeights) { _dataWeights->release(); _dataWeights = newdataWeights; } } } //update CPSet weights (if any) if (_modelWeights) { unsigned count=CPSet->size(); assert(CPSetWeights); if (CPSetWeights->currentSize()!=count) { if (!CPSetWeights->resize(count)) { result = ICP_ERROR_REGISTRATION_STEP; break; } } for (unsigned i=0;i<count;++i) CPSetWeights->setValue(i,_modelWeights->getValue(CPSet->getPointGlobalIndex(i))); CPSetWeights->computeMinAndMax(); } PointProjectionTools::Transformation currentTrans; //if registration procedure fails if (!RegistrationTools::RegistrationProcedure(dataCloud, CPSet, currentTrans, _dataWeights, _modelWeights)) { result = ICP_ERROR_REGISTRATION_STEP; break; } //get rotated data cloud if (!rotatedDataCloud || filterOutFarthestPoints) { //we create a new structure, with rotated points SimpleCloud* newDataCloud = PointProjectionTools::applyTransformation(dataCloud,currentTrans); if (!newDataCloud) { //not enough memory result = ICP_ERROR_REGISTRATION_STEP; break; } //update dataCloud if (rotatedDataCloud) delete rotatedDataCloud; rotatedDataCloud = newDataCloud; delete dataCloud; dataCloud = new ReferenceCloud(rotatedDataCloud); if (!dataCloud->reserve(rotatedDataCloud->size())) { //not enough memory result = ICP_ERROR_REGISTRATION_STEP; break; } dataCloud->addPointIndex(0,rotatedDataCloud->size()); } else { //we simply have to rotate the existing temporary cloud rotatedDataCloud->applyTransformation(currentTrans); } //compute (new) distances to model params.CPSet = CPSet; if (DistanceComputationTools::computeHausdorffDistance(dataCloud,modelCloud,params)<0) { //an error occured during distances computation... result = ICP_ERROR_REGISTRATION_STEP; break; } lastError = error; //12/11/2008 - A.BEY: ICP guarantees only the decrease of the squared distances sum (not the distances sum) error = DistanceComputationTools::computeMeanSquareDist(dataCloud); finalError = (error>0 ? sqrt(error) : error); #ifdef _DEBUG if (fp) fprintf(fp,"Iteration #%i --> error: %f\n",iteration,error); #endif //error update errorDelta = lastError-error; //is it better? if (errorDelta > 0.0) { //we update global transformation matrix if (currentTrans.R.isValid()) { if (transform.R.isValid()) transform.R = currentTrans.R * transform.R; else transform.R = currentTrans.R; transform.T = currentTrans.R * transform.T; } transform.T += currentTrans.T; } //stop criterion if ((errorDelta < 0.0) || //error increase (convType == MAX_ERROR_CONVERGENCE && errorDelta < minErrorDecrease) || //convergence reached (convType == MAX_ITER_CONVERGENCE && iteration > nbMaxIterations)) //max iteration reached { break; } } if (progressCb) progressCb->stop(); #ifdef _DEBUG if (fp) { fclose(fp); fp=0; } #endif } if (CPSet) delete CPSet; CPSet=0; if (CPSetWeights) CPSetWeights->release(); //release memory if (modelCloud && modelCloud != _modelCloud) delete modelCloud; if (_modelWeights && _modelWeights!=modelWeights) _modelWeights->release(); if (dataCloud) delete dataCloud; if (_dataWeights && _dataWeights!=dataWeights) _dataWeights->release(); if (rotatedDataCloud) delete rotatedDataCloud; return result; }
ReferenceCloud* CloudSamplingTools::resampleCloudSpatially(GenericIndexedCloudPersist* theCloud, PointCoordinateType minDistance, DgmOctree* theOctree/*=0*/, GenericProgressCallback* progressCb/*=0*/) { assert(theCloud); unsigned cloudSize = theCloud->size(); DgmOctree *_theOctree=theOctree; if (!_theOctree) { _theOctree = new DgmOctree(theCloud); if (_theOctree->build()<(int)cloudSize) { delete _theOctree; return 0; } } ReferenceCloud* sampledCloud = new ReferenceCloud(theCloud); if (!sampledCloud->reserve(cloudSize)) { if (!theOctree) delete _theOctree; return 0; } GenericChunkedArray<1,bool>* markers = new GenericChunkedArray<1,bool>(); //DGM: upgraded from vector, as this can be quite huge! if (!markers->resize(cloudSize,true,true)) { markers->release(); if (!theOctree) delete _theOctree; delete sampledCloud; return 0; } NormalizedProgress* normProgress=0; if (progressCb) { progressCb->setInfo("Spatial resampling"); normProgress = new NormalizedProgress(progressCb,cloudSize); progressCb->reset(); progressCb->start(); } //for each point in the cloud that is still 'marked', we look //for its neighbors and remove their own marks DgmOctree::NearestNeighboursSphericalSearchStruct nss; nss.level = _theOctree->findBestLevelForAGivenNeighbourhoodSizeExtraction(minDistance); markers->placeIteratorAtBegining(); for (unsigned i=0; i<cloudSize; i++, markers->forwardIterator()) { //progress indicator if (normProgress && !normProgress->oneStep()) { //cancel process delete sampledCloud; sampledCloud = 0; break; } //no mark? we skip this point if (!markers->getCurrentValue()) continue; //init neighbor search structure theCloud->getPoint(i,nss.queryPoint); bool inbounds = false; _theOctree->getTheCellPosWhichIncludesThePoint(&nss.queryPoint, nss.cellPos, nss.level, inbounds); nss.truncatedCellCode = (inbounds ? _theOctree->generateTruncatedCellCode(nss.cellPos, nss.level) : DgmOctree::INVALID_CELL_CODE); _theOctree->computeCellCenter(nss.cellPos, nss.level, nss.cellCenter); //add the points that lie in the same cell (faster) { ReferenceCloud* Y = _theOctree->getPointsInCell(nss.truncatedCellCode, nss.level, true); unsigned count = Y->size(); try { nss.pointsInNeighbourhood.resize(count); } catch (std::bad_alloc) //out of memory { //stop process delete sampledCloud; sampledCloud = 0; break; } unsigned realCount = 0; DgmOctree::NeighboursSet::iterator it = nss.pointsInNeighbourhood.begin(); for (unsigned j=0; j<count; ++j) { unsigned index = Y->getPointGlobalIndex(j); if (index != i && markers->getValue(index)) //no need to add the point itself and those already flagged off { it->point = Y->getPointPersistentPtr(j); it->pointIndex = index; ++it; ++realCount; } } nss.pointsInNeighbourhood.resize(realCount); //should be ok as realCount<=count nss.alreadyVisitedNeighbourhoodSize = 1; } #ifdef TEST_CELLS_FOR_SPHERICAL_NN nss.pointsInSphericalNeighbourhood.clear(); #endif nss.prepare(minDistance,_theOctree->getCellSize(nss.level)); //look for neighbors and 'de-mark' them { unsigned nbNeighbors = _theOctree->findNeighborsInASphereStartingFromCell(nss, minDistance, false); DgmOctree::NeighboursSet::iterator it = nss.pointsInNeighbourhood.begin(); for (unsigned j=0; j<nbNeighbors; ++j, ++it) if (it->pointIndex != i) markers->setValue(it->pointIndex,false); } //At this stage, the ith point is the only one marked in a radius of <minDistance>. //Therefore it will necessarily be in the final cloud! if (!sampledCloud->addPointIndex(i)) //not enough memory { //stop process delete sampledCloud; sampledCloud = 0; break; } } if(normProgress) { delete normProgress; normProgress = 0; } if (!theOctree) delete _theOctree; markers->release(); return sampledCloud; }
ReferenceCloud* CloudSamplingTools::noiseFilter(GenericIndexedCloudPersist* inputCloud, PointCoordinateType kernelRadius, double nSigma, bool removeIsolatedPoints/*=false*/, bool useKnn/*=false*/, int knn/*=6*/, bool useAbsoluteError/*=true*/, double absoluteError/*=0.0*/, DgmOctree* inputOctree/*=0*/, GenericProgressCallback* progressCb/*=0*/) { if (!inputCloud || inputCloud->size() < 2 || (useKnn && knn <= 0) || (!useKnn && kernelRadius <= 0)) { //invalid input assert(false); return 0; } DgmOctree* octree = inputOctree; if (!octree) { octree = new DgmOctree(inputCloud); if (octree->build(progressCb) < 1) { delete octree; return 0; } } ReferenceCloud* filteredCloud = new ReferenceCloud(inputCloud); unsigned pointCount = inputCloud->size(); if (!filteredCloud->reserve(pointCount)) { //not enough memory if (!inputOctree) delete octree; delete filteredCloud; return 0; } //additional parameters void* additionalParameters[] = {reinterpret_cast<void*>(filteredCloud), reinterpret_cast<void*>(&kernelRadius), reinterpret_cast<void*>(&nSigma), reinterpret_cast<void*>(&removeIsolatedPoints), reinterpret_cast<void*>(&useKnn), reinterpret_cast<void*>(&knn), reinterpret_cast<void*>(&useAbsoluteError), reinterpret_cast<void*>(&absoluteError) }; unsigned char octreeLevel = 0; if (useKnn) octreeLevel = octree->findBestLevelForAGivenNeighbourhoodSizeExtraction(kernelRadius); else octreeLevel = octree->findBestLevelForAGivenPopulationPerCell(knn); if (octree->executeFunctionForAllCellsAtLevel( octreeLevel, &applyNoiseFilterAtLevel, additionalParameters, true, progressCb, "Noise filter" ) == 0) { //something went wrong delete filteredCloud; filteredCloud = 0; } if (!inputOctree) { delete octree; octree = 0; } if (filteredCloud) { filteredCloud->resize(filteredCloud->size()); } return filteredCloud; }