std::vector<float> getComboItemAsStdFloatVector(ComboItemDescriptor desc, const ccPointCloud* cloud) { int n = cloud->size(); std::vector<float> v; v.resize(n); v.reserve(n); if (desc.type == ComboItemDescriptor::COORDINATE) { CCVector3 point; for (int i = 0; i < n; i++) { cloud->getPoint(i, point); v[i] = point[desc.index_in_cloud]; } } else if (desc.type == ComboItemDescriptor::SCALAR) { CCLib::ScalarField * field = cloud->getScalarField(desc.index_in_cloud); for (int i = 0; i < n; i++) v[i] = field->getValue(i); } return v; }
void copyScalarFields(const ccPointCloud *inCloud, ccPointCloud *outCloud, pcl::PointIndicesPtr &in2outMapping, bool overwrite = true) { int n_in = inCloud->size(); int n_out = outCloud->size(); assert(in2outMapping->indices.size() == outCloud->size()); int n_scalars = inCloud->getNumberOfScalarFields(); for (int i = 0; i < n_scalars; ++i) { CCLib::ScalarField * field = inCloud->getScalarField(i); const char * name = field->getName(); //we need to verify no scalar field with the same name exists in the output cloud int id = outCloud->getScalarFieldIndexByName(name); ccScalarField * new_field = new ccScalarField; //resize the scalar field to the outcloud size new_field->reserve(outCloud->size()); new_field->setName(name); if (id >= 0) //a scalar field with the same name exists { if (overwrite) outCloud->deleteScalarField(id); else break; } //now perform point to point copy for (unsigned int j = 0; j < outCloud->size(); ++j) { new_field->setValue(j, field->getValue(in2outMapping->indices.at(j))); } //recompute stats new_field->computeMinAndMax(); ccScalarField * casted_field = static_cast<ccScalarField *> (new_field); casted_field->computeMinAndMax(); //now put back the scalar field to the outCloud if (id < 0) outCloud->addScalarField(casted_field); } }
CC_FILE_ERROR BinFilter::LoadFileV1(QFile& in, ccHObject& container, unsigned nbScansTotal, const LoadParameters& parameters) { ccLog::Print("[BIN] Version 1.0"); if (nbScansTotal > 99) { if (QMessageBox::question(0, QString("Oups"), QString("Hum, do you really expect %1 point clouds?").arg(nbScansTotal), QMessageBox::Yes, QMessageBox::No) == QMessageBox::No) return CC_FERR_WRONG_FILE_TYPE; } else if (nbScansTotal == 0) { return CC_FERR_NO_LOAD; } ccProgressDialog pdlg(true, parameters.parentWidget); pdlg.setMethodTitle(QObject::tr("Open Bin file (old style)")); for (unsigned k=0; k<nbScansTotal; k++) { HeaderFlags header; unsigned nbOfPoints = 0; if (ReadEntityHeader(in, nbOfPoints, header) < 0) { return CC_FERR_READING; } //Console::print("[BinFilter::loadModelFromBinaryFile] Entity %i : %i points, color=%i, norms=%i, dists=%i\n",k,nbOfPoints,color,norms,distances); if (nbOfPoints == 0) { //Console::print("[BinFilter::loadModelFromBinaryFile] rien a faire !\n"); continue; } //progress for this cloud CCLib::NormalizedProgress nprogress(&pdlg, nbOfPoints); if (parameters.alwaysDisplayLoadDialog) { pdlg.reset(); pdlg.setInfo(QObject::tr("cloud %1/%2 (%3 points)").arg(k + 1).arg(nbScansTotal).arg(nbOfPoints)); pdlg.start(); QApplication::processEvents(); } //Cloud name char cloudName[256] = "unnamed"; if (header.name) { for (int i=0; i<256; ++i) { if (in.read(cloudName+i,1) < 0) { //Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the cloud name!\n"); return CC_FERR_READING; } if (cloudName[i] == 0) { break; } } //we force the end of the name in case it is too long! cloudName[255] = 0; } else { sprintf(cloudName,"unnamed - Cloud #%u",k); } //Cloud name char sfName[1024] = "unnamed"; if (header.sfName) { for (int i=0; i<1024; ++i) { if (in.read(sfName+i,1) < 0) { //Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the cloud name!\n"); return CC_FERR_READING; } if (sfName[i] == 0) break; } //we force the end of the name in case it is too long! sfName[1023] = 0; } else { strcpy(sfName,"Loaded scalar field"); } //Creation ccPointCloud* loadedCloud = new ccPointCloud(cloudName); if (!loadedCloud) return CC_FERR_NOT_ENOUGH_MEMORY; unsigned fileChunkPos = 0; unsigned fileChunkSize = std::min(nbOfPoints,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD); loadedCloud->reserveThePointsTable(fileChunkSize); if (header.colors) { loadedCloud->reserveTheRGBTable(); loadedCloud->showColors(true); } if (header.normals) { loadedCloud->reserveTheNormsTable(); loadedCloud->showNormals(true); } if (header.scalarField) loadedCloud->enableScalarField(); unsigned lineRead = 0; int parts = 0; const ScalarType FORMER_HIDDEN_POINTS = (ScalarType)-1.0; //lecture du fichier for (unsigned i=0; i<nbOfPoints; ++i) { if (lineRead == fileChunkPos+fileChunkSize) { if (header.scalarField) loadedCloud->getCurrentInScalarField()->computeMinAndMax(); container.addChild(loadedCloud); fileChunkPos = lineRead; fileChunkSize = std::min(nbOfPoints-lineRead,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD); char partName[64]; ++parts; sprintf(partName,"%s.part_%i",cloudName,parts); loadedCloud = new ccPointCloud(partName); loadedCloud->reserveThePointsTable(fileChunkSize); if (header.colors) { loadedCloud->reserveTheRGBTable(); loadedCloud->showColors(true); } if (header.normals) { loadedCloud->reserveTheNormsTable(); loadedCloud->showNormals(true); } if (header.scalarField) loadedCloud->enableScalarField(); } float Pf[3]; if (in.read((char*)Pf,sizeof(float)*3) < 0) { //Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the %ith entity point !\n",k); return CC_FERR_READING; } loadedCloud->addPoint(CCVector3::fromArray(Pf)); if (header.colors) { unsigned char C[3]; if (in.read((char*)C,sizeof(ColorCompType)*3) < 0) { //Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the %ith entity colors !\n",k); return CC_FERR_READING; } loadedCloud->addRGBColor(C); } if (header.normals) { CCVector3 N; if (in.read((char*)N.u,sizeof(float)*3) < 0) { //Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the %ith entity norms !\n",k); return CC_FERR_READING; } loadedCloud->addNorm(N); } if (header.scalarField) { double D; if (in.read((char*)&D,sizeof(double)) < 0) { //Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the %ith entity distance!\n",k); return CC_FERR_READING; } ScalarType d = static_cast<ScalarType>(D); loadedCloud->setPointScalarValue(i,d); } lineRead++; if (parameters.alwaysDisplayLoadDialog && !nprogress.oneStep()) { loadedCloud->resize(i+1-fileChunkPos); k=nbScansTotal; i=nbOfPoints; } } if (parameters.alwaysDisplayLoadDialog) { pdlg.stop(); QApplication::processEvents(); } if (header.scalarField) { CCLib::ScalarField* sf = loadedCloud->getCurrentInScalarField(); assert(sf); sf->setName(sfName); //replace HIDDEN_VALUES by NAN_VALUES for (unsigned i=0; i<sf->currentSize(); ++i) { if (sf->getValue(i) == FORMER_HIDDEN_POINTS) sf->setValue(i,NAN_VALUE); } sf->computeMinAndMax(); loadedCloud->setCurrentDisplayedScalarField(loadedCloud->getCurrentInScalarFieldIndex()); loadedCloud->showSF(true); } container.addChild(loadedCloud); } return CC_FERR_NO_ERROR; }
bool cc2Point5DimEditor::RasterGrid::fillWith( ccGenericPointCloud* cloud, unsigned char projectionDimension, cc2Point5DimEditor::ProjectionType projectionType, bool interpolateEmptyCells, cc2Point5DimEditor::ProjectionType sfInterpolation/*=INVALID_PROJECTION_TYPE*/, ccProgressDialog* progressDialog/*=0*/) { if (!cloud) { assert(false); return false; } //current parameters unsigned gridTotalSize = width * height; //vertical dimension const unsigned char Z = projectionDimension; assert(Z >= 0 && Z <= 2); const unsigned char X = Z == 2 ? 0 : Z +1; const unsigned char Y = X == 2 ? 0 : X +1; //do we need to interpolate scalar fields? ccPointCloud* pc = cloud->isA(CC_TYPES::POINT_CLOUD) ? static_cast<ccPointCloud*>(cloud) : 0; bool interpolateSF = (sfInterpolation != INVALID_PROJECTION_TYPE); interpolateSF &= (pc && pc->hasScalarFields()); if (interpolateSF) { unsigned sfCount = pc->getNumberOfScalarFields(); bool memoryError = false; size_t previousCount = scalarFields.size(); if (sfCount > previousCount) { try { scalarFields.resize(sfCount,0); } catch (const std::bad_alloc&) { //not enough memory memoryError = true; } } for (size_t i=previousCount; i<sfCount; ++i) { assert(scalarFields[i] == 0); scalarFields[i] = new double[gridTotalSize]; if (!scalarFields[i]) { //not enough memory memoryError = true; break; } } if (memoryError) { ccLog::Warning(QString("[Rasterize] Failed to allocate memory for scalar fields!")); } } //filling the grid unsigned pointCount = cloud->size(); double gridMaxX = gridStep * width; double gridMaxY = gridStep * height; if (progressDialog) { progressDialog->setMethodTitle("Grid generation"); progressDialog->setInfo(qPrintable(QString("Points: %1\nCells: %2 x %3").arg(pointCount).arg(width).arg(height))); progressDialog->start(); progressDialog->show(); QCoreApplication::processEvents(); } CCLib::NormalizedProgress nProgress(progressDialog,pointCount); for (unsigned n=0; n<pointCount; ++n) { const CCVector3* P = cloud->getPoint(n); CCVector3d relativePos = CCVector3d::fromArray(P->u) - minCorner; int i = static_cast<int>(relativePos.u[X]/gridStep); int j = static_cast<int>(relativePos.u[Y]/gridStep); //specific case: if we fall exactly on the max corner of the grid box if (i == static_cast<int>(width) && relativePos.u[X] == gridMaxX) --i; if (j == static_cast<int>(height) && relativePos.u[Y] == gridMaxY) --j; //we skip points outside the box! if ( i < 0 || i >= static_cast<int>(width) || j < 0 || j >= static_cast<int>(height) ) continue; assert(i >= 0 && j >= 0); RasterCell* aCell = data[j]+i; unsigned& pointsInCell = aCell->nbPoints; if (pointsInCell) { if (P->u[Z] < aCell->minHeight) { aCell->minHeight = P->u[Z]; if (projectionType == PROJ_MINIMUM_VALUE) aCell->pointIndex = n; } else if (P->u[Z] > aCell->maxHeight) { aCell->maxHeight = P->u[Z]; if (projectionType == PROJ_MAXIMUM_VALUE) aCell->pointIndex = n; } } else { aCell->minHeight = aCell->maxHeight = P->u[Z]; aCell->pointIndex = n; } // Sum the points heights aCell->avgHeight += P->u[Z]; aCell->stdDevHeight += static_cast<double>(P->u[Z])*P->u[Z]; //scalar fields if (interpolateSF) { int pos = j*static_cast<int>(width)+i; //pos in 2D SF grid(s) assert(pos < static_cast<int>(gridTotalSize)); for (size_t k=0; k<scalarFields.size(); ++k) { if (scalarFields[k]) { CCLib::ScalarField* sf = pc->getScalarField(static_cast<unsigned>(k)); assert(sf); ScalarType sfValue = sf->getValue(n); ScalarType formerValue = static_cast<ScalarType>(scalarFields[k][pos]); if (pointsInCell && ccScalarField::ValidValue(formerValue)) { if (ccScalarField::ValidValue(sfValue)) { switch (sfInterpolation) { case PROJ_MINIMUM_VALUE: // keep the minimum value scalarFields[k][pos] = std::min<double>(formerValue,sfValue); break; case PROJ_AVERAGE_VALUE: //we sum all values (we will divide them later) scalarFields[k][pos] += sfValue; break; case PROJ_MAXIMUM_VALUE: // keep the maximum value scalarFields[k][pos] = std::max<double>(formerValue,sfValue); break; default: assert(false); break; } } } else { //for the first (vaild) point, we simply have to store its SF value (in any case) scalarFields[k][pos] = sfValue; } } } } pointsInCell++; if (!nProgress.oneStep()) { //process cancelled by user return false; } } //update SF grids for 'average' cases if (sfInterpolation == PROJ_AVERAGE_VALUE) { for (size_t k=0; k<scalarFields.size(); ++k) { if (scalarFields[k]) { double* _gridSF = scalarFields[k]; for (unsigned j=0;j<height;++j) { RasterCell* cell = data[j]; for (unsigned i=0; i<width; ++i,++cell,++_gridSF) { if (cell->nbPoints > 1) { ScalarType s = static_cast<ScalarType>(*_gridSF); if (ccScalarField::ValidValue(s)) //valid SF value { *_gridSF /= cell->nbPoints; } } } } } } } //update the main grid (average height and std.dev. computation + current 'height' value) { for (unsigned j=0; j<height; ++j) { RasterCell* cell = data[j]; for (unsigned i=0; i<width; ++i,++cell) { if (cell->nbPoints > 1) { cell->avgHeight /= cell->nbPoints; cell->stdDevHeight = sqrt(fabs(cell->stdDevHeight/cell->nbPoints - cell->avgHeight*cell->avgHeight)); } else { cell->stdDevHeight = 0; } if (cell->nbPoints != 0) { //set the right 'height' value switch (projectionType) { case PROJ_MINIMUM_VALUE: cell->h = cell->minHeight; break; case PROJ_AVERAGE_VALUE: cell->h = cell->avgHeight; break; case PROJ_MAXIMUM_VALUE: cell->h = cell->maxHeight; break; default: assert(false); break; } } } } } //compute the number of non empty cells nonEmptyCellCount = 0; { for (unsigned i=0; i<height; ++i) for (unsigned j=0; j<width; ++j) if (data[i][j].nbPoints) ++nonEmptyCellCount; } //specific case: interpolate the empty cells if (interpolateEmptyCells) { std::vector<CCVector2> the2DPoints; if (nonEmptyCellCount < 3) { ccLog::Warning("[Rasterize] Not enough non-empty cells to interpolate!"); } else if (nonEmptyCellCount < width * height) //otherwise it's useless! { try { the2DPoints.resize(nonEmptyCellCount); } catch (const std::bad_alloc&) { //out of memory ccLog::Warning("[Rasterize] Not enough memory to interpolate empty cells!"); } } //fill 2D vector with non-empty cell indexes if (!the2DPoints.empty()) { unsigned index = 0; for (unsigned j=0; j<height; ++j) { const RasterCell* cell = data[j]; for (unsigned i=0; i<width; ++i, ++cell) { if (cell->nbPoints) { //we only use the non-empty cells to interpolate the2DPoints[index++] = CCVector2(static_cast<PointCoordinateType>(i),static_cast<PointCoordinateType>(j)); } } } assert(index == nonEmptyCellCount); //mesh the '2D' points CCLib::Delaunay2dMesh delaunayMesh; char errorStr[1024]; if (delaunayMesh.buildMesh(the2DPoints,0,errorStr)) { unsigned triNum = delaunayMesh.size(); //now we are going to 'project' all triangles on the grid delaunayMesh.placeIteratorAtBegining(); for (unsigned k=0; k<triNum; ++k) { const CCLib::VerticesIndexes* tsi = delaunayMesh.getNextTriangleVertIndexes(); //get the triangle bounding box (in grid coordinates) int P[3][2]; int xMin = 0, yMin = 0, xMax = 0, yMax = 0; { for (unsigned j=0; j<3; ++j) { const CCVector2& P2D = the2DPoints[tsi->i[j]]; P[j][0] = static_cast<int>(P2D.x); P[j][1] = static_cast<int>(P2D.y); } xMin = std::min(std::min(P[0][0],P[1][0]),P[2][0]); yMin = std::min(std::min(P[0][1],P[1][1]),P[2][1]); xMax = std::max(std::max(P[0][0],P[1][0]),P[2][0]); yMax = std::max(std::max(P[0][1],P[1][1]),P[2][1]); } //now scan the cells { //pre-computation for barycentric coordinates const double& valA = data[ P[0][1] ][ P[0][0] ].h; const double& valB = data[ P[1][1] ][ P[1][0] ].h; const double& valC = data[ P[2][1] ][ P[2][0] ].h; int det = (P[1][1]-P[2][1])*(P[0][0]-P[2][0]) + (P[2][0]-P[1][0])*(P[0][1]-P[2][1]); for (int j=yMin; j<=yMax; ++j) { RasterCell* cell = data[static_cast<unsigned>(j)]; for (int i=xMin; i<=xMax; ++i) { //if the cell is empty if (!cell[i].nbPoints) { //we test if it's included or not in the current triangle //Point Inclusion in Polygon Test (inspired from W. Randolph Franklin - WRF) bool inside = false; for (int ti=0; ti<3; ++ti) { const int* P1 = P[ti]; const int* P2 = P[(ti+1)%3]; if ((P2[1] <= j &&j < P1[1]) || (P1[1] <= j && j < P2[1])) { int t = (i-P2[0])*(P1[1]-P2[1])-(P1[0]-P2[0])*(j-P2[1]); if (P1[1] < P2[1]) t = -t; if (t < 0) inside = !inside; } } //can we interpolate? if (inside) { double l1 = static_cast<double>((P[1][1]-P[2][1])*(i-P[2][0])+(P[2][0]-P[1][0])*(j-P[2][1]))/det; double l2 = static_cast<double>((P[2][1]-P[0][1])*(i-P[2][0])+(P[0][0]-P[2][0])*(j-P[2][1]))/det; double l3 = 1.0-l1-l2; cell[i].h = l1 * valA + l2 * valB + l3 * valC; assert(cell[i].h == cell[i].h); //interpolate SFs as well! for (size_t sfIndex=0; sfIndex<scalarFields.size(); ++sfIndex) { if (scalarFields[sfIndex]) { double* gridSF = scalarFields[sfIndex]; const double& sfValA = gridSF[ P[0][0] + P[0][1]*width ]; const double& sfValB = gridSF[ P[1][0] + P[1][1]*width ]; const double& sfValC = gridSF[ P[2][0] + P[2][1]*width ]; gridSF[i + j*width] = l1 * sfValA + l2 * sfValB + l3 * sfValC; } } } } } } } } } else { ccLog::Warning(QString("[Rasterize] Empty cells interpolation failed: Triangle lib. said '%1'").arg(errorStr)); } } } //computation of the average and extreme height values in the grid { minHeight = 0; maxHeight = 0; meanHeight = 0; validCellCount = 0; for (unsigned i=0; i<height; ++i) { for (unsigned j=0; j<width; ++j) { if (data[i][j].h == data[i][j].h) //valid height { double h = data[i][j].h; if (validCellCount) { if (h < minHeight) minHeight = h; else if (h > maxHeight) maxHeight = h; meanHeight += h; } else { //first valid cell meanHeight = minHeight = maxHeight = h; } ++validCellCount; } } } meanHeight /= validCellCount; } setValid(true); return true; }
CC_FILE_ERROR LASFilter::saveToFile(ccHObject* entity, const char* filename) { if (!entity || !filename) return CC_FERR_BAD_ARGUMENT; ccHObject::Container clouds; if (entity->isKindOf(CC_POINT_CLOUD)) clouds.push_back(entity); else entity->filterChildren(clouds, true, CC_POINT_CLOUD); if (clouds.empty()) { ccConsole::Error("No point cloud in input selection!"); return CC_FERR_BAD_ENTITY_TYPE; } else if (clouds.size()>1) { ccConsole::Error("Can't save more than one cloud per LAS file!"); return CC_FERR_BAD_ENTITY_TYPE; } //the cloud to save ccGenericPointCloud* theCloud = static_cast<ccGenericPointCloud*>(clouds[0]); unsigned numberOfPoints = theCloud->size(); if (numberOfPoints==0) { ccConsole::Error("Cloud is empty!"); return CC_FERR_BAD_ENTITY_TYPE; } //colors bool hasColor = theCloud->hasColors(); //additional fields (as scalar fields) CCLib::ScalarField* classifSF = 0; CCLib::ScalarField* intensitySF = 0; CCLib::ScalarField* timeSF = 0; CCLib::ScalarField* returnNumberSF = 0; if (theCloud->isA(CC_POINT_CLOUD)) { ccPointCloud* pc = static_cast<ccPointCloud*>(theCloud); //Classification { int sfIdx = pc->getScalarFieldIndexByName(CC_LAS_CLASSIFICATION_FIELD_NAME); if (sfIdx>=0) { classifSF = pc->getScalarField(sfIdx); assert(classifSF); if ((int)classifSF->getMin()<0 || (int)classifSF->getMax()>255) //outbounds unsigned char? { ccConsole::Warning("[LASFilter] Found a 'Classification' scalar field, but its values outbound LAS specifications (0-255)..."); classifSF = 0; } } } //Classification end //intensity (as a scalar field) { int sfIdx = pc->getScalarFieldIndexByName(CC_SCAN_INTENSITY_FIELD_NAME); if (sfIdx>=0) { intensitySF = pc->getScalarField(sfIdx); assert(intensitySF); if ((int)intensitySF->getMin()<0 || (int)intensitySF->getMax()>65535) //outbounds unsigned short? { ccConsole::Warning("[LASFilter] Found a 'Intensity' scalar field, but its values outbound LAS specifications (0-65535)..."); intensitySF = 0; } } } //Intensity end //Time (as a scalar field) { int sfIdx = pc->getScalarFieldIndexByName(CC_SCAN_TIME_FIELD_NAME); if (sfIdx>=0) { timeSF = pc->getScalarField(sfIdx); assert(timeSF); } } //Time end //Return number (as a scalar field) { int sfIdx = pc->getScalarFieldIndexByName(CC_SCAN_RETURN_INDEX_FIELD_NAME); if (sfIdx>=0) { returnNumberSF = pc->getScalarField(sfIdx); assert(returnNumberSF); if ((int)returnNumberSF->getMin()<0 || (int)returnNumberSF->getMax()>7) //outbounds 3 bits? { ccConsole::Warning("[LASFilter] Found a 'Return number' scalar field, but its values outbound LAS specifications (0-7)..."); returnNumberSF = 0; } } } //Return number end } //open binary file for writing std::ofstream ofs; ofs.open(filename, std::ios::out | std::ios::binary); if (ofs.fail()) return CC_FERR_WRITING; const double* shift = theCloud->getOriginalShift(); liblas::Writer* writer = 0; try { liblas::Header header; //LAZ support based on extension! if (QFileInfo(filename).suffix().toUpper() == "LAZ") { header.SetCompressed(true); } //header.SetDataFormatId(liblas::ePointFormat3); ccBBox bBox = theCloud->getBB(); if (bBox.isValid()) { header.SetMin(-shift[0]+(double)bBox.minCorner().x,-shift[1]+(double)bBox.minCorner().y,-shift[2]+(double)bBox.minCorner().z); header.SetMax(-shift[0]+(double)bBox.maxCorner().x,-shift[1]+(double)bBox.maxCorner().y,-shift[2]+(double)bBox.maxCorner().z); CCVector3 diag = bBox.getDiagVec(); //Set offset & scale, as points will be stored as boost::int32_t values (between 0 and 4294967296) //int_value = (double_value-offset)/scale header.SetOffset(-shift[0]+(double)bBox.minCorner().x,-shift[1]+(double)bBox.minCorner().y,-shift[2]+(double)bBox.minCorner().z); header.SetScale(1.0e-9*std::max<double>(diag.x,ZERO_TOLERANCE), //result must fit in 32bits?! 1.0e-9*std::max<double>(diag.y,ZERO_TOLERANCE), 1.0e-9*std::max<double>(diag.z,ZERO_TOLERANCE)); } header.SetPointRecordsCount(numberOfPoints); //header.SetDataFormatId(Header::ePointFormat1); writer = new liblas::Writer(ofs, header); } catch (...) { return CC_FERR_WRITING; } //progress dialog ccProgressDialog pdlg(true); //cancel available CCLib::NormalizedProgress nprogress(&pdlg,numberOfPoints); pdlg.setMethodTitle("Save LAS file"); char buffer[256]; sprintf(buffer,"Points: %i",numberOfPoints); pdlg.setInfo(buffer); pdlg.start(); //liblas::Point point(boost::shared_ptr<liblas::Header>(new liblas::Header(writer->GetHeader()))); liblas::Point point(&writer->GetHeader()); for (unsigned i=0; i<numberOfPoints; i++) { const CCVector3* P = theCloud->getPoint(i); { double x=-shift[0]+(double)P->x; double y=-shift[1]+(double)P->y; double z=-shift[2]+(double)P->z; point.SetCoordinates(x, y, z); } if (hasColor) { const colorType* rgb = theCloud->getPointColor(i); point.SetColor(liblas::Color(rgb[0]<<8,rgb[1]<<8,rgb[2]<<8)); //DGM: LAS colors are stored on 16 bits! } if (classifSF) { liblas::Classification classif; classif.SetClass((boost::uint32_t)classifSF->getValue(i)); point.SetClassification(classif); } if (intensitySF) { point.SetIntensity((boost::uint16_t)intensitySF->getValue(i)); } if (timeSF) { point.SetTime((double)timeSF->getValue(i)); } if (returnNumberSF) { point.SetReturnNumber((boost::uint16_t)returnNumberSF->getValue(i)); point.SetNumberOfReturns((boost::uint16_t)returnNumberSF->getMax()); } writer->WritePoint(point); if (!nprogress.oneStep()) break; } delete writer; //ofs.close(); return CC_FERR_NO_ERROR; }
CC_FILE_ERROR VTKFilter::saveToFile(ccHObject* entity, QString filename, SaveParameters& parameters) { if (!entity || filename.isEmpty()) return CC_FERR_BAD_ARGUMENT; //look for either a cloud or a mesh ccMesh* mesh = ccHObjectCaster::ToMesh(entity); unsigned triCount = 0; ccGenericPointCloud* vertices = 0; if (mesh) { //input entity is a mesh triCount = mesh->size(); if (triCount == 0) { ccLog::Warning("[VTK] Input mesh has no triangle?!"); return CC_FERR_NO_SAVE; } vertices = mesh->getAssociatedCloud(); } else { //no mesh? maybe the input entity is a cloud? vertices = ccHObjectCaster::ToGenericPointCloud(entity); } //in any case, we must have a valid 'vertices' entity now if (!vertices) { ccLog::Warning("[VTK] No point cloud nor mesh in input selection!"); return CC_FERR_BAD_ENTITY_TYPE; } unsigned ptsCount = vertices->size(); if (!ptsCount) { ccLog::Warning("[VTK] No point/vertex to save?!"); return CC_FERR_NO_SAVE; } //open ASCII file for writing QFile file(filename); if (!file.open(QIODevice::WriteOnly | QIODevice::Text)) return CC_FERR_WRITING; QTextStream outFile(&file); outFile.setRealNumberPrecision(sizeof(PointCoordinateType) == 4 ? 8 : 12); //write header outFile << "# vtk DataFile Version 3.0" << endl; outFile << "vtk output" << endl; outFile << "ASCII" << endl; outFile << "DATASET " << (mesh ? "POLYDATA" : "UNSTRUCTURED_GRID") << endl; //data type QString floatType = (sizeof(PointCoordinateType) == 4 ? "float" : "double"); /*** what shall we save now? ***/ // write the points { outFile << "POINTS " << ptsCount << " " << floatType << endl; for (unsigned i=0; i<ptsCount; ++i) { const CCVector3* P = vertices->getPoint(i); CCVector3d Pglobal = vertices->toGlobal3d<PointCoordinateType>(*P); outFile << Pglobal.x << " " << Pglobal.y << " " << Pglobal.z << endl; } } // write triangles if (mesh) { outFile << "POLYGONS " << triCount << " " << 4*triCount << endl; mesh->placeIteratorAtBegining(); for (unsigned i=0; i<triCount; ++i) { const CCLib::VerticesIndexes* tsi = mesh->getNextTriangleVertIndexes(); //DGM: getNextTriangleVertIndexes is faster for mesh groups! outFile << "3 " << tsi->i1 << " " << tsi->i2 << " " << tsi->i3 << endl; } } else { // write cell data outFile << "CELLS " << ptsCount << " " << 2*ptsCount << endl; for (unsigned i=0; i<ptsCount; ++i) outFile << "1 " << i << endl; outFile << "CELL_TYPES " << ptsCount << endl; for (unsigned i=0; i<ptsCount; ++i) outFile << "1 " << endl; } outFile << "POINT_DATA " << ptsCount << endl; // write normals if (vertices->hasNormals()) { outFile << "NORMALS Normals "<< floatType << endl; for (unsigned i=0; i<ptsCount; ++i) { const CCVector3& N = vertices->getPointNormal(i); outFile << N.x << " " << N.y << " " << N.z << endl; } } // write colors if (vertices->hasColors()) { outFile << "COLOR_SCALARS RGB 3" << endl; for (unsigned i=0; i<ptsCount; ++i) { const colorType* C = vertices->getPointColor(i); outFile << static_cast<float>(C[0])/ccColor::MAX << " " << static_cast<float>(C[1])/ccColor::MAX << " " << static_cast<float>(C[2])/ccColor::MAX << endl; } } // write scalar field(s)? if (vertices->isA(CC_TYPES::POINT_CLOUD)) { ccPointCloud* pointCloud = static_cast<ccPointCloud*>(vertices); unsigned sfCount = pointCloud->getNumberOfScalarFields(); for (unsigned i=0;i<sfCount;++i) { CCLib::ScalarField* sf = pointCloud->getScalarField(i); outFile << "SCALARS " << QString(sf->getName()).replace(" ","_") << (sizeof(ScalarType)==4 ? " float" : " double") << " 1" << endl; outFile << "LOOKUP_TABLE default" << endl; for (unsigned j=0;j<ptsCount; ++j) outFile << sf->getValue(j) << endl; } } else //virtual point cloud, we only have access to its currently displayed scalar field { if (vertices->hasScalarFields()) { outFile << "SCALARS ScalarField" << (sizeof(ScalarType)==4 ? " float" : " double") << " 1" << endl; outFile << "LOOKUP_TABLE default" << endl; for (unsigned j=0;j<ptsCount; ++j) outFile << vertices->getPointDisplayedDistance(j) << endl; } } file.close(); return CC_FERR_NO_ERROR; }
CC_FILE_ERROR VTKFilter::saveToFile(ccHObject* entity, const char* filename) { if (!entity || !filename) return CC_FERR_BAD_ARGUMENT; //look for either a cloud or a mesh ccHObject::Container clouds,meshes; if (entity->isA(CC_TYPES::POINT_CLOUD)) clouds.push_back(entity); else if (entity->isKindOf(CC_TYPES::MESH)) meshes.push_back(entity); else //group? { for (unsigned i=0; i<entity->getChildrenNumber(); ++i) { ccHObject* child = entity->getChild(i); if (child->isKindOf(CC_TYPES::POINT_CLOUD)) clouds.push_back(child); else if (child->isKindOf(CC_TYPES::MESH)) meshes.push_back(child); } } if (clouds.empty() && meshes.empty()) { ccLog::Error("No point cloud nor mesh in input selection!"); return CC_FERR_BAD_ENTITY_TYPE; } else if (clouds.size()+meshes.size()>1) { ccLog::Error("Can't save more than one entity per VTK file!"); return CC_FERR_BAD_ENTITY_TYPE; } //the cloud to save ccGenericPointCloud* vertices = 0; ccMesh* mesh = 0; unsigned triCount = 0; if (!clouds.empty()) //1 cloud, no mesh { vertices = ccHObjectCaster::ToGenericPointCloud(clouds[0]); } else //1 mesh, with vertices as cloud { mesh = static_cast<ccMesh*>(meshes[0]); triCount = mesh->size(); if (triCount == 0) { ccLog::Error("Mesh has no triangle?!"); return CC_FERR_NO_SAVE; } vertices = mesh->getAssociatedCloud(); } assert(vertices); unsigned ptsCount = vertices->size(); if (!ptsCount) { ccLog::Error("No point/vertex to save?!"); return CC_FERR_NO_SAVE; } //open ASCII file for writing QFile file(filename); if (!file.open(QIODevice::WriteOnly | QIODevice::Text)) return CC_FERR_WRITING; QTextStream outFile(&file); outFile.setRealNumberPrecision(sizeof(PointCoordinateType) == 4 ? 8 : 12); //write header outFile << "# vtk DataFile Version 3.0" << endl; outFile << "vtk output" << endl; outFile << "ASCII" << endl; outFile << "DATASET " << (mesh ? "POLYDATA" : "UNSTRUCTURED_GRID") << endl; //data type QString floatType = (sizeof(PointCoordinateType) == 4 ? "float" : "double"); /*** what shall we save now? ***/ // write the points { outFile << "POINTS " << ptsCount << " " << floatType << endl; for (unsigned i=0; i<ptsCount; ++i) { const CCVector3* P = vertices->getPoint(i); CCVector3d Pglobal = vertices->toGlobal3d<PointCoordinateType>(*P); outFile << Pglobal.x << " " << Pglobal.y << " " << Pglobal.z << endl; } } // write triangles if (mesh) { outFile << "POLYGONS " << triCount << " " << 4*triCount << endl; mesh->placeIteratorAtBegining(); for (unsigned i=0; i<triCount; ++i) { const CCLib::TriangleSummitsIndexes* tsi = mesh->getNextTriangleIndexes(); //DGM: getNextTriangleIndexes is faster for mesh groups! outFile << "3 " << tsi->i1 << " " << tsi->i2 << " " << tsi->i3 << endl; } } else { // write cell data outFile << "CELLS " << ptsCount << " " << 2*ptsCount << endl; for (unsigned i=0; i<ptsCount; ++i) outFile << "1 " << i << endl; outFile << "CELL_TYPES " << ptsCount << endl; for (unsigned i=0; i<ptsCount; ++i) outFile << "1 " << endl; } outFile << "POINT_DATA " << ptsCount << endl; // write normals if (vertices->hasNormals()) { outFile << "NORMALS Normals "<< floatType << endl; for (unsigned i=0; i<ptsCount; ++i) { const CCVector3& N = vertices->getPointNormal(i); outFile << N.x << " " << N.y << " " << N.z << endl; } } // write colors if (vertices->hasColors()) { outFile << "COLOR_SCALARS RGB 3" << endl; for (unsigned i=0; i<ptsCount; ++i) { const colorType* C = vertices->getPointColor(i); outFile << (float)C[0]/(float)MAX_COLOR_COMP << " " << (float)C[1]/(float)MAX_COLOR_COMP << " " << (float)C[2]/(float)MAX_COLOR_COMP << endl; } } // write scalar field(s)? if (vertices->isA(CC_TYPES::POINT_CLOUD)) { ccPointCloud* pointCloud = static_cast<ccPointCloud*>(vertices); unsigned sfCount = pointCloud->getNumberOfScalarFields(); for (unsigned i=0;i<sfCount;++i) { CCLib::ScalarField* sf = pointCloud->getScalarField(i); outFile << "SCALARS " << QString(sf->getName()).replace(" ","_") << (sizeof(ScalarType)==4 ? " float" : " double") << " 1" << endl; outFile << "LOOKUP_TABLE default" << endl; for (unsigned j=0;j<ptsCount; ++j) outFile << sf->getValue(j) << endl; } } else //virtual point cloud, we only have access to its currently displayed scalar field { if (vertices->hasScalarFields()) { outFile << "SCALARS ScalarField" << (sizeof(ScalarType)==4 ? " float" : " double") << " 1" << endl; outFile << "LOOKUP_TABLE default" << endl; for (unsigned j=0;j<ptsCount; ++j) outFile << vertices->getPointDisplayedDistance(j) << endl; } } file.close(); return CC_FERR_NO_ERROR; }
CC_FILE_ERROR PVFilter::saveToFile(ccHObject* entity, const char* filename) { if (!entity || !filename) return CC_FERR_BAD_ARGUMENT; ccHObject::Container clouds; if (entity->isKindOf(CC_POINT_CLOUD)) clouds.push_back(entity); else entity->filterChildren(clouds, true, CC_POINT_CLOUD); if (clouds.empty()) { ccConsole::Error("No point cloud in input selection!"); return CC_FERR_BAD_ENTITY_TYPE; } else if (clouds.size()>1) { ccConsole::Error("Can't save more than one cloud per PV file!"); return CC_FERR_BAD_ENTITY_TYPE; } //the cloud to save ccGenericPointCloud* theCloud = static_cast<ccGenericPointCloud*>(clouds[0]); //and its scalar field CCLib::ScalarField* sf = 0; if (theCloud->isA(CC_POINT_CLOUD)) sf = static_cast<ccPointCloud*>(theCloud)->getCurrentDisplayedScalarField(); if (!sf) ccConsole::Warning("No displayed scalar field! Values will all be 0!\n"); unsigned numberOfPoints = theCloud->size(); if (numberOfPoints==0) { ccConsole::Error("Cloud is empty!"); return CC_FERR_BAD_ENTITY_TYPE; } //open binary file for writing FILE* theFile = fopen(filename , "wb"); if (!theFile) return CC_FERR_WRITING; //Has the cloud been recentered? const double* shift = theCloud->getOriginalShift(); if (fabs(shift[0])+fabs(shift[0])+fabs(shift[0])>0.0) ccConsole::Warning(QString("[PVFilter::save] Can't recenter cloud %1 on PV file save!").arg(theCloud->getName())); //progress dialog ccProgressDialog pdlg(true); //cancel available CCLib::NormalizedProgress nprogress(&pdlg,numberOfPoints); pdlg.setMethodTitle("Save PV file"); char buffer[256]; sprintf(buffer,"Points: %i",numberOfPoints); pdlg.setInfo(buffer); pdlg.start(); float wBuff[3]; float val=0.0; for (unsigned i=0;i<numberOfPoints;i++) { //conversion to float const CCVector3* P = theCloud->getPoint(i); wBuff[0]=float(P->x); wBuff[1]=float(P->y); wBuff[2]=float(P->z); if (fwrite(wBuff,sizeof(float),3,theFile) < 0) {fclose(theFile);return CC_FERR_WRITING;} if (sf) val = (float)sf->getValue(i); if (fwrite(&val,sizeof(float),1,theFile) < 0) {fclose(theFile);return CC_FERR_WRITING;} if (!nprogress.oneStep()) break; } fclose(theFile); return CC_FERR_NO_ERROR; }