void ccHObject::transferChildren(ccHObject& newParent, bool forceFatherDependent/*=false*/) { for (Container::iterator it = m_children.begin(); it != m_children.end(); ++it) { //remove link from old parent bool fatherDependent = (*it)->getFlagState(CC_FATHER_DEPENDANT) || forceFatherDependent; if (fatherDependent) (*it)->setFlagState(CC_FATHER_DEPENDANT,false); newParent.addChild(*it,fatherDependent); } m_children.clear(); }
CC_FILE_ERROR DxfFilter::loadFile(const char* filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, double* coordinatesShift/*=0*/) { #ifdef CC_DXF_SUPPORT DxfImporter importer(&container); if (!DL_Dxf().in(qPrintable(filename), &importer)) { return CC_FERR_READING; } #else ccLog::Error("[DXF] Not supported in this version!"); #endif return container.getChildrenNumber() == 0 ? CC_FERR_NO_LOAD : CC_FERR_NO_ERROR; }
CC_FILE_ERROR ImageFileFilter::loadFile(QString filename, ccHObject& container, LoadParameters& parameters) { QImage qImage; if (!qImage.load(filename)) { ccLog::Warning(QString("[IMAGE] Failed to load image '%1").arg(filename)); return CC_FERR_CONSOLE_ERROR; } //create corresponding ccImage ccImage* image = new ccImage(qImage,QFileInfo(filename).baseName()); container.addChild(image); return CC_FERR_NO_ERROR; }
void ccHObject::transferChild(ccHObject* child, ccHObject& newParent) { assert(child); //remove link from old parent int childDependencyFlags = child->getDependencyFlagsWith(this); int parentDependencyFlags = getDependencyFlagsWith(child); detachChild(child); //automatically removes any dependency with this object newParent.addChild(child,parentDependencyFlags); child->addDependency(&newParent,childDependencyFlags); //after a successful transfer, either the parent is 'newParent' or a null pointer assert(child->getParent() == &newParent || child->getParent() == 0); }
void ccHObject::transferChild(unsigned index, ccHObject& newParent) { ccHObject* child = getChild(index); if (!child) { assert(false); return; } //remove link from old parent bool fatherDependent = child->getFlagState(CC_FATHER_DEPENDANT); if (fatherDependent) child->setFlagState(CC_FATHER_DEPENDANT,false); removeChild(index); newParent.addChild(child,fatherDependent); }
CC_FILE_ERROR X3DFilter::loadFile(QString filename, ccHObject& container, LoadParameters& parameters) { XIOT::X3DLoader loader; X3DXIOTNodeHandler handler(&container); loader.setNodeHandler(&handler); try { loader.load(qPrintable(filename)); } catch (XIOT::X3DParseException& e) { ccLog::Error("[X3DFilter] Error: '%s' (line: %i, col.: %i)\n",e.getMessage().c_str(),e.getLineNumber(),e.getColumnNumber()); return CC_FERR_READING; } if (container.getChildrenNumber() == 0) return CC_FERR_NO_LOAD; return CC_FERR_NO_ERROR; }
//=====================================transferChild===============================================================// void ccHObject::transferChild(ccHObject* child, ccHObject& newParent) { assert(child); //remove link from old parent //从原先的父亲物体中删除依赖关系 int childDependencyFlags = child->getDependencyFlagsWith(this); //获取子物体对父物体的依赖关系 int parentDependencyFlags = getDependencyFlagsWith(child); //获取父亲物体对子物体的依赖关系 //automatically removes any dependency with this object //只是删除与该物体的依赖关系//两两之间 detachChild(child); //新的父物体添加孩子物体 //依赖关系也同时转移 newParent.addChild(child,parentDependencyFlags); //子物体添加依赖关系 child->addDependency(&newParent,childDependencyFlags); //after a successful transfer, either the parent is 'newParent' or a null pointer assert(child->getParent() == &newParent || child->getParent() == 0); }
void ccHObject::transferChildren(ccHObject& newParent, bool forceFatherDependent/*=false*/) { for (Container::iterator it = m_children.begin(); it != m_children.end(); ++it) { ccHObject* child = *it; //remove link from old parent int childDependencyFlags = child->getDependencyFlagsWith(this); int fatherDependencyFlags = getDependencyFlagsWith(child); //we must explicitely remove any depedency with the child as we don't call 'detachChild' removeDependencyWith(child); child->removeDependencyWith(this); newParent.addChild(child,fatherDependencyFlags); child->addDependency(&newParent,childDependencyFlags); //after a successful transfer, either the parent is 'newParent' or a null pointer assert(child->getParent() == &newParent || child->getParent() == 0); } m_children.clear(); }
CC_FILE_ERROR BinFilter::loadFileV2(QFile& in, ccHObject& container) { assert(in.isOpen()); uint32_t binVersion = 20; if (in.read((char*)&binVersion,4)<0) return CC_FERR_READING; if (binVersion<20) //should be superior to 2.0! return CC_FERR_MALFORMED_FILE; ccLog::Print(QString("[BIN] Version %1.%2").arg(binVersion/10).arg(binVersion%10)); ccProgressDialog pdlg(true); pdlg.setMethodTitle(qPrintable(QString("Open Bin file (V%1.%2)").arg(binVersion/10).arg(binVersion%10))); //we keep track of the last unique ID before load unsigned lastUniqueIDBeforeLoad = ccObject::GetLastUniqueID(); //we read first entity type unsigned classID=0; if (!ccObject::ReadClassIDFromFile(classID, in, binVersion)) return CC_FERR_CONSOLE_ERROR; ccHObject* root = ccHObject::New(classID); if (!root) return CC_FERR_MALFORMED_FILE; if (!root->fromFile(in,binVersion)) { //DGM: can't delete it, too dangerous (bad pointers ;) //delete root; return CC_FERR_CONSOLE_ERROR; } CC_FILE_ERROR result = CC_FERR_NO_ERROR; //re-link objects ccHObject::Container toCheck; toCheck.push_back(root); while (!toCheck.empty()) { ccHObject* currentObject = toCheck.back(); toCheck.pop_back(); assert(currentObject); //we check objects that have links to other entities (meshes, polylines, etc.) if (currentObject->isKindOf(CC_MESH)) { ccGenericMesh* mesh = static_cast<ccGenericMesh*>(currentObject); //vertices //special case: if the parent is a mesh group, then the job has already be done once and for all! if (!mesh->getParent() || !mesh->getParent()->isA(CC_MESH_GROUP)) { intptr_t cloudID = (intptr_t)mesh->getAssociatedCloud(); if (cloudID>0) { ccHObject* cloud = root->find(cloudID); if (cloud && cloud->isKindOf(CC_POINT_CLOUD)) mesh->setAssociatedCloud(static_cast<ccGenericPointCloud*>(cloud)); else { //we have a problem here ;) mesh->setAssociatedCloud(0); if (mesh->getMaterialSet()) mesh->setMaterialSet(0,false); //DGM: can't delete it, too dangerous (bad pointers ;) //delete root; ccLog::Warning(QString("[BinFilter::loadFileV2] Couldn't find vertices (ID=%1) for mesh '%2' in the file!").arg(cloudID).arg(mesh->getName())); return CC_FERR_MALFORMED_FILE; } } } //materials intptr_t matSetID = (intptr_t)mesh->getMaterialSet(); if (matSetID>0) { ccHObject* materials = root->find(matSetID); if (materials && materials->isA(CC_MATERIAL_SET)) mesh->setMaterialSet(static_cast<ccMaterialSet*>(materials),false); else { //we have a (less severe) problem here ;) mesh->setMaterialSet(0,false); mesh->showMaterials(false); ccLog::Warning(QString("[BinFilter::loadFileV2] Couldn't find shared materials set (ID=%1) for mesh '%2' in the file!").arg(matSetID).arg(mesh->getName())); result = CC_FERR_BROKEN_DEPENDENCY_ERROR; } } //per-triangle normals intptr_t triNormsTableID = (intptr_t)mesh->getTriNormsTable(); if (triNormsTableID>0) { ccHObject* triNormsTable = root->find(triNormsTableID); if (triNormsTable && triNormsTable->isA(CC_NORMAL_INDEXES_ARRAY)) mesh->setTriNormsTable(static_cast<NormsIndexesTableType*>(triNormsTable),false); else { //we have a (less severe) problem here ;) mesh->setTriNormsTable(0,false); mesh->showTriNorms(false); ccLog::Warning(QString("[BinFilter::loadFileV2] Couldn't find shared normals (ID=%1) for mesh '%2' in the file!").arg(matSetID).arg(mesh->getName())); result = CC_FERR_BROKEN_DEPENDENCY_ERROR; } } //per-triangle texture coordinates intptr_t texCoordArrayID = (intptr_t)mesh->getTexCoordinatesTable(); if (texCoordArrayID>0) { ccHObject* texCoordsTable = root->find(texCoordArrayID); if (texCoordsTable && texCoordsTable->isA(CC_TEX_COORDS_ARRAY)) mesh->setTexCoordinatesTable(static_cast<TextureCoordsContainer*>(texCoordsTable),false); else { //we have a (less severe) problem here ;) mesh->setTexCoordinatesTable(0,false); ccLog::Warning(QString("[BinFilter::loadFileV2] Couldn't find shared texture coordinates (ID=%1) for mesh '%2' in the file!").arg(matSetID).arg(mesh->getName())); result = CC_FERR_BROKEN_DEPENDENCY_ERROR; } } } else if (currentObject->isKindOf(CC_POLY_LINE)) { ccPolyline* poly = static_cast<ccPolyline*>(currentObject); intptr_t cloudID = (intptr_t)poly->getAssociatedCloud(); ccHObject* cloud = root->find(cloudID); if (cloud && cloud->isKindOf(CC_POINT_CLOUD)) poly->setAssociatedCloud(static_cast<ccGenericPointCloud*>(cloud)); else { //we have a problem here ;) poly->setAssociatedCloud(0); //DGM: can't delete it, too dangerous (bad pointers ;) //delete root; ccLog::Warning(QString("[BinFilter::loadFileV2] Couldn't find vertices (ID=%1) for polyline '%2' in the file!").arg(cloudID).arg(poly->getName())); return CC_FERR_MALFORMED_FILE; } } else if (currentObject->isA(CC_2D_LABEL)) { cc2DLabel* label = static_cast<cc2DLabel*>(currentObject); std::vector<cc2DLabel::PickedPoint> correctedPickedPoints; //we must check all label 'points'! for (unsigned i=0;i<label->size();++i) { const cc2DLabel::PickedPoint& pp = label->getPoint(i); intptr_t cloudID = (intptr_t)pp.cloud; ccHObject* cloud = root->find(cloudID); if (cloud && cloud->isKindOf(CC_POINT_CLOUD)) { ccGenericPointCloud* genCloud = static_cast<ccGenericPointCloud*>(cloud); assert(genCloud->size()>pp.index); correctedPickedPoints.push_back(cc2DLabel::PickedPoint(genCloud,pp.index)); } else { //we have a problem here ;) ccLog::Warning(QString("[BinFilter::loadFileV2] Couldn't find cloud (ID=%1) associated to label '%2' in the file!").arg(cloudID).arg(label->getName())); if (label->getParent()) label->getParent()->removeChild(label); if (!label->getFlagState(CC_FATHER_DEPENDANT)) { //DGM: can't delete it, too dangerous (bad pointers ;) //delete label; } label=0; break; } } if (label) //correct label data { assert(correctedPickedPoints.size() == label->size()); bool visible = label->isVisible(); QString originalName(label->getRawName()); label->clear(); for (unsigned i=0;i<correctedPickedPoints.size();++i) label->addPoint(correctedPickedPoints[i].cloud,correctedPickedPoints[i].index); label->setVisible(visible); label->setName(originalName); } } for (unsigned i=0;i<currentObject->getChildrenNumber();++i) toCheck.push_back(currentObject->getChild(i)); } //update 'unique IDs' toCheck.push_back(root); while (!toCheck.empty()) { ccHObject* currentObject = toCheck.back(); toCheck.pop_back(); currentObject->setUniqueID(lastUniqueIDBeforeLoad+currentObject->getUniqueID()); for (unsigned i=0;i<currentObject->getChildrenNumber();++i) toCheck.push_back(currentObject->getChild(i)); } if (root->isA(CC_HIERARCHY_OBJECT)) { //transfer children to container root->transferChildren(container,true); } else { container.addChild(root); } return result; }
CC_FILE_ERROR VTKFilter::loadFile(const char* filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, CCVector3d* coordinatesShift/*=0*/) { //open ASCII file for reading QFile file(filename); if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) return CC_FERR_READING; QTextStream inFile(&file); //read header QString nextline = inFile.readLine(); if (!nextline.startsWith("# vtk")) return CC_FERR_MALFORMED_FILE; //comment nextline = inFile.readLine(); ccLog::Print(QString("[VTK] ")+nextline); ccMesh* mesh = 0; ccPointCloud* vertices = 0; std::vector<int> indexes; //global so as to avoid unnecessary mem. allocations QString lastSfName; bool acceptLookupTables = true; QString fileType = inFile.readLine().toUpper(); if (fileType.startsWith("BINARY")) { //binary not supported yet! ccLog::Error("VTK binary format not supported yet!"); return CC_FERR_WRONG_FILE_TYPE; } else if (fileType.startsWith("ASCII")) { //allow blank lines QString dataType; if (!GetNextNonEmptyLine(inFile,dataType)) return CC_FERR_MALFORMED_FILE; if (!dataType.startsWith("DATASET")) return CC_FERR_MALFORMED_FILE; dataType.remove(0,8); if (dataType.startsWith("POLYDATA")) { vertices = new ccPointCloud("vertices"); mesh = new ccMesh(vertices); } else if (dataType.startsWith("UNSTRUCTURED_GRID")) { vertices = new ccPointCloud("unnamed - VTK unstructured grid"); } else { ccLog::Error(QString("VTK entity '%1' is not supported!").arg(dataType)); return CC_FERR_WRONG_FILE_TYPE; } } //loop on keywords/data CC_FILE_ERROR error = CC_FERR_NO_ERROR; CCVector3d Pshift(0,0,0); while (error == CC_FERR_NO_ERROR) { if (!GetNextNonEmptyLine(inFile,nextline)) break; //end of file assert(!nextline.isEmpty()); if (nextline.startsWith("POINTS")) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() != 3) { error=CC_FERR_MALFORMED_FILE; break; } bool ok = false; unsigned ptsCount = parts[1].toInt(&ok); if (!ok) { error = CC_FERR_MALFORMED_FILE; break; } //QString dataFormat = parts[3].toUpper(); //char buffer[8]; //unsigned char datSize = 4; //if (dataFormat == "DOUBLE") //{ // datSize = 8; //} //else if (dataFormat != "FLOAT") //{ // ccLog::Error(QString("Non floating point data (%1) is not supported!").arg(dataFormat)); // error = CC_FERR_WRONG_FILE_TYPE; // break; //} if (!vertices->reserve(ptsCount)) { error = CC_FERR_NOT_ENOUGH_MEMORY; break; } for (unsigned i=0; i<ptsCount; ++i) { nextline = inFile.readLine(); parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() != 3) { error = CC_FERR_MALFORMED_FILE; break; } double Pd[3] = {0,0,0}; for (unsigned char j=0; j<3; ++j) { Pd[j] = parts[j].toDouble(&ok); if (!ok) { ccLog::Warning("[VTK] Element #%1 of POINTS data is corrupted!",i); error = CC_FERR_MALFORMED_FILE; break; } } //first point: check for 'big' coordinates if (i == 0) { bool shiftAlreadyEnabled = (coordinatesShiftEnabled && *coordinatesShiftEnabled && coordinatesShift); if (shiftAlreadyEnabled) Pshift = *coordinatesShift; bool applyAll = false; if ( sizeof(PointCoordinateType) < 8 && ccCoordinatesShiftManager::Handle(Pd,0,alwaysDisplayLoadDialog,shiftAlreadyEnabled,Pshift,0,applyAll)) { vertices->setGlobalShift(Pshift); ccLog::Warning("[VTKFilter::loadFile] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift.x,Pshift.y,Pshift.z); //we save coordinates shift information if (applyAll && coordinatesShiftEnabled && coordinatesShift) { *coordinatesShiftEnabled = true; *coordinatesShift = Pshift; } } } vertices->addPoint(CCVector3( static_cast<PointCoordinateType>(Pd[0] + Pshift.x), static_cast<PointCoordinateType>(Pd[1] + Pshift.y), static_cast<PointCoordinateType>(Pd[2] + Pshift.z)) ); } //end POINTS } else if (nextline.startsWith("POLYGONS") || nextline.startsWith("TRIANGLE_STRIPS")) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() != 3) { error = CC_FERR_MALFORMED_FILE; break; } //current type name (i.e. POLYGONS or TRIANGLE_STRIPS) QString typeName = parts[0]; bool isPolygon = (typeName == "POLYGONS"); bool ok = false; unsigned elemCount = parts[1].toUInt(&ok); if (!ok) { error = CC_FERR_MALFORMED_FILE; break; } unsigned totalElements = parts[2].toUInt(&ok); if (!ok) { error = CC_FERR_MALFORMED_FILE; break; } assert(mesh); if (!mesh) { ccLog::Warning(QString("[VTK] We found %1 data while file is not composed of POLYDATA!").arg(typeName)); mesh = new ccMesh(vertices); //however, we can still try to load it? } for (unsigned i=0; i<elemCount; ++i) { nextline = inFile.readLine(); parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.empty()) { error = CC_FERR_MALFORMED_FILE; break; } unsigned vertCount = parts[0].toUInt(&ok); if (!ok || static_cast<int>(vertCount) >= parts.size()) { error = CC_FERR_MALFORMED_FILE; break; } else if (vertCount < 3) { ccLog::Warning(QString("[VTK] Element #%1 of %2 data is corrupted! (not enough indexes)").arg(i).arg(typeName)); } if (isPolygon && (vertCount != 3 && vertCount != 4)) //quads are easy to handle as well! { ccLog::Warning(QString("[VTK] POLYGON element #%1 has an unhandled size (> 4 vertices)").arg(i)); continue; } //reserve mem to. store indexes if (indexes.size() < vertCount) { try { indexes.resize(vertCount); } catch (std::bad_alloc) { error = CC_FERR_NOT_ENOUGH_MEMORY; break; } } //decode indexes for (unsigned j=0; j<vertCount; ++j) { indexes[j] = parts[j+1].toUInt(&ok); if (!ok) { ccLog::Warning(QString("[VTK] Element #%1 of %2 data is corrupted! (invalid index value)").arg(i).arg(typeName)); error = CC_FERR_MALFORMED_FILE; break; } } //add the triangles { assert(vertCount > 2); unsigned triCount = vertCount-2; if (mesh->size() + triCount > mesh->maxSize()) { if (!mesh->reserve(mesh->size()+triCount+256)) //take some advance to avoid too many allocations { error = CC_FERR_NOT_ENOUGH_MEMORY; break; } } if (isPolygon) { //triangle or quad mesh->addTriangle(indexes[0],indexes[1],indexes[2]); if (vertCount == 4) mesh->addTriangle(indexes[0],indexes[2],indexes[3]); } else { //triangle strip for (unsigned j=0; j<triCount; ++j) mesh->addTriangle(indexes[j],indexes[j+1],indexes[j+2]); } } } if (mesh->size() != 0 && mesh->size() < mesh->maxSize()) { mesh->resize(mesh->size()); } //end POLYGONS or TRIANGLE_STRIPS } else if (nextline.startsWith("NORMALS")) { unsigned ptsCount = vertices->size(); if (vertices->size() == 0) { error = CC_FERR_MALFORMED_FILE; break; } else { bool loadNormals = vertices->reserveTheNormsTable(); if (!loadNormals) ccLog::Warning("[VTK] Not enough memory to load normals!"); for (unsigned i=0; i<ptsCount; ++i) { nextline = inFile.readLine(); if (loadNormals) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() != 3) { error = CC_FERR_MALFORMED_FILE; break; } CCVector3 N; for (unsigned char j=0; j<3; ++j) { bool ok; N.u[j] = (PointCoordinateType)parts[j].toDouble(&ok); if (!ok) { ccLog::Warning("[VTK] Element #%1 of NORMALS data is corrupted!",i); error = CC_FERR_MALFORMED_FILE; break; } } vertices->addNorm(N); } } } //end NORMALS } else if (nextline.startsWith("COLOR_SCALARS")) { unsigned ptsCount = vertices->size(); if (vertices->size() == 0) { error = CC_FERR_MALFORMED_FILE; break; } else { bool loadRGBColors = vertices->reserveTheRGBTable(); if (!loadRGBColors) ccLog::Warning("[VTK] Not enough memory to load RGB colors!"); for (unsigned i=0; i<ptsCount; ++i) { nextline = inFile.readLine(); if (loadRGBColors) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() != 3) { error = CC_FERR_MALFORMED_FILE; break; } colorType rgb[3]; for (unsigned char j=0; j<3; ++j) { bool ok; rgb[j] = (colorType)(parts[j].toDouble(&ok) * (double)MAX_COLOR_COMP); if (!ok) { ccLog::Warning("[VTK] Element #%1 of COLOR_SCALARS data is corrupted!",i); error = CC_FERR_MALFORMED_FILE; break; } } vertices->addRGBColor(rgb); } } } //end COLOR_SCALARS } else if (nextline.startsWith("SCALARS")) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); lastSfName = "ScalarField"; if (parts.size() > 1) lastSfName = parts[1].replace("_"," "); //SF already exists? if (vertices->getScalarFieldIndexByName(qPrintable(lastSfName)) >= 0) lastSfName += QString(" (%1)").arg(vertices->getNumberOfScalarFields()); //end of SCALARS } else if (nextline.startsWith("LOOKUP_TABLE") || nextline.startsWith("VECTORS")) { unsigned ptsCount = vertices->size(); QStringList parts = nextline.split(" ",QString::SkipEmptyParts); QString itemName = parts[0]; if (parts.size() > 2) { bool ok = false; int valCount = parts[2].toUInt(&ok); if (ok) ptsCount = valCount; } bool createSF = (vertices->size() == ptsCount && vertices->size() != 0); if (acceptLookupTables && !createSF) { ccLog::Warning(QString("[VTK] field %1 has not the right number of points (will be ignored)").arg(itemName)); } createSF &= acceptLookupTables; if (createSF && lastSfName.isNull()) { ccLog::Warning(QString("[VTK] field %1 has no name (will be ignored)").arg(itemName)); createSF = false; } //create scalar field? int newSFIndex = createSF ? vertices->addScalarField(qPrintable(lastSfName)) : -1; CCLib::ScalarField* sf = newSFIndex >= 0 ? vertices->getScalarField(newSFIndex) : 0; lastSfName.clear(); //name is "consumed" for (unsigned i=0; i<ptsCount; ++i) { nextline = inFile.readLine(); if (sf) //otherwise we simply skip the line { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() != 1) { //get rid of the scalar field :( vertices->deleteScalarField(newSFIndex); sf = 0; if (i == 0) { ccLog::Warning(QString("[VTK] %1 field with more than one element can't be imported as scalar fields!").arg(itemName)); } else { error = CC_FERR_MALFORMED_FILE; break; } } else { bool ok; ScalarType d = static_cast<ScalarType>(nextline.toDouble(&ok)); sf->setValue(i, ok ? d : NAN_VALUE); } } } if (sf) { sf->computeMinAndMax(); vertices->setCurrentDisplayedScalarField(newSFIndex); vertices->showSF(true); } //end of SCALARS } else if (nextline.startsWith("POINT_DATA")) { //check that the number of 'point_data' match the number of points QStringList parts = nextline.split(" ",QString::SkipEmptyParts); acceptLookupTables = false; if (parts.size() > 1) { bool ok; unsigned dataCount = parts[1].toUInt(&ok); if (ok && vertices && dataCount == vertices->size()) { acceptLookupTables = true; } } if (!acceptLookupTables) { ccLog::Warning("[VTK] The number of 'POINT_DATA' doesn't match the number of loaded points... lookup tables will be ignored"); } } else //unhandled property (CELLS, CELL_TYPES, etc.) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() < 2) { ccLog::Warning(QString("[VTK] Unhandled element: %1").arg(parts[0])); error = CC_FERR_MALFORMED_FILE; break; } bool ok; unsigned elements = parts[1].toUInt(&ok); if (!ok) { error = CC_FERR_MALFORMED_FILE; break; } for (unsigned i=0; i<elements; ++i) { inFile.readLine(); //ignore } //end unhandled property } if (error != CC_FERR_NO_ERROR) break; } if (error != CC_FERR_NO_ERROR) { if (mesh) delete mesh; if (vertices) delete vertices; return CC_FERR_MALFORMED_FILE; } file.close(); if (mesh && mesh->size() == 0) { delete mesh; mesh = 0; } if (vertices->size() == 0) { delete vertices; return CC_FERR_NO_LOAD; } if (mesh) { container.addChild(mesh); mesh->setVisible(true); mesh->addChild(vertices); vertices->setVisible(false); vertices->setEnabled(false); vertices->setName("Vertices"); vertices->setLocked(true); //DGM: no need to lock it as it is only used by one mesh! //DGM: normals can be per-vertex or per-triangle so it's better to let the user do it himself later //Moreover it's not always good idea if the user doesn't want normals (especially in ccViewer!) //if (!mesh->hasNormals()) // mesh->computeNormals(); ccLog::Warning("[VTK] Mesh has no normal! You can manually compute them (select it then call \"Edit > Normals > Compute\")"); mesh->showNormals(mesh->hasNormals()); if (vertices->hasScalarFields()) { vertices->setCurrentDisplayedScalarField(0); mesh->showSF(true); } if (vertices->hasColors()) mesh->showColors(true); } else { container.addChild(vertices); vertices->setVisible(true); if (vertices->hasNormals()) vertices->showNormals(true); if (vertices->hasScalarFields()) { vertices->setCurrentDisplayedScalarField(0); vertices->showSF(true); } if (vertices->hasColors()) vertices->showColors(true); } return CC_FERR_NO_ERROR; }
CC_FILE_ERROR LASFilter::loadFile(const char* filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, double* coordinatesShift/*=0*/) { //opening file std::ifstream ifs; ifs.open(filename, std::ios::in | std::ios::binary); if (ifs.fail()) return CC_FERR_READING; liblas::Reader* reader = 0; unsigned nbOfPoints = 0; std::vector<std::string> dimensions; try { reader = new liblas::Reader(liblas::ReaderFactory().CreateWithStream(ifs)); //using factory for automatic and transparent //handling of compressed/uncompressed files liblas::Header const& header = reader->GetHeader(); #ifdef _DEBUG //ccConsole::Print("[LAS FILE] %s - signature: %s",filename,header.GetFileSignature().c_str()); #endif //get fields present in file dimensions = header.GetSchema().GetDimensionNames(); //and of course the number of points nbOfPoints = header.GetPointRecordsCount(); } catch (...) { delete reader; ifs.close(); return CC_FERR_READING; } if (nbOfPoints==0) { //strange file ;) delete reader; ifs.close(); return CC_FERR_NO_LOAD; } liblas::Color rgbColorMask; //(0,0,0) on construction bool hasClassif = false; bool hasIntensity = false; bool hasTime = false; bool hasReturnNumber = false; for (unsigned k=0;k<dimensions.size();++k) { QString dim = QString(dimensions[k].c_str()).toUpper(); bool handled = true; if (dim == "RED") rgbColorMask.SetRed(~0); else if (dim == "BLUE") rgbColorMask.SetBlue(~0); else if (dim == "GREEN") rgbColorMask.SetGreen(~0); else if (dim == "CLASSIFICATION") hasClassif = true; else if (dim == "TIME") hasTime = true; else if (dim == "INTENSITY") hasIntensity = true; else if (dim == "RETURN NUMBER") hasReturnNumber = true; else if (dim != "X" && dim != "Y" && dim != "Z") handled = false; ccConsole::Print(QString("[LAS FILE] Found dimension '%1' (%2)").arg(dimensions[k].c_str()).arg(handled ? "handled" : "not handled")); } bool hasColor = (rgbColorMask[0] || rgbColorMask[1] || rgbColorMask[2]); //progress dialog ccProgressDialog pdlg(true); //cancel available CCLib::NormalizedProgress nprogress(&pdlg,nbOfPoints); pdlg.setMethodTitle("Open LAS file"); pdlg.setInfo(qPrintable(QString("Points: %1").arg(nbOfPoints))); pdlg.start(); //number of points read from the begining of the current cloud part unsigned pointsRead = 0; double Pshift[3] = {0.0,0.0,0.0}; //by default we read color as 8 bits integers and we will change this to 16 bits if it's not (16 bits is the standard!) unsigned char colorCompBitDec = 0; colorType rgb[3] = {0,0,0}; ccPointCloud* loadedCloud = 0; ccScalarField* classifSF = 0; uint8_t firstClassifValue = 0; ccScalarField* timeSF = 0; double firstTime = 0.0; ccScalarField* intensitySF = 0; uint16_t firstIntensity = 0; ccScalarField* returnNumberSF = 0; uint16_t firstReturnNumber = 0; //if the file is too big, we will chunck it in multiple parts unsigned int fileChunkPos = 0; unsigned int fileChunkSize = 0; while (true) { //if we reach the end of the file, or the max. cloud size limit (in which case we cerate a new chunk) bool newPointAvailable = (nprogress.oneStep() && reader->ReadNextPoint()); if (!newPointAvailable || pointsRead == fileChunkPos+fileChunkSize) { if (loadedCloud) { if (loadedCloud->size()) { bool thisChunkHasColors = loadedCloud->hasColors(); loadedCloud->showColors(thisChunkHasColors); if (hasColor && !thisChunkHasColors) ccLog::Warning("[LAS FILE] Color field was all black! We ignored it..."); if (hasClassif) { if (classifSF) { classifSF->computeMinAndMax(); int cMin = (int)classifSF->getMin(); int cMax = (int)classifSF->getMax(); classifSF->setColorRampSteps(cMax-cMin); //classifSF->setMinSaturation(cMin); int sfIndex = loadedCloud->addScalarField(classifSF); if (!loadedCloud->hasDisplayedScalarField()) { loadedCloud->setCurrentDisplayedScalarField(sfIndex); loadedCloud->showSF(!thisChunkHasColors); } } else { ccLog::Warning(QString("[LAS FILE] All classification values were the same (%1)! We ignored them...").arg(firstClassifValue)); } } if (hasIntensity) { if (intensitySF) { intensitySF->computeMinAndMax(); intensitySF->setColorScale(ccColorScalesManager::GetDefaultScale(ccColorScalesManager::GREY)); int sfIndex = loadedCloud->addScalarField(intensitySF); if (!loadedCloud->hasDisplayedScalarField()) { loadedCloud->setCurrentDisplayedScalarField(sfIndex); loadedCloud->showSF(!thisChunkHasColors); } } else { ccLog::Warning(QString("[LAS FILE] All intensities were the same (%1)! We ignored them...").arg(firstIntensity)); } } if (hasTime) { if (timeSF) { timeSF->computeMinAndMax(); int sfIndex = loadedCloud->addScalarField(timeSF); if (!loadedCloud->hasDisplayedScalarField()) { loadedCloud->setCurrentDisplayedScalarField(sfIndex); loadedCloud->showSF(!thisChunkHasColors); } } else { ccLog::Warning(QString("[LAS FILE] All timestamps were the same (%1)! We ignored them...").arg(firstTime)); } } if (hasReturnNumber) { if (returnNumberSF) { returnNumberSF->computeMinAndMax(); int rMin = (int)returnNumberSF->getMin(); int rMax = (int)returnNumberSF->getMax(); returnNumberSF->setColorRampSteps(rMax-rMin); int sfIndex = loadedCloud->addScalarField(returnNumberSF); if (!loadedCloud->hasDisplayedScalarField()) { loadedCloud->setCurrentDisplayedScalarField(sfIndex); loadedCloud->showSF(!thisChunkHasColors); } } else { ccLog::Warning(QString("[LAS FILE] All return numbers were the same (%1)! We ignored them...").arg(firstReturnNumber)); } } //if we have reserved too much memory if (loadedCloud->size() < loadedCloud->capacity()) loadedCloud->resize(loadedCloud->size()); QString chunkName("unnamed - Cloud"); unsigned n = container.getChildrenNumber(); if (n!=0) //if we have more than one cloud, we append an index { if (n==1) //we must also update the first one! container.getChild(0)->setName(chunkName+QString(" #1")); chunkName += QString(" #%1").arg(n+1); } loadedCloud->setName(chunkName); container.addChild(loadedCloud); loadedCloud=0; } else { //empty cloud?! delete loadedCloud; loadedCloud=0; } if (classifSF) classifSF->release(); classifSF=0; if (intensitySF) intensitySF->release(); intensitySF=0; if (returnNumberSF) returnNumberSF->release(); returnNumberSF=0; if (timeSF) timeSF->release(); timeSF=0; } if (!newPointAvailable) break; //end of the file (or cancel requested) //otherwise, we must create a new cloud fileChunkPos = pointsRead; fileChunkSize = std::min(nbOfPoints-pointsRead,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD); loadedCloud = new ccPointCloud(); if (!loadedCloud->reserveThePointsTable(fileChunkSize)) { ccLog::Warning("[LASFilter::loadFile] Not enough memory!"); delete loadedCloud; delete reader; ifs.close(); return CC_FERR_NOT_ENOUGH_MEMORY; } loadedCloud->setOriginalShift(Pshift[0],Pshift[1],Pshift[2]); //DGM: from now on, we only enable scalar fields when we detect a valid value! if (hasClassif) { assert(!classifSF); firstClassifValue = 0; } if (hasTime) { assert(!timeSF); firstTime = 0.0; } if (hasIntensity) { assert(!intensitySF); firstIntensity=0; } if (hasReturnNumber) { assert(!returnNumberSF); firstReturnNumber = 0; } } assert(newPointAvailable); const liblas::Point& p = reader->GetPoint(); //first point: check for 'big' coordinates if (pointsRead==0) { double P[3]={p.GetX(),p.GetY(),p.GetZ()}; bool shiftAlreadyEnabled = (coordinatesShiftEnabled && *coordinatesShiftEnabled && coordinatesShift); if (shiftAlreadyEnabled) memcpy(Pshift,coordinatesShift,sizeof(double)*3); bool applyAll=false; if (ccCoordinatesShiftManager::Handle(P,0,alwaysDisplayLoadDialog,shiftAlreadyEnabled,Pshift,0,applyAll)) { loadedCloud->setOriginalShift(Pshift[0],Pshift[1],Pshift[2]); ccConsole::Warning("[LASFilter::loadFile] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift[0],Pshift[1],Pshift[2]); //we save coordinates shift information if (applyAll && coordinatesShiftEnabled && coordinatesShift) { *coordinatesShiftEnabled = true; coordinatesShift[0] = Pshift[0]; coordinatesShift[1] = Pshift[1]; coordinatesShift[2] = Pshift[2]; } } } CCVector3 P(p.GetX()+Pshift[0],p.GetY()+Pshift[1],p.GetZ()+Pshift[2]); loadedCloud->addPoint(P); //color field if (hasColor) { //Warning: LAS colors are stored on 16 bits! liblas::Color col = p.GetColor(); col[0] &= rgbColorMask[0]; col[1] &= rgbColorMask[1]; col[2] &= rgbColorMask[2]; //if we don't have reserved a color field yet, we check first that color is not black bool pushColor = true; if (!loadedCloud->hasColors()) { //if the color is not black, we are sure it's a valid color field! if (col[0] || col[1] || col[2]) { if (loadedCloud->reserveTheRGBTable()) { //we must set the color (black) of all the precedently skipped points for (unsigned i=0;i<loadedCloud->size()-1;++i) loadedCloud->addRGBColor(ccColor::black); } else { ccConsole::Warning("[LAS FILE] Not enough memory: color field will be ignored!"); hasColor = false; //no need to retry with the other chunks anyway pushColor = false; } } else //otherwise we ignore it for the moment (we'll add it later if necessary) { pushColor = false; } } //do we need to push this color? if (pushColor) { //we test if the color components are on 16 bits (standard) or only on 8 bits (it happens ;) if (colorCompBitDec==0) { if ( (col[0] & 0xFF00) || (col[1] & 0xFF00) || (col[2] & 0xFF00)) { //the color components are on 16 bits! ccLog::Print("[LAS FILE] Color components are coded on 16 bits"); colorCompBitDec = 8; //we fix all the precedently read colors for (unsigned i=0;i<loadedCloud->size()-1;++i) loadedCloud->setPointColor(i,ccColor::black); //255 >> 8 = 0! } } rgb[0]=(colorType)(col[0]>>colorCompBitDec); rgb[1]=(colorType)(col[1]>>colorCompBitDec); rgb[2]=(colorType)(col[2]>>colorCompBitDec); loadedCloud->addRGBColor(rgb); } } if (hasClassif) { uint8_t intValue = p.GetClassification().GetClass(); if (classifSF) { classifSF->addElement(intValue); } else { //first point? we track its value if (loadedCloud->size()==1) { firstClassifValue = intValue; } if (intValue != firstClassifValue || firstClassifValue > 1) //0 = Created, never classified, 1 = Unclassified { classifSF = new ccScalarField(CC_LAS_CLASSIFICATION_FIELD_NAME); if (classifSF->reserve(fileChunkSize)) { classifSF->link(); //we must set the classification value (firstClassifValue) of all the precedently skipped points for (unsigned i=0;i<loadedCloud->size()-1;++i) classifSF->addElement(firstClassifValue); classifSF->addElement(intValue); } else { ccConsole::Warning("[LAS FILE] Not enough memory: classificaiton field will be ignored!"); hasClassif = false; //no need to retry with the other chunks anyway classifSF->release(); classifSF=0; } } } } if (hasTime) { double timeValue = p.GetTime(); if (timeSF) { timeSF->addElement(timeValue); } else { //first point? we track its value if (loadedCloud->size()==1) { firstTime = timeValue; } else if (timeValue != firstTime) { timeSF = new ccScalarField(CC_SCAN_TIME_FIELD_NAME); if (timeSF->reserve(fileChunkSize)) { timeSF->link(); //we must set the timestamp value (firstTime) of all the precedently skipped points for (unsigned i=0;i<loadedCloud->size()-1;++i) timeSF->addElement(firstTime); timeSF->addElement(timeValue); } else { ccConsole::Warning("[LAS FILE] Not enough memory: 'time' field will be ignored!"); hasTime = false; //no need to retry with the other chunks anyway timeSF->release(); timeSF=0; } } } } if (hasIntensity) { uint16_t intValue = p.GetIntensity(); if (intensitySF) { intensitySF->addElement(intValue); } else { //first point? we track its value if (loadedCloud->size()==1) { firstIntensity = intValue; } if (intValue != firstIntensity || (firstIntensity != 0 && firstIntensity != 65535)) { intensitySF = new ccScalarField(CC_SCAN_INTENSITY_FIELD_NAME); if (intensitySF->reserve(fileChunkSize)) { intensitySF->link(); //we must set the intensity (firstIntensity) of all the precedently skipped points for (unsigned i=0;i<loadedCloud->size()-1;++i) intensitySF->addElement(firstIntensity); intensitySF->addElement(intValue); } else { ccConsole::Warning("[LAS FILE] Not enough memory: intensity field will be ignored!"); hasIntensity = false; //no need to retry with the other chunks anyway intensitySF->release(); intensitySF=0; } } } } if (hasReturnNumber) { uint16_t intValue = p.GetReturnNumber(); if (returnNumberSF) { returnNumberSF->addElement(intValue); } else { //first point? we track its value if (loadedCloud->size()==1) { firstReturnNumber = intValue; } if (intValue != firstReturnNumber) { returnNumberSF = new ccScalarField(CC_SCAN_RETURN_INDEX_FIELD_NAME); if (returnNumberSF->reserve(fileChunkSize)) { returnNumberSF->link(); //we must set the return index (firstReturnNumber) of all the precedently skipped points for (unsigned i=0;i<loadedCloud->size()-1;++i) returnNumberSF->addElement(firstReturnNumber); returnNumberSF->addElement(intValue); } else { ccConsole::Warning("[LAS FILE] Not enough memory: return number field will be ignored!"); hasReturnNumber = false; //no need to retry with the other chunks anyway returnNumberSF->release(); returnNumberSF=0; } } } } ++pointsRead; } if (reader) delete reader; reader=0; ifs.close(); return CC_FERR_NO_ERROR; }
CC_FILE_ERROR PNFilter::loadFile(const char* filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, double* coordinatesShift/*=0*/) { //opening file QFile in(filename); if (!in.open(QIODevice::ReadOnly)) return CC_FERR_READING; //we deduce the points number from the file size qint64 fileSize = in.size(); qint64 singlePointSize = 6*sizeof(float); //check that size is ok if (fileSize == 0) return CC_FERR_NO_LOAD; if ((fileSize % singlePointSize) != 0) return CC_FERR_MALFORMED_FILE; unsigned numberOfPoints = (unsigned) (fileSize / singlePointSize); //progress dialog ccProgressDialog pdlg(true); //cancel available CCLib::NormalizedProgress nprogress(&pdlg,numberOfPoints); pdlg.setMethodTitle("Open PN file"); pdlg.setInfo(qPrintable(QString("Points: %1").arg(numberOfPoints))); pdlg.start(); ccPointCloud* loadedCloud = 0; //if the file is too big, it will be chuncked in multiple parts unsigned chunkIndex = 0; unsigned fileChunkPos = 0; unsigned fileChunkSize = 0; //number of points read for the current cloud part unsigned pointsRead = 0; CC_FILE_ERROR result = CC_FERR_NO_ERROR; for (unsigned i=0;i<numberOfPoints;i++) { //if we reach the max. cloud size limit, we cerate a new chunk if (pointsRead == fileChunkPos+fileChunkSize) { if (loadedCloud) container.addChild(loadedCloud); fileChunkPos = pointsRead; fileChunkSize = std::min<unsigned>(numberOfPoints-pointsRead,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD); loadedCloud = new ccPointCloud(QString("unnamed - Cloud #%1").arg(++chunkIndex)); if (!loadedCloud || !loadedCloud->reserveThePointsTable(fileChunkSize) || !loadedCloud->reserveTheNormsTable()) { result = CC_FERR_NOT_ENOUGH_MEMORY; if (loadedCloud) delete loadedCloud; loadedCloud=0; break; } loadedCloud->showNormals(true); } //we read the 3 coordinates of the point float rBuff[3]; if (in.read((char*)rBuff,3*sizeof(float))>=0) { //conversion to CCVector3 CCVector3 P((PointCoordinateType)rBuff[0], (PointCoordinateType)rBuff[1], (PointCoordinateType)rBuff[2]); loadedCloud->addPoint(P); } else { result = CC_FERR_READING; break; } //then the 3 components of the normal vector if (in.read((char*)rBuff,3*sizeof(float))>=0) { //conversion to PointCoordinateType[3] PointCoordinateType N[3] = {(PointCoordinateType)rBuff[0], (PointCoordinateType)rBuff[1], (PointCoordinateType)rBuff[2]}; loadedCloud->addNorm(N); } else { //add fake normal for consistency then break loadedCloud->addNorm(s_defaultNorm); result = CC_FERR_READING; break; } ++pointsRead; if (!nprogress.oneStep()) { result = CC_FERR_CANCELED_BY_USER; break; } } in.close(); if (loadedCloud) { if (loadedCloud->size() < loadedCloud->capacity()) loadedCloud->resize(loadedCloud->size()); container.addChild(loadedCloud); } return result; }
CC_FILE_ERROR AsciiFilter::loadCloudFromFormatedAsciiFile( const QString& filename, ccHObject& container, const AsciiOpenDlg::Sequence& openSequence, char separator, unsigned approximateNumberOfLines, qint64 fileSize, unsigned maxCloudSize, unsigned skipLines, LoadParameters& parameters) { //we may have to "slice" clouds when opening them if they are too big! maxCloudSize = std::min(maxCloudSize,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD); unsigned cloudChunkSize = std::min(maxCloudSize,approximateNumberOfLines); unsigned cloudChunkPos = 0; unsigned chunkRank = 1; //we initialize the loading accelerator structure and point cloud int maxPartIndex = -1; cloudAttributesDescriptor cloudDesc = prepareCloud(openSequence, cloudChunkSize, maxPartIndex, separator, chunkRank); if (!cloudDesc.cloud) return CC_FERR_NOT_ENOUGH_MEMORY; //we re-open the file (ASCII mode) QFile file(filename); if (!file.open(QFile::ReadOnly)) { //we clear already initialized data clearStructure(cloudDesc); return CC_FERR_READING; } QTextStream stream(&file); //we skip lines as defined on input { for (unsigned i=0; i<skipLines; ++i) { stream.readLine(); } } //progress indicator ccProgressDialog pdlg(true); CCLib::NormalizedProgress nprogress(&pdlg,approximateNumberOfLines); pdlg.setMethodTitle(qPrintable(QString("Open ASCII file [%1]").arg(filename))); pdlg.setInfo(qPrintable(QString("Approximate number of points: %1").arg(approximateNumberOfLines))); pdlg.start(); //buffers ScalarType D = 0; CCVector3d P(0,0,0); CCVector3d Pshift(0,0,0); CCVector3 N(0,0,0); ccColor::Rgb col; //other useful variables unsigned linesRead = 0; unsigned pointsRead = 0; CC_FILE_ERROR result = CC_FERR_NO_ERROR; //main process unsigned nextLimit = /*cloudChunkPos+*/cloudChunkSize; QString currentLine = stream.readLine(); while (!currentLine.isNull()) { ++linesRead; //comment if (currentLine.startsWith("//")) { currentLine = stream.readLine(); continue; } if (currentLine.size() == 0) { ccLog::Warning("[AsciiFilter::Load] Line %i is corrupted (empty)!",linesRead); currentLine = stream.readLine(); continue; } //if we have reached the max. number of points per cloud if (pointsRead == nextLimit) { ccLog::PrintDebug("[ASCII] Point %i -> end of chunk (%i points)",pointsRead,cloudChunkSize); //we re-evaluate the average line size { double averageLineSize = static_cast<double>(file.pos())/(pointsRead+skipLines); double newNbOfLinesApproximation = std::max(1.0, static_cast<double>(fileSize)/averageLineSize - static_cast<double>(skipLines)); //if approximation is smaller than actual one, we add 2% by default if (newNbOfLinesApproximation <= pointsRead) { newNbOfLinesApproximation = std::max(static_cast<double>(cloudChunkPos+cloudChunkSize)+1.0,static_cast<double>(pointsRead) * 1.02); } approximateNumberOfLines = static_cast<unsigned>(ceil(newNbOfLinesApproximation)); ccLog::PrintDebug("[ASCII] New approximate nb of lines: %i",approximateNumberOfLines); } //we try to resize actual clouds if (cloudChunkSize < maxCloudSize || approximateNumberOfLines-cloudChunkPos <= maxCloudSize) { ccLog::PrintDebug("[ASCII] We choose to enlarge existing clouds"); cloudChunkSize = std::min(maxCloudSize,approximateNumberOfLines-cloudChunkPos); if (!cloudDesc.cloud->reserve(cloudChunkSize)) { ccLog::Error("Not enough memory! Process stopped ..."); result = CC_FERR_NOT_ENOUGH_MEMORY; break; } } else //otherwise we have to create new clouds { ccLog::PrintDebug("[ASCII] We choose to instantiate new clouds"); //we store (and resize) actual cloud if (!cloudDesc.cloud->resize(cloudChunkSize)) ccLog::Warning("Memory reallocation failed ... some memory may have been wasted ..."); if (!cloudDesc.scalarFields.empty()) { for (unsigned k=0; k<cloudDesc.scalarFields.size(); ++k) cloudDesc.scalarFields[k]->computeMinAndMax(); cloudDesc.cloud->setCurrentDisplayedScalarField(0); cloudDesc.cloud->showSF(true); } //we add this cloud to the output container container.addChild(cloudDesc.cloud); cloudDesc.reset(); //and create new one cloudChunkPos = pointsRead; cloudChunkSize = std::min(maxCloudSize,approximateNumberOfLines-cloudChunkPos); cloudDesc = prepareCloud(openSequence, cloudChunkSize, maxPartIndex, separator, ++chunkRank); if (!cloudDesc.cloud) { ccLog::Error("Not enough memory! Process stopped ..."); break; } cloudDesc.cloud->setGlobalShift(Pshift); } //we update the progress info nprogress.scale(approximateNumberOfLines,100,true); pdlg.setInfo(qPrintable(QString("Approximate number of points: %1").arg(approximateNumberOfLines))); nextLimit = cloudChunkPos+cloudChunkSize; } //we split current line QStringList parts = currentLine.split(separator,QString::SkipEmptyParts); int nParts = parts.size(); if (nParts > maxPartIndex) { //(X,Y,Z) if (cloudDesc.xCoordIndex >= 0) P.x = parts[cloudDesc.xCoordIndex].toDouble(); if (cloudDesc.yCoordIndex >= 0) P.y = parts[cloudDesc.yCoordIndex].toDouble(); if (cloudDesc.zCoordIndex >= 0) P.z = parts[cloudDesc.zCoordIndex].toDouble(); //first point: check for 'big' coordinates if (pointsRead == 0) { if (HandleGlobalShift(P,Pshift,parameters)) { cloudDesc.cloud->setGlobalShift(Pshift); ccLog::Warning("[ASCIIFilter::loadFile] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift.x,Pshift.y,Pshift.z); } } //add point cloudDesc.cloud->addPoint(CCVector3::fromArray((P+Pshift).u)); //Normal vector if (cloudDesc.hasNorms) { if (cloudDesc.xNormIndex >= 0) N.x = static_cast<PointCoordinateType>(parts[cloudDesc.xNormIndex].toDouble()); if (cloudDesc.yNormIndex >= 0) N.y = static_cast<PointCoordinateType>(parts[cloudDesc.yNormIndex].toDouble()); if (cloudDesc.zNormIndex >= 0) N.z = static_cast<PointCoordinateType>(parts[cloudDesc.zNormIndex].toDouble()); cloudDesc.cloud->addNorm(N); } //Colors if (cloudDesc.hasRGBColors) { if (cloudDesc.iRgbaIndex >= 0) { const uint32_t rgb = parts[cloudDesc.iRgbaIndex].toInt(); col.r = ((rgb >> 16) & 0x0000ff); col.g = ((rgb >> 8 ) & 0x0000ff); col.b = ((rgb ) & 0x0000ff); } else if (cloudDesc.fRgbaIndex >= 0) { const float rgbf = parts[cloudDesc.fRgbaIndex].toFloat(); const uint32_t rgb = (uint32_t)(*((uint32_t*)&rgbf)); col.r = ((rgb >> 16) & 0x0000ff); col.g = ((rgb >> 8 ) & 0x0000ff); col.b = ((rgb ) & 0x0000ff); } else { if (cloudDesc.redIndex >= 0) { float multiplier = cloudDesc.hasFloatRGBColors[0] ? static_cast<float>(ccColor::MAX) : 1.0f; col.r = static_cast<ColorCompType>(parts[cloudDesc.redIndex].toFloat() * multiplier); } if (cloudDesc.greenIndex >= 0) { float multiplier = cloudDesc.hasFloatRGBColors[1] ? static_cast<float>(ccColor::MAX) : 1.0f; col.g = static_cast<ColorCompType>(parts[cloudDesc.greenIndex].toFloat() * multiplier); } if (cloudDesc.blueIndex >= 0) { float multiplier = cloudDesc.hasFloatRGBColors[2] ? static_cast<float>(ccColor::MAX) : 1.0f; col.b = static_cast<ColorCompType>(parts[cloudDesc.blueIndex].toFloat() * multiplier); } } cloudDesc.cloud->addRGBColor(col.rgb); } else if (cloudDesc.greyIndex >= 0)
CC_FILE_ERROR BinFilter::LoadFileV1(QFile& in, ccHObject& container, unsigned nbScansTotal, const LoadParameters& parameters) { ccLog::Print("[BIN] Version 1.0"); if (nbScansTotal > 99) { if (QMessageBox::question(0, QString("Oups"), QString("Hum, do you really expect %1 point clouds?").arg(nbScansTotal), QMessageBox::Yes, QMessageBox::No) == QMessageBox::No) return CC_FERR_WRONG_FILE_TYPE; } else if (nbScansTotal == 0) { return CC_FERR_NO_LOAD; } ccProgressDialog pdlg(true, parameters.parentWidget); pdlg.setMethodTitle(QObject::tr("Open Bin file (old style)")); for (unsigned k=0; k<nbScansTotal; k++) { HeaderFlags header; unsigned nbOfPoints = 0; if (ReadEntityHeader(in, nbOfPoints, header) < 0) { return CC_FERR_READING; } //Console::print("[BinFilter::loadModelFromBinaryFile] Entity %i : %i points, color=%i, norms=%i, dists=%i\n",k,nbOfPoints,color,norms,distances); if (nbOfPoints == 0) { //Console::print("[BinFilter::loadModelFromBinaryFile] rien a faire !\n"); continue; } //progress for this cloud CCLib::NormalizedProgress nprogress(&pdlg, nbOfPoints); if (parameters.alwaysDisplayLoadDialog) { pdlg.reset(); pdlg.setInfo(QObject::tr("cloud %1/%2 (%3 points)").arg(k + 1).arg(nbScansTotal).arg(nbOfPoints)); pdlg.start(); QApplication::processEvents(); } //Cloud name char cloudName[256] = "unnamed"; if (header.name) { for (int i=0; i<256; ++i) { if (in.read(cloudName+i,1) < 0) { //Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the cloud name!\n"); return CC_FERR_READING; } if (cloudName[i] == 0) { break; } } //we force the end of the name in case it is too long! cloudName[255] = 0; } else { sprintf(cloudName,"unnamed - Cloud #%u",k); } //Cloud name char sfName[1024] = "unnamed"; if (header.sfName) { for (int i=0; i<1024; ++i) { if (in.read(sfName+i,1) < 0) { //Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the cloud name!\n"); return CC_FERR_READING; } if (sfName[i] == 0) break; } //we force the end of the name in case it is too long! sfName[1023] = 0; } else { strcpy(sfName,"Loaded scalar field"); } //Creation ccPointCloud* loadedCloud = new ccPointCloud(cloudName); if (!loadedCloud) return CC_FERR_NOT_ENOUGH_MEMORY; unsigned fileChunkPos = 0; unsigned fileChunkSize = std::min(nbOfPoints,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD); loadedCloud->reserveThePointsTable(fileChunkSize); if (header.colors) { loadedCloud->reserveTheRGBTable(); loadedCloud->showColors(true); } if (header.normals) { loadedCloud->reserveTheNormsTable(); loadedCloud->showNormals(true); } if (header.scalarField) loadedCloud->enableScalarField(); unsigned lineRead = 0; int parts = 0; const ScalarType FORMER_HIDDEN_POINTS = (ScalarType)-1.0; //lecture du fichier for (unsigned i=0; i<nbOfPoints; ++i) { if (lineRead == fileChunkPos+fileChunkSize) { if (header.scalarField) loadedCloud->getCurrentInScalarField()->computeMinAndMax(); container.addChild(loadedCloud); fileChunkPos = lineRead; fileChunkSize = std::min(nbOfPoints-lineRead,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD); char partName[64]; ++parts; sprintf(partName,"%s.part_%i",cloudName,parts); loadedCloud = new ccPointCloud(partName); loadedCloud->reserveThePointsTable(fileChunkSize); if (header.colors) { loadedCloud->reserveTheRGBTable(); loadedCloud->showColors(true); } if (header.normals) { loadedCloud->reserveTheNormsTable(); loadedCloud->showNormals(true); } if (header.scalarField) loadedCloud->enableScalarField(); } float Pf[3]; if (in.read((char*)Pf,sizeof(float)*3) < 0) { //Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the %ith entity point !\n",k); return CC_FERR_READING; } loadedCloud->addPoint(CCVector3::fromArray(Pf)); if (header.colors) { unsigned char C[3]; if (in.read((char*)C,sizeof(ColorCompType)*3) < 0) { //Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the %ith entity colors !\n",k); return CC_FERR_READING; } loadedCloud->addRGBColor(C); } if (header.normals) { CCVector3 N; if (in.read((char*)N.u,sizeof(float)*3) < 0) { //Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the %ith entity norms !\n",k); return CC_FERR_READING; } loadedCloud->addNorm(N); } if (header.scalarField) { double D; if (in.read((char*)&D,sizeof(double)) < 0) { //Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the %ith entity distance!\n",k); return CC_FERR_READING; } ScalarType d = static_cast<ScalarType>(D); loadedCloud->setPointScalarValue(i,d); } lineRead++; if (parameters.alwaysDisplayLoadDialog && !nprogress.oneStep()) { loadedCloud->resize(i+1-fileChunkPos); k=nbScansTotal; i=nbOfPoints; } } if (parameters.alwaysDisplayLoadDialog) { pdlg.stop(); QApplication::processEvents(); } if (header.scalarField) { CCLib::ScalarField* sf = loadedCloud->getCurrentInScalarField(); assert(sf); sf->setName(sfName); //replace HIDDEN_VALUES by NAN_VALUES for (unsigned i=0; i<sf->currentSize(); ++i) { if (sf->getValue(i) == FORMER_HIDDEN_POINTS) sf->setValue(i,NAN_VALUE); } sf->computeMinAndMax(); loadedCloud->setCurrentDisplayedScalarField(loadedCloud->getCurrentInScalarFieldIndex()); loadedCloud->showSF(true); } container.addChild(loadedCloud); } return CC_FERR_NO_ERROR; }
CC_FILE_ERROR IcmFilter::loadFile(QString filename, ccHObject& container, LoadParameters& parameters) { //ouverture du fichier FILE *fp = fopen(qPrintable(filename), "rt"); if (!fp) return CC_FERR_READING; //buffer char line[MAX_ASCII_FILE_LINE_LENGTH]; //lecture du header if (!fgets(line, MAX_ASCII_FILE_LINE_LENGTH , fp)) { fclose(fp); return CC_FERR_READING; } if (strncmp(line,"#CC_ICM_FILE",12)!=0) { fclose(fp); return CC_FERR_WRONG_FILE_TYPE; } //on extrait le chemin relatif QString path = QFileInfo(filename).absolutePath(); char cloudFileName[MAX_ASCII_FILE_LINE_LENGTH]; if (!fgets(line, MAX_ASCII_FILE_LINE_LENGTH , fp)) { fclose(fp); return CC_FERR_READING; } if (strncmp(line,"FILE_NAME=",10)!=0) { fclose(fp); return CC_FERR_WRONG_FILE_TYPE; } sscanf(line,"FILE_NAME=%s",cloudFileName); char subFileType[12]; if (!fgets(line, MAX_ASCII_FILE_LINE_LENGTH , fp)) { fclose(fp); return CC_FERR_READING; } if (strncmp(line,"FILE_TYPE=",10)!=0) { fclose(fp); return CC_FERR_WRONG_FILE_TYPE; } sscanf(line,"FILE_TYPE=%s",subFileType); FileIOFilter::Shared filter = FileIOFilter::FindBestFilterForExtension(subFileType); if (!filter) { ccLog::Warning(QString("[ICM] No I/O filter found for loading file '%1' (type = '%2')").arg(cloudFileName).arg(subFileType)); fclose(fp); return CC_FERR_UNKNOWN_FILE; } //load the corresponding file (potentially containing several clouds) CC_FILE_ERROR result = CC_FERR_NO_ERROR; ccHObject* entities = FileIOFilter::LoadFromFile(QString("%0/%1").arg(path).arg(cloudFileName), parameters, filter, result); if (!entities) { fclose(fp); return CC_FERR_READING; } container.addChild(entities); //chargement des images if (!fgets(line, MAX_ASCII_FILE_LINE_LENGTH , fp)) { ccLog::Error("[ICM] Read error (IMAGES_DESCRIPTOR)! No image loaded"); fclose(fp); return CC_FERR_READING; } else { if (strncmp(line,"IMAGES_DESCRIPTOR=",18)!=0) { fclose(fp); return CC_FERR_WRONG_FILE_TYPE; } char imagesDescriptorFileName[MAX_ASCII_FILE_LINE_LENGTH]; sscanf(line,"IMAGES_DESCRIPTOR=%s",imagesDescriptorFileName); int n = LoadCalibratedImages(entities,path,imagesDescriptorFileName,entities->getBB_recursive()); ccLog::Print("[ICM] %i image(s) loaded ...",n); } fclose(fp); return CC_FERR_NO_ERROR; }
CC_FILE_ERROR VTKFilter::loadFile(QString filename, ccHObject& container, LoadParameters& parameters) { //open ASCII file for reading QFile file(filename); if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) return CC_FERR_READING; QTextStream inFile(&file); //read header QString nextline = inFile.readLine(); if (!nextline.startsWith("# vtk")) return CC_FERR_MALFORMED_FILE; //comment nextline = inFile.readLine(); ccLog::Print(QString("[VTK] ")+nextline); ccMesh* mesh = 0; ccPointCloud* vertices = 0; std::vector<int> indexes; //global so as to avoid unnecessary mem. allocations QString lastSfName; bool acceptLookupTables = true; unsigned lastDataSize = 0; QString fileType = inFile.readLine().toUpper(); if (fileType.startsWith("BINARY")) { //binary not supported yet! ccLog::Error("VTK binary format not supported yet!"); return CC_FERR_WRONG_FILE_TYPE; } else if (fileType.startsWith("ASCII")) { //allow blank lines QString dataType; if (!GetNextNonEmptyLine(inFile,dataType)) return CC_FERR_MALFORMED_FILE; if (!dataType.startsWith("DATASET")) return CC_FERR_MALFORMED_FILE; dataType.remove(0,8); if (dataType.startsWith("POLYDATA")) { vertices = new ccPointCloud("vertices"); mesh = new ccMesh(vertices); } else if (dataType.startsWith("UNSTRUCTURED_GRID")) { vertices = new ccPointCloud("unnamed - VTK unstructured grid"); } else { ccLog::Error(QString("VTK entity '%1' is not supported!").arg(dataType)); return CC_FERR_WRONG_FILE_TYPE; } } //loop on keywords/data CC_FILE_ERROR error = CC_FERR_NO_ERROR; CCVector3d Pshift(0,0,0); bool skipReadLine = false; while (error == CC_FERR_NO_ERROR) { if (!skipReadLine && !GetNextNonEmptyLine(inFile,nextline)) break; //end of file skipReadLine = false; assert(!nextline.isEmpty()); if (nextline.startsWith("POINTS")) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() != 3) { error=CC_FERR_MALFORMED_FILE; break; } bool ok = false; unsigned ptsCount = parts[1].toInt(&ok); if (!ok) { error = CC_FERR_MALFORMED_FILE; break; } //QString dataFormat = parts[3].toUpper(); //char buffer[8]; //unsigned char datSize = 4; //if (dataFormat == "DOUBLE") //{ // datSize = 8; //} //else if (dataFormat != "FLOAT") //{ // ccLog::Error(QString("Non floating point data (%1) is not supported!").arg(dataFormat)); // error = CC_FERR_WRONG_FILE_TYPE; // break; //} if (!vertices->reserve(ptsCount)) { error = CC_FERR_NOT_ENOUGH_MEMORY; break; } //warning: multiple points can be stored on a single line! unsigned iPt = 0; CCVector3d Pd(0,0,0); unsigned coordIndex = 0; while (iPt < ptsCount) { nextline = inFile.readLine(); parts = nextline.split(" ",QString::SkipEmptyParts); for (int i=0; i<parts.size(); ++i) { Pd.u[coordIndex] = parts[i].toDouble(&ok); if (!ok) { ccLog::Warning("[VTK] Element #%1 of POINTS data is corrupted!",iPt); error = CC_FERR_MALFORMED_FILE; iPt = ptsCount; break; } if (coordIndex == 2) { //first point: check for 'big' coordinates if (iPt == 0) { if (HandleGlobalShift(Pd,Pshift,parameters)) { vertices->setGlobalShift(Pshift); ccLog::Warning("[VTKFilter::loadFile] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift.x,Pshift.y,Pshift.z); } } CCVector3 P = CCVector3::fromArray((Pd + Pshift).u); vertices->addPoint(P); coordIndex = 0; ++iPt; } else { ++coordIndex; } } } //end POINTS } else if (nextline.startsWith("POLYGONS") || nextline.startsWith("TRIANGLE_STRIPS")) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() != 3) { error = CC_FERR_MALFORMED_FILE; break; } //current type name (i.e. POLYGONS or TRIANGLE_STRIPS) QString typeName = parts[0]; bool isPolygon = (typeName == "POLYGONS"); bool ok = false; unsigned elemCount = parts[1].toUInt(&ok); if (!ok) { error = CC_FERR_MALFORMED_FILE; break; } // unsigned totalElements = parts[2].toUInt(&ok); if (!ok) { error = CC_FERR_MALFORMED_FILE; break; } assert(mesh); if (!mesh) { ccLog::Warning(QString("[VTK] We found %1 data while file is not composed of POLYDATA!").arg(typeName)); mesh = new ccMesh(vertices); //however, we can still try to load it? } for (unsigned i=0; i<elemCount; ++i) { nextline = inFile.readLine(); parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.empty()) { error = CC_FERR_MALFORMED_FILE; break; } unsigned vertCount = parts[0].toUInt(&ok); if (!ok || static_cast<int>(vertCount) >= parts.size()) { error = CC_FERR_MALFORMED_FILE; break; } else if (vertCount < 3) { ccLog::Warning(QString("[VTK] Element #%1 of %2 data is corrupted! (not enough indexes)").arg(i).arg(typeName)); } if (isPolygon && (vertCount != 3 && vertCount != 4)) //quads are easy to handle as well! { ccLog::Warning(QString("[VTK] POLYGON element #%1 has an unhandled size (> 4 vertices)").arg(i)); continue; } //reserve mem to. store indexes if (indexes.size() < vertCount) { try { indexes.resize(vertCount); } catch (const std::bad_alloc&) { error = CC_FERR_NOT_ENOUGH_MEMORY; break; } } //decode indexes for (unsigned j=0; j<vertCount; ++j) { indexes[j] = parts[j+1].toUInt(&ok); if (!ok) { ccLog::Warning(QString("[VTK] Element #%1 of %2 data is corrupted! (invalid index value)").arg(i).arg(typeName)); error = CC_FERR_MALFORMED_FILE; break; } } //add the triangles { assert(vertCount > 2); unsigned triCount = vertCount-2; if (mesh->size() + triCount > mesh->maxSize()) { if (!mesh->reserve(mesh->size()+triCount+256)) //take some advance to avoid too many allocations { error = CC_FERR_NOT_ENOUGH_MEMORY; break; } } if (isPolygon) { //triangle or quad mesh->addTriangle(indexes[0],indexes[1],indexes[2]); if (vertCount == 4) mesh->addTriangle(indexes[0],indexes[2],indexes[3]); } else { //triangle strip for (unsigned j=0; j<triCount; ++j) mesh->addTriangle(indexes[j],indexes[j+1],indexes[j+2]); } } } if (mesh->size() != 0 && mesh->size() < mesh->maxSize()) { mesh->resize(mesh->size()); } //end POLYGONS or TRIANGLE_STRIPS } else if (nextline.startsWith("NORMALS")) { if (lastDataSize == 0) lastDataSize = vertices->size(); if (lastDataSize == 0) { error = CC_FERR_MALFORMED_FILE; break; } bool loadNormals = false; if (lastDataSize == vertices->size()) { if (!vertices->reserveTheNormsTable()) ccLog::Warning("[VTK] Not enough memory to load normals!"); else loadNormals = true; } //warning: multiple normals can be stored on a single line! unsigned iNorm = 0; CCVector3 N; unsigned coordIndex = 0; while (iNorm < lastDataSize) { nextline = inFile.readLine(); QStringList parts = nextline.split(" ",QString::SkipEmptyParts); for (int i=0; i<parts.size(); ++i) { bool ok; N.u[coordIndex] = static_cast<PointCoordinateType>(parts[i].toDouble(&ok)); if (!ok) { ccLog::Warning("[VTK] Element #%1 of NORMALS data is corrupted!",iNorm); error = CC_FERR_MALFORMED_FILE; iNorm = lastDataSize; break; } if (coordIndex == 2) { if (loadNormals) vertices->addNorm(N); coordIndex = 0; ++iNorm; } else { ++coordIndex; } } } lastDataSize = 0; //lastDataSize is consumed //end NORMALS } else if (nextline.startsWith("COLOR_SCALARS")) { if (lastDataSize == 0) lastDataSize = vertices->size(); if (lastDataSize == 0) { error = CC_FERR_MALFORMED_FILE; break; } bool loadRGBColors = vertices->reserveTheRGBTable(); if (!loadRGBColors) ccLog::Warning("[VTK] Not enough memory to load RGB colors!"); //warning: multiple colors can be stored on a single line! unsigned iCol = 0; colorType rgb[3]; unsigned coordIndex = 0; while (iCol < lastDataSize) { nextline = inFile.readLine(); QStringList parts = nextline.split(" ",QString::SkipEmptyParts); for (int i=0; i<parts.size(); ++i) { bool ok; rgb[coordIndex] = static_cast<colorType>(parts[i].toDouble(&ok) * ccColor::MAX); if (!ok) { ccLog::Warning("[VTK] Element #%1 of COLOR_SCALARS data is corrupted!",iCol); error = CC_FERR_MALFORMED_FILE; iCol = lastDataSize; break; } if (coordIndex == 2) { if (loadRGBColors) vertices->addRGBColor(rgb); coordIndex = 0; ++iCol; } else { ++coordIndex; } } } lastDataSize = 0; //lastDataSize is consumed //end COLOR_SCALARS } else if (nextline.startsWith("SCALARS")) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); lastSfName = "ScalarField"; if (parts.size() > 1) lastSfName = parts[1].replace("_"," "); //SF already exists? if (vertices->getScalarFieldIndexByName(qPrintable(lastSfName)) >= 0) lastSfName += QString(" (%1)").arg(vertices->getNumberOfScalarFields()); //end of SCALARS } else if (nextline.startsWith("LOOKUP_TABLE") || nextline.startsWith("VECTORS")) { bool expected = (lastDataSize != 0); assert(!acceptLookupTables || expected); //i.e. lastDataSize shouldn't be 0 for 'accepted' lookup tables QStringList parts = nextline.split(" ",QString::SkipEmptyParts); QString itemName = parts[0]; if (parts.size() > 2) { bool ok = false; int valCount = parts[2].toUInt(&ok); if (ok) lastDataSize = valCount; } else if (!expected) { ccLog::Warning(QString("[VTK] field %1 has no size?!").arg(itemName)); error = CC_FERR_MALFORMED_FILE; break; } bool createSF = (vertices->size() == lastDataSize && vertices->size() != 0); if (acceptLookupTables && !createSF) { ccLog::Warning(QString("[VTK] field %1 has not the right number of points (will be ignored)").arg(itemName)); } createSF &= (acceptLookupTables || expected); if (createSF && lastSfName.isNull()) { ccLog::Warning(QString("[VTK] field %1 has no name (will be ignored)").arg(itemName)); createSF = false; } else if (!expected) { ccLog::Warning(QString("[VTK] field %1 was not expected (will be ignored)").arg(itemName)); } //create scalar field? ccScalarField* sf = 0; if (createSF) { sf = new ccScalarField(qPrintable(lastSfName)); if (!sf->reserve(lastDataSize)) { ccLog::Warning(QString("[VTK] Not enough memory to load scalar field' %1' (will be ignored)").arg(lastSfName)); sf->release(); sf = 0; } } lastSfName.clear(); //name is "consumed" //warning: multiple colors can be stored on a single line! unsigned iScal = 0; while (iScal < lastDataSize) { nextline = inFile.readLine(); QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (expected) { for (int i=0; i<parts.size(); ++i) { bool ok; ScalarType d = static_cast<ScalarType>(parts[i].toDouble(&ok)); if (!ok) { ccLog::Warning("[VTK] Element #%1 of LOOKUP_TABLE/VECTORS data is corrupted!",iScal); error = CC_FERR_MALFORMED_FILE; if (sf) { sf->release(); sf = 0; } iScal = lastDataSize; break; } if (sf) sf->addElement(d); ++iScal; } } else { //hard to guess the right format, but an unexpected field seem to always be //organized as 'one element per line' ++iScal; } } lastDataSize = 0; //lastDataSize is "consumed" acceptLookupTables = false; if (sf) { sf->computeMinAndMax(); int newSFIndex = vertices->addScalarField(sf); if (newSFIndex == 0) vertices->setCurrentDisplayedScalarField(newSFIndex); vertices->showSF(true); } //end of SCALARS } else if (nextline.startsWith("POINT_DATA")) { //check that the number of 'point_data' match the number of points QStringList parts = nextline.split(" ",QString::SkipEmptyParts); acceptLookupTables = false; if (parts.size() > 1) { bool ok; lastDataSize = parts[1].toUInt(&ok); acceptLookupTables = ok && vertices; } } else if (nextline.startsWith("FIELD")) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() < 2) { error = CC_FERR_MALFORMED_FILE; break; } bool ok; unsigned elements = parts[2].toUInt(&ok); if (!ok) { error = CC_FERR_MALFORMED_FILE; break; } elements *= 2; //we don't know how to handle those properly but at least //we know that for FIELD elements, there's 2 lines per element... for (unsigned i=0; i<elements; ++i) { inFile.readLine(); //ignore } } else //unhandled property (CELLS, CELL_TYPES, etc.) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() < 2) { ccLog::Warning(QString("[VTK] Unhandled element: %1").arg(parts[0])); error = CC_FERR_MALFORMED_FILE; break; } bool ok; unsigned elements = parts[1].toUInt(&ok); if (!ok) { error = CC_FERR_MALFORMED_FILE; break; } if (nextline.startsWith("CELL_DATA")) { //read next line (in case we actually know how to read it! if (!GetNextNonEmptyLine(inFile,nextline)) { error = CC_FERR_MALFORMED_FILE; break; } skipReadLine = true; if ( nextline.startsWith("SCALARS") || nextline.startsWith("NORMALS") || nextline.startsWith("COLOR_SCALARS")) { lastDataSize = elements; acceptLookupTables = false; //this property is for triangles! continue; } } //we'll try to blindly skip the elements... for (unsigned i=0; i<elements; ++i) { inFile.readLine(); //ignore } //end unhandled property } if (error != CC_FERR_NO_ERROR) break; } file.close(); if (vertices && vertices->size() == 0) { delete vertices; vertices = 0; if (error == CC_FERR_NO_ERROR) error = CC_FERR_NO_LOAD; } if (mesh && (mesh->size() == 0 || vertices == 0)) { delete mesh; mesh = 0; if (error == CC_FERR_NO_ERROR) error = CC_FERR_NO_LOAD; } if (mesh) { container.addChild(mesh); mesh->setVisible(true); mesh->addChild(vertices); vertices->setEnabled(false); vertices->setName("Vertices"); vertices->setLocked(true); //DGM: no need to lock it as it is only used by one mesh! //DGM: normals can be per-vertex or per-triangle so it's better to let the user do it himself later //Moreover it's not always good idea if the user doesn't want normals (especially in ccViewer!) if (!mesh->hasNormals()) { // mesh->computeNormals(); ccLog::Warning("[VTK] Mesh has no normal! You can manually compute them (select it then call \"Edit > Normals > Compute\")"); } mesh->showNormals(mesh->hasNormals()); if (vertices->hasScalarFields()) { vertices->setCurrentDisplayedScalarField(0); mesh->showSF(true); } if (vertices->hasColors()) mesh->showColors(true); } else if (vertices) { container.addChild(vertices); vertices->setVisible(true); if (vertices->hasNormals()) vertices->showNormals(true); if (vertices->hasScalarFields()) { vertices->setCurrentDisplayedScalarField(0); vertices->showSF(true); } if (vertices->hasColors()) vertices->showColors(true); } return error; }
CC_FILE_ERROR LASFilter::loadFile(const char* filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, CCVector3d* coordinatesShift/*=0*/) { //opening file std::ifstream ifs; ifs.open(filename, std::ios::in | std::ios::binary); if (ifs.fail()) return CC_FERR_READING; liblas::Reader* reader = 0; unsigned nbOfPoints = 0; std::vector<std::string> dimensions; try { reader = new liblas::Reader(liblas::ReaderFactory().CreateWithStream(ifs)); //using factory for automatic and transparent //handling of compressed/uncompressed files liblas::Header const& header = reader->GetHeader(); ccLog::PrintDebug(QString("[LAS FILE] %1 - signature: %2").arg(filename).arg(header.GetFileSignature().c_str())); //get fields present in file dimensions = header.GetSchema().GetDimensionNames(); //and of course the number of points nbOfPoints = header.GetPointRecordsCount(); } catch (...) { delete reader; ifs.close(); return CC_FERR_READING; } if (nbOfPoints==0) { //strange file ;) delete reader; ifs.close(); return CC_FERR_NO_LOAD; } //dialog to choose the fields to load if (!s_lasOpenDlg) s_lasOpenDlg = QSharedPointer<LASOpenDlg>(new LASOpenDlg()); s_lasOpenDlg->setDimensions(dimensions); if (alwaysDisplayLoadDialog && !s_lasOpenDlg->autoSkipMode() && !s_lasOpenDlg->exec()) { delete reader; ifs.close(); return CC_FERR_CANCELED_BY_USER; } bool ignoreDefaultFields = s_lasOpenDlg->ignoreDefaultFieldsCheckBox->isChecked(); //RGB color liblas::Color rgbColorMask; //(0,0,0) on construction if (s_lasOpenDlg->doLoad(LAS_RED)) rgbColorMask.SetRed(~0); if (s_lasOpenDlg->doLoad(LAS_GREEN)) rgbColorMask.SetGreen(~0); if (s_lasOpenDlg->doLoad(LAS_BLUE)) rgbColorMask.SetBlue(~0); bool loadColor = (rgbColorMask[0] || rgbColorMask[1] || rgbColorMask[2]); //progress dialog ccProgressDialog pdlg(true); //cancel available CCLib::NormalizedProgress nprogress(&pdlg,nbOfPoints); pdlg.setMethodTitle("Open LAS file"); pdlg.setInfo(qPrintable(QString("Points: %1").arg(nbOfPoints))); pdlg.start(); //number of points read from the begining of the current cloud part unsigned pointsRead = 0; CCVector3d Pshift(0,0,0); //by default we read color as 8 bits integers and we will change this to 16 bits if it's not (16 bits is the standard!) unsigned char colorCompBitDec = 0; colorType rgb[3] = {0,0,0}; ccPointCloud* loadedCloud = 0; std::vector<LasField> fieldsToLoad; //if the file is too big, we will chunck it in multiple parts unsigned int fileChunkPos = 0; unsigned int fileChunkSize = 0; while (true) { //if we reach the end of the file, or the max. cloud size limit (in which case we cerate a new chunk) bool newPointAvailable = (nprogress.oneStep() && reader->ReadNextPoint()); if (!newPointAvailable || pointsRead == fileChunkPos+fileChunkSize) { if (loadedCloud) { if (loadedCloud->size()) { bool thisChunkHasColors = loadedCloud->hasColors(); loadedCloud->showColors(thisChunkHasColors); if (loadColor && !thisChunkHasColors) ccLog::Warning("[LAS FILE] Color field was all black! We ignored it..."); while (!fieldsToLoad.empty()) { LasField& field = fieldsToLoad.back(); if (field.sf) { field.sf->computeMinAndMax(); if (field.type == LAS_CLASSIFICATION || field.type == LAS_RETURN_NUMBER || field.type == LAS_NUMBER_OF_RETURNS) { int cMin = (int)field.sf->getMin(); int cMax = (int)field.sf->getMax(); field.sf->setColorRampSteps(std::min<int>(cMax-cMin+1,256)); //classifSF->setMinSaturation(cMin); } else if (field.type == LAS_INTENSITY) { field.sf->setColorScale(ccColorScalesManager::GetDefaultScale(ccColorScalesManager::GREY)); } int sfIndex = loadedCloud->addScalarField(field.sf); if (!loadedCloud->hasDisplayedScalarField()) { loadedCloud->setCurrentDisplayedScalarField(sfIndex); loadedCloud->showSF(!thisChunkHasColors); } field.sf->release(); field.sf=0; } else { ccLog::Warning(QString("[LAS FILE] All '%1' values were the same (%2)! We ignored them...").arg(LAS_FIELD_NAMES[field.type]).arg(field.firstValue)); } fieldsToLoad.pop_back(); } //if we have reserved too much memory if (loadedCloud->size() < loadedCloud->capacity()) loadedCloud->resize(loadedCloud->size()); QString chunkName("unnamed - Cloud"); unsigned n = container.getChildrenNumber(); if (n!=0) //if we have more than one cloud, we append an index { if (n==1) //we must also update the first one! container.getChild(0)->setName(chunkName+QString(" #1")); chunkName += QString(" #%1").arg(n+1); } loadedCloud->setName(chunkName); container.addChild(loadedCloud); loadedCloud=0; } else { //empty cloud?! delete loadedCloud; loadedCloud=0; } } if (!newPointAvailable) break; //end of the file (or cancel requested) //otherwise, we must create a new cloud fileChunkPos = pointsRead; fileChunkSize = std::min(nbOfPoints-pointsRead,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD); loadedCloud = new ccPointCloud(); if (!loadedCloud->reserveThePointsTable(fileChunkSize)) { ccLog::Warning("[LASFilter::loadFile] Not enough memory!"); delete loadedCloud; delete reader; ifs.close(); return CC_FERR_NOT_ENOUGH_MEMORY; } loadedCloud->setGlobalShift(Pshift); //DGM: from now on, we only enable scalar fields when we detect a valid value! if (s_lasOpenDlg->doLoad(LAS_CLASSIFICATION)) fieldsToLoad.push_back(LasField(LAS_CLASSIFICATION,0,0,255)); //unsigned char: between 0 and 255 if (s_lasOpenDlg->doLoad(LAS_CLASSIF_VALUE)) fieldsToLoad.push_back(LasField(LAS_CLASSIF_VALUE,0,0,31)); //5 bits: between 0 and 31 if (s_lasOpenDlg->doLoad(LAS_CLASSIF_SYNTHETIC)) fieldsToLoad.push_back(LasField(LAS_CLASSIF_SYNTHETIC,0,0,1)); //1 bit: 0 or 1 if (s_lasOpenDlg->doLoad(LAS_CLASSIF_KEYPOINT)) fieldsToLoad.push_back(LasField(LAS_CLASSIF_KEYPOINT,0,0,1)); //1 bit: 0 or 1 if (s_lasOpenDlg->doLoad(LAS_CLASSIF_WITHHELD)) fieldsToLoad.push_back(LasField(LAS_CLASSIF_WITHHELD,0,0,1)); //1 bit: 0 or 1 if (s_lasOpenDlg->doLoad(LAS_INTENSITY)) fieldsToLoad.push_back(LasField(LAS_INTENSITY,0,0,65535)); //16 bits: between 0 and 65536 if (s_lasOpenDlg->doLoad(LAS_TIME)) fieldsToLoad.push_back(LasField(LAS_TIME,0,0,-1.0)); //8 bytes (double) if (s_lasOpenDlg->doLoad(LAS_RETURN_NUMBER)) fieldsToLoad.push_back(LasField(LAS_RETURN_NUMBER,1,1,7)); //3 bits: between 1 and 7 if (s_lasOpenDlg->doLoad(LAS_NUMBER_OF_RETURNS)) fieldsToLoad.push_back(LasField(LAS_NUMBER_OF_RETURNS,1,1,7)); //3 bits: between 1 and 7 if (s_lasOpenDlg->doLoad(LAS_SCAN_DIRECTION)) fieldsToLoad.push_back(LasField(LAS_SCAN_DIRECTION,0,0,1)); //1 bit: 0 or 1 if (s_lasOpenDlg->doLoad(LAS_FLIGHT_LINE_EDGE)) fieldsToLoad.push_back(LasField(LAS_FLIGHT_LINE_EDGE,0,0,1)); //1 bit: 0 or 1 if (s_lasOpenDlg->doLoad(LAS_SCAN_ANGLE_RANK)) fieldsToLoad.push_back(LasField(LAS_SCAN_ANGLE_RANK,0,-90,90)); //signed char: between -90 and +90 if (s_lasOpenDlg->doLoad(LAS_USER_DATA)) fieldsToLoad.push_back(LasField(LAS_USER_DATA,0,0,255)); //unsigned char: between 0 and 255 if (s_lasOpenDlg->doLoad(LAS_POINT_SOURCE_ID)) fieldsToLoad.push_back(LasField(LAS_POINT_SOURCE_ID,0,0,65535)); //16 bits: between 0 and 65536 } assert(newPointAvailable); const liblas::Point& p = reader->GetPoint(); //first point: check for 'big' coordinates if (pointsRead == 0) { CCVector3d P( p.GetX(),p.GetY(),p.GetZ() ); bool shiftAlreadyEnabled = (coordinatesShiftEnabled && *coordinatesShiftEnabled && coordinatesShift); if (shiftAlreadyEnabled) Pshift = *coordinatesShift; bool applyAll = false; if ( sizeof(PointCoordinateType) < 8 && ccCoordinatesShiftManager::Handle(P.u,0,alwaysDisplayLoadDialog,shiftAlreadyEnabled,Pshift,0,applyAll)) { loadedCloud->setGlobalShift(Pshift); ccLog::Warning("[LASFilter::loadFile] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift.x,Pshift.y,Pshift.z); //we save coordinates shift information if (applyAll && coordinatesShiftEnabled && coordinatesShift) { *coordinatesShiftEnabled = true; *coordinatesShift = Pshift; } } } CCVector3 P(static_cast<PointCoordinateType>(p.GetX()+Pshift.x), static_cast<PointCoordinateType>(p.GetY()+Pshift.y), static_cast<PointCoordinateType>(p.GetZ()+Pshift.z)); loadedCloud->addPoint(P); //color field if (loadColor) { //Warning: LAS colors are stored on 16 bits! liblas::Color col = p.GetColor(); col[0] &= rgbColorMask[0]; col[1] &= rgbColorMask[1]; col[2] &= rgbColorMask[2]; //if we don't have reserved a color field yet, we check first that color is not black bool pushColor = true; if (!loadedCloud->hasColors()) { //if the color is not black, we are sure it's a valid color field! if (col[0] || col[1] || col[2]) { if (loadedCloud->reserveTheRGBTable()) { //we must set the color (black) of all the precedently skipped points for (unsigned i=0;i<loadedCloud->size()-1;++i) loadedCloud->addRGBColor(ccColor::black); } else { ccLog::Warning("[LAS FILE] Not enough memory: color field will be ignored!"); loadColor = false; //no need to retry with the other chunks anyway pushColor = false; } } else //otherwise we ignore it for the moment (we'll add it later if necessary) { pushColor = false; } } //do we need to push this color? if (pushColor) { //we test if the color components are on 16 bits (standard) or only on 8 bits (it happens ;) if (colorCompBitDec==0) { if ( (col[0] & 0xFF00) || (col[1] & 0xFF00) || (col[2] & 0xFF00)) { //the color components are on 16 bits! ccLog::Print("[LAS FILE] Color components are coded on 16 bits"); colorCompBitDec = 8; //we fix all the precedently read colors for (unsigned i=0;i<loadedCloud->size()-1;++i) loadedCloud->setPointColor(i,ccColor::black); //255 >> 8 = 0! } } rgb[0]=(colorType)(col[0]>>colorCompBitDec); rgb[1]=(colorType)(col[1]>>colorCompBitDec); rgb[2]=(colorType)(col[2]>>colorCompBitDec); loadedCloud->addRGBColor(rgb); } } //additional fields for (std::vector<LasField>::iterator it = fieldsToLoad.begin(); it != fieldsToLoad.end(); ++it) { double value = 0.0; switch (it->type) { case LAS_X: case LAS_Y: case LAS_Z: assert(false); break; case LAS_INTENSITY: value = (double)p.GetIntensity(); break; case LAS_RETURN_NUMBER: value = (double)p.GetReturnNumber(); break; case LAS_NUMBER_OF_RETURNS: value = (double)p.GetNumberOfReturns(); break; case LAS_SCAN_DIRECTION: value = (double)p.GetScanDirection(); break; case LAS_FLIGHT_LINE_EDGE: value = (double)p.GetFlightLineEdge(); break; case LAS_CLASSIFICATION: value = (double)p.GetClassification().GetClass(); break; case LAS_SCAN_ANGLE_RANK: value = (double)p.GetScanAngleRank(); break; case LAS_USER_DATA: value = (double)p.GetUserData(); break; case LAS_POINT_SOURCE_ID: value = (double)p.GetPointSourceID(); break; case LAS_RED: case LAS_GREEN: case LAS_BLUE: assert(false); break; case LAS_TIME: value = p.GetTime(); break; case LAS_CLASSIF_VALUE: value = (double)(p.GetClassification().GetClass() & 31); //5 bits break; case LAS_CLASSIF_SYNTHETIC: value = (double)(p.GetClassification().GetClass() & 32); //bit #6 break; case LAS_CLASSIF_KEYPOINT: value = (double)(p.GetClassification().GetClass() & 64); //bit #7 break; case LAS_CLASSIF_WITHHELD: value = (double)(p.GetClassification().GetClass() & 128); //bit #8 break; case LAS_INVALID: default: assert(false); break; } if (it->sf) { ScalarType s = static_cast<ScalarType>(value); it->sf->addElement(s); } else { //first point? we track its value if (loadedCloud->size() == 1) { it->firstValue = value; } if (!ignoreDefaultFields || value != it->firstValue || it->firstValue != it->defaultValue) { it->sf = new ccScalarField(it->getName()); if (it->sf->reserve(fileChunkSize)) { it->sf->link(); //we must set the value (firstClassifValue) of all the precedently skipped points ScalarType firstS = static_cast<ScalarType>(it->firstValue); for (unsigned i=0; i<loadedCloud->size()-1; ++i) it->sf->addElement(firstS); ScalarType s = static_cast<ScalarType>(value); it->sf->addElement(s); } else { ccLog::Warning(QString("[LAS FILE] Not enough memory: '%1' field will be ignored!").arg(LAS_FIELD_NAMES[it->type])); it->sf->release(); it->sf = 0; } } } } ++pointsRead; } if (reader) delete reader; reader=0; ifs.close(); return CC_FERR_NO_ERROR; }
CC_FILE_ERROR PlyFilter::loadFile(const char* filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, double* coordinatesShift/*=0*/) { //reset statics! s_triCount = 0; s_unsupportedPolygonType = false; s_scalarCount=0; s_IntensityCount=0; s_ColorCount=0; s_NormalCount=0; s_PointCount=0; s_PointDataCorrupted=false; s_AlwaysDisplayLoadDialog=alwaysDisplayLoadDialog; s_ShiftApplyAll=false; s_ShiftAlreadyEnabled = (coordinatesShiftEnabled && *coordinatesShiftEnabled && coordinatesShift); if (s_ShiftAlreadyEnabled) memcpy(s_Pshift,coordinatesShift,sizeof(double)*3); else memset(s_Pshift,0,sizeof(double)*3); /****************/ /*** Header ***/ /****************/ //open a PLY file for reading p_ply ply = ply_open(filename,NULL, 0, NULL); if (!ply) return CC_FERR_READING; ccConsole::PrintDebug("[PLY] Opening file '%s' ...",filename); if (!ply_read_header(ply)) { ply_close(ply); return CC_FERR_WRONG_FILE_TYPE; } //storage mode: little/big endian e_ply_storage_mode storage_mode; get_plystorage_mode(ply,&storage_mode); /******************/ /*** Comments ***/ /******************/ //display comments const char* lastComment = NULL; while ((lastComment = ply_get_next_comment(ply, lastComment))) ccConsole::Print("[PLY][Comment] %s",lastComment); /*******************************/ /*** Elements & properties ***/ /*******************************/ //Point-based elements (points, colors, normals, etc.) std::vector<plyElement> pointElements; //Mesh-based elements (vertices, etc.) std::vector<plyElement> meshElements; //Point-based element properties (coordinates, color components, etc.) std::vector<plyProperty> stdProperties; //Mesh-based element properties (vertex indexes, etc.) std::vector<plyProperty> listProperties; unsigned i=0; //last read element plyElement lastElement; lastElement.elem = 0; while ((lastElement.elem = ply_get_next_element(ply, lastElement.elem))) { //we get next element info ply_get_element_info(lastElement.elem, &lastElement.elementName, &lastElement.elementInstances); if (lastElement.elementInstances == 0) { ccConsole::Warning("[PLY] Element '%s' was ignored as it has 0 instance!",lastElement.elementName); continue; } lastElement.properties.clear(); lastElement.propertiesCount=0; lastElement.isList=false; //printf("Element: %s\n",lastElement.elementName); //last read property plyProperty lastProperty; lastProperty.prop = 0; lastProperty.elemIndex = 0; while ((lastProperty.prop = ply_get_next_property(lastElement.elem,lastProperty.prop))) { //we get next property info ply_get_property_info(lastProperty.prop, &lastProperty.propName, &lastProperty.type, &lastProperty.length_type, &lastProperty.value_type); //printf("\tProperty: %s (%s)\n",lastProperty.propName,e_ply_type_names[lastProperty.type]); if (lastProperty.type == 16) //PLY_LIST lastElement.isList = true; lastElement.properties.push_back(lastProperty); ++lastElement.propertiesCount; } //if we have a "mesh-like" element if (lastElement.isList) { //we store its properties in 'listProperties' for (i=0;i<lastElement.properties.size();++i) { plyProperty& prop = lastElement.properties[i]; prop.elemIndex = meshElements.size(); //we only keep track of lists (we can't handle per triangle scalars) if (prop.type == 16) listProperties.push_back(prop); else { ccConsole::Warning("[PLY] Unhandled property: [%s:%s] (%s)", lastElement.elementName, prop.propName, e_ply_type_names[prop.type]); } } meshElements.push_back(lastElement); } else //else if we have a "point-like" element { //we store its properties in 'stdProperties' for (i=0;i<lastElement.properties.size();++i) { plyProperty& prop = lastElement.properties[i]; prop.elemIndex = pointElements.size(); stdProperties.push_back(prop); } pointElements.push_back(lastElement); } } //We need some points at least! if (pointElements.empty()) { ply_close(ply); return CC_FERR_NO_LOAD; } /**********************/ /*** Objects info ***/ /**********************/ const char* lastObjInfo = NULL; while ((lastObjInfo = ply_get_next_obj_info(ply, lastObjInfo))) ccConsole::Print("[PLY][Info] %s",lastObjInfo); /****************/ /*** Dialog ***/ /****************/ //properties indexes (0=unassigned) static const unsigned nStdProp=11; int stdPropIndexes[nStdProp]={0,0,0,0,0,0,0,0,0,0,0}; int& xIndex = stdPropIndexes[0]; int& yIndex = stdPropIndexes[1]; int& zIndex = stdPropIndexes[2]; int& nxIndex = stdPropIndexes[3]; int& nyIndex = stdPropIndexes[4]; int& nzIndex = stdPropIndexes[5]; int& rIndex = stdPropIndexes[6]; int& gIndex = stdPropIndexes[7]; int& bIndex = stdPropIndexes[8]; int& iIndex = stdPropIndexes[9]; int& sfIndex = stdPropIndexes[10]; static const unsigned nListProp=1; int listPropIndexes[nListProp]={0}; int& facesIndex = listPropIndexes[0]; //Combo box items for standard properties (coordinates, color components, etc.) QStringList stdPropsText; stdPropsText << QString("None"); for (i=1; i<=stdProperties.size(); ++i) { plyProperty& pp = stdProperties[i-1]; QString itemText = QString("%1 - %2 [%3]").arg(pointElements[pp.elemIndex].elementName).arg(pp.propName).arg(e_ply_type_names[pp.type]); assert(pp.type!=16); //we don't want any PLY_LIST here stdPropsText << itemText; QString elementName = QString(pointElements[pp.elemIndex].elementName).toUpper(); QString propName = QString(pp.propName).toUpper(); if (nxIndex == 0 && (propName.contains("NX") || (elementName.contains("NORM") && propName.endsWith("X")))) nxIndex = i; else if (nyIndex == 0 && (propName.contains("NY") || (elementName.contains("NORM") && propName.endsWith("Y")))) nyIndex = i; else if (nzIndex == 0 && (propName.contains("NZ") || (elementName.contains("NORM") && propName.endsWith("Z")))) nzIndex = i; else if (rIndex == 0 && (propName.contains("RED") || (elementName.contains("COL") && propName.endsWith("R")))) rIndex = i; else if (gIndex == 0 && (propName.contains("GREEN") || (elementName.contains("COL") && propName.endsWith("G")))) gIndex = i; else if (bIndex == 0 && (propName.contains("BLUE") || (elementName.contains("COL") && propName.endsWith("B")))) bIndex = i; else if (iIndex == 0 && (propName.contains("INTENSITY") || propName.contains("GRAY") || propName.contains("GREY") || (elementName.contains("COL") && propName.endsWith("I")))) iIndex = i; else if (elementName.contains("VERT") || elementName.contains("POINT")) { if (sfIndex == 0 && propName.contains("SCAL")) sfIndex = i; else if (xIndex == 0 && propName.endsWith("X")) xIndex = i; else if (yIndex == 0 && propName.endsWith("Y")) yIndex = i; else if (zIndex == 0 && propName.endsWith("Z")) zIndex = i; } else if (sfIndex == 0 && (propName.contains("SCAL") || propName.contains("VAL"))) sfIndex = i; } //Combo box items for list properties (vertex indexes, etc.) QStringList listPropsText; listPropsText << QString("None"); for (i=0; i<listProperties.size(); ++i) { plyProperty& pp = listProperties[i]; QString itemText = QString("%0 - %1 [%2]").arg(meshElements[pp.elemIndex].elementName).arg(pp.propName).arg(e_ply_type_names[pp.type]); assert(pp.type==16); //we only want PLY_LIST here listPropsText << itemText; QString elementName = QString(meshElements[pp.elemIndex].elementName).toUpper(); QString propName = QString(pp.propName).toUpper(); if (facesIndex == 0 && (elementName.contains("FACE") || elementName.contains("TRI")) && propName.contains("IND")) facesIndex = i+1; } //combo-box max visible items int stdPropsCount = stdPropsText.count(); int listPropsCount = listPropsText.count(); //we need at least 2 coordinates! if (stdPropsCount<2) { return CC_FERR_BAD_ENTITY_TYPE; } else if (stdPropsCount<4 && !alwaysDisplayLoadDialog) { //brute force heuristic xIndex = 1; yIndex = 2; zIndex = (stdPropsCount>3 ? 3 : 0); facesIndex = (listPropsCount>1 ? 1 : 0); } else { //we count all assigned properties int assignedStdProperties = 0; for (i=0;i<nStdProp;++i) if (stdPropIndexes[i]>0) ++assignedStdProperties; int assignedListProperties = 0; for (i=0;i<nListProp;++i) if (listPropIndexes[i]>0) ++assignedListProperties; if (alwaysDisplayLoadDialog || stdPropsCount > assignedStdProperties+1 || //+1 because of the first item in the combo box ('none') listPropsCount > assignedListProperties+1) //+1 because of the first item in the combo box ('none') { PlyOpenDlg pod/*(MainWindow::TheInstance())*/; pod.plyTypeEdit->setText(e_ply_storage_mode_names[storage_mode]); pod.elementsEdit->setText(QString::number(pointElements.size())); pod.propertiesEdit->setText(QString::number(listProperties.size()+stdProperties.size())); //we fill every combo box pod.xComboBox->addItems(stdPropsText); pod.xComboBox->setCurrentIndex(xIndex); pod.xComboBox->setMaxVisibleItems(stdPropsCount); pod.yComboBox->addItems(stdPropsText); pod.yComboBox->setCurrentIndex(yIndex); pod.yComboBox->setMaxVisibleItems(stdPropsCount); pod.zComboBox->addItems(stdPropsText); pod.zComboBox->setCurrentIndex(zIndex); pod.zComboBox->setMaxVisibleItems(stdPropsCount); pod.rComboBox->addItems(stdPropsText); pod.rComboBox->setCurrentIndex(rIndex); pod.rComboBox->setMaxVisibleItems(stdPropsCount); pod.gComboBox->addItems(stdPropsText); pod.gComboBox->setCurrentIndex(gIndex); pod.gComboBox->setMaxVisibleItems(stdPropsCount); pod.bComboBox->addItems(stdPropsText); pod.bComboBox->setCurrentIndex(bIndex); pod.bComboBox->setMaxVisibleItems(stdPropsCount); pod.iComboBox->addItems(stdPropsText); pod.iComboBox->setCurrentIndex(iIndex); pod.iComboBox->setMaxVisibleItems(stdPropsCount); pod.sfComboBox->addItems(stdPropsText); pod.sfComboBox->setCurrentIndex(sfIndex); pod.sfComboBox->setMaxVisibleItems(stdPropsCount); pod.nxComboBox->addItems(stdPropsText); pod.nxComboBox->setCurrentIndex(nxIndex); pod.nxComboBox->setMaxVisibleItems(stdPropsCount); pod.nyComboBox->addItems(stdPropsText); pod.nyComboBox->setCurrentIndex(nyIndex); pod.nyComboBox->setMaxVisibleItems(stdPropsCount); pod.nzComboBox->addItems(stdPropsText); pod.nzComboBox->setCurrentIndex(nzIndex); pod.nzComboBox->setMaxVisibleItems(stdPropsCount); pod.facesComboBox->addItems(listPropsText); pod.facesComboBox->setCurrentIndex(facesIndex); pod.facesComboBox->setMaxVisibleItems(listPropsCount); //We execute dialog if (!pod.exec()) { ply_close(ply); return CC_FERR_CANCELED_BY_USER; } //Force events processing (to hide dialog) QCoreApplication::processEvents(); xIndex = pod.xComboBox->currentIndex(); yIndex = pod.yComboBox->currentIndex(); zIndex = pod.zComboBox->currentIndex(); nxIndex = pod.nxComboBox->currentIndex(); nyIndex = pod.nyComboBox->currentIndex(); nzIndex = pod.nzComboBox->currentIndex(); rIndex = pod.rComboBox->currentIndex(); gIndex = pod.gComboBox->currentIndex(); bIndex = pod.bComboBox->currentIndex(); iIndex = pod.iComboBox->currentIndex(); facesIndex = pod.facesComboBox->currentIndex(); sfIndex = pod.sfComboBox->currentIndex(); } } /*************************/ /*** Callbacks setup ***/ /*************************/ //Main point cloud ccPointCloud* cloud = new ccPointCloud("unnamed - Cloud"); /* POINTS (X,Y,Z) */ unsigned numberOfPoints=0; assert(xIndex != yIndex && xIndex != zIndex && yIndex != zIndex); //POINTS (X) if (xIndex>0) { long flags = ELEM_POS_0; //X coordinate if (xIndex > yIndex && xIndex > zIndex) flags |= ELEM_EOL; plyProperty& pp = stdProperties[xIndex-1]; ply_set_read_cb(ply, pointElements[pp.elemIndex].elementName, pp.propName, vertex_cb, cloud, flags); numberOfPoints = pointElements[pp.elemIndex].elementInstances; } //POINTS (Y) if (yIndex>0) { long flags = ELEM_POS_1; //Y coordinate if (yIndex > xIndex && yIndex > zIndex) flags |= ELEM_EOL; plyProperty& pp = stdProperties[yIndex-1]; ply_set_read_cb(ply, pointElements[pp.elemIndex].elementName, pp.propName, vertex_cb, cloud, flags); if (numberOfPoints > 0) { if ((long)numberOfPoints != pointElements[pp.elemIndex].elementInstances) { ccConsole::Warning("[PLY] Bad/uncompatible assignation of point properties!"); delete cloud; ply_close(ply); return CC_FERR_BAD_ENTITY_TYPE; } } else numberOfPoints = pointElements[pp.elemIndex].elementInstances; } //POINTS (Z) if (zIndex>0) { long flags = ELEM_POS_2; //Z coordinate if (zIndex > xIndex && zIndex > yIndex) flags |= ELEM_EOL; plyProperty& pp = stdProperties[zIndex-1]; ply_set_read_cb(ply, pointElements[pp.elemIndex].elementName, pp.propName, vertex_cb, cloud, flags); if (numberOfPoints > 0) { if ((long)numberOfPoints != pointElements[pp.elemIndex].elementInstances) { ccConsole::Warning("[PLY] Bad/uncompatible assignation of point properties!"); delete cloud; ply_close(ply); return CC_FERR_BAD_ENTITY_TYPE; } } else numberOfPoints = pointElements[pp.elemIndex].elementInstances; } if (numberOfPoints == 0 || !cloud->reserveThePointsTable(numberOfPoints)) { delete cloud; ply_close(ply); return CC_FERR_NOT_ENOUGH_MEMORY; } /* NORMALS (X,Y,Z) */ unsigned numberOfNormals=0; assert(nxIndex == 0 || (nxIndex != nyIndex && nxIndex != nzIndex)); assert(nyIndex == 0 || (nyIndex != nxIndex && nyIndex != nzIndex)); assert(nzIndex == 0 || (nzIndex != nxIndex && nzIndex != nyIndex)); //NORMALS (X) if (nxIndex>0) { long flags = ELEM_POS_0; //Nx if (nxIndex > nyIndex && nxIndex > nzIndex) flags |= ELEM_EOL; plyProperty& pp = stdProperties[nxIndex-1]; ply_set_read_cb(ply, pointElements[pp.elemIndex].elementName, pp.propName, normal_cb, cloud, flags); numberOfNormals = pointElements[pp.elemIndex].elementInstances; } //NORMALS (Y) if (nyIndex>0) { long flags = ELEM_POS_1; //Ny if (nyIndex > nxIndex && nyIndex > nzIndex) flags |= ELEM_EOL; plyProperty& pp = stdProperties[nyIndex-1]; ply_set_read_cb(ply, pointElements[pp.elemIndex].elementName, pp.propName, normal_cb, cloud, flags); numberOfNormals = ccMax(numberOfNormals, (unsigned)pointElements[pp.elemIndex].elementInstances); } //NORMALS (Z) if (nzIndex>0) { long flags = ELEM_POS_2; //Nz if (nzIndex > nxIndex && nzIndex > nyIndex) flags |= ELEM_EOL; plyProperty& pp = stdProperties[nzIndex-1]; ply_set_read_cb(ply, pointElements[pp.elemIndex].elementName, pp.propName, normal_cb, cloud, flags); numberOfNormals = ccMax(numberOfNormals, (unsigned)pointElements[pp.elemIndex].elementInstances); } //We check that the number of normals corresponds to the number of points if (numberOfNormals > 0) { if (numberOfPoints != numberOfNormals) { ccConsole::Warning("[PLY] The number of normals doesn't match the number of points!"); delete cloud; ply_close(ply); return CC_FERR_BAD_ENTITY_TYPE; } if (!cloud->reserveTheNormsTable()) { delete cloud; ply_close(ply); return CC_FERR_NOT_ENOUGH_MEMORY; } cloud->showNormals(true); } /* COLORS (R,G,B) */ unsigned numberOfColors=0; assert(rIndex == 0 || (rIndex != gIndex && rIndex != bIndex)); assert(gIndex == 0 || (gIndex != rIndex && gIndex != bIndex)); assert(bIndex == 0 || (bIndex != rIndex && bIndex != gIndex)); if (rIndex>0) { long flags = ELEM_POS_0; //R if (rIndex > gIndex && rIndex > bIndex) flags |= ELEM_EOL; plyProperty& pp = stdProperties[rIndex-1]; ply_set_read_cb(ply, pointElements[pp.elemIndex].elementName, pp.propName, rgb_cb, cloud, flags); numberOfColors = pointElements[pp.elemIndex].elementInstances; } if (gIndex>0) { long flags = ELEM_POS_1; //G if (gIndex > rIndex && gIndex > bIndex) flags |= ELEM_EOL; plyProperty& pp = stdProperties[gIndex-1]; ply_set_read_cb(ply, pointElements[pp.elemIndex].elementName, pp.propName, rgb_cb, cloud, flags); numberOfColors = ccMax(numberOfColors, (unsigned)pointElements[pp.elemIndex].elementInstances); } if (bIndex>0) { long flags = ELEM_POS_2; //B if (bIndex > rIndex && bIndex > gIndex) flags |= ELEM_EOL; plyProperty& pp = stdProperties[bIndex-1]; ply_set_read_cb(ply, pointElements[pp.elemIndex].elementName, pp.propName, rgb_cb, cloud, flags); numberOfColors = ccMax(numberOfColors, (unsigned)pointElements[pp.elemIndex].elementInstances); } /* Intensity (I) */ //INTENSITE (G) if (iIndex>0) { if (numberOfColors>0) { ccConsole::Error("Can't import colors AND intensity (intensities will be ignored)!"); ccConsole::Warning("[PLY] intensities will be ignored"); } else { plyProperty pp = stdProperties[iIndex-1]; ply_set_read_cb(ply, pointElements[pp.elemIndex].elementName, pp.propName, grey_cb, cloud, 0); numberOfColors = pointElements[pp.elemIndex].elementInstances; } } //We check that the number of colors corresponds to the number of points if (numberOfColors > 0) { if (numberOfPoints != numberOfColors) { ccConsole::Warning("The number of colors doesn't match the number of points!"); delete cloud; ply_close(ply); return CC_FERR_BAD_ENTITY_TYPE; } if (!cloud->reserveTheRGBTable()) { delete cloud; ply_close(ply); return CC_FERR_NOT_ENOUGH_MEMORY; } cloud->showColors(true); } /* SCALAR FIELD (SF) */ unsigned numberOfScalars=0; if (sfIndex>0) { plyProperty& pp = stdProperties[sfIndex-1]; numberOfScalars = pointElements[pp.elemIndex].elementInstances; //does the number of scalars matches the number of points? if (numberOfPoints != numberOfScalars) { ccConsole::Error("The number of scalars doesn't match the number of points (they will be ignored)!"); ccConsole::Warning("[PLY] Scalar field ignored!"); numberOfScalars = 0; } else if (!cloud->enableScalarField()) { ccConsole::Error("Not enough memory to load scalar field (they will be ignored)!"); ccConsole::Warning("[PLY] Scalar field ignored!"); numberOfScalars = 0; } else { CCLib::ScalarField* sf = cloud->getCurrentInScalarField(); if (sf) { QString qPropName(pp.propName); if (qPropName.startsWith("scalar_") && qPropName.length()>7) { //remove the 'scalar_' prefix added when saving SF with CC! qPropName = qPropName.mid(7).replace('_',' '); sf->setName(qPrintable(qPropName)); } else { sf->setName(pp.propName); } } ply_set_read_cb(ply, pointElements[pp.elemIndex].elementName, pp.propName, scalar_cb, cloud, 1); } cloud->showSF(true); } /* MESH FACETS (TRI) */ ccMesh* mesh = 0; unsigned numberOfFacets=0; if (facesIndex>0) { plyProperty& pp = listProperties[facesIndex-1]; assert(pp.type==16); //we only accept PLY_LIST here! mesh = new ccMesh(cloud); numberOfFacets = meshElements[pp.elemIndex].elementInstances; if (!mesh->reserve(numberOfFacets)) { ccConsole::Error("Not enough memory to load facets (they will be ignored)!"); ccConsole::Warning("[PLY] Mesh ignored!"); delete mesh; mesh = 0; numberOfFacets = 0; } else { ply_set_read_cb(ply, meshElements[pp.elemIndex].elementName, pp.propName, face_cb, mesh, 0); } } QProgressDialog progressDlg(QString("Loading in progress..."),0,0,0,0,Qt::Popup); progressDlg.setMinimumDuration(0); progressDlg.setModal(true); progressDlg.show(); QApplication::processEvents(); int success = ply_read(ply); progressDlg.close(); ply_close(ply); if (success<1) { if (mesh) delete mesh; delete cloud; return CC_FERR_READING; } //we check mesh if (mesh && mesh->size()==0) { if (s_unsupportedPolygonType) ccConsole::Error("Mesh is not triangular! (unsupported)"); else ccConsole::Error("Mesh is empty!"); delete mesh; mesh=0; } //we save coordinates shift information if (s_ShiftApplyAll && coordinatesShiftEnabled && coordinatesShift) { *coordinatesShiftEnabled = true; coordinatesShift[0] = s_Pshift[0]; coordinatesShift[1] = s_Pshift[1]; coordinatesShift[2] = s_Pshift[2]; } //we update scalar field CCLib::ScalarField* sf = cloud->getCurrentInScalarField(); if (sf) { sf->setPositive(!s_negSF); sf->computeMinAndMax(); int sfIdx = cloud->getCurrentInScalarFieldIndex(); cloud->setCurrentDisplayedScalarField(sfIdx); cloud->showSF(sfIdx>=0); } if (mesh) { assert(s_triCount > 0); //check number of loaded facets against 'theoretical' number if (s_triCount<numberOfFacets) { mesh->resize(s_triCount); ccConsole::Warning("[PLY] Missing vertex indexes!"); } //check that vertex indices start at 0 unsigned minVertIndex=numberOfPoints,maxVertIndex=0; for (unsigned i=0;i<s_triCount;++i) { const CCLib::TriangleSummitsIndexes* tri = mesh->getTriangleIndexes(i); if (tri->i1 < minVertIndex) minVertIndex = tri->i1; else if (tri->i1 > maxVertIndex) maxVertIndex = tri->i1; if (tri->i2 < minVertIndex) minVertIndex = tri->i2; else if (tri->i2 > maxVertIndex) maxVertIndex = tri->i2; if (tri->i3 < minVertIndex) minVertIndex = tri->i3; else if (tri->i3 > maxVertIndex) maxVertIndex = tri->i3; } if (maxVertIndex>=numberOfPoints) { if (maxVertIndex == numberOfPoints && minVertIndex > 0) { ccLog::Warning("[PLY] Vertex indices seem to be shifted (+1)! We will try to 'unshift' indices (otherwise file is corrupted...)"); for (unsigned i=0;i<s_triCount;++i) { CCLib::TriangleSummitsIndexes* tri = mesh->getTriangleIndexes(i); --tri->i1; --tri->i2; --tri->i3; } } else //file is definitely corrupted! { ccLog::Warning("[PLY] Invalid vertex indices!"); delete mesh; delete cloud; return CC_FERR_MALFORMED_FILE; } } mesh->addChild(cloud); cloud->setEnabled(false); cloud->setName("Vertices"); //cloud->setLocked(true); //DGM: no need to lock it as it is only used by one mesh! if (cloud->hasColors()) mesh->showColors(true); if (cloud->hasDisplayedScalarField()) mesh->showSF(true); if (cloud->hasNormals()) mesh->showNormals(true); else mesh->computeNormals(); container.addChild(mesh); } else { container.addChild(cloud); } return CC_FERR_NO_ERROR; }
CC_FILE_ERROR PVFilter::loadFile(QString filename, ccHObject& container, LoadParameters& parameters) { //opening file QFile in(filename); if (!in.open(QIODevice::ReadOnly)) return CC_FERR_READING; //we deduce the points number from the file size qint64 fileSize = in.size(); qint64 singlePointSize = 4*sizeof(float); //check that size is ok if (fileSize == 0) return CC_FERR_NO_LOAD; if ((fileSize % singlePointSize) != 0) return CC_FERR_MALFORMED_FILE; unsigned numberOfPoints = static_cast<unsigned>(fileSize / singlePointSize); //progress dialog ccProgressDialog pdlg(true); //cancel available CCLib::NormalizedProgress nprogress(&pdlg,numberOfPoints); pdlg.setMethodTitle("Open PV file"); pdlg.setInfo(qPrintable(QString("Points: %1").arg(numberOfPoints))); pdlg.start(); ccPointCloud* loadedCloud = 0; //if the file is too big, it will be chuncked in multiple parts unsigned chunkIndex = 0; unsigned fileChunkPos = 0; unsigned fileChunkSize = 0; //number of points read for the current cloud part unsigned pointsRead = 0; CC_FILE_ERROR result = CC_FERR_NO_ERROR; for (unsigned i=0;i<numberOfPoints;i++) { //if we reach the max. cloud size limit, we cerate a new chunk if (pointsRead == fileChunkPos+fileChunkSize) { if (loadedCloud) { int sfIdx = loadedCloud->getCurrentInScalarFieldIndex(); if (sfIdx>=0) { CCLib::ScalarField* sf = loadedCloud->getScalarField(sfIdx); sf->computeMinAndMax(); loadedCloud->setCurrentDisplayedScalarField(sfIdx); loadedCloud->showSF(true); } container.addChild(loadedCloud); } fileChunkPos = pointsRead; fileChunkSize = std::min<unsigned>(numberOfPoints-pointsRead,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD); loadedCloud = new ccPointCloud(QString("unnamed - Cloud #%1").arg(++chunkIndex)); if (!loadedCloud || !loadedCloud->reserveThePointsTable(fileChunkSize) || !loadedCloud->enableScalarField()) { result = CC_FERR_NOT_ENOUGH_MEMORY; if (loadedCloud) delete loadedCloud; loadedCloud=0; break; } } //we read the 3 coordinates of the point float rBuff[3]; if (in.read((char*)rBuff,3*sizeof(float))>=0) { //conversion to CCVector3 CCVector3 P((PointCoordinateType)rBuff[0], (PointCoordinateType)rBuff[1], (PointCoordinateType)rBuff[2]); loadedCloud->addPoint(P); } else { result = CC_FERR_READING; break; } //then the scalar value if (in.read((char*)rBuff,sizeof(float))>=0) { loadedCloud->setPointScalarValue(pointsRead,(ScalarType)rBuff[0]); } else { //add fake scalar value for consistency then break loadedCloud->setPointScalarValue(pointsRead,0); result = CC_FERR_READING; break; } ++pointsRead; if (!nprogress.oneStep()) { result = CC_FERR_CANCELED_BY_USER; break; } } in.close(); if (loadedCloud) { if (loadedCloud->size() < loadedCloud->capacity()) loadedCloud->resize(loadedCloud->size()); int sfIdx = loadedCloud->getCurrentInScalarFieldIndex(); if (sfIdx>=0) { CCLib::ScalarField* sf = loadedCloud->getScalarField(sfIdx); sf->computeMinAndMax(); loadedCloud->setCurrentDisplayedScalarField(sfIdx); loadedCloud->showSF(true); } container.addChild(loadedCloud); } return result; }
CC_FILE_ERROR SoiFilter::loadFile(QString filename, ccHObject& container, LoadParameters& parameters) { //open the file FILE *fp = fopen(qPrintable(filename), "rt"); if (!fp) return CC_FERR_READING; std::string line; line.resize(MAX_ASCII_FILE_LINE_LENGTH); unsigned nbScansTotal = 0; unsigned nbPointsTotal = 0; //we read the first line char* eof = fgets ((char*)line.c_str(), MAX_ASCII_FILE_LINE_LENGTH , fp); char* pEnd; //header while ((strcmp((char*)line.substr(0,4).c_str(),"#CC#") != 0)&&(eof != NULL)) { if (strcmp(line.substr(0,4).c_str(),"#NP#")==0) { std::string numPoints (line,4,line.size()-4); nbPointsTotal=strtol(numPoints.c_str(),&pEnd,0); //ccLog::Print("[SoiFilter::loadFile] Total number of points: %i",nbPointsTotal); } else if (strcmp(line.substr(0,4).c_str(),"#NS#")==0) { std::string numScans (line,4,line.size()-4); nbScansTotal=strtol(numScans.c_str(),&pEnd,0); //ccLog::Print("[SoiFilter::loadFile] Total number of scans: %i",nbScansTotal); } eof = fgets ((char*)line.c_str(), MAX_ASCII_FILE_LINE_LENGTH , fp); } if ((nbScansTotal == 0)||(nbPointsTotal == 0)) { ccLog::Warning("[SoiFilter::loadFile] No points or scans defined in this file!"); fclose(fp); return CC_FERR_NO_LOAD; } //Progress dialog ccProgressDialog pdlg(false); //cancel is not supported pdlg.setMethodTitle("Open SOI file"); char buffer[256]; sprintf(buffer,"%u scans / %u points\n",nbScansTotal,nbPointsTotal); CCLib::NormalizedProgress nprogress(&pdlg,nbPointsTotal); pdlg.setInfo(buffer); pdlg.start(); //Scan by scan for (unsigned k=0; k<nbScansTotal; k++) { char* eof = fgets ((char*)line.c_str(), MAX_ASCII_FILE_LINE_LENGTH , fp); //we only look for points (we ignore the rest) while ((strcmp(line.substr(0,4).c_str(),"#pt#")!=0)&&(eof != NULL)) eof = fgets ((char*)line.c_str(), MAX_ASCII_FILE_LINE_LENGTH , fp); unsigned nbOfPoints = 0; if (strcmp(line.substr(0,4).c_str(),"#pt#")==0) { std::string numPoints(line,4,line.size()-4); nbOfPoints = strtol(numPoints.c_str(),&pEnd,0); //ccLog::Print("[SoiFilter::loadFile] Scan %i - points: %i",k+1,nbOfPoints); } else { ccLog::Warning("[SoiFilter::loadFile] Can't find marker '#pt#'!"); fclose(fp); return CC_FERR_WRONG_FILE_TYPE; } if (nbOfPoints == 0) { ccLog::Warning("[SoiFilter::loadFile] Scan #%i is empty!",k); continue; } //Creation de la liste de points QString name = QString("unnamed - Scan #%1").arg(k); ccPointCloud* loadedCloud = new ccPointCloud(name); if ( !loadedCloud->reserveThePointsTable(nbOfPoints) || !loadedCloud->reserveTheRGBTable() ) { fclose(fp); delete loadedCloud; return CC_FERR_NOT_ENOUGH_MEMORY; } loadedCloud->showColors(true); //we can read points now for (unsigned i=0; i<nbOfPoints; i++) { float P[3]; int c = 0; fscanf(fp,"%f %f %f %i",P,P+1,P+2,&c); loadedCloud->addPoint(CCVector3::fromArray(P)); loadedCloud->addGreyColor(static_cast<colorType>(c<<3)); //<<2 ? <<3 ? we lack some info. here ... nprogress.oneStep(); } container.addChild(loadedCloud); } fclose(fp); return CC_FERR_NO_ERROR; }
CC_FILE_ERROR OFFFilter::loadFile(QString filename, ccHObject& container, LoadParameters& parameters) { //try to open file QFile fp(filename); if (!fp.open(QIODevice::ReadOnly | QIODevice::Text)) return CC_FERR_READING; QTextStream stream(&fp); QString currentLine = stream.readLine(); if (!currentLine.toUpper().startsWith("OFF")) return CC_FERR_MALFORMED_FILE; //check if the number of vertices/faces/etc. are on the first line (yes it happens :( ) QStringList tokens = currentLine.split(QRegExp("\\s+"),QString::SkipEmptyParts); if (tokens.size() == 4) { tokens.removeAt(0); } else { currentLine = GetNextLine(stream); //end of file already?! if (currentLine.isNull()) return CC_FERR_MALFORMED_FILE; //read the number of vertices/faces tokens = currentLine.split(QRegExp("\\s+"),QString::SkipEmptyParts); if (tokens.size() < 2/*3*/) //should be 3 but we only use the 2 firsts... return CC_FERR_MALFORMED_FILE; } bool ok = false; unsigned vertCount = tokens[0].toUInt(&ok); if (!ok) return CC_FERR_MALFORMED_FILE; unsigned triCount = tokens[1].toUInt(&ok); if (!ok) return CC_FERR_MALFORMED_FILE; //create cloud and reserve some memory ccPointCloud* vertices = new ccPointCloud("vertices"); if (!vertices->reserve(vertCount)) { delete vertices; return CC_FERR_NOT_ENOUGH_MEMORY; } //read vertices { CCVector3d Pshift(0,0,0); for (unsigned i=0; i<vertCount; ++i) { currentLine = GetNextLine(stream); tokens = currentLine.split(QRegExp("\\s+"),QString::SkipEmptyParts); if (tokens.size() < 3) { delete vertices; return CC_FERR_MALFORMED_FILE; } //read vertex CCVector3d Pd(0,0,0); { bool vertexIsOk = false; Pd.x = tokens[0].toDouble(&vertexIsOk); if (vertexIsOk) { Pd.y = tokens[1].toDouble(&vertexIsOk); if (vertexIsOk) Pd.z = tokens[2].toDouble(&vertexIsOk); } if (!vertexIsOk) { delete vertices; return CC_FERR_MALFORMED_FILE; } } //first point: check for 'big' coordinates if (i == 0) { if (HandleGlobalShift(Pd,Pshift,parameters)) { vertices->setGlobalShift(Pshift); ccLog::Warning("[OFF] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift.x,Pshift.y,Pshift.z); } } CCVector3 P = CCVector3::fromArray((Pd + Pshift).u); vertices->addPoint(P); } } ccMesh* mesh = new ccMesh(vertices); mesh->addChild(vertices); if (!mesh->reserve(triCount)) { delete mesh; return CC_FERR_NOT_ENOUGH_MEMORY; } //load triangles { bool ignoredPolygons = false; for (unsigned i=0; i<triCount; ++i) { currentLine = GetNextLine(stream); tokens = currentLine.split(QRegExp("\\s+"),QString::SkipEmptyParts); if (tokens.size() < 3) { delete mesh; return CC_FERR_MALFORMED_FILE; } unsigned polyVertCount = tokens[0].toUInt(&ok); if (!ok || static_cast<int>(polyVertCount) >= tokens.size()) { delete mesh; return CC_FERR_MALFORMED_FILE; } if (polyVertCount == 3 || polyVertCount == 4) { //decode indexes unsigned indexes[4]; for (unsigned j=0; j<polyVertCount; ++j) { indexes[j] = tokens[j+1].toUInt(&ok); if (!ok) { delete mesh; return CC_FERR_MALFORMED_FILE; } } //reserve memory if necessary unsigned polyTriCount = polyVertCount-2; if (mesh->size() + polyTriCount > mesh->capacity()) { if (!mesh->reserve(mesh->size() + polyTriCount + 256)) //use some margin to avoid too many allocations { delete mesh; return CC_FERR_NOT_ENOUGH_MEMORY; } } //triangle or quad only mesh->addTriangle(indexes[0],indexes[1],indexes[2]); if (polyVertCount == 4) mesh->addTriangle(indexes[0],indexes[2],indexes[3]); } else { ignoredPolygons = true; } } if (ignoredPolygons) { ccLog::Warning("[OFF] Some polygons with an unhandled size (i.e. > 4) were ignored!"); } } if (mesh->size() == 0) { ccLog::Warning("[OFF] Failed to load any polygon!"); mesh->detachChild(vertices); delete mesh; mesh = 0; container.addChild(vertices); vertices->setEnabled(true); } else { mesh->shrinkToFit(); //DGM: normals can be per-vertex or per-triangle so it's better to let the user do it himself later //Moreover it's not always good idea if the user doesn't want normals (especially in ccViewer!) //if (mesh->computeNormals()) // mesh->showNormals(true); //else // ccLog::Warning("[OFF] Failed to compute per-vertex normals..."); ccLog::Warning("[OFF] Mesh has no normal! You can manually compute them (select it then call \"Edit > Normals > Compute\")"); vertices->setEnabled(false); //vertices->setLocked(true); //DGM: no need to lock it as it is only used by one mesh! container.addChild(mesh); } return CC_FERR_NO_ERROR; }
CC_FILE_ERROR BinFilter::LoadFileV2(QFile& in, ccHObject& container, int flags) { assert(in.isOpen()); uint32_t binVersion = 20; if (in.read((char*)&binVersion,4) < 0) return CC_FERR_READING; if (binVersion < 20) //should be superior to 2.0! return CC_FERR_MALFORMED_FILE; QString coordsFormat = (flags & ccSerializableObject::DF_POINT_COORDS_64_BITS ? "double" : "float"); QString scalarFormat = (flags & ccSerializableObject::DF_SCALAR_VAL_32_BITS ? "float" : "double"); ccLog::Print(QString("[BIN] Version %1.%2 (coords: %3 / scalar: %4)").arg(binVersion/10).arg(binVersion%10).arg(coordsFormat).arg(scalarFormat)); //we keep track of the last unique ID before load unsigned lastUniqueIDBeforeLoad = ccObject::GetLastUniqueID(); //we read first entity type unsigned classID = 0; if (!ccObject::ReadClassIDFromFile(classID, in, static_cast<short>(binVersion))) return CC_FERR_CONSOLE_ERROR; ccHObject* root = ccHObject::New(classID); if (!root) return CC_FERR_MALFORMED_FILE; if (!root->fromFile(in,static_cast<short>(binVersion),flags)) { //DGM: can't delete it, too dangerous (bad pointers ;) //delete root; return CC_FERR_CONSOLE_ERROR; } CC_FILE_ERROR result = CC_FERR_NO_ERROR; //re-link objects (and check errors) bool checkErrors = true; ccHObject* orphans = new ccHObject("Orphans (CORRUPTED FILE)");; ccHObject::Container toCheck; toCheck.push_back(root); while (!toCheck.empty()) { ccHObject* currentObject = toCheck.back(); toCheck.pop_back(); assert(currentObject); //we check objects that have links to other entities (meshes, polylines, etc.) if (currentObject->isKindOf(CC_MESH)) { //specific case: mesh groups are deprecated! if (currentObject->isA(CC_MESH_GROUP)) { //TODO ccLog::Warning(QString("Mesh groups are deprecated! Entity %1 should be ignored...").arg(currentObject->getName())); } else if (currentObject->isA(CC_SUB_MESH)) { ccSubMesh* subMesh = ccHObjectCaster::ToSubMesh(currentObject); //normally, the associated mesh should be the sub-mesh's parent! //however we have its ID so we will look for it just to be sure intptr_t meshID = (intptr_t)subMesh->getAssociatedMesh(); if (meshID > 0) { ccHObject* mesh = root->find(static_cast<int>(meshID)); if (mesh && mesh->isA(CC_MESH)) { subMesh->setAssociatedMesh(ccHObjectCaster::ToMesh(mesh)); } else { //we have a problem here ;) //normally, the associated mesh should be the sub-mesh's parent! if (subMesh->getParent() && subMesh->getParent()->isA(CC_MESH)) { subMesh->setAssociatedMesh(ccHObjectCaster::ToMesh(subMesh->getParent())); } else { subMesh->setAssociatedMesh(0); //DGM: can't delete it, too dangerous (bad pointers ;) //delete subMesh; ccLog::Warning(QString("[BinFilter::loadFileV2] Couldn't find associated mesh (ID=%1) for sub-mesh '%2' in the file!").arg(meshID).arg(subMesh->getName())); return CC_FERR_MALFORMED_FILE; } } } } else if (currentObject->isA(CC_MESH) || currentObject->isKindOf(CC_PRIMITIVE)) //CC_MESH or CC_PRIMITIVE! { ccMesh* mesh = ccHObjectCaster::ToMesh(currentObject); assert(mesh); //vertices intptr_t cloudID = (intptr_t)mesh->getAssociatedCloud(); if (cloudID > 0) { ccHObject* cloud = root->find(static_cast<int>(cloudID)); if (cloud && cloud->isKindOf(CC_POINT_CLOUD)) { mesh->setAssociatedCloud(ccHObjectCaster::ToGenericPointCloud(cloud)); } else { //we have a problem here ;) mesh->setAssociatedCloud(0); if (mesh->getMaterialSet()) mesh->setMaterialSet(0,false); //DGM: can't delete it, too dangerous (bad pointers ;) //delete mesh; ccLog::Warning(QString("[BinFilter::loadFileV2] Couldn't find vertices (ID=%1) for mesh '%2' in the file!").arg(cloudID).arg(mesh->getName())); return CC_FERR_MALFORMED_FILE; } } //materials ccHObject* materials = 0; intptr_t matSetID = (intptr_t)mesh->getMaterialSet(); if (matSetID > 0) { materials = root->find(static_cast<int>(matSetID)); if (materials && materials->isA(CC_MATERIAL_SET)) mesh->setMaterialSet(static_cast<ccMaterialSet*>(materials),false); else { //we have a (less severe) problem here ;) mesh->setMaterialSet(0,false); mesh->showMaterials(false); ccLog::Warning(QString("[BinFilter::loadFileV2] Couldn't find shared materials set (ID=%1) for mesh '%2' in the file!").arg(matSetID).arg(mesh->getName())); result = CC_FERR_BROKEN_DEPENDENCY_ERROR; //add it to the 'orphans' set if (materials) orphans->addChild(materials); materials = 0; } } //per-triangle normals ccHObject* triNormsTable = 0; intptr_t triNormsTableID = (intptr_t)mesh->getTriNormsTable(); if (triNormsTableID > 0) { triNormsTable = root->find(static_cast<int>(triNormsTableID)); if (triNormsTable && triNormsTable->isA(CC_NORMAL_INDEXES_ARRAY)) { mesh->setTriNormsTable(static_cast<NormsIndexesTableType*>(triNormsTable),false); } else { //we have a (less severe) problem here ;) mesh->setTriNormsTable(0,false); mesh->showTriNorms(false); ccLog::Warning(QString("[BinFilter::loadFileV2] Couldn't find shared normals (ID=%1) for mesh '%2' in the file!").arg(triNormsTableID).arg(mesh->getName())); result = CC_FERR_BROKEN_DEPENDENCY_ERROR; //add it to the 'orphans' set if (triNormsTable) orphans->addChild(triNormsTable); triNormsTable = 0; } } //per-triangle texture coordinates ccHObject* texCoordsTable = 0; intptr_t texCoordArrayID = (intptr_t)mesh->getTexCoordinatesTable(); if (texCoordArrayID > 0) { texCoordsTable = root->find(static_cast<int>(texCoordArrayID)); if (texCoordsTable && texCoordsTable->isA(CC_TEX_COORDS_ARRAY)) mesh->setTexCoordinatesTable(static_cast<TextureCoordsContainer*>(texCoordsTable),false); else { //we have a (less severe) problem here ;) mesh->setTexCoordinatesTable(0,false); ccLog::Warning(QString("[BinFilter::loadFileV2] Couldn't find shared texture coordinates (ID=%1) for mesh '%2' in the file!").arg(texCoordArrayID).arg(mesh->getName())); result = CC_FERR_BROKEN_DEPENDENCY_ERROR; //add it to the 'orphans' set if (texCoordsTable) orphans->addChild(texCoordsTable); texCoordsTable = 0; } } if (checkErrors) { ccGenericPointCloud* pc = mesh->getAssociatedCloud(); unsigned faceCount = mesh->size(); unsigned vertCount = pc->size(); for (unsigned i=0; i<faceCount; ++i) { const CCLib::TriangleSummitsIndexes* tri = mesh->getTriangleIndexes(i); if ( tri->i1 >= vertCount || tri->i2 >= vertCount || tri->i3 >= vertCount ) { ccLog::Warning(QString("[BinFilter::loadFileV2] File is corrupted: missing vertices for mesh '%1'!").arg(mesh->getName())); //add cloud to the 'orphans' set pc->setName(mesh->getName() + QString(".") + pc->getName()); orphans->addChild(pc); if (texCoordsTable) { texCoordsTable->setName(mesh->getName() + QString(".") + texCoordsTable->getName()); orphans->addChild(texCoordsTable); } if (triNormsTable) { triNormsTable->setName(mesh->getName() + QString(".") + triNormsTable->getName()); orphans->addChild(triNormsTable); } if (materials) { materials->setName(mesh->getName() + QString(".") + materials->getName()); orphans->addChild(materials); } //delete corrupted mesh mesh->setMaterialSet(0,false); mesh->setTriNormsTable(0,false); mesh->setTexCoordinatesTable(0,false); if (mesh->getParent()) mesh->getParent()->removeChild(mesh); mesh = 0; break; } } } } } else if (currentObject->isKindOf(CC_POLY_LINE)) { ccPolyline* poly = static_cast<ccPolyline*>(currentObject); intptr_t cloudID = (intptr_t)poly->getAssociatedCloud(); ccHObject* cloud = root->find(static_cast<int>(cloudID)); if (cloud && cloud->isKindOf(CC_POINT_CLOUD)) poly->setAssociatedCloud(ccHObjectCaster::ToGenericPointCloud(cloud)); else { //we have a problem here ;) poly->setAssociatedCloud(0); //DGM: can't delete it, too dangerous (bad pointers ;) //delete root; ccLog::Warning(QString("[BinFilter::loadFileV2] Couldn't find vertices (ID=%1) for polyline '%2' in the file!").arg(cloudID).arg(poly->getName())); return CC_FERR_MALFORMED_FILE; } } else if (currentObject->isA(CC_2D_LABEL)) { cc2DLabel* label = static_cast<cc2DLabel*>(currentObject); std::vector<cc2DLabel::PickedPoint> correctedPickedPoints; //we must check all label 'points'! for (unsigned i=0;i<label->size();++i) { const cc2DLabel::PickedPoint& pp = label->getPoint(i); intptr_t cloudID = (intptr_t)pp.cloud; ccHObject* cloud = root->find(static_cast<int>(cloudID)); if (cloud && cloud->isKindOf(CC_POINT_CLOUD)) { ccGenericPointCloud* genCloud = ccHObjectCaster::ToGenericPointCloud(cloud); assert(genCloud->size()>pp.index); correctedPickedPoints.push_back(cc2DLabel::PickedPoint(genCloud,pp.index)); } else { //we have a problem here ;) ccLog::Warning(QString("[BinFilter::loadFileV2] Couldn't find cloud (ID=%1) associated to label '%2' in the file!").arg(cloudID).arg(label->getName())); if (label->getParent()) label->getParent()->removeChild(label); if (!label->getFlagState(CC_FATHER_DEPENDENT)) { //DGM: can't delete it, too dangerous (bad pointers ;) //delete label; } label=0; break; } } if (label) //correct label data { assert(correctedPickedPoints.size() == label->size()); bool visible = label->isVisible(); QString originalName(label->getRawName()); label->clear(); for (unsigned i=0;i<correctedPickedPoints.size();++i) label->addPoint(correctedPickedPoints[i].cloud,correctedPickedPoints[i].index); label->setVisible(visible); label->setName(originalName); } } else if (currentObject->isA(CC_FACET)) { ccFacet* facet = ccHObjectCaster::ToFacet(currentObject); //origin points { intptr_t cloudID = (intptr_t)facet->getOriginPoints(); if (cloudID > 0) { ccHObject* cloud = root->find(static_cast<int>(cloudID)); if (cloud && cloud->isA(CC_POINT_CLOUD)) facet->setOriginPoints(ccHObjectCaster::ToPointCloud(cloud)); else { //we have a problem here ;) facet->setOriginPoints(0); ccLog::Warning(QString("[BinFilter::loadFileV2] Couldn't find origin points (ID=%1) for facet '%2' in the file!").arg(cloudID).arg(facet->getName())); } } } //contour points { intptr_t cloudID = (intptr_t)facet->getContourVertices(); if (cloudID > 0) { ccHObject* cloud = root->find(static_cast<int>(cloudID)); if (cloud && cloud->isA(CC_POINT_CLOUD)) facet->setContourVertices(ccHObjectCaster::ToPointCloud(cloud)); else { //we have a problem here ;) facet->setContourVertices(0); ccLog::Warning(QString("[BinFilter::loadFileV2] Couldn't find contour points (ID=%1) for facet '%2' in the file!").arg(cloudID).arg(facet->getName())); } } } //contour polyline { intptr_t polyID = (intptr_t)facet->getContour(); if (polyID > 0) { ccHObject* poly = root->find(static_cast<int>(polyID)); if (poly && poly->isA(CC_POLY_LINE)) facet->setContour(ccHObjectCaster::ToPolyline(poly)); else { //we have a problem here ;) facet->setContourVertices(0); ccLog::Warning(QString("[BinFilter::loadFileV2] Couldn't find contour polyline (ID=%1) for facet '%2' in the file!").arg(polyID).arg(facet->getName())); } } } //polygon mesh { intptr_t polyID = (intptr_t)facet->getPolygon(); if (polyID > 0) { ccHObject* poly = root->find(static_cast<int>(polyID)); if (poly && poly->isA(CC_MESH)) { facet->setPolygon(ccHObjectCaster::ToMesh(poly)); } else { //we have a problem here ;) facet->setContourVertices(0); ccLog::Warning(QString("[BinFilter::loadFileV2] Couldn't find polygon mesh (ID=%1) for facet '%2' in the file!").arg(polyID).arg(facet->getName())); } } } } if (currentObject) for (unsigned i=0;i<currentObject->getChildrenNumber();++i) toCheck.push_back(currentObject->getChild(i)); } //update 'unique IDs' toCheck.push_back(root); while (!toCheck.empty()) { ccHObject* currentObject = toCheck.back(); toCheck.pop_back(); currentObject->setUniqueID(lastUniqueIDBeforeLoad+currentObject->getUniqueID()); for (unsigned i=0;i<currentObject->getChildrenNumber();++i) toCheck.push_back(currentObject->getChild(i)); } if (root->isA(CC_HIERARCHY_OBJECT)) { //transfer children to container root->transferChildren(container,true); } else { container.addChild(root); } //orphans if (orphans) { if (orphans->getChildrenNumber() != 0) { orphans->setEnabled(false); container.addChild(orphans); } else { delete orphans; orphans = 0; } } return result; }
CC_FILE_ERROR SoiFilter::loadFile(const char* filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, double* coordinatesShift/*=0*/) { //open the file FILE *fp = fopen(filename, "rt"); if (!fp) return CC_FERR_READING; std::string line; line.resize(MAX_ASCII_FILE_LINE_LENGTH); unsigned nbScansTotal = 0; unsigned nbPointsTotal = 0; //we read the first line char* eof = fgets ((char*)line.c_str(), MAX_ASCII_FILE_LINE_LENGTH , fp); char* pEnd; //header while ((strcmp((char*)line.substr(0,4).c_str(),"#CC#") != 0)&&(eof != NULL)) { if (strcmp(line.substr(0,4).c_str(),"#NP#")==0) { std::string numPoints (line,4,line.size()-4); nbPointsTotal=strtol(numPoints.c_str(),&pEnd,0); //ccConsole::Print("[SoiFilter::loadFile] Total number of points: %i\n",nbPointsTotal); } else if (strcmp(line.substr(0,4).c_str(),"#NS#")==0) { std::string numScans (line,4,line.size()-4); nbScansTotal=strtol(numScans.c_str(),&pEnd,0); //ccConsole::Print("[SoiFilter::loadFile] Total number of scans: %i\n",nbScansTotal); } eof = fgets ((char*)line.c_str(), MAX_ASCII_FILE_LINE_LENGTH , fp); } if ((nbScansTotal == 0)||(nbPointsTotal == 0)) { ccConsole::Warning("[SoiFilter::loadFile] No points or scans defined in this file!"); fclose(fp); return CC_FERR_NO_LOAD; } //Progress dialog ccProgressDialog pdlg(false); //cancel is not supported pdlg.setMethodTitle("Open SOI file"); char buffer[256]; sprintf(buffer,"%i scans / %i points\n",nbScansTotal,nbPointsTotal); CCLib::NormalizedProgress nprogress(&pdlg,nbPointsTotal); pdlg.setInfo(buffer); pdlg.start(); //Scan by scan for (unsigned k=0;k<nbScansTotal;k++) { char* eof = fgets ((char*)line.c_str(), MAX_ASCII_FILE_LINE_LENGTH , fp); //we only look for points (we ignore the rest) while ((strcmp(line.substr(0,4).c_str(),"#pt#")!=0)&&(eof != NULL)) eof = fgets ((char*)line.c_str(), MAX_ASCII_FILE_LINE_LENGTH , fp); unsigned nbOfPoints = 0; if (strcmp(line.substr(0,4).c_str(),"#pt#")==0) { std::string numPoints(line,4,line.size()-4); nbOfPoints=strtol(numPoints.c_str(),&pEnd,0); //ccConsole::Print("[SoiFilter::loadFile] Scan %i - points: %i\n",k+1,nbOfPoints); } else { ccConsole::Warning("[SoiFilter::loadFile] Can't find marker '#pt#'!\n"); fclose(fp); return CC_FERR_WRONG_FILE_TYPE; } //Creation de la liste de points char name[64]; sprintf(name,"unnamed - Scan #%i",k); ccPointCloud* loadedCloud = new ccPointCloud(name); if (!loadedCloud) { fclose(fp); return CC_FERR_NOT_ENOUGH_MEMORY; } if (nbOfPoints>0) { loadedCloud->reserveThePointsTable(nbOfPoints); loadedCloud->reserveTheRGBTable(); loadedCloud->showColors(true); } else { ccConsole::Warning("[SoiFilter::loadFile] Scan #%i is empty!\n",k); continue; } CCVector3 P; int c = 0; //we can read points now for (unsigned i=0;i<nbOfPoints;i++) { fscanf(fp,"%f %f %f %i",&P.x,&P.y,&P.z,&c); loadedCloud->addPoint(P); loadedCloud->addGreyColor(colorType(c<<3)); //<<2 ? <<3 ? we lack some info. here ... nprogress.oneStep(); } container.addChild(loadedCloud); } fclose(fp); return CC_FERR_NO_ERROR; }
CC_FILE_ERROR RasterGridFilter::loadFile(QString filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, CCVector3d* coordinatesShift/*=0*/) { GDALAllRegister(); ccLog::PrintDebug("(GDAL drivers: %i)", GetGDALDriverManager()->GetDriverCount()); GDALDataset* poDataset = static_cast<GDALDataset*>(GDALOpen( qPrintable(filename), GA_ReadOnly )); if( poDataset != NULL ) { ccLog::Print(QString("Raster file: '%1'").arg(filename)); ccLog::Print( "Driver: %s/%s", poDataset->GetDriver()->GetDescription(), poDataset->GetDriver()->GetMetadataItem( GDAL_DMD_LONGNAME ) ); int rasterCount = poDataset->GetRasterCount(); int rasterX = poDataset->GetRasterXSize(); int rasterY = poDataset->GetRasterYSize(); ccLog::Print( "Size is %dx%dx%d", rasterX, rasterY, rasterCount ); ccPointCloud* pc = new ccPointCloud(); if (!pc->reserve(static_cast<unsigned>(rasterX * rasterY))) { delete pc; return CC_FERR_NOT_ENOUGH_MEMORY; } if( poDataset->GetProjectionRef() != NULL ) ccLog::Print( "Projection is `%s'", poDataset->GetProjectionRef() ); double adfGeoTransform[6] = { 0, //top left x 1, //w-e pixel resolution (can be negative) 0, //0 0, //top left y 0, //0 1 //n-s pixel resolution (can be negative) }; if( poDataset->GetGeoTransform( adfGeoTransform ) == CE_None ) { ccLog::Print( "Origin = (%.6f,%.6f)", adfGeoTransform[0], adfGeoTransform[3] ); ccLog::Print( "Pixel Size = (%.6f,%.6f)", adfGeoTransform[1], adfGeoTransform[5] ); } if (adfGeoTransform[1] == 0 || adfGeoTransform[5] == 0) { ccLog::Warning("Invalid pixel size! Forcing it to (1,1)"); adfGeoTransform[1] = adfGeoTransform[5] = 1; } CCVector3d origin( adfGeoTransform[0], adfGeoTransform[3], 0.0 ); CCVector3d Pshift(0,0,0); //check for 'big' coordinates { bool shiftAlreadyEnabled = (coordinatesShiftEnabled && *coordinatesShiftEnabled && coordinatesShift); if (shiftAlreadyEnabled) Pshift = *coordinatesShift; bool applyAll = false; if ( sizeof(PointCoordinateType) < 8 && ccCoordinatesShiftManager::Handle(origin,0,alwaysDisplayLoadDialog,shiftAlreadyEnabled,Pshift,0,&applyAll)) { pc->setGlobalShift(Pshift); ccLog::Warning("[RasterFilter::loadFile] Raster has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift.x,Pshift.y,Pshift.z); //we save coordinates shift information if (applyAll && coordinatesShiftEnabled && coordinatesShift) { *coordinatesShiftEnabled = true; *coordinatesShift = Pshift; } } } //create blank raster 'grid' { double z = 0.0 /*+ Pshift.z*/; for (int j=0; j<rasterY; ++j) { double y = adfGeoTransform[3] + static_cast<double>(j) * adfGeoTransform[5] + Pshift.y; CCVector3 P( 0, static_cast<PointCoordinateType>(y), static_cast<PointCoordinateType>(z)); for (int i=0; i<rasterX; ++i) { double x = adfGeoTransform[0] + static_cast<double>(i) * adfGeoTransform[1] + Pshift.x; P.x = static_cast<PointCoordinateType>(x); pc->addPoint(P); } } QVariant xVar = QVariant::fromValue<int>(rasterX); QVariant yVar = QVariant::fromValue<int>(rasterY); pc->setMetaData("raster_width",xVar); pc->setMetaData("raster_height",yVar); } //fetch raster bands bool zRasterProcessed = false; unsigned zInvalid = 0; double zMinMax[2] = {0, 0}; for (int i=1; i<=rasterCount; ++i) { ccLog::Print( "Reading band #%i", i); GDALRasterBand* poBand = poDataset->GetRasterBand(i); GDALColorInterp colorInterp = poBand->GetColorInterpretation(); GDALDataType bandType = poBand->GetRasterDataType(); int nBlockXSize, nBlockYSize; poBand->GetBlockSize( &nBlockXSize, &nBlockYSize ); ccLog::Print( "Block=%dx%d Type=%s, ColorInterp=%s", nBlockXSize, nBlockYSize, GDALGetDataTypeName(poBand->GetRasterDataType()), GDALGetColorInterpretationName(colorInterp) ); //fetching raster scan-line int nXSize = poBand->GetXSize(); int nYSize = poBand->GetYSize(); assert(nXSize == rasterX); assert(nYSize == rasterY); int bGotMin, bGotMax; double adfMinMax[2] = {0, 0}; adfMinMax[0] = poBand->GetMinimum( &bGotMin ); adfMinMax[1] = poBand->GetMaximum( &bGotMax ); if (!bGotMin || !bGotMax ) //DGM FIXME: if the file is corrupted (e.g. ASCII ArcGrid with missing rows) this method will enter in a infinite loop! GDALComputeRasterMinMax((GDALRasterBandH)poBand, TRUE, adfMinMax); ccLog::Print( "Min=%.3fd, Max=%.3f", adfMinMax[0], adfMinMax[1] ); GDALColorTable* colTable = poBand->GetColorTable(); if( colTable != NULL ) printf( "Band has a color table with %d entries", colTable->GetColorEntryCount() ); if( poBand->GetOverviewCount() > 0 ) printf( "Band has %d overviews", poBand->GetOverviewCount() ); if (colorInterp == GCI_Undefined && !zRasterProcessed/*&& !colTable*/) //probably heights? { zRasterProcessed = true; zMinMax[0] = adfMinMax[0]; zMinMax[1] = adfMinMax[1]; double* scanline = (double*) CPLMalloc(sizeof(double)*nXSize); //double* scanline = new double[nXSize]; memset(scanline,0,sizeof(double)*nXSize); for (int j=0; j<nYSize; ++j) { if (poBand->RasterIO( GF_Read, /*xOffset=*/0, /*yOffset=*/j, /*xSize=*/nXSize, /*ySize=*/1, /*buffer=*/scanline, /*bufferSizeX=*/nXSize, /*bufferSizeY=*/1, /*bufferType=*/GDT_Float64, /*x_offset=*/0, /*y_offset=*/0 ) != CE_None) { delete pc; CPLFree(scanline); GDALClose(poDataset); return CC_FERR_READING; } for (int k=0; k<nXSize; ++k) { double z = static_cast<double>(scanline[k]) + Pshift[2]; unsigned pointIndex = static_cast<unsigned>(k + j * rasterX); if (pointIndex <= pc->size()) { if (z < zMinMax[0] || z > zMinMax[1]) { z = zMinMax[0] - 1.0; ++zInvalid; } const_cast<CCVector3*>(pc->getPoint(pointIndex))->z = static_cast<PointCoordinateType>(z); } } } //update bounding-box pc->invalidateBoundingBox(); if (scanline) CPLFree(scanline); scanline = 0; } else //colors { bool isRGB = false; bool isScalar = false; bool isPalette = false; switch(colorInterp) { case GCI_Undefined: isScalar = true; break; case GCI_PaletteIndex: isPalette = true; break; case GCI_RedBand: case GCI_GreenBand: case GCI_BlueBand: isRGB = true; break; case GCI_AlphaBand: if (adfMinMax[0] != adfMinMax[1]) isScalar = true; else ccLog::Warning(QString("Alpha band ignored as it has a unique value (%1)").arg(adfMinMax[0])); break; default: isScalar = true; break; } if (isRGB || isPalette) { //first check that a palette exists if the band is a palette index if (isPalette && !colTable) { ccLog::Warning(QString("Band is declared as a '%1' but no palette is associated!").arg(GDALGetColorInterpretationName(colorInterp))); isPalette = false; } else { //instantiate memory for RBG colors if necessary if (!pc->hasColors() && !pc->setRGBColor(MAX_COLOR_COMP,MAX_COLOR_COMP,MAX_COLOR_COMP)) { ccLog::Warning(QString("Failed to instantiate memory for storing color band '%1'!").arg(GDALGetColorInterpretationName(colorInterp))); } else { assert(bandType <= GDT_Int32); int* colIndexes = (int*) CPLMalloc(sizeof(int)*nXSize); //double* scanline = new double[nXSize]; memset(colIndexes,0,sizeof(int)*nXSize); for (int j=0; j<nYSize; ++j) { if (poBand->RasterIO( GF_Read, /*xOffset=*/0, /*yOffset=*/j, /*xSize=*/nXSize, /*ySize=*/1, /*buffer=*/colIndexes, /*bufferSizeX=*/nXSize, /*bufferSizeY=*/1, /*bufferType=*/GDT_Int32, /*x_offset=*/0, /*y_offset=*/0 ) != CE_None) { CPLFree(colIndexes); delete pc; return CC_FERR_READING; } for (int k=0; k<nXSize; ++k) { unsigned pointIndex = static_cast<unsigned>(k + j * rasterX); if (pointIndex <= pc->size()) { colorType* C = const_cast<colorType*>(pc->getPointColor(pointIndex)); switch(colorInterp) { case GCI_PaletteIndex: assert(colTable); { GDALColorEntry col; colTable->GetColorEntryAsRGB(colIndexes[k],&col); C[0] = static_cast<colorType>(col.c1 & MAX_COLOR_COMP); C[1] = static_cast<colorType>(col.c2 & MAX_COLOR_COMP); C[2] = static_cast<colorType>(col.c3 & MAX_COLOR_COMP); } break; case GCI_RedBand: C[0] = static_cast<colorType>(colIndexes[k] & MAX_COLOR_COMP); break; case GCI_GreenBand: C[1] = static_cast<colorType>(colIndexes[k] & MAX_COLOR_COMP); break; case GCI_BlueBand: C[2] = static_cast<colorType>(colIndexes[k] & MAX_COLOR_COMP); break; default: assert(false); break; } } } } if (colIndexes) CPLFree(colIndexes); colIndexes = 0; pc->showColors(true); } } } else if (isScalar) { ccScalarField* sf = new ccScalarField(GDALGetColorInterpretationName(colorInterp)); if (!sf->resize(pc->size(),true,NAN_VALUE)) { ccLog::Warning(QString("Failed to instantiate memory for storing '%1' as a scalar field!").arg(sf->getName())); sf->release(); sf = 0; } else { double* colValues = (double*) CPLMalloc(sizeof(double)*nXSize); //double* scanline = new double[nXSize]; memset(colValues,0,sizeof(double)*nXSize); for (int j=0; j<nYSize; ++j) { if (poBand->RasterIO( GF_Read, /*xOffset=*/0, /*yOffset=*/j, /*xSize=*/nXSize, /*ySize=*/1, /*buffer=*/colValues, /*bufferSizeX=*/nXSize, /*bufferSizeY=*/1, /*bufferType=*/GDT_Float64, /*x_offset=*/0, /*y_offset=*/0 ) != CE_None) { CPLFree(colValues); delete pc; return CC_FERR_READING; } for (int k=0; k<nXSize; ++k) { unsigned pointIndex = static_cast<unsigned>(k + j * rasterX); if (pointIndex <= pc->size()) { ScalarType s = static_cast<ScalarType>(colValues[k]); sf->setValue(pointIndex,s); } } } if (colValues) CPLFree(colValues); colValues = 0; sf->computeMinAndMax(); pc->addScalarField(sf); if (pc->getNumberOfScalarFields() == 1) pc->setCurrentDisplayedScalarField(0); pc->showSF(true); } } } } if (pc) { if (!zRasterProcessed) { ccLog::Warning("Raster has no height (Z) information: you can convert one of its scalar fields to Z with 'Edit > Scalar Fields > Set SF as coordinate(s)'"); } else if (zInvalid != 0 && zInvalid < pc->size()) { //shall we remove the points with invalid heights? if (QMessageBox::question(0,"Remove NaN points?","This raster has pixels with invalid heights. Shall we remove them?",QMessageBox::Yes, QMessageBox::No) == QMessageBox::Yes) { CCLib::ReferenceCloud validPoints(pc); unsigned count = pc->size(); bool error = true; if (validPoints.reserve(count-zInvalid)) { for (unsigned i=0; i<count; ++i) { if (pc->getPoint(i)->z >= zMinMax[0]) validPoints.addPointIndex(i); } if (validPoints.size() > 0) { validPoints.resize(validPoints.size()); ccPointCloud* newPC = pc->partialClone(&validPoints); if (newPC) { delete pc; pc = newPC; error = false; } } else { assert(false); } } if (error) { ccLog::Error("Not enough memory to remove the points with invalid heights!"); } } } container.addChild(pc); } GDALClose(poDataset); } else { return CC_FERR_UNKNOWN_FILE; } return CC_FERR_NO_ERROR; }
CC_FILE_ERROR PovFilter::loadFile(QString filename, ccHObject& container, LoadParameters& parameters) { assert(!filename.isEmpty()); //opening file FILE* fp = fopen(qPrintable(filename), "rt"); if (!fp) return CC_FERR_READING; //read buffer char line[MAX_ASCII_FILE_LINE_LENGTH]; //header if (!fgets(line, MAX_ASCII_FILE_LINE_LENGTH, fp)) { fclose(fp); return CC_FERR_READING; } if (strcmp(line,"#CC_POVS_FILE\n")!=0) { fclose(fp); return CC_FERR_READING; } char sensorType[256]; if (fscanf(fp,"SENSOR_TYPE = %s\n",sensorType) < 0) { fclose(fp); return CC_FERR_READING; } ccGBLSensor::ROTATION_ORDER rotationOrder; if ( strcmp(sensorType,CC_SENSOR_ROTATION_ORDER_NAMES[ccGBLSensor::YAW_THEN_PITCH]) == 0 || strcmp(sensorType,CC_SENSOR_ROTATION_ORDER_OLD_NAMES[ccGBLSensor::YAW_THEN_PITCH]) == 0) { rotationOrder = ccGBLSensor::YAW_THEN_PITCH; } else if ( strcmp(sensorType,CC_SENSOR_ROTATION_ORDER_NAMES[ccGBLSensor::PITCH_THEN_YAW]) == 0 || strcmp(sensorType,CC_SENSOR_ROTATION_ORDER_OLD_NAMES[ccGBLSensor::PITCH_THEN_YAW]) == 0) { rotationOrder = ccGBLSensor::PITCH_THEN_YAW; } else { ccLog::Warning("[PovFilter::loadFile] Unhandled rotation order description! (%s)",sensorType); fclose(fp); return CC_FERR_READING; } float base = 0.0f; char unitsType[3]; //units: ignored in this version if ( fscanf(fp,"SENSOR_BASE = %f\n",&base) < 0 || fscanf(fp,"UNITS = %s\n",unitsType) < 0 || !fgets(line, MAX_ASCII_FILE_LINE_LENGTH, fp) || strcmp(line,"#END_HEADER\n") != 0 ) { fclose(fp); return CC_FERR_READING; } ccLog::Print("[PovFilter::loadFile] POV FILE [Type %s - base=%f - unit: %s]",sensorType,base,unitsType); //on extrait le chemin relatif QString path = QFileInfo(filename).absolutePath(); char subFileName[256]; char subFileType[12]; while (fgets(line, MAX_ASCII_FILE_LINE_LENGTH, fp)) { if ((line[0]=='#')&&(line[1]=='P')) { ccLog::Print(QString(line).trimmed()); if (fscanf(fp,"F %s\n",subFileName) < 0) { ccLog::PrintDebug("[PovFilter::loadFile] Read error (F) !"); fclose(fp); return CC_FERR_READING; } if (fscanf(fp,"T %s\n",subFileType) < 0) { ccLog::PrintDebug("[PovFilter::loadFile] Read error (T) !"); fclose(fp); return CC_FERR_READING; } //chargement du fichier (potentiellement plusieurs listes) correspondant au point de vue en cours FileIOFilter::Shared filter = FileIOFilter::FindBestFilterForExtension(subFileType); if (!filter) { ccLog::Warning(QString("[POV] No I/O filter found for loading file '%1' (type = '%2')").arg(subFileName).arg(subFileType)); fclose(fp); return CC_FERR_UNKNOWN_FILE; } CC_FILE_ERROR result = CC_FERR_NO_ERROR; ccHObject* entities = FileIOFilter::LoadFromFile(QString("%0/%1").arg(path).arg(subFileName), parameters, filter, result); if (entities) { ccGLMatrix rot; rot.toIdentity(); CCVector3 sensorCenter(0,0,0); float dPhi = 1.0f, dTheta = 1.0f; while (fgets(line, MAX_ASCII_FILE_LINE_LENGTH, fp)) { if (line[0]=='#') break; else if (line[0]=='C') { float C[3]; sscanf(line,"C %f %f %f\n",C,C+1,C+2); sensorCenter = CCVector3::fromArray(C); } else if (line[0]=='X' || line[0]=='Y' || line[0]=='Z') { float V[3]; sscanf(line+2,"%f %f %f\n",V,V+1,V+2); unsigned char col = static_cast<unsigned char>(line[0])-88; float* mat = rot.data(); mat[col+0] = V[0]; mat[col+4] = V[1]; mat[col+8] = V[2]; } else if (line[0]=='A') { sscanf(line,"A %f %f\n",&dTheta,&dPhi); } } ccHObject::Container clouds; if (entities->isKindOf(CC_TYPES::POINT_CLOUD)) { clouds.push_back(entities); } else { entities->filterChildren(clouds,true,CC_TYPES::POINT_CLOUD); entities->detatchAllChildren(); delete entities; } entities = 0; for (size_t i=0; i<clouds.size(); ++i) { ccGenericPointCloud* theCloud = ccHObjectCaster::ToGenericPointCloud(clouds[i]); ccGBLSensor* gls = new ccGBLSensor(rotationOrder); //DGM: the base simply corresponds to a shift of the center along the X axis! sensorCenter.x -= base; //DGM: sensor center is now integrated in rigid transformation (= inverse of former rotation matrix + center as translation) ccGLMatrix trans = rot.inverse(); trans.setTranslation(sensorCenter); gls->setRigidTransformation(trans); gls->setYawStep(dTheta); gls->setPitchStep(dPhi); gls->setVisible(true); gls->setEnabled(false); if (gls->computeAutoParameters(theCloud)) { theCloud->addChild(gls); } else { ccLog::Warning(QString("[PovFilter::loadFile] failed to create sensor on cloud #%1 (%2)").arg(i).arg(theCloud->getName())); delete gls; gls = 0; } //theCloud->setName(subFileName); container.addChild(theCloud); } } else { if (result == CC_FERR_CANCELED_BY_USER) { break; } else { ccLog::Print("[PovFilter::loadFile] File (%s) not found or empty!", subFileName); } } } } fclose(fp); return CC_FERR_NO_ERROR; }
CC_FILE_ERROR FBXFilter::loadFile(const char* filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, CCVector3d* coordinatesShift/*=0*/) { // Initialize the SDK manager. This object handles memory management. FbxManager* lSdkManager = FbxManager::Create(); // Create the IO settings object. FbxIOSettings *ios = FbxIOSettings::Create(lSdkManager, IOSROOT); lSdkManager->SetIOSettings(ios); // Import options determine what kind of data is to be imported. // True is the default, but here we’ll set some to true explicitly, and others to false. //(*(lSdkManager->GetIOSettings())).SetBoolProp(IMP_FBX_MATERIAL, true); //(*(lSdkManager->GetIOSettings())).SetBoolProp(IMP_FBX_TEXTURE, true); // Create an importer using the SDK manager. FbxImporter* lImporter = FbxImporter::Create(lSdkManager,""); CC_FILE_ERROR result = CC_FERR_NO_ERROR; // Use the first argument as the filename for the importer. if (!lImporter->Initialize(filename, -1, lSdkManager->GetIOSettings())) { ccLog::Warning(QString("[FBX] Error: %1").arg(lImporter->GetStatus().GetErrorString())); result = CC_FERR_READING; } else { // Create a new scene so that it can be populated by the imported file. FbxScene* lScene = FbxScene::Create(lSdkManager,"myScene"); // Import the contents of the file into the scene. if (lImporter->Import(lScene)) { // Print the nodes of the scene and their attributes recursively. // Note that we are not printing the root node because it should // not contain any attributes. FbxNode* lRootNode = lScene->GetRootNode(); std::vector<FbxNode*> nodes; nodes.push_back(lRootNode); while (!nodes.empty()) { FbxNode* lNode = nodes.back(); nodes.pop_back(); const char* nodeName = lNode->GetName(); #ifdef _DEBUG ccLog::Print(QString("Node: %1 - %2 properties").arg(nodeName).arg(lNode->GetNodeAttributeCount())); #endif // scan the node's attributes. for(int i=0; i<lNode->GetNodeAttributeCount(); i++) { FbxNodeAttribute* pAttribute = lNode->GetNodeAttributeByIndex(i); FbxNodeAttribute::EType type = pAttribute->GetAttributeType(); #ifdef _DEBUG ccLog::Print(QString("\tProp. #%1").arg(GetAttributeTypeName(type))); #endif switch(type) { case FbxNodeAttribute::eMesh: { ccMesh* mesh = FromFbxMesh(static_cast<FbxMesh*>(pAttribute),alwaysDisplayLoadDialog,coordinatesShiftEnabled,coordinatesShift); if (mesh) { //apply transformation FbxAMatrix& transform = lNode->EvaluateGlobalTransform(); ccGLMatrix mat; float* data = mat.data(); for (int c=0; c<4; ++c) { FbxVector4 C = transform.GetColumn(c); *data++ = static_cast<float>(C[0]); *data++ = static_cast<float>(C[1]); *data++ = static_cast<float>(C[2]); *data++ = static_cast<float>(C[3]); } mesh->applyGLTransformation_recursive(&mat); if (mesh->getName().isEmpty()) mesh->setName(nodeName); container.addChild(mesh); } } break; case FbxNodeAttribute::eUnknown: case FbxNodeAttribute::eNull: case FbxNodeAttribute::eMarker: case FbxNodeAttribute::eSkeleton: case FbxNodeAttribute::eNurbs: case FbxNodeAttribute::ePatch: case FbxNodeAttribute::eCamera: case FbxNodeAttribute::eCameraStereo: case FbxNodeAttribute::eCameraSwitcher: case FbxNodeAttribute::eLight: case FbxNodeAttribute::eOpticalReference: case FbxNodeAttribute::eOpticalMarker: case FbxNodeAttribute::eNurbsCurve: case FbxNodeAttribute::eTrimNurbsSurface: case FbxNodeAttribute::eBoundary: case FbxNodeAttribute::eNurbsSurface: case FbxNodeAttribute::eShape: case FbxNodeAttribute::eLODGroup: case FbxNodeAttribute::eSubDiv: default: //not handled yet break; } } // Recursively add the children. for(int j=0; j<lNode->GetChildCount(); j++) { nodes.push_back(lNode->GetChild(j)); } } } } // The file is imported, so get rid of the importer. lImporter->Destroy(); // Destroy the SDK manager and all the other objects it was handling. lSdkManager->Destroy(); return container.getChildrenNumber() == 0 ? CC_FERR_NO_LOAD : CC_FERR_NO_ERROR; }
CC_FILE_ERROR BinFilter::LoadFileV2(QFile& in, ccHObject& container, int flags) { assert(in.isOpen()); uint32_t binVersion = 20; if (in.read((char*)&binVersion,4) < 0) return CC_FERR_READING; if (binVersion < 20) //should be superior to 2.0! return CC_FERR_MALFORMED_FILE; QString coordsFormat = ( (flags & ccSerializableObject::DF_POINT_COORDS_64_BITS) ? "double" : "float"); QString scalarFormat = ( (flags & ccSerializableObject::DF_SCALAR_VAL_32_BITS) ? "float" : "double"); ccLog::Print(QString("[BIN] Version %1.%2 (coords: %3 / scalar: %4)").arg(binVersion/10).arg(binVersion%10).arg(coordsFormat).arg(scalarFormat)); //we keep track of the last unique ID before load unsigned lastUniqueIDBeforeLoad = ccObject::GetLastUniqueID(); //we read first entity type CC_CLASS_ENUM classID = ccObject::ReadClassIDFromFile(in, static_cast<short>(binVersion)); if (classID == CC_TYPES::OBJECT) return CC_FERR_CONSOLE_ERROR; ccHObject* root = ccHObject::New(classID); if (!root) return CC_FERR_MALFORMED_FILE; if (classID == CC_TYPES::CUSTOM_H_OBJECT) { // store seeking position size_t original_pos = in.pos(); // we need to load it as plain ccCustomHobject root->fromFileNoChildren(in, static_cast<short>(binVersion), flags); // this will load it in.seek(original_pos); // reseek back the file QString classId = root->getMetaData("class_name").toString(); QString pluginId = root->getMetaData("plugin_name").toString(); // try to get a new object from external factories ccHObject* new_child = ccHObject::New(pluginId, classId); if (new_child) // found a plugin that can deserialize it root = new_child; else return CC_FERR_FILE_WAS_WRITTEN_BY_UNKNOWN_PLUGIN; } if (!root->fromFile(in,static_cast<short>(binVersion),flags)) { //DGM: can't delete it, too dangerous (bad pointers ;) //delete root; return CC_FERR_CONSOLE_ERROR; } CC_FILE_ERROR result = CC_FERR_NO_ERROR; //re-link objects (and check errors) bool checkErrors = true; ccHObject* orphans = new ccHObject("Orphans (CORRUPTED FILE)");; ccHObject::Container toCheck; toCheck.push_back(root); while (!toCheck.empty()) { ccHObject* currentObject = toCheck.back(); toCheck.pop_back(); assert(currentObject); //we check objects that have links to other entities (meshes, polylines, etc.) if (currentObject->isKindOf(CC_TYPES::MESH)) { //specific case: mesh groups are deprecated! if (currentObject->isA(CC_TYPES::MESH_GROUP)) { //TODO ccLog::Warning(QString("[BIN] Mesh groups are deprecated! Entity %1 should be ignored...").arg(currentObject->getName())); } else if (currentObject->isA(CC_TYPES::SUB_MESH)) { ccSubMesh* subMesh = ccHObjectCaster::ToSubMesh(currentObject); //normally, the associated mesh should be the sub-mesh's parent! //however we have its ID so we will look for it just to be sure intptr_t meshID = (intptr_t)subMesh->getAssociatedMesh(); if (meshID > 0) { ccHObject* mesh = FindRobust(root,subMesh,static_cast<unsigned>(meshID),CC_TYPES::MESH); if (mesh) { subMesh->setAssociatedMesh(ccHObjectCaster::ToMesh(mesh),false); //'false' because previous mesh is not null (= real mesh ID)!!! } else { //we have a problem here ;) //normally, the associated mesh should be the sub-mesh's parent! if (subMesh->getParent() && subMesh->getParent()->isA(CC_TYPES::MESH)) { subMesh->setAssociatedMesh(ccHObjectCaster::ToMesh(subMesh->getParent()),false); //'false' because previous mesh is not null (= real mesh ID)!!! } else { subMesh->setAssociatedMesh(0,false); //'false' because previous mesh is not null (= real mesh ID)!!! //DGM: can't delete it, too dangerous (bad pointers ;) //delete subMesh; ccLog::Warning(QString("[BIN] Couldn't find associated mesh (ID=%1) for sub-mesh '%2' in the file!").arg(meshID).arg(subMesh->getName())); return CC_FERR_MALFORMED_FILE; } } } } else if (currentObject->isA(CC_TYPES::MESH) || currentObject->isKindOf(CC_TYPES::PRIMITIVE)) //CC_TYPES::MESH or CC_TYPES::PRIMITIVE! { ccMesh* mesh = ccHObjectCaster::ToMesh(currentObject); assert(mesh); //vertices intptr_t cloudID = (intptr_t)mesh->getAssociatedCloud(); if (cloudID > 0) { ccHObject* cloud = FindRobust(root,mesh,static_cast<unsigned>(cloudID),CC_TYPES::POINT_CLOUD); if (cloud) { mesh->setAssociatedCloud(ccHObjectCaster::ToGenericPointCloud(cloud)); } else { //we have a problem here ;) mesh->setAssociatedCloud(0); if (mesh->getMaterialSet()) mesh->setMaterialSet(0,false); //DGM: can't delete it, too dangerous (bad pointers ;) //delete mesh; if (mesh->getParent()) { mesh->getParent()->removeDependencyWith(mesh); mesh->getParent()->removeChild(mesh); } ccLog::Warning(QString("[BIN] Couldn't find vertices (ID=%1) for mesh '%2' in the file!").arg(cloudID).arg(mesh->getName())); mesh = 0; //return CC_FERR_MALFORMED_FILE; } } if (mesh) { //materials ccHObject* materials = 0; intptr_t matSetID = (intptr_t)mesh->getMaterialSet(); if (matSetID > 0) { materials = FindRobust(root,mesh,static_cast<unsigned>(matSetID),CC_TYPES::MATERIAL_SET); if (materials) { mesh->setMaterialSet(static_cast<ccMaterialSet*>(materials),false); } else { //we have a (less severe) problem here ;) mesh->setMaterialSet(0,false); mesh->showMaterials(false); ccLog::Warning(QString("[BIN] Couldn't find shared materials set (ID=%1) for mesh '%2' in the file!").arg(matSetID).arg(mesh->getName())); result = CC_FERR_BROKEN_DEPENDENCY_ERROR; //add it to the 'orphans' set if (materials) orphans->addChild(materials); materials = 0; } } //per-triangle normals ccHObject* triNormsTable = 0; intptr_t triNormsTableID = (intptr_t)mesh->getTriNormsTable(); if (triNormsTableID > 0) { triNormsTable = FindRobust(root,mesh,static_cast<unsigned>(triNormsTableID),CC_TYPES::NORMAL_INDEXES_ARRAY); if (triNormsTable) { mesh->setTriNormsTable(static_cast<NormsIndexesTableType*>(triNormsTable),false); } else { //we have a (less severe) problem here ;) mesh->setTriNormsTable(0,false); mesh->showTriNorms(false); ccLog::Warning(QString("[BIN] Couldn't find shared normals (ID=%1) for mesh '%2' in the file!").arg(triNormsTableID).arg(mesh->getName())); result = CC_FERR_BROKEN_DEPENDENCY_ERROR; //add it to the 'orphans' set if (triNormsTable) orphans->addChild(triNormsTable); triNormsTable = 0; } } //per-triangle texture coordinates ccHObject* texCoordsTable = 0; intptr_t texCoordArrayID = (intptr_t)mesh->getTexCoordinatesTable(); if (texCoordArrayID > 0) { texCoordsTable = FindRobust(root,mesh,static_cast<unsigned>(texCoordArrayID),CC_TYPES::TEX_COORDS_ARRAY); if (texCoordsTable) { mesh->setTexCoordinatesTable(static_cast<TextureCoordsContainer*>(texCoordsTable),false); } else { //we have a (less severe) problem here ;) mesh->setTexCoordinatesTable(0,false); ccLog::Warning(QString("[BIN] Couldn't find shared texture coordinates (ID=%1) for mesh '%2' in the file!").arg(texCoordArrayID).arg(mesh->getName())); result = CC_FERR_BROKEN_DEPENDENCY_ERROR; //add it to the 'orphans' set if (texCoordsTable) orphans->addChild(texCoordsTable); texCoordsTable = 0; } } if (checkErrors) { ccGenericPointCloud* pc = mesh->getAssociatedCloud(); unsigned faceCount = mesh->size(); unsigned vertCount = pc->size(); for (unsigned i=0; i<faceCount; ++i) { const CCLib::VerticesIndexes* tri = mesh->getTriangleVertIndexes(i); if ( tri->i1 >= vertCount || tri->i2 >= vertCount || tri->i3 >= vertCount ) { ccLog::Warning(QString("[BIN] File is corrupted: missing vertices for mesh '%1'!").arg(mesh->getName())); //add cloud to the 'orphans' set pc->setName(mesh->getName() + QString(".") + pc->getName()); orphans->addChild(pc); if (texCoordsTable) { texCoordsTable->setName(mesh->getName() + QString(".") + texCoordsTable->getName()); orphans->addChild(texCoordsTable); } if (triNormsTable) { triNormsTable->setName(mesh->getName() + QString(".") + triNormsTable->getName()); orphans->addChild(triNormsTable); } if (materials) { materials->setName(mesh->getName() + QString(".") + materials->getName()); orphans->addChild(materials); } //delete corrupted mesh mesh->setMaterialSet(0,false); mesh->setTriNormsTable(0,false); mesh->setTexCoordinatesTable(0,false); if (mesh->getParent()) mesh->getParent()->removeChild(mesh); mesh = 0; break; } } } } } } else if (currentObject->isKindOf(CC_TYPES::POLY_LINE)) { ccPolyline* poly = ccHObjectCaster::ToPolyline(currentObject); intptr_t cloudID = (intptr_t)poly->getAssociatedCloud(); ccHObject* cloud = FindRobust(root,poly,static_cast<unsigned>(cloudID),CC_TYPES::POINT_CLOUD); if (cloud) { poly->setAssociatedCloud(ccHObjectCaster::ToGenericPointCloud(cloud)); } else { //we have a problem here ;) poly->setAssociatedCloud(0); //DGM: can't delete it, too dangerous (bad pointers ;) //delete root; ccLog::Warning(QString("[BIN] Couldn't find vertices (ID=%1) for polyline '%2' in the file!").arg(cloudID).arg(poly->getName())); return CC_FERR_MALFORMED_FILE; } } else if (currentObject->isKindOf(CC_TYPES::SENSOR)) { ccSensor* sensor = ccHObjectCaster::ToSensor(currentObject); intptr_t bufferID = (intptr_t)sensor->getPositions(); if (bufferID > 0) { ccHObject* buffer = FindRobust(root,sensor,static_cast<unsigned>(bufferID),CC_TYPES::TRANS_BUFFER); if (buffer) { sensor->setPositions(ccHObjectCaster::ToTransBuffer(buffer)); } else { //we have a problem here ;) sensor->setPositions(0); //DGM: can't delete it, too dangerous (bad pointers ;) //delete root; ccLog::Warning(QString("[BIN] Couldn't find trans. buffer (ID=%1) for sensor '%2' in the file!").arg(bufferID).arg(sensor->getName())); //positions are optional, so we can simply set them to NULL and go ahead, we do not need to return. //return CC_FERR_MALFORMED_FILE; } } } else if (currentObject->isA(CC_TYPES::LABEL_2D)) { cc2DLabel* label = ccHObjectCaster::To2DLabel(currentObject); std::vector<cc2DLabel::PickedPoint> correctedPickedPoints; //we must check all label 'points'! for (unsigned i=0; i<label->size(); ++i) { const cc2DLabel::PickedPoint& pp = label->getPoint(i); intptr_t cloudID = (intptr_t)pp.cloud; ccHObject* cloud = FindRobust(root,label,static_cast<unsigned>(cloudID),CC_TYPES::POINT_CLOUD); if (cloud) { ccGenericPointCloud* genCloud = ccHObjectCaster::ToGenericPointCloud(cloud); assert(genCloud->size()>pp.index); correctedPickedPoints.push_back(cc2DLabel::PickedPoint(genCloud,pp.index)); } else { //we have a problem here ;) ccLog::Warning(QString("[BIN] Couldn't find cloud (ID=%1) associated to label '%2' in the file!").arg(cloudID).arg(label->getName())); if (label->getParent()) label->getParent()->removeChild(label); //DGM: can't delete it, too dangerous (bad pointers ;) //delete label; label = 0; break; } } if (label) //correct label data { assert(correctedPickedPoints.size() == label->size()); bool visible = label->isVisible(); QString originalName(label->getRawName()); label->clear(true); for (unsigned i=0; i<correctedPickedPoints.size(); ++i) label->addPoint(correctedPickedPoints[i].cloud,correctedPickedPoints[i].index); label->setVisible(visible); label->setName(originalName); } } else if (currentObject->isA(CC_TYPES::FACET)) { ccFacet* facet = ccHObjectCaster::ToFacet(currentObject); //origin points { intptr_t cloudID = (intptr_t)facet->getOriginPoints(); if (cloudID > 0) { ccHObject* cloud = FindRobust(root,facet,static_cast<unsigned>(cloudID),CC_TYPES::POINT_CLOUD); if (cloud && cloud->isA(CC_TYPES::POINT_CLOUD)) { facet->setOriginPoints(ccHObjectCaster::ToPointCloud(cloud)); } else { //we have a problem here ;) facet->setOriginPoints(0); ccLog::Warning(QString("[BIN] Couldn't find origin points (ID=%1) for facet '%2' in the file!").arg(cloudID).arg(facet->getName())); } } } //contour points { intptr_t cloudID = (intptr_t)facet->getContourVertices(); if (cloudID > 0) { ccHObject* cloud = FindRobust(root,facet,static_cast<unsigned>(cloudID),CC_TYPES::POINT_CLOUD); if (cloud) { facet->setContourVertices(ccHObjectCaster::ToPointCloud(cloud)); } else { //we have a problem here ;) facet->setContourVertices(0); ccLog::Warning(QString("[BIN] Couldn't find contour points (ID=%1) for facet '%2' in the file!").arg(cloudID).arg(facet->getName())); } } } //contour polyline { intptr_t polyID = (intptr_t)facet->getContour(); if (polyID > 0) { ccHObject* poly = FindRobust(root,facet,static_cast<unsigned>(polyID),CC_TYPES::POLY_LINE); if (poly) { facet->setContour(ccHObjectCaster::ToPolyline(poly)); } else { //we have a problem here ;) facet->setContourVertices(0); ccLog::Warning(QString("[BIN] Couldn't find contour polyline (ID=%1) for facet '%2' in the file!").arg(polyID).arg(facet->getName())); } } } //polygon mesh { intptr_t polyID = (intptr_t)facet->getPolygon(); if (polyID > 0) { ccHObject* poly = FindRobust(root,facet,static_cast<unsigned>(polyID),CC_TYPES::MESH); if (poly) { facet->setPolygon(ccHObjectCaster::ToMesh(poly)); } else { //we have a problem here ;) facet->setPolygon(0); ccLog::Warning(QString("[BIN] Couldn't find polygon mesh (ID=%1) for facet '%2' in the file!").arg(polyID).arg(facet->getName())); } } } } else if (currentObject->isKindOf(CC_TYPES::IMAGE)) { ccImage* image = ccHObjectCaster::ToImage(currentObject); intptr_t sensorID = (intptr_t)image->getAssociatedSensor(); if (sensorID > 0) { ccHObject* sensor = FindRobust(root,image,static_cast<unsigned>(sensorID),CC_TYPES::CAMERA_SENSOR); if (sensor) { image->setAssociatedSensor(ccHObjectCaster::ToCameraSensor(sensor)); } else { //we have a problem here ;) image->setAssociatedSensor(0); //DGM: can't delete it, too dangerous (bad pointers ;) //delete root; ccLog::Warning(QString("[BIN] Couldn't find camera sensor (ID=%1) for image '%2' in the file!").arg(sensorID).arg(image->getName())); //return CC_FERR_MALFORMED_FILE; } } } if (currentObject) for (unsigned i=0; i<currentObject->getChildrenNumber() ;++i) toCheck.push_back(currentObject->getChild(i)); } //check for unique IDs duplicate (yes it happens :-( ) { std::unordered_set<unsigned> uniqueIDs; unsigned maxUniqueID = root->findMaxUniqueID_recursive(); assert(toCheck.empty()); toCheck.push_back(root); while (!toCheck.empty()) { ccHObject* currentObject = toCheck.back(); toCheck.pop_back(); assert(currentObject); //check that the ID is not already used (strangely it happens!) unsigned uniqueID = currentObject->getUniqueID(); if (uniqueIDs.find(uniqueID) != uniqueIDs.end()) { ccLog::Warning(QString("[BIN] Duplicate 'unique ID' found! (ID = %1)").arg(uniqueID)); currentObject->setUniqueID(++maxUniqueID); } else { uniqueIDs.insert(uniqueID); } for (unsigned i=0; i<currentObject->getChildrenNumber() ;++i) { toCheck.push_back(currentObject->getChild(i)); } } } //update 'unique IDs' toCheck.push_back(root); while (!toCheck.empty()) { ccHObject* currentObject = toCheck.back(); toCheck.pop_back(); currentObject->setUniqueID(lastUniqueIDBeforeLoad+currentObject->getUniqueID()); for (unsigned i=0; i<currentObject->getChildrenNumber(); ++i) toCheck.push_back(currentObject->getChild(i)); } if (root->isA(CC_TYPES::HIERARCHY_OBJECT)) { //transfer children to container root->transferChildren(container,true); delete root; root = 0; } else { container.addChild(root); } //orphans if (orphans) { if (orphans->getChildrenNumber() != 0) { orphans->setEnabled(false); container.addChild(orphans); } else { delete orphans; orphans = 0; } } return result; }
CC_FILE_ERROR IcmFilter::loadFile(const char* filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, double* coordinatesShift/*=0*/) { //ouverture du fichier FILE *fp = fopen(filename, "rt"); if (!fp) return CC_FERR_READING; //buffer char line[MAX_ASCII_FILE_LINE_LENGTH]; //lecture du header if (!fgets(line, MAX_ASCII_FILE_LINE_LENGTH , fp)) { fclose(fp); return CC_FERR_READING; } if (strncmp(line,"#CC_ICM_FILE",12)!=0) { fclose(fp); return CC_FERR_WRONG_FILE_TYPE; } //on extrait le chemin relatif QString path = QFileInfo(filename).absolutePath(); char cloudFileName[MAX_ASCII_FILE_LINE_LENGTH]; if (!fgets(line, MAX_ASCII_FILE_LINE_LENGTH , fp)) { fclose(fp); return CC_FERR_READING; } if (strncmp(line,"FILE_NAME=",10)!=0) { fclose(fp); return CC_FERR_WRONG_FILE_TYPE; } sscanf(line,"FILE_NAME=%s",cloudFileName); char subFileType[12]; if (!fgets(line, MAX_ASCII_FILE_LINE_LENGTH , fp)) { fclose(fp); return CC_FERR_READING; } if (strncmp(line,"FILE_TYPE=",10)!=0) { fclose(fp); return CC_FERR_WRONG_FILE_TYPE; } sscanf(line,"FILE_TYPE=%s",subFileType); CC_FILE_TYPES fType = FileIOFilter::StringToFileFormat(subFileType); //chargement du fichier (potentiellement plusieurs listes) correspondant ccHObject* entities = FileIOFilter::LoadFromFile(qPrintable(QString("%0/%1").arg(path).arg(cloudFileName)),fType); if (!entities) { fclose(fp); return CC_FERR_READING; } container.addChild(entities); //chargement des images char imagesDescriptorFileName[MAX_ASCII_FILE_LINE_LENGTH]; if (!fgets(line, MAX_ASCII_FILE_LINE_LENGTH , fp)) { ccConsole::Error("[IcmFilter::loadModelFromIcmFile] Read error (IMAGES_DESCRIPTOR)! No image loaded"); fclose(fp); return CC_FERR_READING; } else { if (strncmp(line,"IMAGES_DESCRIPTOR=",18)!=0) { fclose(fp); return CC_FERR_WRONG_FILE_TYPE; } sscanf(line,"IMAGES_DESCRIPTOR=%s",imagesDescriptorFileName); int n = loadCalibratedImages(entities,path,imagesDescriptorFileName); ccConsole::Print("[IcmFilter::loadModelFromIcmFile] %i image(s) loaded ...",n); } fclose(fp); return CC_FERR_NO_ERROR; }
CC_FILE_ERROR PTXFilter::loadFile( QString filename, ccHObject& container, LoadParameters& parameters) { //open ASCII file for reading QFile file(filename); if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) { return CC_FERR_READING; } QTextStream inFile(&file); CCVector3d PshiftTrans(0,0,0); CCVector3d PshiftCloud(0,0,0); CC_FILE_ERROR result = CC_FERR_NO_LOAD; ScalarType minIntensity = 0; ScalarType maxIntensity = 0; //progress dialog ccProgressDialog pdlg(true, parameters.parentWidget); pdlg.setMethodTitle(QObject::tr("Loading PTX file")); pdlg.setAutoClose(false); //progress dialog (for normals computation) ccProgressDialog normalsProgressDlg(true, parameters.parentWidget); normalsProgressDlg.setAutoClose(false); for (unsigned cloudIndex = 0; result == CC_FERR_NO_ERROR || result == CC_FERR_NO_LOAD; cloudIndex++) { unsigned width = 0, height = 0; ccGLMatrixd sensorTransD, cloudTransD; //read header { QString line = inFile.readLine(); if (line.isNull() && container.getChildrenNumber() != 0) //end of file? break; //read the width (number of columns) and the height (number of rows) on the two first lines //(DGM: we transpose the matrix right away) bool ok; height = line.toUInt(&ok); if (!ok) return CC_FERR_MALFORMED_FILE; line = inFile.readLine(); width = line.toUInt(&ok); if (!ok) return CC_FERR_MALFORMED_FILE; ccLog::Print(QString("[PTX] Scan #%1 - grid size: %2 x %3").arg(cloudIndex+1).arg(height).arg(width)); //read sensor transformation matrix for (int i=0; i<4; ++i) { line = inFile.readLine(); QStringList tokens = line.split(" ",QString::SkipEmptyParts); if (tokens.size() != 3) return CC_FERR_MALFORMED_FILE; double* colDest = 0; if (i == 0) { //Translation colDest = sensorTransD.getTranslation(); } else { //X, Y and Z axis colDest = sensorTransD.getColumn(i-1); } for (int j=0; j<3; ++j) { assert(colDest); colDest[j] = tokens[j].toDouble(&ok); if (!ok) return CC_FERR_MALFORMED_FILE; } } //make the transform a little bit cleaner (necessary as it's read from ASCII!) CleanMatrix(sensorTransD); //read cloud transformation matrix for (int i=0; i<4; ++i) { line = inFile.readLine(); QStringList tokens = line.split(" ",QString::SkipEmptyParts); if (tokens.size() != 4) return CC_FERR_MALFORMED_FILE; double* col = cloudTransD.getColumn(i); for (int j=0; j<4; ++j) { col[j] = tokens[j].toDouble(&ok); if (!ok) return CC_FERR_MALFORMED_FILE; } } //make the transform a little bit cleaner (necessary as it's read from ASCII!) CleanMatrix(cloudTransD); //handle Global Shift directly on the first cloud's translation! if (cloudIndex == 0) { if (HandleGlobalShift(cloudTransD.getTranslationAsVec3D(),PshiftTrans,parameters)) { ccLog::Warning("[PTXFilter::loadFile] Cloud has be recentered! Translation: (%.2f,%.2f,%.2f)",PshiftTrans.x,PshiftTrans.y,PshiftTrans.z); } } //'remove' global shift from the sensor and cloud transformation matrices cloudTransD.setTranslation(cloudTransD.getTranslationAsVec3D() + PshiftTrans); sensorTransD.setTranslation(sensorTransD.getTranslationAsVec3D() + PshiftTrans); } //now we can read the grid cells ccPointCloud* cloud = new ccPointCloud(); if (container.getChildrenNumber() == 0) { cloud->setName("unnamed - Cloud"); } else { if (container.getChildrenNumber() == 1) container.getChild(0)->setName("unnamed - Cloud 1"); //update previous cloud name! cloud->setName(QString("unnamed - Cloud %1").arg(container.getChildrenNumber()+1)); } unsigned gridSize = width * height; if (!cloud->reserve(gridSize)) { result = CC_FERR_NOT_ENOUGH_MEMORY; delete cloud; cloud = 0; break; } //set global shift cloud->setGlobalShift(PshiftTrans); //intensities ccScalarField* intensitySF = new ccScalarField(CC_PTX_INTENSITY_FIELD_NAME); if (!intensitySF->reserve(static_cast<unsigned>(gridSize))) { ccLog::Warning("[PTX] Not enough memory to load intensities!"); intensitySF->release(); intensitySF = 0; } //grid structure ccPointCloud::Grid::Shared grid(new ccPointCloud::Grid); grid->w = width; grid->h = height; bool hasIndexGrid = true; try { grid->indexes.resize(gridSize,-1); //-1 means no cell/point } catch (const std::bad_alloc&) { ccLog::Warning("[PTX] Not enough memory to load the grid structure"); hasIndexGrid = false; } //read points { CCLib::NormalizedProgress nprogress(&pdlg, gridSize); pdlg.setInfo(qPrintable(QString("Number of cells: %1").arg(gridSize))); pdlg.start(); bool firstPoint = true; bool hasColors = false; bool loadColors = false; bool loadGridColors = false; size_t gridIndex = 0; for (unsigned j=0; j<height; ++j) { for (unsigned i=0; i<width; ++i, ++gridIndex) { QString line = inFile.readLine(); QStringList tokens = line.split(" ",QString::SkipEmptyParts); if (firstPoint) { hasColors = (tokens.size() == 7); if (hasColors) { loadColors = cloud->reserveTheRGBTable(); if (!loadColors) { ccLog::Warning("[PTX] Not enough memory to load RGB colors!"); } else if (hasIndexGrid) { //we also load the colors into the grid (as invalid/missing points can have colors!) try { grid->colors.resize(gridSize, ccColor::Rgb(0, 0, 0)); loadGridColors = true; } catch (const std::bad_alloc&) { ccLog::Warning("[PTX] Not enough memory to load the grid colors"); } } } } if ((hasColors && tokens.size() != 7) || (!hasColors && tokens.size() != 4)) { result = CC_FERR_MALFORMED_FILE; //early stop j = height; break; } double values[4]; for (int v=0; v<4; ++v) { bool ok; values[v] = tokens[v].toDouble(&ok); if (!ok) { result = CC_FERR_MALFORMED_FILE; //early stop j = height; break; } } //we skip "empty" cells bool pointIsValid = (CCVector3d::fromArray(values).norm2() != 0); if (pointIsValid) { const double* Pd = values; //first point: check for 'big' coordinates if (firstPoint) { if (cloudIndex == 0 && !cloud->isShifted()) //in case the trans. matrix was ok! { CCVector3d P(Pd); if (HandleGlobalShift(P,PshiftCloud,parameters)) { cloud->setGlobalShift(PshiftCloud); ccLog::Warning("[PTXFilter::loadFile] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",PshiftCloud.x,PshiftCloud.y,PshiftCloud.z); } } firstPoint = false; } //update index grid if (hasIndexGrid) { grid->indexes[gridIndex] = static_cast<int>(cloud->size()); // = index (default value = -1, means no point) } //add point cloud->addPoint(CCVector3( static_cast<PointCoordinateType>(Pd[0] + PshiftCloud.x), static_cast<PointCoordinateType>(Pd[1] + PshiftCloud.y), static_cast<PointCoordinateType>(Pd[2] + PshiftCloud.z)) ); //add intensity if (intensitySF) { intensitySF->addElement(static_cast<ScalarType>(values[3])); } } //color if (loadColors && (pointIsValid || loadGridColors)) { ccColor::Rgb color; for (int c=0; c<3; ++c) { bool ok; unsigned temp = tokens[4+c].toUInt(&ok); ok &= (temp <= 255); if (ok) { color.rgb[c] = static_cast<unsigned char>(temp); } else { result = CC_FERR_MALFORMED_FILE; //early stop j = height; break; } } if (pointIsValid) { cloud->addRGBColor(color.rgb); } if (loadGridColors) { assert(!grid->colors.empty()); grid->colors[gridIndex] = color; } } if (!nprogress.oneStep()) { result = CC_FERR_CANCELED_BY_USER; break; } } } } //is there at least one valid point in this grid? if (cloud->size() == 0) { delete cloud; cloud = 0; if (intensitySF) intensitySF->release(); ccLog::Warning(QString("[PTX] Scan #%1 is empty?!").arg(cloudIndex+1)); } else { if (result == CC_FERR_NO_LOAD) result = CC_FERR_NO_ERROR; //to make clear that we have loaded at least something! cloud->resize(cloud->size()); if (intensitySF) { assert(intensitySF->currentSize() == cloud->size()); intensitySF->resize(cloud->size()); intensitySF->computeMinAndMax(); int intensitySFIndex = cloud->addScalarField(intensitySF); //keep track of the min and max intensity if (container.getChildrenNumber() == 0) { minIntensity = intensitySF->getMin(); maxIntensity = intensitySF->getMax(); } else { minIntensity = std::min(minIntensity,intensitySF->getMin()); maxIntensity = std::max(maxIntensity,intensitySF->getMax()); } cloud->showSF(true); cloud->setCurrentDisplayedScalarField(intensitySFIndex); } ccGBLSensor* sensor = 0; if (hasIndexGrid && result != CC_FERR_CANCELED_BY_USER) { //determine best sensor parameters (mainly yaw and pitch steps) ccGLMatrix cloudToSensorTrans((sensorTransD.inverse() * cloudTransD).data()); sensor = ccGriddedTools::ComputeBestSensor(cloud, grid, &cloudToSensorTrans); } //we apply the transformation ccGLMatrix cloudTrans(cloudTransD.data()); cloud->applyGLTransformation_recursive(&cloudTrans); if (sensor) { ccGLMatrix sensorTrans(sensorTransD.data()); sensor->setRigidTransformation(sensorTrans); //after cloud->applyGLTransformation_recursive! cloud->addChild(sensor); } //scan grid if (hasIndexGrid) { grid->validCount = static_cast<unsigned>(cloud->size()); grid->minValidIndex = 0; grid->maxValidIndex = grid->validCount-1; grid->sensorPosition = sensorTransD; cloud->addGrid(grid); //by default we don't compute normals without asking the user if (parameters.autoComputeNormals) { cloud->computeNormalsWithGrids(LS, 2, true, &normalsProgressDlg); } } cloud->setVisible(true); cloud->showColors(cloud->hasColors()); cloud->showNormals(cloud->hasNormals()); container.addChild(cloud); #ifdef QT_DEBUG //break; #endif } } //update scalar fields saturation (globally!) { bool validIntensityRange = true; if (minIntensity < 0 || maxIntensity > 1.0) { ccLog::Warning("[PTX] Intensity values are invalid (they should all fall in [0 ; 1])"); validIntensityRange = false; } for (unsigned i=0; i<container.getChildrenNumber(); ++i) { ccHObject* obj = container.getChild(i); assert(obj && obj->isA(CC_TYPES::POINT_CLOUD)); CCLib::ScalarField* sf = static_cast<ccPointCloud*>(obj)->getScalarField(0); if (sf) { ccScalarField* ccSF = static_cast<ccScalarField*>(sf); ccSF->setColorScale(ccColorScalesManager::GetDefaultScale(validIntensityRange ? ccColorScalesManager::ABS_NORM_GREY : ccColorScalesManager::GREY)); ccSF->setSaturationStart(0/*minIntensity*/); ccSF->setSaturationStop(maxIntensity); } } } return result; }
CC_FILE_ERROR ObjFilter::loadFile(QString filename, ccHObject& container, LoadParameters& parameters) { ccLog::Print(QString("[OBJ] ") + filename); //open file QFile file(filename); if (!file.open(QFile::ReadOnly)) return CC_FERR_READING; QTextStream stream(&file); //current vertex shift CCVector3d Pshift(0,0,0); //vertices ccPointCloud* vertices = new ccPointCloud("vertices"); int pointsRead = 0; //facets unsigned int facesRead = 0; unsigned int totalFacesRead = 0; int maxVertexIndex = -1; //base mesh ccMesh* baseMesh = new ccMesh(vertices); baseMesh->setName(QFileInfo(filename).baseName()); //we need some space already reserved! if (!baseMesh->reserve(128)) { ccLog::Error("Not engouh memory!"); return CC_FERR_NOT_ENOUGH_MEMORY; } //groups (starting index + name) std::vector<std::pair<unsigned,QString> > groups; //materials ccMaterialSet* materials = 0; bool hasMaterial = false; int currentMaterial = -1; bool currentMaterialDefined = false; bool materialsLoadFailed = true; //texture coordinates TextureCoordsContainer* texCoords = 0; bool hasTexCoords = false; int texCoordsRead = 0; int maxTexCoordIndex = -1; //normals NormsIndexesTableType* normals = 0; int normsRead = 0; bool normalsPerFacet = false; int maxTriNormIndex = -1; //progress dialog ccProgressDialog pDlg(true); pDlg.setMethodTitle("OBJ file"); pDlg.setInfo("Loading in progress..."); pDlg.setRange(0,static_cast<int>(file.size())); pDlg.show(); QApplication::processEvents(); //common warnings that can appear multiple time (we avoid to send too many messages to the console!) enum OBJ_WARNINGS { INVALID_NORMALS = 0, INVALID_INDEX = 1, NOT_ENOUGH_MEMORY = 2, INVALID_LINE = 3, CANCELLED_BY_USER = 4, }; bool objWarnings[5] = { false, false, false, false, false }; bool error = false; try { unsigned lineCount = 0; unsigned polyCount = 0; QString currentLine = stream.readLine(); while (!currentLine.isNull()) { if ((++lineCount % 2048) == 0) { if (pDlg.wasCanceled()) { error = true; objWarnings[CANCELLED_BY_USER] = true; break; } pDlg.setValue(static_cast<int>(file.pos())); QApplication::processEvents(); } QStringList tokens = QString(currentLine).split(QRegExp("\\s+"),QString::SkipEmptyParts); //skip comments & empty lines if( tokens.empty() || tokens.front().startsWith('/',Qt::CaseInsensitive) || tokens.front().startsWith('#',Qt::CaseInsensitive) ) { currentLine = stream.readLine(); continue; } /*** new vertex ***/ if (tokens.front() == "v") { //reserve more memory if necessary if (vertices->size() == vertices->capacity()) { if (!vertices->reserve(vertices->capacity()+MAX_NUMBER_OF_ELEMENTS_PER_CHUNK)) { objWarnings[NOT_ENOUGH_MEMORY] = true; error = true; break; } } //malformed line? if (tokens.size() < 4) { objWarnings[INVALID_LINE] = true; error = true; break; } CCVector3d Pd( tokens[1].toDouble(), tokens[2].toDouble(), tokens[3].toDouble() ); //first point: check for 'big' coordinates if (pointsRead == 0) { if (HandleGlobalShift(Pd,Pshift,parameters)) { vertices->setGlobalShift(Pshift); ccLog::Warning("[OBJ] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift.x,Pshift.y,Pshift.z); } } //shifted point CCVector3 P = CCVector3::fromArray((Pd + Pshift).u); vertices->addPoint(P); ++pointsRead; } /*** new vertex texture coordinates ***/ else if (tokens.front() == "vt") { //create and reserve memory for tex. coords container if necessary if (!texCoords) { texCoords = new TextureCoordsContainer(); texCoords->link(); } if (texCoords->currentSize() == texCoords->capacity()) { if (!texCoords->reserve(texCoords->capacity() + MAX_NUMBER_OF_ELEMENTS_PER_CHUNK)) { objWarnings[NOT_ENOUGH_MEMORY] = true; error = true; break; } } //malformed line? if (tokens.size() < 2) { objWarnings[INVALID_LINE] = true; error = true; break; } float T[2] = { T[0] = tokens[1].toFloat(), 0 }; if (tokens.size() > 2) //OBJ specification allows for only one value!!! { T[1] = tokens[2].toFloat(); } texCoords->addElement(T); ++texCoordsRead; } /*** new vertex normal ***/ else if (tokens.front() == "vn") //--> in fact it can also be a facet normal!!! { //create and reserve memory for normals container if necessary if (!normals) { normals = new NormsIndexesTableType; normals->link(); } if (normals->currentSize() == normals->capacity()) { if (!normals->reserve(normals->capacity() + MAX_NUMBER_OF_ELEMENTS_PER_CHUNK)) { objWarnings[NOT_ENOUGH_MEMORY] = true; error = true; break; } } //malformed line? if (tokens.size() < 4) { objWarnings[INVALID_LINE] = true; error = true; break; } CCVector3 N(static_cast<PointCoordinateType>(tokens[1].toDouble()), static_cast<PointCoordinateType>(tokens[2].toDouble()), static_cast<PointCoordinateType>(tokens[3].toDouble())); if (fabs(N.norm2() - 1.0) > 0.005) { objWarnings[INVALID_NORMALS] = true; N.normalize(); } CompressedNormType nIndex = ccNormalVectors::GetNormIndex(N.u); normals->addElement(nIndex); //we don't know yet if it's per-vertex or per-triangle normal... ++normsRead; } /*** new group ***/ else if (tokens.front() == "g" || tokens.front() == "o") { //update new group index facesRead = 0; //get the group name QString groupName = (tokens.size() > 1 && !tokens[1].isEmpty() ? tokens[1] : "default"); for (int i=2; i<tokens.size(); ++i) //multiple parts? groupName.append(QString(" ")+tokens[i]); //push previous group descriptor (if none was pushed) if (groups.empty() && totalFacesRead > 0) groups.push_back(std::pair<unsigned,QString>(0,"default")); //push new group descriptor if (!groups.empty() && groups.back().first == totalFacesRead) groups.back().second = groupName; //simply replace the group name if the previous group was empty! else groups.push_back(std::pair<unsigned,QString>(totalFacesRead,groupName)); polyCount = 0; //restart polyline count at 0! } /*** new face ***/ else if (tokens.front().startsWith('f')) { //malformed line? if (tokens.size() < 4) { objWarnings[INVALID_LINE] = true; currentLine = stream.readLine(); continue; //error = true; //break; } //read the face elements (singleton, pair or triplet) std::vector<facetElement> currentFace; { for (int i=1; i<tokens.size(); ++i) { QStringList vertexTokens = tokens[i].split('/'); if (vertexTokens.size() == 0 || vertexTokens[0].isEmpty()) { objWarnings[INVALID_LINE] = true; error = true; break; } else { //new vertex facetElement fe; //(0,0,0) by default fe.vIndex = vertexTokens[0].toInt(); if (vertexTokens.size() > 1 && !vertexTokens[1].isEmpty()) fe.tcIndex = vertexTokens[1].toInt(); if (vertexTokens.size() > 2 && !vertexTokens[2].isEmpty()) fe.nIndex = vertexTokens[2].toInt(); currentFace.push_back(fe); } } } if (error) break; if (currentFace.size() < 3) { ccLog::Warning("[OBJ] Malformed file: polygon on line %1 has less than 3 vertices!",lineCount); error = true; break; } //first vertex std::vector<facetElement>::iterator A = currentFace.begin(); //the very first vertex of the group tells us about the whole sequence if (facesRead == 0) { //we have a tex. coord index as second vertex element! if (!hasTexCoords && A->tcIndex != 0 && !materialsLoadFailed) { if (!baseMesh->reservePerTriangleTexCoordIndexes()) { objWarnings[NOT_ENOUGH_MEMORY] = true; error = true; break; } for (unsigned int i=0; i<totalFacesRead; ++i) baseMesh->addTriangleTexCoordIndexes(-1, -1, -1); hasTexCoords = true; } //we have a normal index as third vertex element! if (!normalsPerFacet && A->nIndex != 0) { //so the normals are 'per-facet' if (!baseMesh->reservePerTriangleNormalIndexes()) { objWarnings[NOT_ENOUGH_MEMORY] = true; error = true; break; } for (unsigned int i=0; i<totalFacesRead; ++i) baseMesh->addTriangleNormalIndexes(-1, -1, -1); normalsPerFacet = true; } } //we process all vertices accordingly for (std::vector<facetElement>::iterator it = currentFace.begin() ; it!=currentFace.end(); ++it) { facetElement& vertex = *it; //vertex index { if (!vertex.updatePointIndex(pointsRead)) { objWarnings[INVALID_INDEX] = true; error = true; break; } if (vertex.vIndex > maxVertexIndex) maxVertexIndex = vertex.vIndex; } //should we have a tex. coord index as second vertex element? if (hasTexCoords && currentMaterialDefined) { if (!vertex.updateTexCoordIndex(texCoordsRead)) { objWarnings[INVALID_INDEX] = true; error = true; break; } if (vertex.tcIndex > maxTexCoordIndex) maxTexCoordIndex = vertex.tcIndex; } //should we have a normal index as third vertex element? if (normalsPerFacet) { if (!vertex.updateNormalIndex(normsRead)) { objWarnings[INVALID_INDEX] = true; error = true; break; } if (vertex.nIndex > maxTriNormIndex) maxTriNormIndex = vertex.nIndex; } } //don't forget material (common for all vertices) if (currentMaterialDefined && !materialsLoadFailed) { if (!hasMaterial) { if (!baseMesh->reservePerTriangleMtlIndexes()) { objWarnings[NOT_ENOUGH_MEMORY] = true; error = true; break; } for (unsigned int i=0; i<totalFacesRead; ++i) baseMesh->addTriangleMtlIndex(-1); hasMaterial = true; } } if (error) break; //Now, let's tesselate the whole polygon //FIXME: yeah, we do very ulgy tesselation here! std::vector<facetElement>::const_iterator B = A+1; std::vector<facetElement>::const_iterator C = B+1; for ( ; C != currentFace.end(); ++B,++C) { //need more space? if (baseMesh->size() == baseMesh->capacity()) { if (!baseMesh->reserve(baseMesh->size()+128)) { objWarnings[NOT_ENOUGH_MEMORY] = true; error = true; break; } } //push new triangle baseMesh->addTriangle(A->vIndex, B->vIndex, C->vIndex); ++facesRead; ++totalFacesRead; if (hasMaterial) baseMesh->addTriangleMtlIndex(currentMaterial); if (hasTexCoords) baseMesh->addTriangleTexCoordIndexes(A->tcIndex, B->tcIndex, C->tcIndex); if (normalsPerFacet) baseMesh->addTriangleNormalIndexes(A->nIndex, B->nIndex, C->nIndex); } } /*** polyline ***/ else if (tokens.front().startsWith('l')) { //malformed line? if (tokens.size() < 3) { objWarnings[INVALID_LINE] = true; currentLine = stream.readLine(); continue; } //read the face elements (singleton, pair or triplet) ccPolyline* polyline = new ccPolyline(vertices); if (!polyline->reserve(static_cast<unsigned>(tokens.size()-1))) { //not enough memory objWarnings[NOT_ENOUGH_MEMORY] = true; delete polyline; polyline = 0; currentLine = stream.readLine(); continue; } for (int i=1; i<tokens.size(); ++i) { //get next polyline's vertex index QStringList vertexTokens = tokens[i].split('/'); if (vertexTokens.size() == 0 || vertexTokens[0].isEmpty()) { objWarnings[INVALID_LINE] = true; error = true; break; } else { int index = vertexTokens[0].toInt(); //we ignore normal index (if any!) if (!UpdatePointIndex(index,pointsRead)) { objWarnings[INVALID_INDEX] = true; error = true; break; } polyline->addPointIndex(index); } } if (error) { delete polyline; polyline = 0; break; } polyline->setVisible(true); QString name = groups.empty() ? QString("Line") : groups.back().second+QString(".line"); polyline->setName(QString("%1 %2").arg(name).arg(++polyCount)); vertices->addChild(polyline); } /*** material ***/ else if (tokens.front() == "usemtl") //see 'MTL file' below { if (materials) //otherwise we have failed to load MTL file!!! { QString mtlName = currentLine.mid(7).trimmed(); //DGM: in case there's space characters in the material name, we must read it again from the original line buffer //QString mtlName = (tokens.size() > 1 && !tokens[1].isEmpty() ? tokens[1] : ""); currentMaterial = (!mtlName.isEmpty() ? materials->findMaterialByName(mtlName) : -1); currentMaterialDefined = true; } } /*** material file (MTL) ***/ else if (tokens.front() == "mtllib") { //malformed line? if (tokens.size() < 2 || tokens[1].isEmpty()) { objWarnings[INVALID_LINE] = true; } else { //we build the whole MTL filename + path //DGM: in case there's space characters in the filename, we must read it again from the original line buffer //QString mtlFilename = tokens[1]; QString mtlFilename = currentLine.mid(7).trimmed(); ccLog::Print(QString("[OBJ] Material file: ")+mtlFilename); QString mtlPath = QFileInfo(filename).canonicalPath(); //we try to load it if (!materials) { materials = new ccMaterialSet("materials"); materials->link(); } size_t oldSize = materials->size(); QStringList errors; if (ccMaterialSet::ParseMTL(mtlPath,mtlFilename,*materials,errors)) { ccLog::Print("[OBJ] %i materials loaded",materials->size()-oldSize); materialsLoadFailed = false; } else { ccLog::Error(QString("[OBJ] Failed to load material file! (should be in '%1')").arg(mtlPath+'/'+QString(mtlFilename))); materialsLoadFailed = true; } if (!errors.empty()) { for (int i=0; i<errors.size(); ++i) ccLog::Warning(QString("[OBJ::Load::MTL parser] ")+errors[i]); } if (materials->empty()) { materials->release(); materials=0; materialsLoadFailed = true; } } } ///*** shading group ***/ //else if (tokens.front() == "s") //{ // //ignored! //} if (error) break; currentLine = stream.readLine(); } } catch (const std::bad_alloc&) { //not enough memory objWarnings[NOT_ENOUGH_MEMORY] = true; error = true; } file.close(); //1st check if (!error && pointsRead == 0) { //of course if there's no vertex, that's the end of the story ... ccLog::Warning("[OBJ] Malformed file: no vertex in file!"); error = true; } if (!error) { ccLog::Print("[OBJ] %i points, %u faces",pointsRead,totalFacesRead); if (texCoordsRead > 0 || normsRead > 0) ccLog::Print("[OBJ] %i tex. coords, %i normals",texCoordsRead,normsRead); //do some cleaning vertices->shrinkToFit(); if (normals) normals->shrinkToFit(); if (texCoords) texCoords->shrinkToFit(); if (baseMesh->size() == 0) { delete baseMesh; baseMesh = 0; } else { baseMesh->shrinkToFit(); } if ( maxVertexIndex >= pointsRead || maxTexCoordIndex >= texCoordsRead || maxTriNormIndex >= normsRead) { //hum, we've got a problem here ccLog::Warning("[OBJ] Malformed file: indexes go higher than the number of elements! (v=%i/tc=%i/n=%i)",maxVertexIndex,maxTexCoordIndex,maxTriNormIndex); if (maxVertexIndex >= pointsRead) { error = true; } else { objWarnings[INVALID_INDEX] = true; if (maxTexCoordIndex >= texCoordsRead) { texCoords->release(); texCoords = 0; materials->release(); materials = 0; } if (maxTriNormIndex >= normsRead) { normals->release(); normals = 0; } } } if (!error && baseMesh) { if (normals && normalsPerFacet) { baseMesh->setTriNormsTable(normals); baseMesh->showTriNorms(true); } if (materials) { baseMesh->setMaterialSet(materials); baseMesh->showMaterials(true); } if (texCoords) { if (materials) { baseMesh->setTexCoordinatesTable(texCoords); } else { ccLog::Warning("[OBJ] Texture coordinates were defined but no material could be loaded!"); } } //normals: if the obj file doesn't provide any, should we compute them? if (!normals) { //DGM: normals can be per-vertex or per-triangle so it's better to let the user do it himself later //Moreover it's not always good idea if the user doesn't want normals (especially in ccViewer!) //if (!materials && !baseMesh->hasColors()) //yes if no material is available! //{ // ccLog::Print("[OBJ] Mesh has no normal! We will compute them automatically"); // baseMesh->computeNormals(); // baseMesh->showNormals(true); //} //else { ccLog::Warning("[OBJ] Mesh has no normal! You can manually compute them (select it then call \"Edit > Normals > Compute\")"); } } //create sub-meshes if necessary ccLog::Print("[OBJ] 1 mesh loaded - %i group(s)", groups.size()); if (groups.size() > 1) { for (size_t i=0; i<groups.size(); ++i) { const QString& groupName = groups[i].second; unsigned startIndex = groups[i].first; unsigned endIndex = (i+1 == groups.size() ? baseMesh->size() : groups[i+1].first); if (startIndex == endIndex) { continue; } ccSubMesh* subTri = new ccSubMesh(baseMesh); if (subTri->reserve(endIndex-startIndex)) { subTri->addTriangleIndex(startIndex,endIndex); subTri->setName(groupName); subTri->showMaterials(baseMesh->materialsShown()); subTri->showNormals(baseMesh->normalsShown()); subTri->showTriNorms(baseMesh->triNormsShown()); //subTri->showColors(baseMesh->colorsShown()); //subTri->showWired(baseMesh->isShownAsWire()); baseMesh->addChild(subTri); } else { delete subTri; subTri = 0; objWarnings[NOT_ENOUGH_MEMORY] = true; } } baseMesh->setVisible(false); vertices->setLocked(true); } baseMesh->addChild(vertices); //DGM: we can't deactive the vertices if it has children! (such as polyline) if (vertices->getChildrenNumber() != 0) vertices->setVisible(false); else vertices->setEnabled(false); container.addChild(baseMesh); } if (!baseMesh && vertices->size() != 0) { //no (valid) mesh! container.addChild(vertices); //we hide the vertices if the entity has children (probably polylines!) if (vertices->getChildrenNumber() != 0) { vertices->setVisible(false); } } //special case: normals held by cloud! if (normals && !normalsPerFacet) { if (normsRead == pointsRead) //must be 'per-vertex' normals { vertices->setNormsTable(normals); if (baseMesh) baseMesh->showNormals(true); } else { ccLog::Warning("File contains normals which seem to be neither per-vertex nor per-face!!! We had to ignore them..."); } } } if (error) { if (baseMesh) delete baseMesh; if (vertices) delete vertices; } //release shared structures if (normals) { normals->release(); normals = 0; } if (texCoords) { texCoords->release(); texCoords = 0; } if (materials) { materials->release(); materials = 0; } pDlg.close(); //potential warnings if (objWarnings[INVALID_NORMALS]) ccLog::Warning("[OBJ] Some normals in file were invalid. You should re-compute them (select entity, then \"Edit > Normals > Compute\")"); if (objWarnings[INVALID_INDEX]) ccLog::Warning("[OBJ] File is malformed! Check indexes..."); if (objWarnings[NOT_ENOUGH_MEMORY]) ccLog::Warning("[OBJ] Not enough memory!"); if (objWarnings[INVALID_LINE]) ccLog::Warning("[OBJ] File is malformed! Missing data."); if (error) { if (objWarnings[NOT_ENOUGH_MEMORY]) return CC_FERR_NOT_ENOUGH_MEMORY; else if (objWarnings[CANCELLED_BY_USER]) return CC_FERR_CANCELED_BY_USER; else return CC_FERR_MALFORMED_FILE; } else { return CC_FERR_NO_ERROR; } }