bool ccPointPairRegistrationDlg::addReferencePoint(CCVector3d& Pin, ccHObject* entity/*=0*/, bool shifted/*=true*/) { assert(entity == 0 || entity == m_reference.entity); ccGenericPointCloud* cloud = entity ? ccHObjectCaster::ToGenericPointCloud(entity) : 0; //first point? if (m_refPoints.size() == 0) { if (entity) //picked point { //simply copy the cloud global shift/scale if (cloud) { m_refPoints.setGlobalScale(cloud->getGlobalScale()); m_refPoints.setGlobalShift(cloud->getGlobalShift()); } } else //virtual point { m_refPoints.setGlobalScale(1.0); m_refPoints.setGlobalShift(0,0,0); if (!shifted) { //test that the input point has not too big coordinates bool shiftEnabled = false; CCVector3d Pshift(0,0,0); double scale = 1.0; //we use the aligned shift by default (if any) ccGenericPointCloud* alignedCloud = m_aligned.entity ? ccHObjectCaster::ToGenericPointCloud(m_aligned.entity) : 0; if (alignedCloud && alignedCloud->isShifted()) { Pshift = alignedCloud->getGlobalShift(); scale = alignedCloud->getGlobalScale(); shiftEnabled = true; } if (ccGlobalShiftManager::Handle(Pin,0,ccGlobalShiftManager::DIALOG_IF_NECESSARY,shiftEnabled,Pshift,&scale)) { m_refPoints.setGlobalShift(Pshift); m_refPoints.setGlobalScale(scale); } } } } PointCoordinateType sphereRadius = -PC_ONE; if (!convertToSphereCenter(Pin,entity,sphereRadius)) return false; //transform the input point in the 'global world' by default if (shifted && cloud) { Pin = cloud->toGlobal3d<double>(Pin); } //check that we don't duplicate points for (unsigned i=0; i<m_refPoints.size(); ++i) { //express the 'Pi' point in the current global coordinate system CCVector3d Pi = m_refPoints.toGlobal3d<PointCoordinateType>(*m_refPoints.getPoint(i)); if ((Pi-Pin).norm() < ZERO_TOLERANCE) { ccLog::Error("Point already picked or too close to an already selected one!"); return false; } } //add point to the 'reference' set unsigned newPointIndex = m_refPoints.size(); if (newPointIndex == m_refPoints.capacity() && !m_refPoints.reserve(newPointIndex+1)) { ccLog::Error("Not enough memory?!"); return false; } //shift point to the local coordinate system before pushing it CCVector3 P = m_refPoints.toLocal3pc<double>(Pin); m_refPoints.addPoint(P); QString pointName = QString("R%1").arg(newPointIndex); //add corresponding row in table addPointToTable(refPointsTableWidget,newPointIndex,Pin,pointName); //eventually add a label (or a sphere) if (sphereRadius <= 0) { cc2DLabel* label = CreateLabel(&m_refPoints,newPointIndex,pointName,m_associatedWin); m_refPoints.addChild(label); } else { ccGLMatrix trans; trans.setTranslation(Pin); ccSphere* sphere = new ccSphere(sphereRadius,&trans,pointName); sphere->showNameIn3D(true); sphere->setTempColor(ccColor::yellow,true); m_refPoints.addChild(sphere); } if (m_associatedWin) { m_associatedWin->redraw(); } onPointCountChanged(); return true; }
//converts a FBX mesh to a CC mesh static ccMesh* FromFbxMesh(FbxMesh* fbxMesh, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, CCVector3d* coordinatesShift/*=0*/) { if (!fbxMesh) return 0; int polyCount = fbxMesh->GetPolygonCount(); //fbxMesh->GetLayer( unsigned triCount = 0; unsigned polyVertCount = 0; //different from vertCount (vertices can be counted multiple times here!) //as we can't load all polygons (yet ;) we already look if we can load any! { unsigned skipped = 0; for (int i=0; i<polyCount; ++i) { int pSize = fbxMesh->GetPolygonSize(i); if (pSize == 3) { ++triCount; polyVertCount += 3; } else if (pSize == 4) { triCount += 2; polyVertCount += 4; } else { ++skipped; } } if (triCount == 0) { ccLog::Warning(QString("[FBX] No triangle or quad found in mesh '%1'! (polygons with more than 4 vertices are not supported for the moment)").arg(fbxMesh->GetName())); return 0; } else if (skipped != 0) { ccLog::Warning(QString("[FBX] Some polygons in mesh '%1' were ignored (%2): polygons with more than 4 vertices are not supported for the moment)").arg(fbxMesh->GetName()).arg(skipped)); return 0; } } int vertCount = fbxMesh->GetControlPointsCount(); if (vertCount <= 0) { ccLog::Warning(QString("[FBX] Mesh '%1' has no vetex or no polygon?!").arg(fbxMesh->GetName())); return 0; } ccPointCloud* vertices = new ccPointCloud("vertices"); ccMesh* mesh = new ccMesh(vertices); mesh->setName(fbxMesh->GetName()); mesh->addChild(vertices); vertices->setEnabled(false); if (!mesh->reserve(static_cast<unsigned>(triCount)) || !vertices->reserve(vertCount)) { ccLog::Warning(QString("[FBX] Not enough memory to load mesh '%1'!").arg(fbxMesh->GetName())); delete mesh; return 0; } //colors { for (int l=0; l<fbxMesh->GetElementVertexColorCount(); l++) { FbxGeometryElementVertexColor* vertColor = fbxMesh->GetElementVertexColor(l); //CC can only handle per-vertex colors if (vertColor->GetMappingMode() == FbxGeometryElement::eByControlPoint) { if (vertColor->GetReferenceMode() == FbxGeometryElement::eDirect || vertColor->GetReferenceMode() == FbxGeometryElement::eIndexToDirect) { if (vertices->reserveTheRGBTable()) { switch (vertColor->GetReferenceMode()) { case FbxGeometryElement::eDirect: { for (int i=0; i<vertCount; ++i) { FbxColor c = vertColor->GetDirectArray().GetAt(i); vertices->addRGBColor( static_cast<colorType>(c.mRed * MAX_COLOR_COMP), static_cast<colorType>(c.mGreen * MAX_COLOR_COMP), static_cast<colorType>(c.mBlue * MAX_COLOR_COMP) ); } } break; case FbxGeometryElement::eIndexToDirect: { for (int i=0; i<vertCount; ++i) { int id = vertColor->GetIndexArray().GetAt(i); FbxColor c = vertColor->GetDirectArray().GetAt(id); vertices->addRGBColor( static_cast<colorType>(c.mRed * MAX_COLOR_COMP), static_cast<colorType>(c.mGreen * MAX_COLOR_COMP), static_cast<colorType>(c.mBlue * MAX_COLOR_COMP) ); } } break; default: assert(false); break; } vertices->showColors(true); mesh->showColors(true); break; //no need to look for other color fields (we won't be able to handle them! } else { ccLog::Warning(QString("[FBX] Not enough memory to load mesh '%1' colors!").arg(fbxMesh->GetName())); } } else { ccLog::Warning(QString("[FBX] Color field #%i of mesh '%1' will be ignored (unhandled type)").arg(l).arg(fbxMesh->GetName())); } } else { ccLog::Warning(QString("[FBX] Color field #%i of mesh '%1' will be ignored (unhandled type)").arg(l).arg(fbxMesh->GetName())); } } } //normals can be per vertices or per-triangle int perPointNormals = -1; int perVertexNormals = -1; int perPolygonNormals = -1; { for (int j=0; j<fbxMesh->GetElementNormalCount(); j++) { FbxGeometryElementNormal* leNormals = fbxMesh->GetElementNormal(j); switch(leNormals->GetMappingMode()) { case FbxGeometryElement::eByControlPoint: perPointNormals = j; break; case FbxGeometryElement::eByPolygonVertex: perVertexNormals = j; break; case FbxGeometryElement::eByPolygon: perPolygonNormals = j; break; default: //not handled break; } } } //per-point normals if (perPointNormals >= 0) { FbxGeometryElementNormal* leNormals = fbxMesh->GetElementNormal(perPointNormals); FbxLayerElement::EReferenceMode refMode = leNormals->GetReferenceMode(); const FbxLayerElementArrayTemplate<FbxVector4>& normals = leNormals->GetDirectArray(); assert(normals.GetCount() == vertCount); if (normals.GetCount() != vertCount) { ccLog::Warning(QString("[FBX] Wrong number of normals on mesh '%1'!").arg(fbxMesh->GetName())); perPointNormals = -1; } else if (!vertices->reserveTheNormsTable()) { ccLog::Warning(QString("[FBX] Not enough memory to load mesh '%1' normals!").arg(fbxMesh->GetName())); perPointNormals = -1; } else { //import normals for (int i=0; i<vertCount; ++i) { int id = refMode != FbxGeometryElement::eDirect ? leNormals->GetIndexArray().GetAt(i) : i; FbxVector4 N = normals.GetAt(id); //convert to CC-structure CCVector3 Npc( static_cast<PointCoordinateType>(N.Buffer()[0]), static_cast<PointCoordinateType>(N.Buffer()[1]), static_cast<PointCoordinateType>(N.Buffer()[2]) ); vertices->addNorm(Npc.u); } vertices->showNormals(true); mesh->showNormals(true); //no need to import the other normals (if any) perVertexNormals = -1; perPolygonNormals = -1; } } //per-triangle normals NormsIndexesTableType* normsTable = 0; if (perVertexNormals >= 0 || perPolygonNormals >= 0) { normsTable = new NormsIndexesTableType(); if (!normsTable->reserve(polyVertCount) || !mesh->reservePerTriangleNormalIndexes()) { ccLog::Warning(QString("[FBX] Not enough memory to load mesh '%1' normals!").arg(fbxMesh->GetName())); normsTable->release(); normsTable = 0; } else { mesh->setTriNormsTable(normsTable); mesh->addChild(normsTable); vertices->showNormals(true); mesh->showNormals(true); } } //import textures UV int perVertexUV = -1; bool hasTexUV = false; { for (int l=0; l<fbxMesh->GetElementUVCount(); ++l) { FbxGeometryElementUV* leUV = fbxMesh->GetElementUV(l); //per-point UV coordinates if (leUV->GetMappingMode() == FbxGeometryElement::eByControlPoint) { TextureCoordsContainer* vertTexUVTable = new TextureCoordsContainer(); if (!vertTexUVTable->reserve(vertCount) || !mesh->reservePerTriangleTexCoordIndexes()) { vertTexUVTable->release(); ccLog::Warning(QString("[FBX] Not enough memory to load mesh '%1' UV coordinates!").arg(fbxMesh->GetName())); } else { FbxLayerElement::EReferenceMode refMode = leUV->GetReferenceMode(); for (int i=0; i<vertCount; ++i) { int id = refMode != FbxGeometryElement::eDirect ? leUV->GetIndexArray().GetAt(i) : i; FbxVector2 uv = leUV->GetDirectArray().GetAt(id); //convert to CC-structure float uvf[2] = {static_cast<float>(uv.Buffer()[0]), static_cast<float>(uv.Buffer()[1])}; vertTexUVTable->addElement(uvf); } mesh->addChild(vertTexUVTable); hasTexUV = true; } perVertexUV = -1; break; //no need to look to the other UV fields (can't handle them!) } else if (leUV->GetMappingMode() == FbxGeometryElement::eByPolygonVertex) { //per-vertex UV coordinates perVertexUV = l; } } } //per-vertex UV coordinates TextureCoordsContainer* texUVTable = 0; if (perVertexUV >= 0) { texUVTable = new TextureCoordsContainer(); if (!texUVTable->reserve(polyVertCount) || !mesh->reservePerTriangleTexCoordIndexes()) { texUVTable->release(); ccLog::Warning(QString("[FBX] Not enough memory to load mesh '%1' UV coordinates!").arg(fbxMesh->GetName())); } else { mesh->addChild(texUVTable); hasTexUV = true; } } //import polygons { for (int i=0; i<polyCount; ++i) { int pSize = fbxMesh->GetPolygonSize(i); if (pSize > 4) { //not handled for the moment continue; } //we split quads into two triangles //vertex indices int i1 = fbxMesh->GetPolygonVertex(i, 0); int i2 = fbxMesh->GetPolygonVertex(i, 1); int i3 = fbxMesh->GetPolygonVertex(i, 2); mesh->addTriangle(i1,i2,i3); int i4 = -1; if (pSize == 4) { i4 = fbxMesh->GetPolygonVertex(i, 3); mesh->addTriangle(i1,i3,i4); } if (hasTexUV) { if (texUVTable) { assert(perVertexUV >= 0); int uvIndex = static_cast<int>(texUVTable->currentSize()); for (int j=0; j<pSize; ++j) { int lTextureUVIndex = fbxMesh->GetTextureUVIndex(i, j); FbxGeometryElementUV* leUV = fbxMesh->GetElementUV(perVertexUV); FbxVector2 uv = leUV->GetDirectArray().GetAt(lTextureUVIndex); //convert to CC-structure float uvf[2] = {static_cast<float>(uv.Buffer()[0]), static_cast<float>(uv.Buffer()[1])}; texUVTable->addElement(uvf); } mesh->addTriangleTexCoordIndexes(uvIndex,uvIndex+1,uvIndex+2); if (pSize == 4) mesh->addTriangleTexCoordIndexes(uvIndex,uvIndex+2,uvIndex+3); } else { mesh->addTriangleTexCoordIndexes(i1,i2,i3); if (pSize == 4) mesh->addTriangleTexCoordIndexes(i1,i3,i4); } } //per-triangle normals if (normsTable) { int nIndex = static_cast<int>(normsTable->currentSize()); for (int j=0; j<pSize; ++j) { FbxVector4 N; fbxMesh->GetPolygonVertexNormal(i, j, N); CCVector3 Npc( static_cast<PointCoordinateType>(N.Buffer()[0]), static_cast<PointCoordinateType>(N.Buffer()[1]), static_cast<PointCoordinateType>(N.Buffer()[2]) ); normsTable->addElement(ccNormalVectors::GetNormIndex(Npc.u)); } mesh->addTriangleNormalIndexes(nIndex,nIndex+1,nIndex+2); if (pSize == 4) mesh->addTriangleNormalIndexes(nIndex,nIndex+2,nIndex+3); } } if (mesh->size() == 0) { ccLog::Warning(QString("[FBX] No triangle found in mesh '%1'! (only triangles are supported for the moment)").arg(fbxMesh->GetName())); delete mesh; return 0; } } //import vertices { const FbxVector4* fbxVertices = fbxMesh->GetControlPoints(); assert(vertices && fbxVertices); CCVector3d Pshift(0,0,0); for (int i=0; i<vertCount; ++i, ++fbxVertices) { const double* P = fbxVertices->Buffer(); assert(P[3] == 0); //coordinate shift management if (i == 0) { bool shiftAlreadyEnabled = (coordinatesShiftEnabled && *coordinatesShiftEnabled && coordinatesShift); if (shiftAlreadyEnabled) Pshift = *coordinatesShift; bool applyAll = false; if ( sizeof(PointCoordinateType) < 8 && ccCoordinatesShiftManager::Handle(P,0,alwaysDisplayLoadDialog,shiftAlreadyEnabled,Pshift,0,applyAll)) { vertices->setGlobalShift(Pshift); ccLog::Warning("[FBX] Mesh has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift.x,Pshift.y,Pshift.z); //we save coordinates shift information if (applyAll && coordinatesShiftEnabled && coordinatesShift) { *coordinatesShiftEnabled = true; *coordinatesShift = Pshift; } } } CCVector3 PV( static_cast<PointCoordinateType>(P[0] + Pshift.x), static_cast<PointCoordinateType>(P[1] + Pshift.y), static_cast<PointCoordinateType>(P[2] + Pshift.z) ); vertices->addPoint(PV); } } //import textures { //TODO } return mesh; }
CC_FILE_ERROR AsciiFilter::loadCloudFromFormatedAsciiFile( const QString& filename, ccHObject& container, const AsciiOpenDlg::Sequence& openSequence, char separator, unsigned approximateNumberOfLines, qint64 fileSize, unsigned maxCloudSize, unsigned skipLines, LoadParameters& parameters) { //we may have to "slice" clouds when opening them if they are too big! maxCloudSize = std::min(maxCloudSize,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD); unsigned cloudChunkSize = std::min(maxCloudSize,approximateNumberOfLines); unsigned cloudChunkPos = 0; unsigned chunkRank = 1; //we initialize the loading accelerator structure and point cloud int maxPartIndex = -1; cloudAttributesDescriptor cloudDesc = prepareCloud(openSequence, cloudChunkSize, maxPartIndex, separator, chunkRank); if (!cloudDesc.cloud) return CC_FERR_NOT_ENOUGH_MEMORY; //we re-open the file (ASCII mode) QFile file(filename); if (!file.open(QFile::ReadOnly)) { //we clear already initialized data clearStructure(cloudDesc); return CC_FERR_READING; } QTextStream stream(&file); //we skip lines as defined on input { for (unsigned i=0; i<skipLines; ++i) { stream.readLine(); } } //progress indicator ccProgressDialog pdlg(true); CCLib::NormalizedProgress nprogress(&pdlg,approximateNumberOfLines); pdlg.setMethodTitle(qPrintable(QString("Open ASCII file [%1]").arg(filename))); pdlg.setInfo(qPrintable(QString("Approximate number of points: %1").arg(approximateNumberOfLines))); pdlg.start(); //buffers ScalarType D = 0; CCVector3d P(0,0,0); CCVector3d Pshift(0,0,0); CCVector3 N(0,0,0); ccColor::Rgb col; //other useful variables unsigned linesRead = 0; unsigned pointsRead = 0; CC_FILE_ERROR result = CC_FERR_NO_ERROR; //main process unsigned nextLimit = /*cloudChunkPos+*/cloudChunkSize; QString currentLine = stream.readLine(); while (!currentLine.isNull()) { ++linesRead; //comment if (currentLine.startsWith("//")) { currentLine = stream.readLine(); continue; } if (currentLine.size() == 0) { ccLog::Warning("[AsciiFilter::Load] Line %i is corrupted (empty)!",linesRead); currentLine = stream.readLine(); continue; } //if we have reached the max. number of points per cloud if (pointsRead == nextLimit) { ccLog::PrintDebug("[ASCII] Point %i -> end of chunk (%i points)",pointsRead,cloudChunkSize); //we re-evaluate the average line size { double averageLineSize = static_cast<double>(file.pos())/(pointsRead+skipLines); double newNbOfLinesApproximation = std::max(1.0, static_cast<double>(fileSize)/averageLineSize - static_cast<double>(skipLines)); //if approximation is smaller than actual one, we add 2% by default if (newNbOfLinesApproximation <= pointsRead) { newNbOfLinesApproximation = std::max(static_cast<double>(cloudChunkPos+cloudChunkSize)+1.0,static_cast<double>(pointsRead) * 1.02); } approximateNumberOfLines = static_cast<unsigned>(ceil(newNbOfLinesApproximation)); ccLog::PrintDebug("[ASCII] New approximate nb of lines: %i",approximateNumberOfLines); } //we try to resize actual clouds if (cloudChunkSize < maxCloudSize || approximateNumberOfLines-cloudChunkPos <= maxCloudSize) { ccLog::PrintDebug("[ASCII] We choose to enlarge existing clouds"); cloudChunkSize = std::min(maxCloudSize,approximateNumberOfLines-cloudChunkPos); if (!cloudDesc.cloud->reserve(cloudChunkSize)) { ccLog::Error("Not enough memory! Process stopped ..."); result = CC_FERR_NOT_ENOUGH_MEMORY; break; } } else //otherwise we have to create new clouds { ccLog::PrintDebug("[ASCII] We choose to instantiate new clouds"); //we store (and resize) actual cloud if (!cloudDesc.cloud->resize(cloudChunkSize)) ccLog::Warning("Memory reallocation failed ... some memory may have been wasted ..."); if (!cloudDesc.scalarFields.empty()) { for (unsigned k=0; k<cloudDesc.scalarFields.size(); ++k) cloudDesc.scalarFields[k]->computeMinAndMax(); cloudDesc.cloud->setCurrentDisplayedScalarField(0); cloudDesc.cloud->showSF(true); } //we add this cloud to the output container container.addChild(cloudDesc.cloud); cloudDesc.reset(); //and create new one cloudChunkPos = pointsRead; cloudChunkSize = std::min(maxCloudSize,approximateNumberOfLines-cloudChunkPos); cloudDesc = prepareCloud(openSequence, cloudChunkSize, maxPartIndex, separator, ++chunkRank); if (!cloudDesc.cloud) { ccLog::Error("Not enough memory! Process stopped ..."); break; } cloudDesc.cloud->setGlobalShift(Pshift); } //we update the progress info nprogress.scale(approximateNumberOfLines,100,true); pdlg.setInfo(qPrintable(QString("Approximate number of points: %1").arg(approximateNumberOfLines))); nextLimit = cloudChunkPos+cloudChunkSize; } //we split current line QStringList parts = currentLine.split(separator,QString::SkipEmptyParts); int nParts = parts.size(); if (nParts > maxPartIndex) { //(X,Y,Z) if (cloudDesc.xCoordIndex >= 0) P.x = parts[cloudDesc.xCoordIndex].toDouble(); if (cloudDesc.yCoordIndex >= 0) P.y = parts[cloudDesc.yCoordIndex].toDouble(); if (cloudDesc.zCoordIndex >= 0) P.z = parts[cloudDesc.zCoordIndex].toDouble(); //first point: check for 'big' coordinates if (pointsRead == 0) { if (HandleGlobalShift(P,Pshift,parameters)) { cloudDesc.cloud->setGlobalShift(Pshift); ccLog::Warning("[ASCIIFilter::loadFile] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift.x,Pshift.y,Pshift.z); } } //add point cloudDesc.cloud->addPoint(CCVector3::fromArray((P+Pshift).u)); //Normal vector if (cloudDesc.hasNorms) { if (cloudDesc.xNormIndex >= 0) N.x = static_cast<PointCoordinateType>(parts[cloudDesc.xNormIndex].toDouble()); if (cloudDesc.yNormIndex >= 0) N.y = static_cast<PointCoordinateType>(parts[cloudDesc.yNormIndex].toDouble()); if (cloudDesc.zNormIndex >= 0) N.z = static_cast<PointCoordinateType>(parts[cloudDesc.zNormIndex].toDouble()); cloudDesc.cloud->addNorm(N); } //Colors if (cloudDesc.hasRGBColors) { if (cloudDesc.iRgbaIndex >= 0) { const uint32_t rgb = parts[cloudDesc.iRgbaIndex].toInt(); col.r = ((rgb >> 16) & 0x0000ff); col.g = ((rgb >> 8 ) & 0x0000ff); col.b = ((rgb ) & 0x0000ff); } else if (cloudDesc.fRgbaIndex >= 0) { const float rgbf = parts[cloudDesc.fRgbaIndex].toFloat(); const uint32_t rgb = (uint32_t)(*((uint32_t*)&rgbf)); col.r = ((rgb >> 16) & 0x0000ff); col.g = ((rgb >> 8 ) & 0x0000ff); col.b = ((rgb ) & 0x0000ff); } else { if (cloudDesc.redIndex >= 0) { float multiplier = cloudDesc.hasFloatRGBColors[0] ? static_cast<float>(ccColor::MAX) : 1.0f; col.r = static_cast<ColorCompType>(parts[cloudDesc.redIndex].toFloat() * multiplier); } if (cloudDesc.greenIndex >= 0) { float multiplier = cloudDesc.hasFloatRGBColors[1] ? static_cast<float>(ccColor::MAX) : 1.0f; col.g = static_cast<ColorCompType>(parts[cloudDesc.greenIndex].toFloat() * multiplier); } if (cloudDesc.blueIndex >= 0) { float multiplier = cloudDesc.hasFloatRGBColors[2] ? static_cast<float>(ccColor::MAX) : 1.0f; col.b = static_cast<ColorCompType>(parts[cloudDesc.blueIndex].toFloat() * multiplier); } } cloudDesc.cloud->addRGBColor(col.rgb); } else if (cloudDesc.greyIndex >= 0)
CC_FILE_ERROR LASFilter::loadFile(const char* filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, CCVector3d* coordinatesShift/*=0*/) { //opening file std::ifstream ifs; ifs.open(filename, std::ios::in | std::ios::binary); if (ifs.fail()) return CC_FERR_READING; liblas::Reader* reader = 0; unsigned nbOfPoints = 0; std::vector<std::string> dimensions; try { reader = new liblas::Reader(liblas::ReaderFactory().CreateWithStream(ifs)); //using factory for automatic and transparent //handling of compressed/uncompressed files liblas::Header const& header = reader->GetHeader(); ccLog::PrintDebug(QString("[LAS FILE] %1 - signature: %2").arg(filename).arg(header.GetFileSignature().c_str())); //get fields present in file dimensions = header.GetSchema().GetDimensionNames(); //and of course the number of points nbOfPoints = header.GetPointRecordsCount(); } catch (...) { delete reader; ifs.close(); return CC_FERR_READING; } if (nbOfPoints==0) { //strange file ;) delete reader; ifs.close(); return CC_FERR_NO_LOAD; } //dialog to choose the fields to load if (!s_lasOpenDlg) s_lasOpenDlg = QSharedPointer<LASOpenDlg>(new LASOpenDlg()); s_lasOpenDlg->setDimensions(dimensions); if (alwaysDisplayLoadDialog && !s_lasOpenDlg->autoSkipMode() && !s_lasOpenDlg->exec()) { delete reader; ifs.close(); return CC_FERR_CANCELED_BY_USER; } bool ignoreDefaultFields = s_lasOpenDlg->ignoreDefaultFieldsCheckBox->isChecked(); //RGB color liblas::Color rgbColorMask; //(0,0,0) on construction if (s_lasOpenDlg->doLoad(LAS_RED)) rgbColorMask.SetRed(~0); if (s_lasOpenDlg->doLoad(LAS_GREEN)) rgbColorMask.SetGreen(~0); if (s_lasOpenDlg->doLoad(LAS_BLUE)) rgbColorMask.SetBlue(~0); bool loadColor = (rgbColorMask[0] || rgbColorMask[1] || rgbColorMask[2]); //progress dialog ccProgressDialog pdlg(true); //cancel available CCLib::NormalizedProgress nprogress(&pdlg,nbOfPoints); pdlg.setMethodTitle("Open LAS file"); pdlg.setInfo(qPrintable(QString("Points: %1").arg(nbOfPoints))); pdlg.start(); //number of points read from the begining of the current cloud part unsigned pointsRead = 0; CCVector3d Pshift(0,0,0); //by default we read color as 8 bits integers and we will change this to 16 bits if it's not (16 bits is the standard!) unsigned char colorCompBitDec = 0; colorType rgb[3] = {0,0,0}; ccPointCloud* loadedCloud = 0; std::vector<LasField> fieldsToLoad; //if the file is too big, we will chunck it in multiple parts unsigned int fileChunkPos = 0; unsigned int fileChunkSize = 0; while (true) { //if we reach the end of the file, or the max. cloud size limit (in which case we cerate a new chunk) bool newPointAvailable = (nprogress.oneStep() && reader->ReadNextPoint()); if (!newPointAvailable || pointsRead == fileChunkPos+fileChunkSize) { if (loadedCloud) { if (loadedCloud->size()) { bool thisChunkHasColors = loadedCloud->hasColors(); loadedCloud->showColors(thisChunkHasColors); if (loadColor && !thisChunkHasColors) ccLog::Warning("[LAS FILE] Color field was all black! We ignored it..."); while (!fieldsToLoad.empty()) { LasField& field = fieldsToLoad.back(); if (field.sf) { field.sf->computeMinAndMax(); if (field.type == LAS_CLASSIFICATION || field.type == LAS_RETURN_NUMBER || field.type == LAS_NUMBER_OF_RETURNS) { int cMin = (int)field.sf->getMin(); int cMax = (int)field.sf->getMax(); field.sf->setColorRampSteps(std::min<int>(cMax-cMin+1,256)); //classifSF->setMinSaturation(cMin); } else if (field.type == LAS_INTENSITY) { field.sf->setColorScale(ccColorScalesManager::GetDefaultScale(ccColorScalesManager::GREY)); } int sfIndex = loadedCloud->addScalarField(field.sf); if (!loadedCloud->hasDisplayedScalarField()) { loadedCloud->setCurrentDisplayedScalarField(sfIndex); loadedCloud->showSF(!thisChunkHasColors); } field.sf->release(); field.sf=0; } else { ccLog::Warning(QString("[LAS FILE] All '%1' values were the same (%2)! We ignored them...").arg(LAS_FIELD_NAMES[field.type]).arg(field.firstValue)); } fieldsToLoad.pop_back(); } //if we have reserved too much memory if (loadedCloud->size() < loadedCloud->capacity()) loadedCloud->resize(loadedCloud->size()); QString chunkName("unnamed - Cloud"); unsigned n = container.getChildrenNumber(); if (n!=0) //if we have more than one cloud, we append an index { if (n==1) //we must also update the first one! container.getChild(0)->setName(chunkName+QString(" #1")); chunkName += QString(" #%1").arg(n+1); } loadedCloud->setName(chunkName); container.addChild(loadedCloud); loadedCloud=0; } else { //empty cloud?! delete loadedCloud; loadedCloud=0; } } if (!newPointAvailable) break; //end of the file (or cancel requested) //otherwise, we must create a new cloud fileChunkPos = pointsRead; fileChunkSize = std::min(nbOfPoints-pointsRead,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD); loadedCloud = new ccPointCloud(); if (!loadedCloud->reserveThePointsTable(fileChunkSize)) { ccLog::Warning("[LASFilter::loadFile] Not enough memory!"); delete loadedCloud; delete reader; ifs.close(); return CC_FERR_NOT_ENOUGH_MEMORY; } loadedCloud->setGlobalShift(Pshift); //DGM: from now on, we only enable scalar fields when we detect a valid value! if (s_lasOpenDlg->doLoad(LAS_CLASSIFICATION)) fieldsToLoad.push_back(LasField(LAS_CLASSIFICATION,0,0,255)); //unsigned char: between 0 and 255 if (s_lasOpenDlg->doLoad(LAS_CLASSIF_VALUE)) fieldsToLoad.push_back(LasField(LAS_CLASSIF_VALUE,0,0,31)); //5 bits: between 0 and 31 if (s_lasOpenDlg->doLoad(LAS_CLASSIF_SYNTHETIC)) fieldsToLoad.push_back(LasField(LAS_CLASSIF_SYNTHETIC,0,0,1)); //1 bit: 0 or 1 if (s_lasOpenDlg->doLoad(LAS_CLASSIF_KEYPOINT)) fieldsToLoad.push_back(LasField(LAS_CLASSIF_KEYPOINT,0,0,1)); //1 bit: 0 or 1 if (s_lasOpenDlg->doLoad(LAS_CLASSIF_WITHHELD)) fieldsToLoad.push_back(LasField(LAS_CLASSIF_WITHHELD,0,0,1)); //1 bit: 0 or 1 if (s_lasOpenDlg->doLoad(LAS_INTENSITY)) fieldsToLoad.push_back(LasField(LAS_INTENSITY,0,0,65535)); //16 bits: between 0 and 65536 if (s_lasOpenDlg->doLoad(LAS_TIME)) fieldsToLoad.push_back(LasField(LAS_TIME,0,0,-1.0)); //8 bytes (double) if (s_lasOpenDlg->doLoad(LAS_RETURN_NUMBER)) fieldsToLoad.push_back(LasField(LAS_RETURN_NUMBER,1,1,7)); //3 bits: between 1 and 7 if (s_lasOpenDlg->doLoad(LAS_NUMBER_OF_RETURNS)) fieldsToLoad.push_back(LasField(LAS_NUMBER_OF_RETURNS,1,1,7)); //3 bits: between 1 and 7 if (s_lasOpenDlg->doLoad(LAS_SCAN_DIRECTION)) fieldsToLoad.push_back(LasField(LAS_SCAN_DIRECTION,0,0,1)); //1 bit: 0 or 1 if (s_lasOpenDlg->doLoad(LAS_FLIGHT_LINE_EDGE)) fieldsToLoad.push_back(LasField(LAS_FLIGHT_LINE_EDGE,0,0,1)); //1 bit: 0 or 1 if (s_lasOpenDlg->doLoad(LAS_SCAN_ANGLE_RANK)) fieldsToLoad.push_back(LasField(LAS_SCAN_ANGLE_RANK,0,-90,90)); //signed char: between -90 and +90 if (s_lasOpenDlg->doLoad(LAS_USER_DATA)) fieldsToLoad.push_back(LasField(LAS_USER_DATA,0,0,255)); //unsigned char: between 0 and 255 if (s_lasOpenDlg->doLoad(LAS_POINT_SOURCE_ID)) fieldsToLoad.push_back(LasField(LAS_POINT_SOURCE_ID,0,0,65535)); //16 bits: between 0 and 65536 } assert(newPointAvailable); const liblas::Point& p = reader->GetPoint(); //first point: check for 'big' coordinates if (pointsRead == 0) { CCVector3d P( p.GetX(),p.GetY(),p.GetZ() ); bool shiftAlreadyEnabled = (coordinatesShiftEnabled && *coordinatesShiftEnabled && coordinatesShift); if (shiftAlreadyEnabled) Pshift = *coordinatesShift; bool applyAll = false; if ( sizeof(PointCoordinateType) < 8 && ccCoordinatesShiftManager::Handle(P.u,0,alwaysDisplayLoadDialog,shiftAlreadyEnabled,Pshift,0,applyAll)) { loadedCloud->setGlobalShift(Pshift); ccLog::Warning("[LASFilter::loadFile] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift.x,Pshift.y,Pshift.z); //we save coordinates shift information if (applyAll && coordinatesShiftEnabled && coordinatesShift) { *coordinatesShiftEnabled = true; *coordinatesShift = Pshift; } } } CCVector3 P(static_cast<PointCoordinateType>(p.GetX()+Pshift.x), static_cast<PointCoordinateType>(p.GetY()+Pshift.y), static_cast<PointCoordinateType>(p.GetZ()+Pshift.z)); loadedCloud->addPoint(P); //color field if (loadColor) { //Warning: LAS colors are stored on 16 bits! liblas::Color col = p.GetColor(); col[0] &= rgbColorMask[0]; col[1] &= rgbColorMask[1]; col[2] &= rgbColorMask[2]; //if we don't have reserved a color field yet, we check first that color is not black bool pushColor = true; if (!loadedCloud->hasColors()) { //if the color is not black, we are sure it's a valid color field! if (col[0] || col[1] || col[2]) { if (loadedCloud->reserveTheRGBTable()) { //we must set the color (black) of all the precedently skipped points for (unsigned i=0;i<loadedCloud->size()-1;++i) loadedCloud->addRGBColor(ccColor::black); } else { ccLog::Warning("[LAS FILE] Not enough memory: color field will be ignored!"); loadColor = false; //no need to retry with the other chunks anyway pushColor = false; } } else //otherwise we ignore it for the moment (we'll add it later if necessary) { pushColor = false; } } //do we need to push this color? if (pushColor) { //we test if the color components are on 16 bits (standard) or only on 8 bits (it happens ;) if (colorCompBitDec==0) { if ( (col[0] & 0xFF00) || (col[1] & 0xFF00) || (col[2] & 0xFF00)) { //the color components are on 16 bits! ccLog::Print("[LAS FILE] Color components are coded on 16 bits"); colorCompBitDec = 8; //we fix all the precedently read colors for (unsigned i=0;i<loadedCloud->size()-1;++i) loadedCloud->setPointColor(i,ccColor::black); //255 >> 8 = 0! } } rgb[0]=(colorType)(col[0]>>colorCompBitDec); rgb[1]=(colorType)(col[1]>>colorCompBitDec); rgb[2]=(colorType)(col[2]>>colorCompBitDec); loadedCloud->addRGBColor(rgb); } } //additional fields for (std::vector<LasField>::iterator it = fieldsToLoad.begin(); it != fieldsToLoad.end(); ++it) { double value = 0.0; switch (it->type) { case LAS_X: case LAS_Y: case LAS_Z: assert(false); break; case LAS_INTENSITY: value = (double)p.GetIntensity(); break; case LAS_RETURN_NUMBER: value = (double)p.GetReturnNumber(); break; case LAS_NUMBER_OF_RETURNS: value = (double)p.GetNumberOfReturns(); break; case LAS_SCAN_DIRECTION: value = (double)p.GetScanDirection(); break; case LAS_FLIGHT_LINE_EDGE: value = (double)p.GetFlightLineEdge(); break; case LAS_CLASSIFICATION: value = (double)p.GetClassification().GetClass(); break; case LAS_SCAN_ANGLE_RANK: value = (double)p.GetScanAngleRank(); break; case LAS_USER_DATA: value = (double)p.GetUserData(); break; case LAS_POINT_SOURCE_ID: value = (double)p.GetPointSourceID(); break; case LAS_RED: case LAS_GREEN: case LAS_BLUE: assert(false); break; case LAS_TIME: value = p.GetTime(); break; case LAS_CLASSIF_VALUE: value = (double)(p.GetClassification().GetClass() & 31); //5 bits break; case LAS_CLASSIF_SYNTHETIC: value = (double)(p.GetClassification().GetClass() & 32); //bit #6 break; case LAS_CLASSIF_KEYPOINT: value = (double)(p.GetClassification().GetClass() & 64); //bit #7 break; case LAS_CLASSIF_WITHHELD: value = (double)(p.GetClassification().GetClass() & 128); //bit #8 break; case LAS_INVALID: default: assert(false); break; } if (it->sf) { ScalarType s = static_cast<ScalarType>(value); it->sf->addElement(s); } else { //first point? we track its value if (loadedCloud->size() == 1) { it->firstValue = value; } if (!ignoreDefaultFields || value != it->firstValue || it->firstValue != it->defaultValue) { it->sf = new ccScalarField(it->getName()); if (it->sf->reserve(fileChunkSize)) { it->sf->link(); //we must set the value (firstClassifValue) of all the precedently skipped points ScalarType firstS = static_cast<ScalarType>(it->firstValue); for (unsigned i=0; i<loadedCloud->size()-1; ++i) it->sf->addElement(firstS); ScalarType s = static_cast<ScalarType>(value); it->sf->addElement(s); } else { ccLog::Warning(QString("[LAS FILE] Not enough memory: '%1' field will be ignored!").arg(LAS_FIELD_NAMES[it->type])); it->sf->release(); it->sf = 0; } } } } ++pointsRead; } if (reader) delete reader; reader=0; ifs.close(); return CC_FERR_NO_ERROR; }
CC_FILE_ERROR RasterGridFilter::loadFile(QString filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, CCVector3d* coordinatesShift/*=0*/) { GDALAllRegister(); ccLog::PrintDebug("(GDAL drivers: %i)", GetGDALDriverManager()->GetDriverCount()); GDALDataset* poDataset = static_cast<GDALDataset*>(GDALOpen( qPrintable(filename), GA_ReadOnly )); if( poDataset != NULL ) { ccLog::Print(QString("Raster file: '%1'").arg(filename)); ccLog::Print( "Driver: %s/%s", poDataset->GetDriver()->GetDescription(), poDataset->GetDriver()->GetMetadataItem( GDAL_DMD_LONGNAME ) ); int rasterCount = poDataset->GetRasterCount(); int rasterX = poDataset->GetRasterXSize(); int rasterY = poDataset->GetRasterYSize(); ccLog::Print( "Size is %dx%dx%d", rasterX, rasterY, rasterCount ); ccPointCloud* pc = new ccPointCloud(); if (!pc->reserve(static_cast<unsigned>(rasterX * rasterY))) { delete pc; return CC_FERR_NOT_ENOUGH_MEMORY; } if( poDataset->GetProjectionRef() != NULL ) ccLog::Print( "Projection is `%s'", poDataset->GetProjectionRef() ); double adfGeoTransform[6] = { 0, //top left x 1, //w-e pixel resolution (can be negative) 0, //0 0, //top left y 0, //0 1 //n-s pixel resolution (can be negative) }; if( poDataset->GetGeoTransform( adfGeoTransform ) == CE_None ) { ccLog::Print( "Origin = (%.6f,%.6f)", adfGeoTransform[0], adfGeoTransform[3] ); ccLog::Print( "Pixel Size = (%.6f,%.6f)", adfGeoTransform[1], adfGeoTransform[5] ); } if (adfGeoTransform[1] == 0 || adfGeoTransform[5] == 0) { ccLog::Warning("Invalid pixel size! Forcing it to (1,1)"); adfGeoTransform[1] = adfGeoTransform[5] = 1; } CCVector3d origin( adfGeoTransform[0], adfGeoTransform[3], 0.0 ); CCVector3d Pshift(0,0,0); //check for 'big' coordinates { bool shiftAlreadyEnabled = (coordinatesShiftEnabled && *coordinatesShiftEnabled && coordinatesShift); if (shiftAlreadyEnabled) Pshift = *coordinatesShift; bool applyAll = false; if ( sizeof(PointCoordinateType) < 8 && ccCoordinatesShiftManager::Handle(origin,0,alwaysDisplayLoadDialog,shiftAlreadyEnabled,Pshift,0,&applyAll)) { pc->setGlobalShift(Pshift); ccLog::Warning("[RasterFilter::loadFile] Raster has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift.x,Pshift.y,Pshift.z); //we save coordinates shift information if (applyAll && coordinatesShiftEnabled && coordinatesShift) { *coordinatesShiftEnabled = true; *coordinatesShift = Pshift; } } } //create blank raster 'grid' { double z = 0.0 /*+ Pshift.z*/; for (int j=0; j<rasterY; ++j) { double y = adfGeoTransform[3] + static_cast<double>(j) * adfGeoTransform[5] + Pshift.y; CCVector3 P( 0, static_cast<PointCoordinateType>(y), static_cast<PointCoordinateType>(z)); for (int i=0; i<rasterX; ++i) { double x = adfGeoTransform[0] + static_cast<double>(i) * adfGeoTransform[1] + Pshift.x; P.x = static_cast<PointCoordinateType>(x); pc->addPoint(P); } } QVariant xVar = QVariant::fromValue<int>(rasterX); QVariant yVar = QVariant::fromValue<int>(rasterY); pc->setMetaData("raster_width",xVar); pc->setMetaData("raster_height",yVar); } //fetch raster bands bool zRasterProcessed = false; unsigned zInvalid = 0; double zMinMax[2] = {0, 0}; for (int i=1; i<=rasterCount; ++i) { ccLog::Print( "Reading band #%i", i); GDALRasterBand* poBand = poDataset->GetRasterBand(i); GDALColorInterp colorInterp = poBand->GetColorInterpretation(); GDALDataType bandType = poBand->GetRasterDataType(); int nBlockXSize, nBlockYSize; poBand->GetBlockSize( &nBlockXSize, &nBlockYSize ); ccLog::Print( "Block=%dx%d Type=%s, ColorInterp=%s", nBlockXSize, nBlockYSize, GDALGetDataTypeName(poBand->GetRasterDataType()), GDALGetColorInterpretationName(colorInterp) ); //fetching raster scan-line int nXSize = poBand->GetXSize(); int nYSize = poBand->GetYSize(); assert(nXSize == rasterX); assert(nYSize == rasterY); int bGotMin, bGotMax; double adfMinMax[2] = {0, 0}; adfMinMax[0] = poBand->GetMinimum( &bGotMin ); adfMinMax[1] = poBand->GetMaximum( &bGotMax ); if (!bGotMin || !bGotMax ) //DGM FIXME: if the file is corrupted (e.g. ASCII ArcGrid with missing rows) this method will enter in a infinite loop! GDALComputeRasterMinMax((GDALRasterBandH)poBand, TRUE, adfMinMax); ccLog::Print( "Min=%.3fd, Max=%.3f", adfMinMax[0], adfMinMax[1] ); GDALColorTable* colTable = poBand->GetColorTable(); if( colTable != NULL ) printf( "Band has a color table with %d entries", colTable->GetColorEntryCount() ); if( poBand->GetOverviewCount() > 0 ) printf( "Band has %d overviews", poBand->GetOverviewCount() ); if (colorInterp == GCI_Undefined && !zRasterProcessed/*&& !colTable*/) //probably heights? { zRasterProcessed = true; zMinMax[0] = adfMinMax[0]; zMinMax[1] = adfMinMax[1]; double* scanline = (double*) CPLMalloc(sizeof(double)*nXSize); //double* scanline = new double[nXSize]; memset(scanline,0,sizeof(double)*nXSize); for (int j=0; j<nYSize; ++j) { if (poBand->RasterIO( GF_Read, /*xOffset=*/0, /*yOffset=*/j, /*xSize=*/nXSize, /*ySize=*/1, /*buffer=*/scanline, /*bufferSizeX=*/nXSize, /*bufferSizeY=*/1, /*bufferType=*/GDT_Float64, /*x_offset=*/0, /*y_offset=*/0 ) != CE_None) { delete pc; CPLFree(scanline); GDALClose(poDataset); return CC_FERR_READING; } for (int k=0; k<nXSize; ++k) { double z = static_cast<double>(scanline[k]) + Pshift[2]; unsigned pointIndex = static_cast<unsigned>(k + j * rasterX); if (pointIndex <= pc->size()) { if (z < zMinMax[0] || z > zMinMax[1]) { z = zMinMax[0] - 1.0; ++zInvalid; } const_cast<CCVector3*>(pc->getPoint(pointIndex))->z = static_cast<PointCoordinateType>(z); } } } //update bounding-box pc->invalidateBoundingBox(); if (scanline) CPLFree(scanline); scanline = 0; } else //colors { bool isRGB = false; bool isScalar = false; bool isPalette = false; switch(colorInterp) { case GCI_Undefined: isScalar = true; break; case GCI_PaletteIndex: isPalette = true; break; case GCI_RedBand: case GCI_GreenBand: case GCI_BlueBand: isRGB = true; break; case GCI_AlphaBand: if (adfMinMax[0] != adfMinMax[1]) isScalar = true; else ccLog::Warning(QString("Alpha band ignored as it has a unique value (%1)").arg(adfMinMax[0])); break; default: isScalar = true; break; } if (isRGB || isPalette) { //first check that a palette exists if the band is a palette index if (isPalette && !colTable) { ccLog::Warning(QString("Band is declared as a '%1' but no palette is associated!").arg(GDALGetColorInterpretationName(colorInterp))); isPalette = false; } else { //instantiate memory for RBG colors if necessary if (!pc->hasColors() && !pc->setRGBColor(MAX_COLOR_COMP,MAX_COLOR_COMP,MAX_COLOR_COMP)) { ccLog::Warning(QString("Failed to instantiate memory for storing color band '%1'!").arg(GDALGetColorInterpretationName(colorInterp))); } else { assert(bandType <= GDT_Int32); int* colIndexes = (int*) CPLMalloc(sizeof(int)*nXSize); //double* scanline = new double[nXSize]; memset(colIndexes,0,sizeof(int)*nXSize); for (int j=0; j<nYSize; ++j) { if (poBand->RasterIO( GF_Read, /*xOffset=*/0, /*yOffset=*/j, /*xSize=*/nXSize, /*ySize=*/1, /*buffer=*/colIndexes, /*bufferSizeX=*/nXSize, /*bufferSizeY=*/1, /*bufferType=*/GDT_Int32, /*x_offset=*/0, /*y_offset=*/0 ) != CE_None) { CPLFree(colIndexes); delete pc; return CC_FERR_READING; } for (int k=0; k<nXSize; ++k) { unsigned pointIndex = static_cast<unsigned>(k + j * rasterX); if (pointIndex <= pc->size()) { colorType* C = const_cast<colorType*>(pc->getPointColor(pointIndex)); switch(colorInterp) { case GCI_PaletteIndex: assert(colTable); { GDALColorEntry col; colTable->GetColorEntryAsRGB(colIndexes[k],&col); C[0] = static_cast<colorType>(col.c1 & MAX_COLOR_COMP); C[1] = static_cast<colorType>(col.c2 & MAX_COLOR_COMP); C[2] = static_cast<colorType>(col.c3 & MAX_COLOR_COMP); } break; case GCI_RedBand: C[0] = static_cast<colorType>(colIndexes[k] & MAX_COLOR_COMP); break; case GCI_GreenBand: C[1] = static_cast<colorType>(colIndexes[k] & MAX_COLOR_COMP); break; case GCI_BlueBand: C[2] = static_cast<colorType>(colIndexes[k] & MAX_COLOR_COMP); break; default: assert(false); break; } } } } if (colIndexes) CPLFree(colIndexes); colIndexes = 0; pc->showColors(true); } } } else if (isScalar) { ccScalarField* sf = new ccScalarField(GDALGetColorInterpretationName(colorInterp)); if (!sf->resize(pc->size(),true,NAN_VALUE)) { ccLog::Warning(QString("Failed to instantiate memory for storing '%1' as a scalar field!").arg(sf->getName())); sf->release(); sf = 0; } else { double* colValues = (double*) CPLMalloc(sizeof(double)*nXSize); //double* scanline = new double[nXSize]; memset(colValues,0,sizeof(double)*nXSize); for (int j=0; j<nYSize; ++j) { if (poBand->RasterIO( GF_Read, /*xOffset=*/0, /*yOffset=*/j, /*xSize=*/nXSize, /*ySize=*/1, /*buffer=*/colValues, /*bufferSizeX=*/nXSize, /*bufferSizeY=*/1, /*bufferType=*/GDT_Float64, /*x_offset=*/0, /*y_offset=*/0 ) != CE_None) { CPLFree(colValues); delete pc; return CC_FERR_READING; } for (int k=0; k<nXSize; ++k) { unsigned pointIndex = static_cast<unsigned>(k + j * rasterX); if (pointIndex <= pc->size()) { ScalarType s = static_cast<ScalarType>(colValues[k]); sf->setValue(pointIndex,s); } } } if (colValues) CPLFree(colValues); colValues = 0; sf->computeMinAndMax(); pc->addScalarField(sf); if (pc->getNumberOfScalarFields() == 1) pc->setCurrentDisplayedScalarField(0); pc->showSF(true); } } } } if (pc) { if (!zRasterProcessed) { ccLog::Warning("Raster has no height (Z) information: you can convert one of its scalar fields to Z with 'Edit > Scalar Fields > Set SF as coordinate(s)'"); } else if (zInvalid != 0 && zInvalid < pc->size()) { //shall we remove the points with invalid heights? if (QMessageBox::question(0,"Remove NaN points?","This raster has pixels with invalid heights. Shall we remove them?",QMessageBox::Yes, QMessageBox::No) == QMessageBox::Yes) { CCLib::ReferenceCloud validPoints(pc); unsigned count = pc->size(); bool error = true; if (validPoints.reserve(count-zInvalid)) { for (unsigned i=0; i<count; ++i) { if (pc->getPoint(i)->z >= zMinMax[0]) validPoints.addPointIndex(i); } if (validPoints.size() > 0) { validPoints.resize(validPoints.size()); ccPointCloud* newPC = pc->partialClone(&validPoints); if (newPC) { delete pc; pc = newPC; error = false; } } else { assert(false); } } if (error) { ccLog::Error("Not enough memory to remove the points with invalid heights!"); } } } container.addChild(pc); } GDALClose(poDataset); } else { return CC_FERR_UNKNOWN_FILE; } return CC_FERR_NO_ERROR; }
CC_FILE_ERROR ObjFilter::loadFile(QString filename, ccHObject& container, LoadParameters& parameters) { ccLog::Print(QString("[OBJ] ") + filename); //open file QFile file(filename); if (!file.open(QFile::ReadOnly)) return CC_FERR_READING; QTextStream stream(&file); //current vertex shift CCVector3d Pshift(0,0,0); //vertices ccPointCloud* vertices = new ccPointCloud("vertices"); int pointsRead = 0; //facets unsigned int facesRead = 0; unsigned int totalFacesRead = 0; int maxVertexIndex = -1; //base mesh ccMesh* baseMesh = new ccMesh(vertices); baseMesh->setName(QFileInfo(filename).baseName()); //we need some space already reserved! if (!baseMesh->reserve(128)) { ccLog::Error("Not engouh memory!"); return CC_FERR_NOT_ENOUGH_MEMORY; } //groups (starting index + name) std::vector<std::pair<unsigned,QString> > groups; //materials ccMaterialSet* materials = 0; bool hasMaterial = false; int currentMaterial = -1; bool currentMaterialDefined = false; bool materialsLoadFailed = true; //texture coordinates TextureCoordsContainer* texCoords = 0; bool hasTexCoords = false; int texCoordsRead = 0; int maxTexCoordIndex = -1; //normals NormsIndexesTableType* normals = 0; int normsRead = 0; bool normalsPerFacet = false; int maxTriNormIndex = -1; //progress dialog ccProgressDialog pDlg(true); pDlg.setMethodTitle("OBJ file"); pDlg.setInfo("Loading in progress..."); pDlg.setRange(0,static_cast<int>(file.size())); pDlg.show(); QApplication::processEvents(); //common warnings that can appear multiple time (we avoid to send too many messages to the console!) enum OBJ_WARNINGS { INVALID_NORMALS = 0, INVALID_INDEX = 1, NOT_ENOUGH_MEMORY = 2, INVALID_LINE = 3, CANCELLED_BY_USER = 4, }; bool objWarnings[5] = { false, false, false, false, false }; bool error = false; try { unsigned lineCount = 0; unsigned polyCount = 0; QString currentLine = stream.readLine(); while (!currentLine.isNull()) { if ((++lineCount % 2048) == 0) { if (pDlg.wasCanceled()) { error = true; objWarnings[CANCELLED_BY_USER] = true; break; } pDlg.setValue(static_cast<int>(file.pos())); QApplication::processEvents(); } QStringList tokens = QString(currentLine).split(QRegExp("\\s+"),QString::SkipEmptyParts); //skip comments & empty lines if( tokens.empty() || tokens.front().startsWith('/',Qt::CaseInsensitive) || tokens.front().startsWith('#',Qt::CaseInsensitive) ) { currentLine = stream.readLine(); continue; } /*** new vertex ***/ if (tokens.front() == "v") { //reserve more memory if necessary if (vertices->size() == vertices->capacity()) { if (!vertices->reserve(vertices->capacity()+MAX_NUMBER_OF_ELEMENTS_PER_CHUNK)) { objWarnings[NOT_ENOUGH_MEMORY] = true; error = true; break; } } //malformed line? if (tokens.size() < 4) { objWarnings[INVALID_LINE] = true; error = true; break; } CCVector3d Pd( tokens[1].toDouble(), tokens[2].toDouble(), tokens[3].toDouble() ); //first point: check for 'big' coordinates if (pointsRead == 0) { if (HandleGlobalShift(Pd,Pshift,parameters)) { vertices->setGlobalShift(Pshift); ccLog::Warning("[OBJ] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift.x,Pshift.y,Pshift.z); } } //shifted point CCVector3 P = CCVector3::fromArray((Pd + Pshift).u); vertices->addPoint(P); ++pointsRead; } /*** new vertex texture coordinates ***/ else if (tokens.front() == "vt") { //create and reserve memory for tex. coords container if necessary if (!texCoords) { texCoords = new TextureCoordsContainer(); texCoords->link(); } if (texCoords->currentSize() == texCoords->capacity()) { if (!texCoords->reserve(texCoords->capacity() + MAX_NUMBER_OF_ELEMENTS_PER_CHUNK)) { objWarnings[NOT_ENOUGH_MEMORY] = true; error = true; break; } } //malformed line? if (tokens.size() < 2) { objWarnings[INVALID_LINE] = true; error = true; break; } float T[2] = { T[0] = tokens[1].toFloat(), 0 }; if (tokens.size() > 2) //OBJ specification allows for only one value!!! { T[1] = tokens[2].toFloat(); } texCoords->addElement(T); ++texCoordsRead; } /*** new vertex normal ***/ else if (tokens.front() == "vn") //--> in fact it can also be a facet normal!!! { //create and reserve memory for normals container if necessary if (!normals) { normals = new NormsIndexesTableType; normals->link(); } if (normals->currentSize() == normals->capacity()) { if (!normals->reserve(normals->capacity() + MAX_NUMBER_OF_ELEMENTS_PER_CHUNK)) { objWarnings[NOT_ENOUGH_MEMORY] = true; error = true; break; } } //malformed line? if (tokens.size() < 4) { objWarnings[INVALID_LINE] = true; error = true; break; } CCVector3 N(static_cast<PointCoordinateType>(tokens[1].toDouble()), static_cast<PointCoordinateType>(tokens[2].toDouble()), static_cast<PointCoordinateType>(tokens[3].toDouble())); if (fabs(N.norm2() - 1.0) > 0.005) { objWarnings[INVALID_NORMALS] = true; N.normalize(); } CompressedNormType nIndex = ccNormalVectors::GetNormIndex(N.u); normals->addElement(nIndex); //we don't know yet if it's per-vertex or per-triangle normal... ++normsRead; } /*** new group ***/ else if (tokens.front() == "g" || tokens.front() == "o") { //update new group index facesRead = 0; //get the group name QString groupName = (tokens.size() > 1 && !tokens[1].isEmpty() ? tokens[1] : "default"); for (int i=2; i<tokens.size(); ++i) //multiple parts? groupName.append(QString(" ")+tokens[i]); //push previous group descriptor (if none was pushed) if (groups.empty() && totalFacesRead > 0) groups.push_back(std::pair<unsigned,QString>(0,"default")); //push new group descriptor if (!groups.empty() && groups.back().first == totalFacesRead) groups.back().second = groupName; //simply replace the group name if the previous group was empty! else groups.push_back(std::pair<unsigned,QString>(totalFacesRead,groupName)); polyCount = 0; //restart polyline count at 0! } /*** new face ***/ else if (tokens.front().startsWith('f')) { //malformed line? if (tokens.size() < 4) { objWarnings[INVALID_LINE] = true; currentLine = stream.readLine(); continue; //error = true; //break; } //read the face elements (singleton, pair or triplet) std::vector<facetElement> currentFace; { for (int i=1; i<tokens.size(); ++i) { QStringList vertexTokens = tokens[i].split('/'); if (vertexTokens.size() == 0 || vertexTokens[0].isEmpty()) { objWarnings[INVALID_LINE] = true; error = true; break; } else { //new vertex facetElement fe; //(0,0,0) by default fe.vIndex = vertexTokens[0].toInt(); if (vertexTokens.size() > 1 && !vertexTokens[1].isEmpty()) fe.tcIndex = vertexTokens[1].toInt(); if (vertexTokens.size() > 2 && !vertexTokens[2].isEmpty()) fe.nIndex = vertexTokens[2].toInt(); currentFace.push_back(fe); } } } if (error) break; if (currentFace.size() < 3) { ccLog::Warning("[OBJ] Malformed file: polygon on line %1 has less than 3 vertices!",lineCount); error = true; break; } //first vertex std::vector<facetElement>::iterator A = currentFace.begin(); //the very first vertex of the group tells us about the whole sequence if (facesRead == 0) { //we have a tex. coord index as second vertex element! if (!hasTexCoords && A->tcIndex != 0 && !materialsLoadFailed) { if (!baseMesh->reservePerTriangleTexCoordIndexes()) { objWarnings[NOT_ENOUGH_MEMORY] = true; error = true; break; } for (unsigned int i=0; i<totalFacesRead; ++i) baseMesh->addTriangleTexCoordIndexes(-1, -1, -1); hasTexCoords = true; } //we have a normal index as third vertex element! if (!normalsPerFacet && A->nIndex != 0) { //so the normals are 'per-facet' if (!baseMesh->reservePerTriangleNormalIndexes()) { objWarnings[NOT_ENOUGH_MEMORY] = true; error = true; break; } for (unsigned int i=0; i<totalFacesRead; ++i) baseMesh->addTriangleNormalIndexes(-1, -1, -1); normalsPerFacet = true; } } //we process all vertices accordingly for (std::vector<facetElement>::iterator it = currentFace.begin() ; it!=currentFace.end(); ++it) { facetElement& vertex = *it; //vertex index { if (!vertex.updatePointIndex(pointsRead)) { objWarnings[INVALID_INDEX] = true; error = true; break; } if (vertex.vIndex > maxVertexIndex) maxVertexIndex = vertex.vIndex; } //should we have a tex. coord index as second vertex element? if (hasTexCoords && currentMaterialDefined) { if (!vertex.updateTexCoordIndex(texCoordsRead)) { objWarnings[INVALID_INDEX] = true; error = true; break; } if (vertex.tcIndex > maxTexCoordIndex) maxTexCoordIndex = vertex.tcIndex; } //should we have a normal index as third vertex element? if (normalsPerFacet) { if (!vertex.updateNormalIndex(normsRead)) { objWarnings[INVALID_INDEX] = true; error = true; break; } if (vertex.nIndex > maxTriNormIndex) maxTriNormIndex = vertex.nIndex; } } //don't forget material (common for all vertices) if (currentMaterialDefined && !materialsLoadFailed) { if (!hasMaterial) { if (!baseMesh->reservePerTriangleMtlIndexes()) { objWarnings[NOT_ENOUGH_MEMORY] = true; error = true; break; } for (unsigned int i=0; i<totalFacesRead; ++i) baseMesh->addTriangleMtlIndex(-1); hasMaterial = true; } } if (error) break; //Now, let's tesselate the whole polygon //FIXME: yeah, we do very ulgy tesselation here! std::vector<facetElement>::const_iterator B = A+1; std::vector<facetElement>::const_iterator C = B+1; for ( ; C != currentFace.end(); ++B,++C) { //need more space? if (baseMesh->size() == baseMesh->capacity()) { if (!baseMesh->reserve(baseMesh->size()+128)) { objWarnings[NOT_ENOUGH_MEMORY] = true; error = true; break; } } //push new triangle baseMesh->addTriangle(A->vIndex, B->vIndex, C->vIndex); ++facesRead; ++totalFacesRead; if (hasMaterial) baseMesh->addTriangleMtlIndex(currentMaterial); if (hasTexCoords) baseMesh->addTriangleTexCoordIndexes(A->tcIndex, B->tcIndex, C->tcIndex); if (normalsPerFacet) baseMesh->addTriangleNormalIndexes(A->nIndex, B->nIndex, C->nIndex); } } /*** polyline ***/ else if (tokens.front().startsWith('l')) { //malformed line? if (tokens.size() < 3) { objWarnings[INVALID_LINE] = true; currentLine = stream.readLine(); continue; } //read the face elements (singleton, pair or triplet) ccPolyline* polyline = new ccPolyline(vertices); if (!polyline->reserve(static_cast<unsigned>(tokens.size()-1))) { //not enough memory objWarnings[NOT_ENOUGH_MEMORY] = true; delete polyline; polyline = 0; currentLine = stream.readLine(); continue; } for (int i=1; i<tokens.size(); ++i) { //get next polyline's vertex index QStringList vertexTokens = tokens[i].split('/'); if (vertexTokens.size() == 0 || vertexTokens[0].isEmpty()) { objWarnings[INVALID_LINE] = true; error = true; break; } else { int index = vertexTokens[0].toInt(); //we ignore normal index (if any!) if (!UpdatePointIndex(index,pointsRead)) { objWarnings[INVALID_INDEX] = true; error = true; break; } polyline->addPointIndex(index); } } if (error) { delete polyline; polyline = 0; break; } polyline->setVisible(true); QString name = groups.empty() ? QString("Line") : groups.back().second+QString(".line"); polyline->setName(QString("%1 %2").arg(name).arg(++polyCount)); vertices->addChild(polyline); } /*** material ***/ else if (tokens.front() == "usemtl") //see 'MTL file' below { if (materials) //otherwise we have failed to load MTL file!!! { QString mtlName = currentLine.mid(7).trimmed(); //DGM: in case there's space characters in the material name, we must read it again from the original line buffer //QString mtlName = (tokens.size() > 1 && !tokens[1].isEmpty() ? tokens[1] : ""); currentMaterial = (!mtlName.isEmpty() ? materials->findMaterialByName(mtlName) : -1); currentMaterialDefined = true; } } /*** material file (MTL) ***/ else if (tokens.front() == "mtllib") { //malformed line? if (tokens.size() < 2 || tokens[1].isEmpty()) { objWarnings[INVALID_LINE] = true; } else { //we build the whole MTL filename + path //DGM: in case there's space characters in the filename, we must read it again from the original line buffer //QString mtlFilename = tokens[1]; QString mtlFilename = currentLine.mid(7).trimmed(); ccLog::Print(QString("[OBJ] Material file: ")+mtlFilename); QString mtlPath = QFileInfo(filename).canonicalPath(); //we try to load it if (!materials) { materials = new ccMaterialSet("materials"); materials->link(); } size_t oldSize = materials->size(); QStringList errors; if (ccMaterialSet::ParseMTL(mtlPath,mtlFilename,*materials,errors)) { ccLog::Print("[OBJ] %i materials loaded",materials->size()-oldSize); materialsLoadFailed = false; } else { ccLog::Error(QString("[OBJ] Failed to load material file! (should be in '%1')").arg(mtlPath+'/'+QString(mtlFilename))); materialsLoadFailed = true; } if (!errors.empty()) { for (int i=0; i<errors.size(); ++i) ccLog::Warning(QString("[OBJ::Load::MTL parser] ")+errors[i]); } if (materials->empty()) { materials->release(); materials=0; materialsLoadFailed = true; } } } ///*** shading group ***/ //else if (tokens.front() == "s") //{ // //ignored! //} if (error) break; currentLine = stream.readLine(); } } catch (const std::bad_alloc&) { //not enough memory objWarnings[NOT_ENOUGH_MEMORY] = true; error = true; } file.close(); //1st check if (!error && pointsRead == 0) { //of course if there's no vertex, that's the end of the story ... ccLog::Warning("[OBJ] Malformed file: no vertex in file!"); error = true; } if (!error) { ccLog::Print("[OBJ] %i points, %u faces",pointsRead,totalFacesRead); if (texCoordsRead > 0 || normsRead > 0) ccLog::Print("[OBJ] %i tex. coords, %i normals",texCoordsRead,normsRead); //do some cleaning vertices->shrinkToFit(); if (normals) normals->shrinkToFit(); if (texCoords) texCoords->shrinkToFit(); if (baseMesh->size() == 0) { delete baseMesh; baseMesh = 0; } else { baseMesh->shrinkToFit(); } if ( maxVertexIndex >= pointsRead || maxTexCoordIndex >= texCoordsRead || maxTriNormIndex >= normsRead) { //hum, we've got a problem here ccLog::Warning("[OBJ] Malformed file: indexes go higher than the number of elements! (v=%i/tc=%i/n=%i)",maxVertexIndex,maxTexCoordIndex,maxTriNormIndex); if (maxVertexIndex >= pointsRead) { error = true; } else { objWarnings[INVALID_INDEX] = true; if (maxTexCoordIndex >= texCoordsRead) { texCoords->release(); texCoords = 0; materials->release(); materials = 0; } if (maxTriNormIndex >= normsRead) { normals->release(); normals = 0; } } } if (!error && baseMesh) { if (normals && normalsPerFacet) { baseMesh->setTriNormsTable(normals); baseMesh->showTriNorms(true); } if (materials) { baseMesh->setMaterialSet(materials); baseMesh->showMaterials(true); } if (texCoords) { if (materials) { baseMesh->setTexCoordinatesTable(texCoords); } else { ccLog::Warning("[OBJ] Texture coordinates were defined but no material could be loaded!"); } } //normals: if the obj file doesn't provide any, should we compute them? if (!normals) { //DGM: normals can be per-vertex or per-triangle so it's better to let the user do it himself later //Moreover it's not always good idea if the user doesn't want normals (especially in ccViewer!) //if (!materials && !baseMesh->hasColors()) //yes if no material is available! //{ // ccLog::Print("[OBJ] Mesh has no normal! We will compute them automatically"); // baseMesh->computeNormals(); // baseMesh->showNormals(true); //} //else { ccLog::Warning("[OBJ] Mesh has no normal! You can manually compute them (select it then call \"Edit > Normals > Compute\")"); } } //create sub-meshes if necessary ccLog::Print("[OBJ] 1 mesh loaded - %i group(s)", groups.size()); if (groups.size() > 1) { for (size_t i=0; i<groups.size(); ++i) { const QString& groupName = groups[i].second; unsigned startIndex = groups[i].first; unsigned endIndex = (i+1 == groups.size() ? baseMesh->size() : groups[i+1].first); if (startIndex == endIndex) { continue; } ccSubMesh* subTri = new ccSubMesh(baseMesh); if (subTri->reserve(endIndex-startIndex)) { subTri->addTriangleIndex(startIndex,endIndex); subTri->setName(groupName); subTri->showMaterials(baseMesh->materialsShown()); subTri->showNormals(baseMesh->normalsShown()); subTri->showTriNorms(baseMesh->triNormsShown()); //subTri->showColors(baseMesh->colorsShown()); //subTri->showWired(baseMesh->isShownAsWire()); baseMesh->addChild(subTri); } else { delete subTri; subTri = 0; objWarnings[NOT_ENOUGH_MEMORY] = true; } } baseMesh->setVisible(false); vertices->setLocked(true); } baseMesh->addChild(vertices); //DGM: we can't deactive the vertices if it has children! (such as polyline) if (vertices->getChildrenNumber() != 0) vertices->setVisible(false); else vertices->setEnabled(false); container.addChild(baseMesh); } if (!baseMesh && vertices->size() != 0) { //no (valid) mesh! container.addChild(vertices); //we hide the vertices if the entity has children (probably polylines!) if (vertices->getChildrenNumber() != 0) { vertices->setVisible(false); } } //special case: normals held by cloud! if (normals && !normalsPerFacet) { if (normsRead == pointsRead) //must be 'per-vertex' normals { vertices->setNormsTable(normals); if (baseMesh) baseMesh->showNormals(true); } else { ccLog::Warning("File contains normals which seem to be neither per-vertex nor per-face!!! We had to ignore them..."); } } } if (error) { if (baseMesh) delete baseMesh; if (vertices) delete vertices; } //release shared structures if (normals) { normals->release(); normals = 0; } if (texCoords) { texCoords->release(); texCoords = 0; } if (materials) { materials->release(); materials = 0; } pDlg.close(); //potential warnings if (objWarnings[INVALID_NORMALS]) ccLog::Warning("[OBJ] Some normals in file were invalid. You should re-compute them (select entity, then \"Edit > Normals > Compute\")"); if (objWarnings[INVALID_INDEX]) ccLog::Warning("[OBJ] File is malformed! Check indexes..."); if (objWarnings[NOT_ENOUGH_MEMORY]) ccLog::Warning("[OBJ] Not enough memory!"); if (objWarnings[INVALID_LINE]) ccLog::Warning("[OBJ] File is malformed! Missing data."); if (error) { if (objWarnings[NOT_ENOUGH_MEMORY]) return CC_FERR_NOT_ENOUGH_MEMORY; else if (objWarnings[CANCELLED_BY_USER]) return CC_FERR_CANCELED_BY_USER; else return CC_FERR_MALFORMED_FILE; } else { return CC_FERR_NO_ERROR; } }
CC_FILE_ERROR OFFFilter::loadFile(QString filename, ccHObject& container, LoadParameters& parameters) { //try to open file QFile fp(filename); if (!fp.open(QIODevice::ReadOnly | QIODevice::Text)) return CC_FERR_READING; QTextStream stream(&fp); QString currentLine = stream.readLine(); if (!currentLine.toUpper().startsWith("OFF")) return CC_FERR_MALFORMED_FILE; //check if the number of vertices/faces/etc. are on the first line (yes it happens :( ) QStringList tokens = currentLine.split(QRegExp("\\s+"),QString::SkipEmptyParts); if (tokens.size() == 4) { tokens.removeAt(0); } else { currentLine = GetNextLine(stream); //end of file already?! if (currentLine.isNull()) return CC_FERR_MALFORMED_FILE; //read the number of vertices/faces tokens = currentLine.split(QRegExp("\\s+"),QString::SkipEmptyParts); if (tokens.size() < 2/*3*/) //should be 3 but we only use the 2 firsts... return CC_FERR_MALFORMED_FILE; } bool ok = false; unsigned vertCount = tokens[0].toUInt(&ok); if (!ok) return CC_FERR_MALFORMED_FILE; unsigned triCount = tokens[1].toUInt(&ok); if (!ok) return CC_FERR_MALFORMED_FILE; //create cloud and reserve some memory ccPointCloud* vertices = new ccPointCloud("vertices"); if (!vertices->reserve(vertCount)) { delete vertices; return CC_FERR_NOT_ENOUGH_MEMORY; } //read vertices { CCVector3d Pshift(0,0,0); for (unsigned i=0; i<vertCount; ++i) { currentLine = GetNextLine(stream); tokens = currentLine.split(QRegExp("\\s+"),QString::SkipEmptyParts); if (tokens.size() < 3) { delete vertices; return CC_FERR_MALFORMED_FILE; } //read vertex CCVector3d Pd(0,0,0); { bool vertexIsOk = false; Pd.x = tokens[0].toDouble(&vertexIsOk); if (vertexIsOk) { Pd.y = tokens[1].toDouble(&vertexIsOk); if (vertexIsOk) Pd.z = tokens[2].toDouble(&vertexIsOk); } if (!vertexIsOk) { delete vertices; return CC_FERR_MALFORMED_FILE; } } //first point: check for 'big' coordinates if (i == 0) { if (HandleGlobalShift(Pd,Pshift,parameters)) { vertices->setGlobalShift(Pshift); ccLog::Warning("[OFF] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift.x,Pshift.y,Pshift.z); } } CCVector3 P = CCVector3::fromArray((Pd + Pshift).u); vertices->addPoint(P); } } ccMesh* mesh = new ccMesh(vertices); mesh->addChild(vertices); if (!mesh->reserve(triCount)) { delete mesh; return CC_FERR_NOT_ENOUGH_MEMORY; } //load triangles { bool ignoredPolygons = false; for (unsigned i=0; i<triCount; ++i) { currentLine = GetNextLine(stream); tokens = currentLine.split(QRegExp("\\s+"),QString::SkipEmptyParts); if (tokens.size() < 3) { delete mesh; return CC_FERR_MALFORMED_FILE; } unsigned polyVertCount = tokens[0].toUInt(&ok); if (!ok || static_cast<int>(polyVertCount) >= tokens.size()) { delete mesh; return CC_FERR_MALFORMED_FILE; } if (polyVertCount == 3 || polyVertCount == 4) { //decode indexes unsigned indexes[4]; for (unsigned j=0; j<polyVertCount; ++j) { indexes[j] = tokens[j+1].toUInt(&ok); if (!ok) { delete mesh; return CC_FERR_MALFORMED_FILE; } } //reserve memory if necessary unsigned polyTriCount = polyVertCount-2; if (mesh->size() + polyTriCount > mesh->capacity()) { if (!mesh->reserve(mesh->size() + polyTriCount + 256)) //use some margin to avoid too many allocations { delete mesh; return CC_FERR_NOT_ENOUGH_MEMORY; } } //triangle or quad only mesh->addTriangle(indexes[0],indexes[1],indexes[2]); if (polyVertCount == 4) mesh->addTriangle(indexes[0],indexes[2],indexes[3]); } else { ignoredPolygons = true; } } if (ignoredPolygons) { ccLog::Warning("[OFF] Some polygons with an unhandled size (i.e. > 4) were ignored!"); } } if (mesh->size() == 0) { ccLog::Warning("[OFF] Failed to load any polygon!"); mesh->detachChild(vertices); delete mesh; mesh = 0; container.addChild(vertices); vertices->setEnabled(true); } else { mesh->shrinkToFit(); //DGM: normals can be per-vertex or per-triangle so it's better to let the user do it himself later //Moreover it's not always good idea if the user doesn't want normals (especially in ccViewer!) //if (mesh->computeNormals()) // mesh->showNormals(true); //else // ccLog::Warning("[OFF] Failed to compute per-vertex normals..."); ccLog::Warning("[OFF] Mesh has no normal! You can manually compute them (select it then call \"Edit > Normals > Compute\")"); vertices->setEnabled(false); //vertices->setLocked(true); //DGM: no need to lock it as it is only used by one mesh! container.addChild(mesh); } return CC_FERR_NO_ERROR; }
CC_FILE_ERROR VTKFilter::loadFile(QString filename, ccHObject& container, LoadParameters& parameters) { //open ASCII file for reading QFile file(filename); if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) return CC_FERR_READING; QTextStream inFile(&file); //read header QString nextline = inFile.readLine(); if (!nextline.startsWith("# vtk")) return CC_FERR_MALFORMED_FILE; //comment nextline = inFile.readLine(); ccLog::Print(QString("[VTK] ")+nextline); ccMesh* mesh = 0; ccPointCloud* vertices = 0; std::vector<int> indexes; //global so as to avoid unnecessary mem. allocations QString lastSfName; bool acceptLookupTables = true; unsigned lastDataSize = 0; QString fileType = inFile.readLine().toUpper(); if (fileType.startsWith("BINARY")) { //binary not supported yet! ccLog::Error("VTK binary format not supported yet!"); return CC_FERR_WRONG_FILE_TYPE; } else if (fileType.startsWith("ASCII")) { //allow blank lines QString dataType; if (!GetNextNonEmptyLine(inFile,dataType)) return CC_FERR_MALFORMED_FILE; if (!dataType.startsWith("DATASET")) return CC_FERR_MALFORMED_FILE; dataType.remove(0,8); if (dataType.startsWith("POLYDATA")) { vertices = new ccPointCloud("vertices"); mesh = new ccMesh(vertices); } else if (dataType.startsWith("UNSTRUCTURED_GRID")) { vertices = new ccPointCloud("unnamed - VTK unstructured grid"); } else { ccLog::Error(QString("VTK entity '%1' is not supported!").arg(dataType)); return CC_FERR_WRONG_FILE_TYPE; } } //loop on keywords/data CC_FILE_ERROR error = CC_FERR_NO_ERROR; CCVector3d Pshift(0,0,0); bool skipReadLine = false; while (error == CC_FERR_NO_ERROR) { if (!skipReadLine && !GetNextNonEmptyLine(inFile,nextline)) break; //end of file skipReadLine = false; assert(!nextline.isEmpty()); if (nextline.startsWith("POINTS")) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() != 3) { error=CC_FERR_MALFORMED_FILE; break; } bool ok = false; unsigned ptsCount = parts[1].toInt(&ok); if (!ok) { error = CC_FERR_MALFORMED_FILE; break; } //QString dataFormat = parts[3].toUpper(); //char buffer[8]; //unsigned char datSize = 4; //if (dataFormat == "DOUBLE") //{ // datSize = 8; //} //else if (dataFormat != "FLOAT") //{ // ccLog::Error(QString("Non floating point data (%1) is not supported!").arg(dataFormat)); // error = CC_FERR_WRONG_FILE_TYPE; // break; //} if (!vertices->reserve(ptsCount)) { error = CC_FERR_NOT_ENOUGH_MEMORY; break; } //warning: multiple points can be stored on a single line! unsigned iPt = 0; CCVector3d Pd(0,0,0); unsigned coordIndex = 0; while (iPt < ptsCount) { nextline = inFile.readLine(); parts = nextline.split(" ",QString::SkipEmptyParts); for (int i=0; i<parts.size(); ++i) { Pd.u[coordIndex] = parts[i].toDouble(&ok); if (!ok) { ccLog::Warning("[VTK] Element #%1 of POINTS data is corrupted!",iPt); error = CC_FERR_MALFORMED_FILE; iPt = ptsCount; break; } if (coordIndex == 2) { //first point: check for 'big' coordinates if (iPt == 0) { if (HandleGlobalShift(Pd,Pshift,parameters)) { vertices->setGlobalShift(Pshift); ccLog::Warning("[VTKFilter::loadFile] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift.x,Pshift.y,Pshift.z); } } CCVector3 P = CCVector3::fromArray((Pd + Pshift).u); vertices->addPoint(P); coordIndex = 0; ++iPt; } else { ++coordIndex; } } } //end POINTS } else if (nextline.startsWith("POLYGONS") || nextline.startsWith("TRIANGLE_STRIPS")) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() != 3) { error = CC_FERR_MALFORMED_FILE; break; } //current type name (i.e. POLYGONS or TRIANGLE_STRIPS) QString typeName = parts[0]; bool isPolygon = (typeName == "POLYGONS"); bool ok = false; unsigned elemCount = parts[1].toUInt(&ok); if (!ok) { error = CC_FERR_MALFORMED_FILE; break; } // unsigned totalElements = parts[2].toUInt(&ok); if (!ok) { error = CC_FERR_MALFORMED_FILE; break; } assert(mesh); if (!mesh) { ccLog::Warning(QString("[VTK] We found %1 data while file is not composed of POLYDATA!").arg(typeName)); mesh = new ccMesh(vertices); //however, we can still try to load it? } for (unsigned i=0; i<elemCount; ++i) { nextline = inFile.readLine(); parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.empty()) { error = CC_FERR_MALFORMED_FILE; break; } unsigned vertCount = parts[0].toUInt(&ok); if (!ok || static_cast<int>(vertCount) >= parts.size()) { error = CC_FERR_MALFORMED_FILE; break; } else if (vertCount < 3) { ccLog::Warning(QString("[VTK] Element #%1 of %2 data is corrupted! (not enough indexes)").arg(i).arg(typeName)); } if (isPolygon && (vertCount != 3 && vertCount != 4)) //quads are easy to handle as well! { ccLog::Warning(QString("[VTK] POLYGON element #%1 has an unhandled size (> 4 vertices)").arg(i)); continue; } //reserve mem to. store indexes if (indexes.size() < vertCount) { try { indexes.resize(vertCount); } catch (const std::bad_alloc&) { error = CC_FERR_NOT_ENOUGH_MEMORY; break; } } //decode indexes for (unsigned j=0; j<vertCount; ++j) { indexes[j] = parts[j+1].toUInt(&ok); if (!ok) { ccLog::Warning(QString("[VTK] Element #%1 of %2 data is corrupted! (invalid index value)").arg(i).arg(typeName)); error = CC_FERR_MALFORMED_FILE; break; } } //add the triangles { assert(vertCount > 2); unsigned triCount = vertCount-2; if (mesh->size() + triCount > mesh->maxSize()) { if (!mesh->reserve(mesh->size()+triCount+256)) //take some advance to avoid too many allocations { error = CC_FERR_NOT_ENOUGH_MEMORY; break; } } if (isPolygon) { //triangle or quad mesh->addTriangle(indexes[0],indexes[1],indexes[2]); if (vertCount == 4) mesh->addTriangle(indexes[0],indexes[2],indexes[3]); } else { //triangle strip for (unsigned j=0; j<triCount; ++j) mesh->addTriangle(indexes[j],indexes[j+1],indexes[j+2]); } } } if (mesh->size() != 0 && mesh->size() < mesh->maxSize()) { mesh->resize(mesh->size()); } //end POLYGONS or TRIANGLE_STRIPS } else if (nextline.startsWith("NORMALS")) { if (lastDataSize == 0) lastDataSize = vertices->size(); if (lastDataSize == 0) { error = CC_FERR_MALFORMED_FILE; break; } bool loadNormals = false; if (lastDataSize == vertices->size()) { if (!vertices->reserveTheNormsTable()) ccLog::Warning("[VTK] Not enough memory to load normals!"); else loadNormals = true; } //warning: multiple normals can be stored on a single line! unsigned iNorm = 0; CCVector3 N; unsigned coordIndex = 0; while (iNorm < lastDataSize) { nextline = inFile.readLine(); QStringList parts = nextline.split(" ",QString::SkipEmptyParts); for (int i=0; i<parts.size(); ++i) { bool ok; N.u[coordIndex] = static_cast<PointCoordinateType>(parts[i].toDouble(&ok)); if (!ok) { ccLog::Warning("[VTK] Element #%1 of NORMALS data is corrupted!",iNorm); error = CC_FERR_MALFORMED_FILE; iNorm = lastDataSize; break; } if (coordIndex == 2) { if (loadNormals) vertices->addNorm(N); coordIndex = 0; ++iNorm; } else { ++coordIndex; } } } lastDataSize = 0; //lastDataSize is consumed //end NORMALS } else if (nextline.startsWith("COLOR_SCALARS")) { if (lastDataSize == 0) lastDataSize = vertices->size(); if (lastDataSize == 0) { error = CC_FERR_MALFORMED_FILE; break; } bool loadRGBColors = vertices->reserveTheRGBTable(); if (!loadRGBColors) ccLog::Warning("[VTK] Not enough memory to load RGB colors!"); //warning: multiple colors can be stored on a single line! unsigned iCol = 0; colorType rgb[3]; unsigned coordIndex = 0; while (iCol < lastDataSize) { nextline = inFile.readLine(); QStringList parts = nextline.split(" ",QString::SkipEmptyParts); for (int i=0; i<parts.size(); ++i) { bool ok; rgb[coordIndex] = static_cast<colorType>(parts[i].toDouble(&ok) * ccColor::MAX); if (!ok) { ccLog::Warning("[VTK] Element #%1 of COLOR_SCALARS data is corrupted!",iCol); error = CC_FERR_MALFORMED_FILE; iCol = lastDataSize; break; } if (coordIndex == 2) { if (loadRGBColors) vertices->addRGBColor(rgb); coordIndex = 0; ++iCol; } else { ++coordIndex; } } } lastDataSize = 0; //lastDataSize is consumed //end COLOR_SCALARS } else if (nextline.startsWith("SCALARS")) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); lastSfName = "ScalarField"; if (parts.size() > 1) lastSfName = parts[1].replace("_"," "); //SF already exists? if (vertices->getScalarFieldIndexByName(qPrintable(lastSfName)) >= 0) lastSfName += QString(" (%1)").arg(vertices->getNumberOfScalarFields()); //end of SCALARS } else if (nextline.startsWith("LOOKUP_TABLE") || nextline.startsWith("VECTORS")) { bool expected = (lastDataSize != 0); assert(!acceptLookupTables || expected); //i.e. lastDataSize shouldn't be 0 for 'accepted' lookup tables QStringList parts = nextline.split(" ",QString::SkipEmptyParts); QString itemName = parts[0]; if (parts.size() > 2) { bool ok = false; int valCount = parts[2].toUInt(&ok); if (ok) lastDataSize = valCount; } else if (!expected) { ccLog::Warning(QString("[VTK] field %1 has no size?!").arg(itemName)); error = CC_FERR_MALFORMED_FILE; break; } bool createSF = (vertices->size() == lastDataSize && vertices->size() != 0); if (acceptLookupTables && !createSF) { ccLog::Warning(QString("[VTK] field %1 has not the right number of points (will be ignored)").arg(itemName)); } createSF &= (acceptLookupTables || expected); if (createSF && lastSfName.isNull()) { ccLog::Warning(QString("[VTK] field %1 has no name (will be ignored)").arg(itemName)); createSF = false; } else if (!expected) { ccLog::Warning(QString("[VTK] field %1 was not expected (will be ignored)").arg(itemName)); } //create scalar field? ccScalarField* sf = 0; if (createSF) { sf = new ccScalarField(qPrintable(lastSfName)); if (!sf->reserve(lastDataSize)) { ccLog::Warning(QString("[VTK] Not enough memory to load scalar field' %1' (will be ignored)").arg(lastSfName)); sf->release(); sf = 0; } } lastSfName.clear(); //name is "consumed" //warning: multiple colors can be stored on a single line! unsigned iScal = 0; while (iScal < lastDataSize) { nextline = inFile.readLine(); QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (expected) { for (int i=0; i<parts.size(); ++i) { bool ok; ScalarType d = static_cast<ScalarType>(parts[i].toDouble(&ok)); if (!ok) { ccLog::Warning("[VTK] Element #%1 of LOOKUP_TABLE/VECTORS data is corrupted!",iScal); error = CC_FERR_MALFORMED_FILE; if (sf) { sf->release(); sf = 0; } iScal = lastDataSize; break; } if (sf) sf->addElement(d); ++iScal; } } else { //hard to guess the right format, but an unexpected field seem to always be //organized as 'one element per line' ++iScal; } } lastDataSize = 0; //lastDataSize is "consumed" acceptLookupTables = false; if (sf) { sf->computeMinAndMax(); int newSFIndex = vertices->addScalarField(sf); if (newSFIndex == 0) vertices->setCurrentDisplayedScalarField(newSFIndex); vertices->showSF(true); } //end of SCALARS } else if (nextline.startsWith("POINT_DATA")) { //check that the number of 'point_data' match the number of points QStringList parts = nextline.split(" ",QString::SkipEmptyParts); acceptLookupTables = false; if (parts.size() > 1) { bool ok; lastDataSize = parts[1].toUInt(&ok); acceptLookupTables = ok && vertices; } } else if (nextline.startsWith("FIELD")) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() < 2) { error = CC_FERR_MALFORMED_FILE; break; } bool ok; unsigned elements = parts[2].toUInt(&ok); if (!ok) { error = CC_FERR_MALFORMED_FILE; break; } elements *= 2; //we don't know how to handle those properly but at least //we know that for FIELD elements, there's 2 lines per element... for (unsigned i=0; i<elements; ++i) { inFile.readLine(); //ignore } } else //unhandled property (CELLS, CELL_TYPES, etc.) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() < 2) { ccLog::Warning(QString("[VTK] Unhandled element: %1").arg(parts[0])); error = CC_FERR_MALFORMED_FILE; break; } bool ok; unsigned elements = parts[1].toUInt(&ok); if (!ok) { error = CC_FERR_MALFORMED_FILE; break; } if (nextline.startsWith("CELL_DATA")) { //read next line (in case we actually know how to read it! if (!GetNextNonEmptyLine(inFile,nextline)) { error = CC_FERR_MALFORMED_FILE; break; } skipReadLine = true; if ( nextline.startsWith("SCALARS") || nextline.startsWith("NORMALS") || nextline.startsWith("COLOR_SCALARS")) { lastDataSize = elements; acceptLookupTables = false; //this property is for triangles! continue; } } //we'll try to blindly skip the elements... for (unsigned i=0; i<elements; ++i) { inFile.readLine(); //ignore } //end unhandled property } if (error != CC_FERR_NO_ERROR) break; } file.close(); if (vertices && vertices->size() == 0) { delete vertices; vertices = 0; if (error == CC_FERR_NO_ERROR) error = CC_FERR_NO_LOAD; } if (mesh && (mesh->size() == 0 || vertices == 0)) { delete mesh; mesh = 0; if (error == CC_FERR_NO_ERROR) error = CC_FERR_NO_LOAD; } if (mesh) { container.addChild(mesh); mesh->setVisible(true); mesh->addChild(vertices); vertices->setEnabled(false); vertices->setName("Vertices"); vertices->setLocked(true); //DGM: no need to lock it as it is only used by one mesh! //DGM: normals can be per-vertex or per-triangle so it's better to let the user do it himself later //Moreover it's not always good idea if the user doesn't want normals (especially in ccViewer!) if (!mesh->hasNormals()) { // mesh->computeNormals(); ccLog::Warning("[VTK] Mesh has no normal! You can manually compute them (select it then call \"Edit > Normals > Compute\")"); } mesh->showNormals(mesh->hasNormals()); if (vertices->hasScalarFields()) { vertices->setCurrentDisplayedScalarField(0); mesh->showSF(true); } if (vertices->hasColors()) mesh->showColors(true); } else if (vertices) { container.addChild(vertices); vertices->setVisible(true); if (vertices->hasNormals()) vertices->showNormals(true); if (vertices->hasScalarFields()) { vertices->setCurrentDisplayedScalarField(0); vertices->showSF(true); } if (vertices->hasColors()) vertices->showColors(true); } return error; }
CC_FILE_ERROR VTKFilter::loadFile(const char* filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, CCVector3d* coordinatesShift/*=0*/) { //open ASCII file for reading QFile file(filename); if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) return CC_FERR_READING; QTextStream inFile(&file); //read header QString nextline = inFile.readLine(); if (!nextline.startsWith("# vtk")) return CC_FERR_MALFORMED_FILE; //comment nextline = inFile.readLine(); ccLog::Print(QString("[VTK] ")+nextline); ccMesh* mesh = 0; ccPointCloud* vertices = 0; std::vector<int> indexes; //global so as to avoid unnecessary mem. allocations QString lastSfName; bool acceptLookupTables = true; QString fileType = inFile.readLine().toUpper(); if (fileType.startsWith("BINARY")) { //binary not supported yet! ccLog::Error("VTK binary format not supported yet!"); return CC_FERR_WRONG_FILE_TYPE; } else if (fileType.startsWith("ASCII")) { //allow blank lines QString dataType; if (!GetNextNonEmptyLine(inFile,dataType)) return CC_FERR_MALFORMED_FILE; if (!dataType.startsWith("DATASET")) return CC_FERR_MALFORMED_FILE; dataType.remove(0,8); if (dataType.startsWith("POLYDATA")) { vertices = new ccPointCloud("vertices"); mesh = new ccMesh(vertices); } else if (dataType.startsWith("UNSTRUCTURED_GRID")) { vertices = new ccPointCloud("unnamed - VTK unstructured grid"); } else { ccLog::Error(QString("VTK entity '%1' is not supported!").arg(dataType)); return CC_FERR_WRONG_FILE_TYPE; } } //loop on keywords/data CC_FILE_ERROR error = CC_FERR_NO_ERROR; CCVector3d Pshift(0,0,0); while (error == CC_FERR_NO_ERROR) { if (!GetNextNonEmptyLine(inFile,nextline)) break; //end of file assert(!nextline.isEmpty()); if (nextline.startsWith("POINTS")) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() != 3) { error=CC_FERR_MALFORMED_FILE; break; } bool ok = false; unsigned ptsCount = parts[1].toInt(&ok); if (!ok) { error = CC_FERR_MALFORMED_FILE; break; } //QString dataFormat = parts[3].toUpper(); //char buffer[8]; //unsigned char datSize = 4; //if (dataFormat == "DOUBLE") //{ // datSize = 8; //} //else if (dataFormat != "FLOAT") //{ // ccLog::Error(QString("Non floating point data (%1) is not supported!").arg(dataFormat)); // error = CC_FERR_WRONG_FILE_TYPE; // break; //} if (!vertices->reserve(ptsCount)) { error = CC_FERR_NOT_ENOUGH_MEMORY; break; } for (unsigned i=0; i<ptsCount; ++i) { nextline = inFile.readLine(); parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() != 3) { error = CC_FERR_MALFORMED_FILE; break; } double Pd[3] = {0,0,0}; for (unsigned char j=0; j<3; ++j) { Pd[j] = parts[j].toDouble(&ok); if (!ok) { ccLog::Warning("[VTK] Element #%1 of POINTS data is corrupted!",i); error = CC_FERR_MALFORMED_FILE; break; } } //first point: check for 'big' coordinates if (i == 0) { bool shiftAlreadyEnabled = (coordinatesShiftEnabled && *coordinatesShiftEnabled && coordinatesShift); if (shiftAlreadyEnabled) Pshift = *coordinatesShift; bool applyAll = false; if ( sizeof(PointCoordinateType) < 8 && ccCoordinatesShiftManager::Handle(Pd,0,alwaysDisplayLoadDialog,shiftAlreadyEnabled,Pshift,0,applyAll)) { vertices->setGlobalShift(Pshift); ccLog::Warning("[VTKFilter::loadFile] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift.x,Pshift.y,Pshift.z); //we save coordinates shift information if (applyAll && coordinatesShiftEnabled && coordinatesShift) { *coordinatesShiftEnabled = true; *coordinatesShift = Pshift; } } } vertices->addPoint(CCVector3( static_cast<PointCoordinateType>(Pd[0] + Pshift.x), static_cast<PointCoordinateType>(Pd[1] + Pshift.y), static_cast<PointCoordinateType>(Pd[2] + Pshift.z)) ); } //end POINTS } else if (nextline.startsWith("POLYGONS") || nextline.startsWith("TRIANGLE_STRIPS")) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() != 3) { error = CC_FERR_MALFORMED_FILE; break; } //current type name (i.e. POLYGONS or TRIANGLE_STRIPS) QString typeName = parts[0]; bool isPolygon = (typeName == "POLYGONS"); bool ok = false; unsigned elemCount = parts[1].toUInt(&ok); if (!ok) { error = CC_FERR_MALFORMED_FILE; break; } unsigned totalElements = parts[2].toUInt(&ok); if (!ok) { error = CC_FERR_MALFORMED_FILE; break; } assert(mesh); if (!mesh) { ccLog::Warning(QString("[VTK] We found %1 data while file is not composed of POLYDATA!").arg(typeName)); mesh = new ccMesh(vertices); //however, we can still try to load it? } for (unsigned i=0; i<elemCount; ++i) { nextline = inFile.readLine(); parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.empty()) { error = CC_FERR_MALFORMED_FILE; break; } unsigned vertCount = parts[0].toUInt(&ok); if (!ok || static_cast<int>(vertCount) >= parts.size()) { error = CC_FERR_MALFORMED_FILE; break; } else if (vertCount < 3) { ccLog::Warning(QString("[VTK] Element #%1 of %2 data is corrupted! (not enough indexes)").arg(i).arg(typeName)); } if (isPolygon && (vertCount != 3 && vertCount != 4)) //quads are easy to handle as well! { ccLog::Warning(QString("[VTK] POLYGON element #%1 has an unhandled size (> 4 vertices)").arg(i)); continue; } //reserve mem to. store indexes if (indexes.size() < vertCount) { try { indexes.resize(vertCount); } catch (std::bad_alloc) { error = CC_FERR_NOT_ENOUGH_MEMORY; break; } } //decode indexes for (unsigned j=0; j<vertCount; ++j) { indexes[j] = parts[j+1].toUInt(&ok); if (!ok) { ccLog::Warning(QString("[VTK] Element #%1 of %2 data is corrupted! (invalid index value)").arg(i).arg(typeName)); error = CC_FERR_MALFORMED_FILE; break; } } //add the triangles { assert(vertCount > 2); unsigned triCount = vertCount-2; if (mesh->size() + triCount > mesh->maxSize()) { if (!mesh->reserve(mesh->size()+triCount+256)) //take some advance to avoid too many allocations { error = CC_FERR_NOT_ENOUGH_MEMORY; break; } } if (isPolygon) { //triangle or quad mesh->addTriangle(indexes[0],indexes[1],indexes[2]); if (vertCount == 4) mesh->addTriangle(indexes[0],indexes[2],indexes[3]); } else { //triangle strip for (unsigned j=0; j<triCount; ++j) mesh->addTriangle(indexes[j],indexes[j+1],indexes[j+2]); } } } if (mesh->size() != 0 && mesh->size() < mesh->maxSize()) { mesh->resize(mesh->size()); } //end POLYGONS or TRIANGLE_STRIPS } else if (nextline.startsWith("NORMALS")) { unsigned ptsCount = vertices->size(); if (vertices->size() == 0) { error = CC_FERR_MALFORMED_FILE; break; } else { bool loadNormals = vertices->reserveTheNormsTable(); if (!loadNormals) ccLog::Warning("[VTK] Not enough memory to load normals!"); for (unsigned i=0; i<ptsCount; ++i) { nextline = inFile.readLine(); if (loadNormals) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() != 3) { error = CC_FERR_MALFORMED_FILE; break; } CCVector3 N; for (unsigned char j=0; j<3; ++j) { bool ok; N.u[j] = (PointCoordinateType)parts[j].toDouble(&ok); if (!ok) { ccLog::Warning("[VTK] Element #%1 of NORMALS data is corrupted!",i); error = CC_FERR_MALFORMED_FILE; break; } } vertices->addNorm(N); } } } //end NORMALS } else if (nextline.startsWith("COLOR_SCALARS")) { unsigned ptsCount = vertices->size(); if (vertices->size() == 0) { error = CC_FERR_MALFORMED_FILE; break; } else { bool loadRGBColors = vertices->reserveTheRGBTable(); if (!loadRGBColors) ccLog::Warning("[VTK] Not enough memory to load RGB colors!"); for (unsigned i=0; i<ptsCount; ++i) { nextline = inFile.readLine(); if (loadRGBColors) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() != 3) { error = CC_FERR_MALFORMED_FILE; break; } colorType rgb[3]; for (unsigned char j=0; j<3; ++j) { bool ok; rgb[j] = (colorType)(parts[j].toDouble(&ok) * (double)MAX_COLOR_COMP); if (!ok) { ccLog::Warning("[VTK] Element #%1 of COLOR_SCALARS data is corrupted!",i); error = CC_FERR_MALFORMED_FILE; break; } } vertices->addRGBColor(rgb); } } } //end COLOR_SCALARS } else if (nextline.startsWith("SCALARS")) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); lastSfName = "ScalarField"; if (parts.size() > 1) lastSfName = parts[1].replace("_"," "); //SF already exists? if (vertices->getScalarFieldIndexByName(qPrintable(lastSfName)) >= 0) lastSfName += QString(" (%1)").arg(vertices->getNumberOfScalarFields()); //end of SCALARS } else if (nextline.startsWith("LOOKUP_TABLE") || nextline.startsWith("VECTORS")) { unsigned ptsCount = vertices->size(); QStringList parts = nextline.split(" ",QString::SkipEmptyParts); QString itemName = parts[0]; if (parts.size() > 2) { bool ok = false; int valCount = parts[2].toUInt(&ok); if (ok) ptsCount = valCount; } bool createSF = (vertices->size() == ptsCount && vertices->size() != 0); if (acceptLookupTables && !createSF) { ccLog::Warning(QString("[VTK] field %1 has not the right number of points (will be ignored)").arg(itemName)); } createSF &= acceptLookupTables; if (createSF && lastSfName.isNull()) { ccLog::Warning(QString("[VTK] field %1 has no name (will be ignored)").arg(itemName)); createSF = false; } //create scalar field? int newSFIndex = createSF ? vertices->addScalarField(qPrintable(lastSfName)) : -1; CCLib::ScalarField* sf = newSFIndex >= 0 ? vertices->getScalarField(newSFIndex) : 0; lastSfName.clear(); //name is "consumed" for (unsigned i=0; i<ptsCount; ++i) { nextline = inFile.readLine(); if (sf) //otherwise we simply skip the line { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() != 1) { //get rid of the scalar field :( vertices->deleteScalarField(newSFIndex); sf = 0; if (i == 0) { ccLog::Warning(QString("[VTK] %1 field with more than one element can't be imported as scalar fields!").arg(itemName)); } else { error = CC_FERR_MALFORMED_FILE; break; } } else { bool ok; ScalarType d = static_cast<ScalarType>(nextline.toDouble(&ok)); sf->setValue(i, ok ? d : NAN_VALUE); } } } if (sf) { sf->computeMinAndMax(); vertices->setCurrentDisplayedScalarField(newSFIndex); vertices->showSF(true); } //end of SCALARS } else if (nextline.startsWith("POINT_DATA")) { //check that the number of 'point_data' match the number of points QStringList parts = nextline.split(" ",QString::SkipEmptyParts); acceptLookupTables = false; if (parts.size() > 1) { bool ok; unsigned dataCount = parts[1].toUInt(&ok); if (ok && vertices && dataCount == vertices->size()) { acceptLookupTables = true; } } if (!acceptLookupTables) { ccLog::Warning("[VTK] The number of 'POINT_DATA' doesn't match the number of loaded points... lookup tables will be ignored"); } } else //unhandled property (CELLS, CELL_TYPES, etc.) { QStringList parts = nextline.split(" ",QString::SkipEmptyParts); if (parts.size() < 2) { ccLog::Warning(QString("[VTK] Unhandled element: %1").arg(parts[0])); error = CC_FERR_MALFORMED_FILE; break; } bool ok; unsigned elements = parts[1].toUInt(&ok); if (!ok) { error = CC_FERR_MALFORMED_FILE; break; } for (unsigned i=0; i<elements; ++i) { inFile.readLine(); //ignore } //end unhandled property } if (error != CC_FERR_NO_ERROR) break; } if (error != CC_FERR_NO_ERROR) { if (mesh) delete mesh; if (vertices) delete vertices; return CC_FERR_MALFORMED_FILE; } file.close(); if (mesh && mesh->size() == 0) { delete mesh; mesh = 0; } if (vertices->size() == 0) { delete vertices; return CC_FERR_NO_LOAD; } if (mesh) { container.addChild(mesh); mesh->setVisible(true); mesh->addChild(vertices); vertices->setVisible(false); vertices->setEnabled(false); vertices->setName("Vertices"); vertices->setLocked(true); //DGM: no need to lock it as it is only used by one mesh! //DGM: normals can be per-vertex or per-triangle so it's better to let the user do it himself later //Moreover it's not always good idea if the user doesn't want normals (especially in ccViewer!) //if (!mesh->hasNormals()) // mesh->computeNormals(); ccLog::Warning("[VTK] Mesh has no normal! You can manually compute them (select it then call \"Edit > Normals > Compute\")"); mesh->showNormals(mesh->hasNormals()); if (vertices->hasScalarFields()) { vertices->setCurrentDisplayedScalarField(0); mesh->showSF(true); } if (vertices->hasColors()) mesh->showColors(true); } else { container.addChild(vertices); vertices->setVisible(true); if (vertices->hasNormals()) vertices->showNormals(true); if (vertices->hasScalarFields()) { vertices->setCurrentDisplayedScalarField(0); vertices->showSF(true); } if (vertices->hasColors()) vertices->showColors(true); } return CC_FERR_NO_ERROR; }
CC_FILE_ERROR ShpFilter::loadFile(QString filename, ccHObject& container, LoadParameters& parameters) { QFile file(filename); if (!file.open(QIODevice::ReadOnly)) return CC_FERR_READING; //global shift CCVector3d Pshift(0,0,0); //read header (refer to ESRI Shapefile Technical Description) if (file.size() < 100) return CC_FERR_MALFORMED_FILE; char header[100]; file.read(header,100); int32_t fileLength = 0; { /*** WARNING: the beginning of the header is written with big endianness! ***/ const char* _header = header; //Byte 0: SHP code const int32_t code = qFromBigEndian<int32_t>(*reinterpret_cast<const int32_t*>(_header)); if (code != 9994) { return CC_FERR_MALFORMED_FILE; } _header += 4; //Byte 4: unused (20 bytes) _header += 20; //Byte 24: file length (will be written... later ;) fileLength = qFromBigEndian<int32_t>(*reinterpret_cast<const int32_t*>(_header)); fileLength *= 2; //fileLength is measured in 16-bit words _header += 4; /*** WARNING: from now on, we only read data with little endianness! ***/ //Byte 28: file verion const int32_t version = qFromLittleEndian<int32_t>(*reinterpret_cast<const int32_t*>(_header)); _header += 4; //Byte 32: shape type int32_t shapeTypeInt = qFromLittleEndian<int32_t>(*reinterpret_cast<const int32_t*>(_header)); _header += 4; ccLog::Print(QString("[SHP] Version: %1 - type: %2").arg(version).arg(ToString(static_cast<ESRI_SHAPE_TYPE>(shapeTypeInt)))); //X and Y bounaries //Byte 36: box X min double xMin = qFromLittleEndian<double>(*reinterpret_cast<const double*>(_header)); _header += 8; //Byte 44: box Y min double xMax = qFromLittleEndian<double>(*reinterpret_cast<const double*>(_header)); _header += 8; //Byte 52: box X max double yMin = qFromLittleEndian<double>(*reinterpret_cast<const double*>(_header)); _header += 8; //Byte 60: box Y max double yMax = qFromLittleEndian<double>(*reinterpret_cast<const double*>(_header)); _header += 8; //Z bounaries //Unused, with value 0.0, if not Measured or Z type //Byte 68: box Z min double zMin = qFromLittleEndian<double>(*reinterpret_cast<const double*>(_header)); _header += 8; //Byte 76: box Z max double zMax = qFromLittleEndian<double>(*reinterpret_cast<const double*>(_header)); _header += 8; CCVector3d Pmin(xMin,yMin,zMin); if (HandleGlobalShift(Pmin,Pshift,parameters)) { ccLog::Warning("[SHP] Entities will be recentered! Translation: (%.2f,%.2f,%.2f)",Pshift.x,Pshift.y,Pshift.z); } //M bounaries (M = measures) //Byte 84: M min double mMin = qFromLittleEndian<double>(*reinterpret_cast<const double*>(_header)); _header += 8; //Byte 92: M max double mMax = qFromLittleEndian<double>(*reinterpret_cast<const double*>(_header)); _header += 8; } assert(fileLength >= 100); if (fileLength < 100) { assert(false); return CC_FERR_MALFORMED_FILE; } fileLength -= 100; if (fileLength == 0) { return CC_FERR_NO_LOAD; } //load shapes CC_FILE_ERROR error = CC_FERR_NO_ERROR; ccPointCloud* singlePoints = 0; qint64 pos = file.pos(); while (fileLength >= 12) { file.seek(pos); assert(pos + fileLength == file.size()); //load shape record in main SHP file { file.read(header,8); //Byte 0: Record Number int32_t recordNumber = qFromBigEndian<int32_t>(*reinterpret_cast<const int32_t*>(header)); //Record numbers begin at 1 //Byte 4: Content Length int32_t recordSize = qFromBigEndian<int32_t>(*reinterpret_cast<const int32_t*>(header+4)); //Record numbers begin at 1 recordSize *= 2; //recordSize is measured in 16-bit words fileLength -= 8; pos += 8; if (fileLength < recordSize) { assert(false); error = CC_FERR_MALFORMED_FILE; break; } fileLength -= recordSize; pos += recordSize; //Record start (byte 0): Shape Type if (recordSize < 4) { assert(false); error = CC_FERR_MALFORMED_FILE; break; } file.read(header,4); recordSize -= 4; int32_t shapeTypeInt = qToLittleEndian<int32_t>(*reinterpret_cast<const int32_t*>(header)); ccLog::Print(QString("[SHP] Record #%1 - type: %2 (%3 bytes)").arg(recordNumber).arg(ToString(static_cast<ESRI_SHAPE_TYPE>(shapeTypeInt))).arg(recordSize)); switch (shapeTypeInt) { case SHP_POLYLINE: case SHP_POLYLINE_Z: case SHP_POLYGON: case SHP_POLYGON_Z: error = LoadPolyline(file,container,recordNumber,static_cast<ESRI_SHAPE_TYPE>(shapeTypeInt),Pshift); break; case SHP_MULTI_POINT: case SHP_MULTI_POINT_Z: case SHP_MULTI_POINT_M: error = LoadCloud(file,container,recordNumber,static_cast<ESRI_SHAPE_TYPE>(shapeTypeInt),Pshift); break; case SHP_POINT: case SHP_POINT_Z: case SHP_POINT_M: error = LoadSinglePoint(file,singlePoints,static_cast<ESRI_SHAPE_TYPE>(shapeTypeInt),Pshift); break; //case SHP_MULTI_PATCH: // error = LoadMesh(file,recordSize); // break; case SHP_NULL_SHAPE: //ignored break; default: //unhandled entity ccLog::Warning("[SHP] Unhandled type!"); break; } } if (error != CC_FERR_NO_ERROR) break; } if (singlePoints) { if (singlePoints->size() == 0) { delete singlePoints; singlePoints = 0; } else { CCLib::ScalarField* sf = singlePoints->getScalarField(0); if (sf) { sf->computeMinAndMax(); singlePoints->showSF(true); } container.addChild(singlePoints); } } return error; }
CC_FILE_ERROR SinusxFilter::loadFile(QString filename, ccHObject& container, LoadParameters& parameters) { //open file QFile file(filename); if (!file.open(QFile::ReadOnly)) return CC_FERR_READING; QTextStream stream(&file); QString currentLine("C"); ccPolyline* currentPoly = 0; ccPointCloud* currentVertices = 0; unsigned lineNumber = 0; CurveType curveType = INVALID; unsigned cpIndex = 0; CC_FILE_ERROR result = CC_FERR_NO_ERROR; CCVector3d Pshift(0,0,0); bool firstVertex = true; while (!currentLine.isEmpty() && file.error() == QFile::NoError) { currentLine = stream.readLine(); ++lineNumber; if (currentLine.startsWith("C ")) { //ignore comments continue; } else if (currentLine.startsWith("B")) { //new block if (currentPoly) { if ( currentVertices && currentVertices->size() != 0 && currentVertices->resize(currentVertices->size()) && currentPoly->addPointIndex(0,currentVertices->size()) ) { container.addChild(currentPoly); } else { delete currentPoly; } currentPoly = 0; currentVertices = 0; } //read type QStringList tokens = currentLine.split(QRegExp("\\s+"),QString::SkipEmptyParts); if (tokens.size() < 2 || tokens[1].length() > 1) { ccLog::Warning(QString("[SinusX] Line %1 is corrupted").arg(lineNumber)); result = CC_FERR_MALFORMED_FILE; continue; } QChar curveTypeChar = tokens[1].at(0); curveType = INVALID; if (curveTypeChar == SHORTCUT[CUREV_S]) curveType = CUREV_S; else if (curveTypeChar == SHORTCUT[CURVE_P]) curveType = CURVE_P; else if (curveTypeChar == SHORTCUT[CURVE_N]) curveType = CURVE_N; else if (curveTypeChar == SHORTCUT[CURVE_C]) curveType = CURVE_C; if (curveType == INVALID) { ccLog::Warning(QString("[SinusX] Unhandled curve type '%1' on line '%2'!").arg(curveTypeChar).arg(lineNumber)); result = CC_FERR_MALFORMED_FILE; continue; } //TODO: what about the local coordinate system and scale?! (7 last values) if (tokens.size() > 7) { } //block is ready currentVertices = new ccPointCloud("vertices"); currentPoly = new ccPolyline(currentVertices); currentPoly->addChild(currentVertices); currentVertices->setEnabled(false); cpIndex = 0; } else if (currentPoly) { if (currentLine.startsWith("CN")) { if (currentLine.length() > 3) { QString name = currentLine.right(currentLine.length()-3); currentPoly->setName(name); } } else if (currentLine.startsWith("CP")) { QStringList tokens = currentLine.split(QRegExp("\\s+"),QString::SkipEmptyParts); switch (cpIndex) { case 0: //first 'CP' line { //expected: CP + 'connected' + 'closed' flags bool ok = (tokens.size() == 3); if (ok) { bool ok1 = true, ok2 = true; int isConnected = tokens[1].toInt(&ok1); int isClosed = tokens[2].toInt(&ok2); ok = ok1 && ok2; if (ok) { if (isConnected == 0) { //points are not connected?! //--> we simply hide the polyline and display its vertices currentPoly->setVisible(false); currentVertices->setEnabled(true); } currentPoly->setClosed(isClosed != 0); } } if (!ok) { ccLog::Warning(QString("[SinusX] Line %1 is corrupted (expected: 'CP connected_flag closed_flag')").arg(lineNumber)); result = CC_FERR_MALFORMED_FILE; continue; } ++cpIndex; } break; case 1: //second 'CP' line { if (curveType == CUREV_S) { ++cpIndex; //no break: we go directly to the next case (cpIndex = 2) } else if (curveType == CURVE_P) { //nothing particular for profiles (they are not handled in the same way in CC!) ++cpIndex; break; } else if (curveType == CURVE_N) { //expected: CP + const_altitude bool ok = (tokens.size() == 2); if (ok) { double z = tokens[1].toDouble(&ok); if (ok) currentPoly->setMetaData(ccPolyline::MetaKeyConstAltitude(),QVariant(z)); } if (!ok) { ccLog::Warning(QString("[SinusX] Line %1 is corrupted (expected: 'CP const_altitude')").arg(lineNumber)); result = CC_FERR_MALFORMED_FILE; continue; } ++cpIndex; break; } else if (curveType == CURVE_C) { //skip the next 16 values int skipped = tokens.size()-1; //all but the 'CP' keyword while (skipped < 16 && !currentLine.isEmpty() && file.error() == QFile::NoError) { currentLine = stream.readLine(); ++lineNumber; tokens = currentLine.split(QRegExp("\\s+"),QString::SkipEmptyParts); skipped += tokens.size(); } assert(skipped == 16); //no more than 16 normally! ++cpIndex; break; } else { assert(false); ++cpIndex; break; } } case 2: { //CP + base plane: 0 = (XY), 1 = (YZ), 2 = (ZX) bool ok = (tokens.size() == 2); if (ok) { int vertDir = 2; QChar basePlaneChar = tokens[1].at(0); if (basePlaneChar == '0') vertDir = 2; else if (basePlaneChar == '1') vertDir = 0; else if (basePlaneChar == '2') vertDir = 1; else ok = false; if (ok) currentPoly->setMetaData(ccPolyline::MetaKeyUpDir(),QVariant(vertDir)); } if (!ok) { ccLog::Warning(QString("[SinusX] Line %1 is corrupted (expected: 'CP base_plane')").arg(lineNumber)); result = CC_FERR_MALFORMED_FILE; continue; } } ++cpIndex; break; default: //ignored break; } } else if (!currentLine.isEmpty()) { assert(currentVertices); //shoud be a point! QStringList tokens = currentLine.split(QRegExp("\\s+"),QString::SkipEmptyParts); bool ok = (tokens.size() == 4); if (ok) { CCVector3d Pd; Pd.x = tokens[0].toDouble(&ok); if (ok) { Pd.y = tokens[1].toDouble(&ok); if (ok) { Pd.z = tokens[2].toDouble(&ok); if (ok) { //resize vertex cloud if necessary if ( currentVertices->size() == currentVertices->capacity() && !currentVertices->reserve(currentVertices->size() + 10)) { delete currentPoly; return CC_FERR_NOT_ENOUGH_MEMORY; } //first point: check for 'big' coordinates if (firstVertex/*currentVertices->size() == 0*/) { firstVertex = false; if (HandleGlobalShift(Pd,Pshift,parameters)) { if (currentPoly) currentPoly->setGlobalShift(Pshift); else currentVertices->setGlobalShift(Pshift); ccLog::Warning("[SinusX::loadFile] Polyline has been recentered! Translation: (%.2f ; %.2f ; %.2f)",Pshift.x,Pshift.y,Pshift.z); } } currentVertices->addPoint(CCVector3::fromArray((Pd+Pshift).u)); } } } } if (!ok) { ccLog::Warning(QString("[SinusX] Line %1 is corrupted (expected: 'X Y Z Key ...')").arg(lineNumber)); result = CC_FERR_MALFORMED_FILE; continue; } } } } //don't forget the last polyline! if (currentPoly) { if ( currentVertices && currentVertices->size() != 0 && currentVertices->resize(currentVertices->size()) && currentPoly->addPointIndex(0,currentVertices->size()) ) { container.addChild(currentPoly); } } return result; }
CC_FILE_ERROR SalomeHydroFilter::loadFile(QString filename, ccHObject& container, LoadParameters& parameters) { //we open the file (ASCII mode) QFile file(filename); if (!file.open(QFile::ReadOnly)) { return CC_FERR_READING; } QTextStream stream(&file); CC_FILE_ERROR result = CC_FERR_NO_ERROR; CCVector3d Pshift(0,0,0); bool firstPoint = true; ccPointCloud* currentVertices = 0; unsigned index = 0; while (true) { QString currentLine = stream.readLine().trimmed(); if (currentLine.isNull() || currentLine.isEmpty()) { //close any ongoing polyline if (currentVertices) { if (currentVertices->size() < 2) { delete currentVertices; currentVertices = 0; ccLog::Warning("[Salome Hydro] An invalid polyline (single vertex) will be ignored"); } else { currentVertices->shrinkToFit(); //create the corresponding polyline ccPolyline* newPoly = new ccPolyline(currentVertices); newPoly->setName(QString(QString("Polyline #%1").arg(++index))); newPoly->addChild(currentVertices); newPoly->set2DMode(false); if (!newPoly->reserve(currentVertices->size())) { delete newPoly; result = CC_FERR_NOT_ENOUGH_MEMORY; break; } newPoly->addPointIndex(0,currentVertices->size()); currentVertices->setEnabled(false); container.addChild(newPoly); currentVertices = 0; } } if (currentLine.isNull()) { //end of file break; } } else { if (!currentVertices) { currentVertices = new ccPointCloud("vertices"); if (!firstPoint) currentVertices->setGlobalShift(Pshift); } QStringList parts = currentLine.split(QRegExp("\\s+"),QString::SkipEmptyParts); if (parts.size() == 3) { //(X,Y,Z) CCVector3d P( parts[0].toDouble(), parts[1].toDouble(), parts[2].toDouble() ); //first point: check for 'big' coordinates if (firstPoint) { if (HandleGlobalShift(P,Pshift,parameters)) { currentVertices->setGlobalShift(Pshift); ccLog::Warning("[Salome Hydro] Polylines will be recentered! Translation: (%.2f ; %.2f ; %.2f)",Pshift.x,Pshift.y,Pshift.z); } firstPoint = false; } //add point if (currentVertices->size() == currentVertices->capacity()) { if (!currentVertices->reserve(currentVertices->size() + 64)) { delete currentVertices; currentVertices = 0; result = CC_FERR_NOT_ENOUGH_MEMORY; break; } } currentVertices->addPoint(CCVector3::fromArray((P+Pshift).u)); } else { ccLog::Warning("[Salome Hydro] Malformed file: 3 values per line expected"); result = CC_FERR_MALFORMED_FILE; break; } } } return result; }