CC_FILE_ERROR LASFilter::loadFile(const char* filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, CCVector3d* coordinatesShift/*=0*/) { //opening file std::ifstream ifs; ifs.open(filename, std::ios::in | std::ios::binary); if (ifs.fail()) return CC_FERR_READING; liblas::Reader* reader = 0; unsigned nbOfPoints = 0; std::vector<std::string> dimensions; try { reader = new liblas::Reader(liblas::ReaderFactory().CreateWithStream(ifs)); //using factory for automatic and transparent //handling of compressed/uncompressed files liblas::Header const& header = reader->GetHeader(); ccLog::PrintDebug(QString("[LAS FILE] %1 - signature: %2").arg(filename).arg(header.GetFileSignature().c_str())); //get fields present in file dimensions = header.GetSchema().GetDimensionNames(); //and of course the number of points nbOfPoints = header.GetPointRecordsCount(); } catch (...) { delete reader; ifs.close(); return CC_FERR_READING; } if (nbOfPoints==0) { //strange file ;) delete reader; ifs.close(); return CC_FERR_NO_LOAD; } //dialog to choose the fields to load if (!s_lasOpenDlg) s_lasOpenDlg = QSharedPointer<LASOpenDlg>(new LASOpenDlg()); s_lasOpenDlg->setDimensions(dimensions); if (alwaysDisplayLoadDialog && !s_lasOpenDlg->autoSkipMode() && !s_lasOpenDlg->exec()) { delete reader; ifs.close(); return CC_FERR_CANCELED_BY_USER; } bool ignoreDefaultFields = s_lasOpenDlg->ignoreDefaultFieldsCheckBox->isChecked(); //RGB color liblas::Color rgbColorMask; //(0,0,0) on construction if (s_lasOpenDlg->doLoad(LAS_RED)) rgbColorMask.SetRed(~0); if (s_lasOpenDlg->doLoad(LAS_GREEN)) rgbColorMask.SetGreen(~0); if (s_lasOpenDlg->doLoad(LAS_BLUE)) rgbColorMask.SetBlue(~0); bool loadColor = (rgbColorMask[0] || rgbColorMask[1] || rgbColorMask[2]); //progress dialog ccProgressDialog pdlg(true); //cancel available CCLib::NormalizedProgress nprogress(&pdlg,nbOfPoints); pdlg.setMethodTitle("Open LAS file"); pdlg.setInfo(qPrintable(QString("Points: %1").arg(nbOfPoints))); pdlg.start(); //number of points read from the begining of the current cloud part unsigned pointsRead = 0; CCVector3d Pshift(0,0,0); //by default we read color as 8 bits integers and we will change this to 16 bits if it's not (16 bits is the standard!) unsigned char colorCompBitDec = 0; colorType rgb[3] = {0,0,0}; ccPointCloud* loadedCloud = 0; std::vector<LasField> fieldsToLoad; //if the file is too big, we will chunck it in multiple parts unsigned int fileChunkPos = 0; unsigned int fileChunkSize = 0; while (true) { //if we reach the end of the file, or the max. cloud size limit (in which case we cerate a new chunk) bool newPointAvailable = (nprogress.oneStep() && reader->ReadNextPoint()); if (!newPointAvailable || pointsRead == fileChunkPos+fileChunkSize) { if (loadedCloud) { if (loadedCloud->size()) { bool thisChunkHasColors = loadedCloud->hasColors(); loadedCloud->showColors(thisChunkHasColors); if (loadColor && !thisChunkHasColors) ccLog::Warning("[LAS FILE] Color field was all black! We ignored it..."); while (!fieldsToLoad.empty()) { LasField& field = fieldsToLoad.back(); if (field.sf) { field.sf->computeMinAndMax(); if (field.type == LAS_CLASSIFICATION || field.type == LAS_RETURN_NUMBER || field.type == LAS_NUMBER_OF_RETURNS) { int cMin = (int)field.sf->getMin(); int cMax = (int)field.sf->getMax(); field.sf->setColorRampSteps(std::min<int>(cMax-cMin+1,256)); //classifSF->setMinSaturation(cMin); } else if (field.type == LAS_INTENSITY) { field.sf->setColorScale(ccColorScalesManager::GetDefaultScale(ccColorScalesManager::GREY)); } int sfIndex = loadedCloud->addScalarField(field.sf); if (!loadedCloud->hasDisplayedScalarField()) { loadedCloud->setCurrentDisplayedScalarField(sfIndex); loadedCloud->showSF(!thisChunkHasColors); } field.sf->release(); field.sf=0; } else { ccLog::Warning(QString("[LAS FILE] All '%1' values were the same (%2)! We ignored them...").arg(LAS_FIELD_NAMES[field.type]).arg(field.firstValue)); } fieldsToLoad.pop_back(); } //if we have reserved too much memory if (loadedCloud->size() < loadedCloud->capacity()) loadedCloud->resize(loadedCloud->size()); QString chunkName("unnamed - Cloud"); unsigned n = container.getChildrenNumber(); if (n!=0) //if we have more than one cloud, we append an index { if (n==1) //we must also update the first one! container.getChild(0)->setName(chunkName+QString(" #1")); chunkName += QString(" #%1").arg(n+1); } loadedCloud->setName(chunkName); container.addChild(loadedCloud); loadedCloud=0; } else { //empty cloud?! delete loadedCloud; loadedCloud=0; } } if (!newPointAvailable) break; //end of the file (or cancel requested) //otherwise, we must create a new cloud fileChunkPos = pointsRead; fileChunkSize = std::min(nbOfPoints-pointsRead,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD); loadedCloud = new ccPointCloud(); if (!loadedCloud->reserveThePointsTable(fileChunkSize)) { ccLog::Warning("[LASFilter::loadFile] Not enough memory!"); delete loadedCloud; delete reader; ifs.close(); return CC_FERR_NOT_ENOUGH_MEMORY; } loadedCloud->setGlobalShift(Pshift); //DGM: from now on, we only enable scalar fields when we detect a valid value! if (s_lasOpenDlg->doLoad(LAS_CLASSIFICATION)) fieldsToLoad.push_back(LasField(LAS_CLASSIFICATION,0,0,255)); //unsigned char: between 0 and 255 if (s_lasOpenDlg->doLoad(LAS_CLASSIF_VALUE)) fieldsToLoad.push_back(LasField(LAS_CLASSIF_VALUE,0,0,31)); //5 bits: between 0 and 31 if (s_lasOpenDlg->doLoad(LAS_CLASSIF_SYNTHETIC)) fieldsToLoad.push_back(LasField(LAS_CLASSIF_SYNTHETIC,0,0,1)); //1 bit: 0 or 1 if (s_lasOpenDlg->doLoad(LAS_CLASSIF_KEYPOINT)) fieldsToLoad.push_back(LasField(LAS_CLASSIF_KEYPOINT,0,0,1)); //1 bit: 0 or 1 if (s_lasOpenDlg->doLoad(LAS_CLASSIF_WITHHELD)) fieldsToLoad.push_back(LasField(LAS_CLASSIF_WITHHELD,0,0,1)); //1 bit: 0 or 1 if (s_lasOpenDlg->doLoad(LAS_INTENSITY)) fieldsToLoad.push_back(LasField(LAS_INTENSITY,0,0,65535)); //16 bits: between 0 and 65536 if (s_lasOpenDlg->doLoad(LAS_TIME)) fieldsToLoad.push_back(LasField(LAS_TIME,0,0,-1.0)); //8 bytes (double) if (s_lasOpenDlg->doLoad(LAS_RETURN_NUMBER)) fieldsToLoad.push_back(LasField(LAS_RETURN_NUMBER,1,1,7)); //3 bits: between 1 and 7 if (s_lasOpenDlg->doLoad(LAS_NUMBER_OF_RETURNS)) fieldsToLoad.push_back(LasField(LAS_NUMBER_OF_RETURNS,1,1,7)); //3 bits: between 1 and 7 if (s_lasOpenDlg->doLoad(LAS_SCAN_DIRECTION)) fieldsToLoad.push_back(LasField(LAS_SCAN_DIRECTION,0,0,1)); //1 bit: 0 or 1 if (s_lasOpenDlg->doLoad(LAS_FLIGHT_LINE_EDGE)) fieldsToLoad.push_back(LasField(LAS_FLIGHT_LINE_EDGE,0,0,1)); //1 bit: 0 or 1 if (s_lasOpenDlg->doLoad(LAS_SCAN_ANGLE_RANK)) fieldsToLoad.push_back(LasField(LAS_SCAN_ANGLE_RANK,0,-90,90)); //signed char: between -90 and +90 if (s_lasOpenDlg->doLoad(LAS_USER_DATA)) fieldsToLoad.push_back(LasField(LAS_USER_DATA,0,0,255)); //unsigned char: between 0 and 255 if (s_lasOpenDlg->doLoad(LAS_POINT_SOURCE_ID)) fieldsToLoad.push_back(LasField(LAS_POINT_SOURCE_ID,0,0,65535)); //16 bits: between 0 and 65536 } assert(newPointAvailable); const liblas::Point& p = reader->GetPoint(); //first point: check for 'big' coordinates if (pointsRead == 0) { CCVector3d P( p.GetX(),p.GetY(),p.GetZ() ); bool shiftAlreadyEnabled = (coordinatesShiftEnabled && *coordinatesShiftEnabled && coordinatesShift); if (shiftAlreadyEnabled) Pshift = *coordinatesShift; bool applyAll = false; if ( sizeof(PointCoordinateType) < 8 && ccCoordinatesShiftManager::Handle(P.u,0,alwaysDisplayLoadDialog,shiftAlreadyEnabled,Pshift,0,applyAll)) { loadedCloud->setGlobalShift(Pshift); ccLog::Warning("[LASFilter::loadFile] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift.x,Pshift.y,Pshift.z); //we save coordinates shift information if (applyAll && coordinatesShiftEnabled && coordinatesShift) { *coordinatesShiftEnabled = true; *coordinatesShift = Pshift; } } } CCVector3 P(static_cast<PointCoordinateType>(p.GetX()+Pshift.x), static_cast<PointCoordinateType>(p.GetY()+Pshift.y), static_cast<PointCoordinateType>(p.GetZ()+Pshift.z)); loadedCloud->addPoint(P); //color field if (loadColor) { //Warning: LAS colors are stored on 16 bits! liblas::Color col = p.GetColor(); col[0] &= rgbColorMask[0]; col[1] &= rgbColorMask[1]; col[2] &= rgbColorMask[2]; //if we don't have reserved a color field yet, we check first that color is not black bool pushColor = true; if (!loadedCloud->hasColors()) { //if the color is not black, we are sure it's a valid color field! if (col[0] || col[1] || col[2]) { if (loadedCloud->reserveTheRGBTable()) { //we must set the color (black) of all the precedently skipped points for (unsigned i=0;i<loadedCloud->size()-1;++i) loadedCloud->addRGBColor(ccColor::black); } else { ccLog::Warning("[LAS FILE] Not enough memory: color field will be ignored!"); loadColor = false; //no need to retry with the other chunks anyway pushColor = false; } } else //otherwise we ignore it for the moment (we'll add it later if necessary) { pushColor = false; } } //do we need to push this color? if (pushColor) { //we test if the color components are on 16 bits (standard) or only on 8 bits (it happens ;) if (colorCompBitDec==0) { if ( (col[0] & 0xFF00) || (col[1] & 0xFF00) || (col[2] & 0xFF00)) { //the color components are on 16 bits! ccLog::Print("[LAS FILE] Color components are coded on 16 bits"); colorCompBitDec = 8; //we fix all the precedently read colors for (unsigned i=0;i<loadedCloud->size()-1;++i) loadedCloud->setPointColor(i,ccColor::black); //255 >> 8 = 0! } } rgb[0]=(colorType)(col[0]>>colorCompBitDec); rgb[1]=(colorType)(col[1]>>colorCompBitDec); rgb[2]=(colorType)(col[2]>>colorCompBitDec); loadedCloud->addRGBColor(rgb); } } //additional fields for (std::vector<LasField>::iterator it = fieldsToLoad.begin(); it != fieldsToLoad.end(); ++it) { double value = 0.0; switch (it->type) { case LAS_X: case LAS_Y: case LAS_Z: assert(false); break; case LAS_INTENSITY: value = (double)p.GetIntensity(); break; case LAS_RETURN_NUMBER: value = (double)p.GetReturnNumber(); break; case LAS_NUMBER_OF_RETURNS: value = (double)p.GetNumberOfReturns(); break; case LAS_SCAN_DIRECTION: value = (double)p.GetScanDirection(); break; case LAS_FLIGHT_LINE_EDGE: value = (double)p.GetFlightLineEdge(); break; case LAS_CLASSIFICATION: value = (double)p.GetClassification().GetClass(); break; case LAS_SCAN_ANGLE_RANK: value = (double)p.GetScanAngleRank(); break; case LAS_USER_DATA: value = (double)p.GetUserData(); break; case LAS_POINT_SOURCE_ID: value = (double)p.GetPointSourceID(); break; case LAS_RED: case LAS_GREEN: case LAS_BLUE: assert(false); break; case LAS_TIME: value = p.GetTime(); break; case LAS_CLASSIF_VALUE: value = (double)(p.GetClassification().GetClass() & 31); //5 bits break; case LAS_CLASSIF_SYNTHETIC: value = (double)(p.GetClassification().GetClass() & 32); //bit #6 break; case LAS_CLASSIF_KEYPOINT: value = (double)(p.GetClassification().GetClass() & 64); //bit #7 break; case LAS_CLASSIF_WITHHELD: value = (double)(p.GetClassification().GetClass() & 128); //bit #8 break; case LAS_INVALID: default: assert(false); break; } if (it->sf) { ScalarType s = static_cast<ScalarType>(value); it->sf->addElement(s); } else { //first point? we track its value if (loadedCloud->size() == 1) { it->firstValue = value; } if (!ignoreDefaultFields || value != it->firstValue || it->firstValue != it->defaultValue) { it->sf = new ccScalarField(it->getName()); if (it->sf->reserve(fileChunkSize)) { it->sf->link(); //we must set the value (firstClassifValue) of all the precedently skipped points ScalarType firstS = static_cast<ScalarType>(it->firstValue); for (unsigned i=0; i<loadedCloud->size()-1; ++i) it->sf->addElement(firstS); ScalarType s = static_cast<ScalarType>(value); it->sf->addElement(s); } else { ccLog::Warning(QString("[LAS FILE] Not enough memory: '%1' field will be ignored!").arg(LAS_FIELD_NAMES[it->type])); it->sf->release(); it->sf = 0; } } } } ++pointsRead; } if (reader) delete reader; reader=0; ifs.close(); return CC_FERR_NO_ERROR; }
CC_FILE_ERROR LASFilter::loadFile(const char* filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, double* coordinatesShift/*=0*/) { //opening file std::ifstream ifs; ifs.open(filename, std::ios::in | std::ios::binary); if (ifs.fail()) return CC_FERR_READING; liblas::Reader* reader = 0; unsigned nbOfPoints = 0; std::vector<std::string> dimensions; try { reader = new liblas::Reader(liblas::ReaderFactory().CreateWithStream(ifs)); //using factory for automatic and transparent //handling of compressed/uncompressed files liblas::Header const& header = reader->GetHeader(); #ifdef _DEBUG //ccConsole::Print("[LAS FILE] %s - signature: %s",filename,header.GetFileSignature().c_str()); #endif //get fields present in file dimensions = header.GetSchema().GetDimensionNames(); //and of course the number of points nbOfPoints = header.GetPointRecordsCount(); } catch (...) { delete reader; ifs.close(); return CC_FERR_READING; } if (nbOfPoints==0) { //strange file ;) delete reader; ifs.close(); return CC_FERR_NO_LOAD; } liblas::Color rgbColorMask; //(0,0,0) on construction bool hasClassif = false; bool hasIntensity = false; bool hasTime = false; bool hasReturnNumber = false; for (unsigned k=0;k<dimensions.size();++k) { QString dim = QString(dimensions[k].c_str()).toUpper(); bool handled = true; if (dim == "RED") rgbColorMask.SetRed(~0); else if (dim == "BLUE") rgbColorMask.SetBlue(~0); else if (dim == "GREEN") rgbColorMask.SetGreen(~0); else if (dim == "CLASSIFICATION") hasClassif = true; else if (dim == "TIME") hasTime = true; else if (dim == "INTENSITY") hasIntensity = true; else if (dim == "RETURN NUMBER") hasReturnNumber = true; else if (dim != "X" && dim != "Y" && dim != "Z") handled = false; ccConsole::Print(QString("[LAS FILE] Found dimension '%1' (%2)").arg(dimensions[k].c_str()).arg(handled ? "handled" : "not handled")); } bool hasColor = (rgbColorMask[0] || rgbColorMask[1] || rgbColorMask[2]); //progress dialog ccProgressDialog pdlg(true); //cancel available CCLib::NormalizedProgress nprogress(&pdlg,nbOfPoints); pdlg.setMethodTitle("Open LAS file"); pdlg.setInfo(qPrintable(QString("Points: %1").arg(nbOfPoints))); pdlg.start(); //number of points read from the begining of the current cloud part unsigned pointsRead = 0; double Pshift[3] = {0.0,0.0,0.0}; //by default we read color as 8 bits integers and we will change this to 16 bits if it's not (16 bits is the standard!) unsigned char colorCompBitDec = 0; colorType rgb[3] = {0,0,0}; ccPointCloud* loadedCloud = 0; ccScalarField* classifSF = 0; uint8_t firstClassifValue = 0; ccScalarField* timeSF = 0; double firstTime = 0.0; ccScalarField* intensitySF = 0; uint16_t firstIntensity = 0; ccScalarField* returnNumberSF = 0; uint16_t firstReturnNumber = 0; //if the file is too big, we will chunck it in multiple parts unsigned int fileChunkPos = 0; unsigned int fileChunkSize = 0; while (true) { //if we reach the end of the file, or the max. cloud size limit (in which case we cerate a new chunk) bool newPointAvailable = (nprogress.oneStep() && reader->ReadNextPoint()); if (!newPointAvailable || pointsRead == fileChunkPos+fileChunkSize) { if (loadedCloud) { if (loadedCloud->size()) { bool thisChunkHasColors = loadedCloud->hasColors(); loadedCloud->showColors(thisChunkHasColors); if (hasColor && !thisChunkHasColors) ccLog::Warning("[LAS FILE] Color field was all black! We ignored it..."); if (hasClassif) { if (classifSF) { classifSF->computeMinAndMax(); int cMin = (int)classifSF->getMin(); int cMax = (int)classifSF->getMax(); classifSF->setColorRampSteps(cMax-cMin); //classifSF->setMinSaturation(cMin); int sfIndex = loadedCloud->addScalarField(classifSF); if (!loadedCloud->hasDisplayedScalarField()) { loadedCloud->setCurrentDisplayedScalarField(sfIndex); loadedCloud->showSF(!thisChunkHasColors); } } else { ccLog::Warning(QString("[LAS FILE] All classification values were the same (%1)! We ignored them...").arg(firstClassifValue)); } } if (hasIntensity) { if (intensitySF) { intensitySF->computeMinAndMax(); intensitySF->setColorScale(ccColorScalesManager::GetDefaultScale(ccColorScalesManager::GREY)); int sfIndex = loadedCloud->addScalarField(intensitySF); if (!loadedCloud->hasDisplayedScalarField()) { loadedCloud->setCurrentDisplayedScalarField(sfIndex); loadedCloud->showSF(!thisChunkHasColors); } } else { ccLog::Warning(QString("[LAS FILE] All intensities were the same (%1)! We ignored them...").arg(firstIntensity)); } } if (hasTime) { if (timeSF) { timeSF->computeMinAndMax(); int sfIndex = loadedCloud->addScalarField(timeSF); if (!loadedCloud->hasDisplayedScalarField()) { loadedCloud->setCurrentDisplayedScalarField(sfIndex); loadedCloud->showSF(!thisChunkHasColors); } } else { ccLog::Warning(QString("[LAS FILE] All timestamps were the same (%1)! We ignored them...").arg(firstTime)); } } if (hasReturnNumber) { if (returnNumberSF) { returnNumberSF->computeMinAndMax(); int rMin = (int)returnNumberSF->getMin(); int rMax = (int)returnNumberSF->getMax(); returnNumberSF->setColorRampSteps(rMax-rMin); int sfIndex = loadedCloud->addScalarField(returnNumberSF); if (!loadedCloud->hasDisplayedScalarField()) { loadedCloud->setCurrentDisplayedScalarField(sfIndex); loadedCloud->showSF(!thisChunkHasColors); } } else { ccLog::Warning(QString("[LAS FILE] All return numbers were the same (%1)! We ignored them...").arg(firstReturnNumber)); } } //if we have reserved too much memory if (loadedCloud->size() < loadedCloud->capacity()) loadedCloud->resize(loadedCloud->size()); QString chunkName("unnamed - Cloud"); unsigned n = container.getChildrenNumber(); if (n!=0) //if we have more than one cloud, we append an index { if (n==1) //we must also update the first one! container.getChild(0)->setName(chunkName+QString(" #1")); chunkName += QString(" #%1").arg(n+1); } loadedCloud->setName(chunkName); container.addChild(loadedCloud); loadedCloud=0; } else { //empty cloud?! delete loadedCloud; loadedCloud=0; } if (classifSF) classifSF->release(); classifSF=0; if (intensitySF) intensitySF->release(); intensitySF=0; if (returnNumberSF) returnNumberSF->release(); returnNumberSF=0; if (timeSF) timeSF->release(); timeSF=0; } if (!newPointAvailable) break; //end of the file (or cancel requested) //otherwise, we must create a new cloud fileChunkPos = pointsRead; fileChunkSize = std::min(nbOfPoints-pointsRead,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD); loadedCloud = new ccPointCloud(); if (!loadedCloud->reserveThePointsTable(fileChunkSize)) { ccLog::Warning("[LASFilter::loadFile] Not enough memory!"); delete loadedCloud; delete reader; ifs.close(); return CC_FERR_NOT_ENOUGH_MEMORY; } loadedCloud->setOriginalShift(Pshift[0],Pshift[1],Pshift[2]); //DGM: from now on, we only enable scalar fields when we detect a valid value! if (hasClassif) { assert(!classifSF); firstClassifValue = 0; } if (hasTime) { assert(!timeSF); firstTime = 0.0; } if (hasIntensity) { assert(!intensitySF); firstIntensity=0; } if (hasReturnNumber) { assert(!returnNumberSF); firstReturnNumber = 0; } } assert(newPointAvailable); const liblas::Point& p = reader->GetPoint(); //first point: check for 'big' coordinates if (pointsRead==0) { double P[3]={p.GetX(),p.GetY(),p.GetZ()}; bool shiftAlreadyEnabled = (coordinatesShiftEnabled && *coordinatesShiftEnabled && coordinatesShift); if (shiftAlreadyEnabled) memcpy(Pshift,coordinatesShift,sizeof(double)*3); bool applyAll=false; if (ccCoordinatesShiftManager::Handle(P,0,alwaysDisplayLoadDialog,shiftAlreadyEnabled,Pshift,0,applyAll)) { loadedCloud->setOriginalShift(Pshift[0],Pshift[1],Pshift[2]); ccConsole::Warning("[LASFilter::loadFile] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift[0],Pshift[1],Pshift[2]); //we save coordinates shift information if (applyAll && coordinatesShiftEnabled && coordinatesShift) { *coordinatesShiftEnabled = true; coordinatesShift[0] = Pshift[0]; coordinatesShift[1] = Pshift[1]; coordinatesShift[2] = Pshift[2]; } } } CCVector3 P(p.GetX()+Pshift[0],p.GetY()+Pshift[1],p.GetZ()+Pshift[2]); loadedCloud->addPoint(P); //color field if (hasColor) { //Warning: LAS colors are stored on 16 bits! liblas::Color col = p.GetColor(); col[0] &= rgbColorMask[0]; col[1] &= rgbColorMask[1]; col[2] &= rgbColorMask[2]; //if we don't have reserved a color field yet, we check first that color is not black bool pushColor = true; if (!loadedCloud->hasColors()) { //if the color is not black, we are sure it's a valid color field! if (col[0] || col[1] || col[2]) { if (loadedCloud->reserveTheRGBTable()) { //we must set the color (black) of all the precedently skipped points for (unsigned i=0;i<loadedCloud->size()-1;++i) loadedCloud->addRGBColor(ccColor::black); } else { ccConsole::Warning("[LAS FILE] Not enough memory: color field will be ignored!"); hasColor = false; //no need to retry with the other chunks anyway pushColor = false; } } else //otherwise we ignore it for the moment (we'll add it later if necessary) { pushColor = false; } } //do we need to push this color? if (pushColor) { //we test if the color components are on 16 bits (standard) or only on 8 bits (it happens ;) if (colorCompBitDec==0) { if ( (col[0] & 0xFF00) || (col[1] & 0xFF00) || (col[2] & 0xFF00)) { //the color components are on 16 bits! ccLog::Print("[LAS FILE] Color components are coded on 16 bits"); colorCompBitDec = 8; //we fix all the precedently read colors for (unsigned i=0;i<loadedCloud->size()-1;++i) loadedCloud->setPointColor(i,ccColor::black); //255 >> 8 = 0! } } rgb[0]=(colorType)(col[0]>>colorCompBitDec); rgb[1]=(colorType)(col[1]>>colorCompBitDec); rgb[2]=(colorType)(col[2]>>colorCompBitDec); loadedCloud->addRGBColor(rgb); } } if (hasClassif) { uint8_t intValue = p.GetClassification().GetClass(); if (classifSF) { classifSF->addElement(intValue); } else { //first point? we track its value if (loadedCloud->size()==1) { firstClassifValue = intValue; } if (intValue != firstClassifValue || firstClassifValue > 1) //0 = Created, never classified, 1 = Unclassified { classifSF = new ccScalarField(CC_LAS_CLASSIFICATION_FIELD_NAME); if (classifSF->reserve(fileChunkSize)) { classifSF->link(); //we must set the classification value (firstClassifValue) of all the precedently skipped points for (unsigned i=0;i<loadedCloud->size()-1;++i) classifSF->addElement(firstClassifValue); classifSF->addElement(intValue); } else { ccConsole::Warning("[LAS FILE] Not enough memory: classificaiton field will be ignored!"); hasClassif = false; //no need to retry with the other chunks anyway classifSF->release(); classifSF=0; } } } } if (hasTime) { double timeValue = p.GetTime(); if (timeSF) { timeSF->addElement(timeValue); } else { //first point? we track its value if (loadedCloud->size()==1) { firstTime = timeValue; } else if (timeValue != firstTime) { timeSF = new ccScalarField(CC_SCAN_TIME_FIELD_NAME); if (timeSF->reserve(fileChunkSize)) { timeSF->link(); //we must set the timestamp value (firstTime) of all the precedently skipped points for (unsigned i=0;i<loadedCloud->size()-1;++i) timeSF->addElement(firstTime); timeSF->addElement(timeValue); } else { ccConsole::Warning("[LAS FILE] Not enough memory: 'time' field will be ignored!"); hasTime = false; //no need to retry with the other chunks anyway timeSF->release(); timeSF=0; } } } } if (hasIntensity) { uint16_t intValue = p.GetIntensity(); if (intensitySF) { intensitySF->addElement(intValue); } else { //first point? we track its value if (loadedCloud->size()==1) { firstIntensity = intValue; } if (intValue != firstIntensity || (firstIntensity != 0 && firstIntensity != 65535)) { intensitySF = new ccScalarField(CC_SCAN_INTENSITY_FIELD_NAME); if (intensitySF->reserve(fileChunkSize)) { intensitySF->link(); //we must set the intensity (firstIntensity) of all the precedently skipped points for (unsigned i=0;i<loadedCloud->size()-1;++i) intensitySF->addElement(firstIntensity); intensitySF->addElement(intValue); } else { ccConsole::Warning("[LAS FILE] Not enough memory: intensity field will be ignored!"); hasIntensity = false; //no need to retry with the other chunks anyway intensitySF->release(); intensitySF=0; } } } } if (hasReturnNumber) { uint16_t intValue = p.GetReturnNumber(); if (returnNumberSF) { returnNumberSF->addElement(intValue); } else { //first point? we track its value if (loadedCloud->size()==1) { firstReturnNumber = intValue; } if (intValue != firstReturnNumber) { returnNumberSF = new ccScalarField(CC_SCAN_RETURN_INDEX_FIELD_NAME); if (returnNumberSF->reserve(fileChunkSize)) { returnNumberSF->link(); //we must set the return index (firstReturnNumber) of all the precedently skipped points for (unsigned i=0;i<loadedCloud->size()-1;++i) returnNumberSF->addElement(firstReturnNumber); returnNumberSF->addElement(intValue); } else { ccConsole::Warning("[LAS FILE] Not enough memory: return number field will be ignored!"); hasReturnNumber = false; //no need to retry with the other chunks anyway returnNumberSF->release(); returnNumberSF=0; } } } } ++pointsRead; } if (reader) delete reader; reader=0; ifs.close(); return CC_FERR_NO_ERROR; }
CC_FILE_ERROR PTXFilter::loadFile( QString filename, ccHObject& container, LoadParameters& parameters) { //open ASCII file for reading QFile file(filename); if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) { return CC_FERR_READING; } QTextStream inFile(&file); CCVector3d PshiftTrans(0,0,0); CCVector3d PshiftCloud(0,0,0); CC_FILE_ERROR result = CC_FERR_NO_LOAD; ScalarType minIntensity = 0; ScalarType maxIntensity = 0; //progress dialog ccProgressDialog pdlg(true, parameters.parentWidget); pdlg.setMethodTitle(QObject::tr("Loading PTX file")); pdlg.setAutoClose(false); //progress dialog (for normals computation) ccProgressDialog normalsProgressDlg(true, parameters.parentWidget); normalsProgressDlg.setAutoClose(false); for (unsigned cloudIndex = 0; result == CC_FERR_NO_ERROR || result == CC_FERR_NO_LOAD; cloudIndex++) { unsigned width = 0, height = 0; ccGLMatrixd sensorTransD, cloudTransD; //read header { QString line = inFile.readLine(); if (line.isNull() && container.getChildrenNumber() != 0) //end of file? break; //read the width (number of columns) and the height (number of rows) on the two first lines //(DGM: we transpose the matrix right away) bool ok; height = line.toUInt(&ok); if (!ok) return CC_FERR_MALFORMED_FILE; line = inFile.readLine(); width = line.toUInt(&ok); if (!ok) return CC_FERR_MALFORMED_FILE; ccLog::Print(QString("[PTX] Scan #%1 - grid size: %2 x %3").arg(cloudIndex+1).arg(height).arg(width)); //read sensor transformation matrix for (int i=0; i<4; ++i) { line = inFile.readLine(); QStringList tokens = line.split(" ",QString::SkipEmptyParts); if (tokens.size() != 3) return CC_FERR_MALFORMED_FILE; double* colDest = 0; if (i == 0) { //Translation colDest = sensorTransD.getTranslation(); } else { //X, Y and Z axis colDest = sensorTransD.getColumn(i-1); } for (int j=0; j<3; ++j) { assert(colDest); colDest[j] = tokens[j].toDouble(&ok); if (!ok) return CC_FERR_MALFORMED_FILE; } } //make the transform a little bit cleaner (necessary as it's read from ASCII!) CleanMatrix(sensorTransD); //read cloud transformation matrix for (int i=0; i<4; ++i) { line = inFile.readLine(); QStringList tokens = line.split(" ",QString::SkipEmptyParts); if (tokens.size() != 4) return CC_FERR_MALFORMED_FILE; double* col = cloudTransD.getColumn(i); for (int j=0; j<4; ++j) { col[j] = tokens[j].toDouble(&ok); if (!ok) return CC_FERR_MALFORMED_FILE; } } //make the transform a little bit cleaner (necessary as it's read from ASCII!) CleanMatrix(cloudTransD); //handle Global Shift directly on the first cloud's translation! if (cloudIndex == 0) { if (HandleGlobalShift(cloudTransD.getTranslationAsVec3D(),PshiftTrans,parameters)) { ccLog::Warning("[PTXFilter::loadFile] Cloud has be recentered! Translation: (%.2f,%.2f,%.2f)",PshiftTrans.x,PshiftTrans.y,PshiftTrans.z); } } //'remove' global shift from the sensor and cloud transformation matrices cloudTransD.setTranslation(cloudTransD.getTranslationAsVec3D() + PshiftTrans); sensorTransD.setTranslation(sensorTransD.getTranslationAsVec3D() + PshiftTrans); } //now we can read the grid cells ccPointCloud* cloud = new ccPointCloud(); if (container.getChildrenNumber() == 0) { cloud->setName("unnamed - Cloud"); } else { if (container.getChildrenNumber() == 1) container.getChild(0)->setName("unnamed - Cloud 1"); //update previous cloud name! cloud->setName(QString("unnamed - Cloud %1").arg(container.getChildrenNumber()+1)); } unsigned gridSize = width * height; if (!cloud->reserve(gridSize)) { result = CC_FERR_NOT_ENOUGH_MEMORY; delete cloud; cloud = 0; break; } //set global shift cloud->setGlobalShift(PshiftTrans); //intensities ccScalarField* intensitySF = new ccScalarField(CC_PTX_INTENSITY_FIELD_NAME); if (!intensitySF->reserve(static_cast<unsigned>(gridSize))) { ccLog::Warning("[PTX] Not enough memory to load intensities!"); intensitySF->release(); intensitySF = 0; } //grid structure ccPointCloud::Grid::Shared grid(new ccPointCloud::Grid); grid->w = width; grid->h = height; bool hasIndexGrid = true; try { grid->indexes.resize(gridSize,-1); //-1 means no cell/point } catch (const std::bad_alloc&) { ccLog::Warning("[PTX] Not enough memory to load the grid structure"); hasIndexGrid = false; } //read points { CCLib::NormalizedProgress nprogress(&pdlg, gridSize); pdlg.setInfo(qPrintable(QString("Number of cells: %1").arg(gridSize))); pdlg.start(); bool firstPoint = true; bool hasColors = false; bool loadColors = false; bool loadGridColors = false; size_t gridIndex = 0; for (unsigned j=0; j<height; ++j) { for (unsigned i=0; i<width; ++i, ++gridIndex) { QString line = inFile.readLine(); QStringList tokens = line.split(" ",QString::SkipEmptyParts); if (firstPoint) { hasColors = (tokens.size() == 7); if (hasColors) { loadColors = cloud->reserveTheRGBTable(); if (!loadColors) { ccLog::Warning("[PTX] Not enough memory to load RGB colors!"); } else if (hasIndexGrid) { //we also load the colors into the grid (as invalid/missing points can have colors!) try { grid->colors.resize(gridSize, ccColor::Rgb(0, 0, 0)); loadGridColors = true; } catch (const std::bad_alloc&) { ccLog::Warning("[PTX] Not enough memory to load the grid colors"); } } } } if ((hasColors && tokens.size() != 7) || (!hasColors && tokens.size() != 4)) { result = CC_FERR_MALFORMED_FILE; //early stop j = height; break; } double values[4]; for (int v=0; v<4; ++v) { bool ok; values[v] = tokens[v].toDouble(&ok); if (!ok) { result = CC_FERR_MALFORMED_FILE; //early stop j = height; break; } } //we skip "empty" cells bool pointIsValid = (CCVector3d::fromArray(values).norm2() != 0); if (pointIsValid) { const double* Pd = values; //first point: check for 'big' coordinates if (firstPoint) { if (cloudIndex == 0 && !cloud->isShifted()) //in case the trans. matrix was ok! { CCVector3d P(Pd); if (HandleGlobalShift(P,PshiftCloud,parameters)) { cloud->setGlobalShift(PshiftCloud); ccLog::Warning("[PTXFilter::loadFile] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",PshiftCloud.x,PshiftCloud.y,PshiftCloud.z); } } firstPoint = false; } //update index grid if (hasIndexGrid) { grid->indexes[gridIndex] = static_cast<int>(cloud->size()); // = index (default value = -1, means no point) } //add point cloud->addPoint(CCVector3( static_cast<PointCoordinateType>(Pd[0] + PshiftCloud.x), static_cast<PointCoordinateType>(Pd[1] + PshiftCloud.y), static_cast<PointCoordinateType>(Pd[2] + PshiftCloud.z)) ); //add intensity if (intensitySF) { intensitySF->addElement(static_cast<ScalarType>(values[3])); } } //color if (loadColors && (pointIsValid || loadGridColors)) { ccColor::Rgb color; for (int c=0; c<3; ++c) { bool ok; unsigned temp = tokens[4+c].toUInt(&ok); ok &= (temp <= 255); if (ok) { color.rgb[c] = static_cast<unsigned char>(temp); } else { result = CC_FERR_MALFORMED_FILE; //early stop j = height; break; } } if (pointIsValid) { cloud->addRGBColor(color.rgb); } if (loadGridColors) { assert(!grid->colors.empty()); grid->colors[gridIndex] = color; } } if (!nprogress.oneStep()) { result = CC_FERR_CANCELED_BY_USER; break; } } } } //is there at least one valid point in this grid? if (cloud->size() == 0) { delete cloud; cloud = 0; if (intensitySF) intensitySF->release(); ccLog::Warning(QString("[PTX] Scan #%1 is empty?!").arg(cloudIndex+1)); } else { if (result == CC_FERR_NO_LOAD) result = CC_FERR_NO_ERROR; //to make clear that we have loaded at least something! cloud->resize(cloud->size()); if (intensitySF) { assert(intensitySF->currentSize() == cloud->size()); intensitySF->resize(cloud->size()); intensitySF->computeMinAndMax(); int intensitySFIndex = cloud->addScalarField(intensitySF); //keep track of the min and max intensity if (container.getChildrenNumber() == 0) { minIntensity = intensitySF->getMin(); maxIntensity = intensitySF->getMax(); } else { minIntensity = std::min(minIntensity,intensitySF->getMin()); maxIntensity = std::max(maxIntensity,intensitySF->getMax()); } cloud->showSF(true); cloud->setCurrentDisplayedScalarField(intensitySFIndex); } ccGBLSensor* sensor = 0; if (hasIndexGrid && result != CC_FERR_CANCELED_BY_USER) { //determine best sensor parameters (mainly yaw and pitch steps) ccGLMatrix cloudToSensorTrans((sensorTransD.inverse() * cloudTransD).data()); sensor = ccGriddedTools::ComputeBestSensor(cloud, grid, &cloudToSensorTrans); } //we apply the transformation ccGLMatrix cloudTrans(cloudTransD.data()); cloud->applyGLTransformation_recursive(&cloudTrans); if (sensor) { ccGLMatrix sensorTrans(sensorTransD.data()); sensor->setRigidTransformation(sensorTrans); //after cloud->applyGLTransformation_recursive! cloud->addChild(sensor); } //scan grid if (hasIndexGrid) { grid->validCount = static_cast<unsigned>(cloud->size()); grid->minValidIndex = 0; grid->maxValidIndex = grid->validCount-1; grid->sensorPosition = sensorTransD; cloud->addGrid(grid); //by default we don't compute normals without asking the user if (parameters.autoComputeNormals) { cloud->computeNormalsWithGrids(LS, 2, true, &normalsProgressDlg); } } cloud->setVisible(true); cloud->showColors(cloud->hasColors()); cloud->showNormals(cloud->hasNormals()); container.addChild(cloud); #ifdef QT_DEBUG //break; #endif } } //update scalar fields saturation (globally!) { bool validIntensityRange = true; if (minIntensity < 0 || maxIntensity > 1.0) { ccLog::Warning("[PTX] Intensity values are invalid (they should all fall in [0 ; 1])"); validIntensityRange = false; } for (unsigned i=0; i<container.getChildrenNumber(); ++i) { ccHObject* obj = container.getChild(i); assert(obj && obj->isA(CC_TYPES::POINT_CLOUD)); CCLib::ScalarField* sf = static_cast<ccPointCloud*>(obj)->getScalarField(0); if (sf) { ccScalarField* ccSF = static_cast<ccScalarField*>(sf); ccSF->setColorScale(ccColorScalesManager::GetDefaultScale(validIntensityRange ? ccColorScalesManager::ABS_NORM_GREY : ccColorScalesManager::GREY)); ccSF->setSaturationStart(0/*minIntensity*/); ccSF->setSaturationStop(maxIntensity); } } } return result; }