bool Nitf::NitfImporterShell::validate(const DataDescriptor* pDescriptor, const vector<const DataDescriptor*>& importedDescriptors, string& errorMessage) const { if (pDescriptor == NULL) { return false; } // Get the name of the file being imported const FileDescriptor* const pFileDescriptor = pDescriptor->getFileDescriptor(); VERIFY(pFileDescriptor != NULL); string filename = pFileDescriptor->getFilename().getFullPathAndName(); // Get the image segment being validated const string& datasetLocation = pFileDescriptor->getDatasetLocation(); VERIFY(datasetLocation.empty() == false); string imageSegmentText = datasetLocation.substr(1); ossim_uint32 imageSegment = StringUtilities::fromDisplayString<unsigned int>(imageSegmentText) - 1; // Check for J2K compression, which can be imported without ossim const DynamicObject* pMetadata = pDescriptor->getMetadata(); VERIFY(pMetadata != NULL); const string attributePath[] = { Nitf::NITF_METADATA, Nitf::IMAGE_SUBHEADER, Nitf::ImageSubheaderFieldNames::COMPRESSION, END_METADATA_NAME }; string imageCompression = pMetadata->getAttributeByPath(attributePath).toDisplayString(); if ((imageCompression != Nitf::ImageSubheaderFieldValues::IC_C8) && (imageCompression != Nitf::ImageSubheaderFieldValues::IC_M8)) { // This image segment does not have J2K compression, so check if it can be imported by ossim Nitf::OssimImageHandlerResource pHandler(filename); if (pHandler.get() == NULL || pHandler->canCastTo("ossimNitfTileSource") == false) { errorMessage = "This image segment is not supported by the " + getName() + "."; return false; } vector<ossim_uint32> importableImageSegments; pHandler->getEntryList(importableImageSegments); vector<ossim_uint32>::iterator segmentIter = find(importableImageSegments.begin(), importableImageSegments.end(), imageSegment); if (segmentIter == importableImageSegments.end()) { // This image segment cannot be imported by ossim, which is generally due to the image segment // using an unsupported compression format errorMessage = "This image segment is not supported by the " + getName() + "."; return false; } } // Perform the default validation checks if (RasterElementImporterShell::validate(pDescriptor, importedDescriptors, errorMessage) == false) { ValidationTest errorTest = getValidationError(); if (errorTest == NO_BAND_FILES) { const RasterDataDescriptor* pRasterDescriptor = dynamic_cast<const RasterDataDescriptor*>(pDescriptor); VERIFY(pRasterDescriptor != NULL); if (pRasterDescriptor->getInterleaveFormat() == BSQ) { errorMessage += " Bands in multiple files are not supported with on-disk read-only processing."; } } else if ((errorTest == NO_ROW_SUBSETS) || (errorTest == NO_COLUMN_SUBSETS)) { errorMessage = errorMessage.substr(0, errorMessage.length() - 1); errorMessage += " with on-disk read-only processing."; } return false; } // Add any previously obtained warning messages to the output message map<ossim_uint32, string>::const_iterator messageIter = mParseMessages.find(imageSegment); if (messageIter != mParseMessages.end() && messageIter->second.empty() == false) { if (errorMessage.empty() == false) { errorMessage += "\n"; } errorMessage += messageIter->second; } // Check for file sizes too large for the current platform const qint64 actualSize = QFile(QString::fromStdString(filename)).size(); const qint64 maxSize = numeric_limits<ossim_uint64>::max() & numeric_limits<streamoff>::max(); if (actualSize > maxSize) { if (errorMessage.empty() == false) { errorMessage += "\n"; } errorMessage += "This file exceeds the maximum supported size for this platform. " "Data at the end of the file may be missing or incorrect. " "IMPORT DATA FROM THIS FILE WITH THE KNOWLEDGE THAT IT IS NOT FULLY SUPPORTED."; } // warn user if unsupported metadata is in the file // NITF 2.0: ICORDS values of U and C are unsupported const string versionPathName[] = { Nitf::NITF_METADATA, Nitf::FILE_HEADER, Nitf::FileHeaderFieldNames::FILE_VERSION, END_METADATA_NAME }; if (pMetadata->getAttributeByPath(versionPathName).toDisplayString() == Nitf::VERSION_02_00) { const string iCordsPath[] = { Nitf::NITF_METADATA, Nitf::IMAGE_SUBHEADER, Nitf::ImageSubheaderFieldNames::ICORDS, END_METADATA_NAME }; string iCords = pMetadata->getAttributeByPath(iCordsPath).toDisplayString(); if (iCords == Nitf::ImageSubheaderFieldValues::ICORDS_GEOCENTRIC) { if (errorMessage.empty() == false) { errorMessage += "\n"; } errorMessage += "The ICORDS is not a supported value."; } } // LUTs are unsupported bool hasLut = false; const string irepPathName[] = { Nitf::NITF_METADATA, Nitf::IMAGE_SUBHEADER, Nitf::ImageSubheaderFieldNames::REPRESENTATION, END_METADATA_NAME }; string irep; const DataVariant& dvIrep = pMetadata->getAttributeByPath(irepPathName); if (dvIrep.getValue(irep) == true) { if (irep == Nitf::ImageSubheaderFieldValues::REPRESENTATION_LUT) { hasLut = true; } } if (hasLut == false) { vector<string> irepBands; const string irepBandsPathName[] = { Nitf::NITF_METADATA, Nitf::IMAGE_SUBHEADER, Nitf::ImageSubheaderFieldNames::BAND_REPRESENTATIONS, END_METADATA_NAME }; const DataVariant& dvIrepBands = pMetadata->getAttributeByPath(irepBandsPathName); if (dvIrepBands.getValue(irepBands) == true) { for (vector<string>::iterator iter = irepBands.begin(); iter != irepBands.end(); iter++) { if (*iter == Nitf::ImageSubheaderFieldValues::BAND_REPRESENTATIONS_LUT) { hasLut = true; break; } } } } if (hasLut == false) { vector<unsigned int> numLuts; const string numLutsPathName[] = { Nitf::NITF_METADATA, Nitf::IMAGE_SUBHEADER, Nitf::ImageSubheaderFieldNames::NUMBER_OF_LUTS, END_METADATA_NAME }; const DataVariant& dvNumLuts = pMetadata->getAttributeByPath(numLutsPathName); if (dvNumLuts.getValue(numLuts) == true) { for (vector<unsigned int>::iterator iter = numLuts.begin(); iter != numLuts.end(); iter++) { if (*iter > 0) { hasLut = true; break; } } } } if (hasLut == true) { if (errorMessage.empty() == false) { errorMessage += "\n"; } errorMessage += "The lookup table will not be applied."; } // Check for valid Classification markings. If any level is higher than the file header, display a warning. FactoryResource<Classification> pClassification; const Classification* pOverallClassification = pDescriptor->getClassification(); VERIFY(pOverallClassification != NULL); // Look in the image subheader. string imageClassLevel; const string imageClassLevelPathName[] = { Nitf::NITF_METADATA, Nitf::IMAGE_SUBHEADER, Nitf::ImageSubheaderFieldNames::SECURITY_LEVEL, END_METADATA_NAME }; const DataVariant& dvImageClassLevel = pMetadata->getAttributeByPath(imageClassLevelPathName); if (dvImageClassLevel.getValue(imageClassLevel) == true) { pClassification->setLevel(imageClassLevel); if (pClassification->hasGreaterLevel(pOverallClassification) == true) { if (errorMessage.empty() == false) { errorMessage += "\n"; } errorMessage += "THIS FILE CONTAINS INVALID CLASSIFICATION INFORMATION! The image has a higher " "classification level than the file. Update the Classification information before proceeding."; } } // Look in each DES subheader. int numDes; const string numDesPathName[] = { Nitf::NITF_METADATA, Nitf::FILE_HEADER, Nitf::FileHeaderFieldNames::NUM_DES, END_METADATA_NAME }; const DataVariant& dvNumDes = pMetadata->getAttributeByPath(numDesPathName); if (dvNumDes.getValue(numDes) == true) { for (int i = 0; i < numDes; ++i) { stringstream desStr; desStr << "DES_" << setw(3) << setfill('0') << i; string desClassLevel; const string desClassLevelPathName[] = { Nitf::NITF_METADATA, Nitf::DES_METADATA, desStr.str(), Nitf::DesSubheaderFieldNames::SECURITY_LEVEL, END_METADATA_NAME }; const DataVariant& dvDesClassLevel = pMetadata->getAttributeByPath(desClassLevelPathName); if (dvDesClassLevel.getValue(desClassLevel) == true) { pClassification->setLevel(desClassLevel); if (pClassification->hasGreaterLevel(pOverallClassification) == true) { if (errorMessage.empty() == false) { errorMessage += "\n"; } errorMessage += "THIS FILE CONTAINS INVALID CLASSIFICATION INFORMATION! " + desStr.str() + " has a higher classification level than the file. Update the Classification information " "before proceeding."; } } } } return true; }
bool Nitf::NitfImporterShell::validate(const DataDescriptor* pDescriptor, string& errorMessage) const { if (RasterElementImporterShell::validate(pDescriptor, errorMessage) == false) { ValidationTest errorTest = getValidationError(); if (errorTest == NO_BAND_FILES) { const RasterDataDescriptor* pRasterDescriptor = dynamic_cast<const RasterDataDescriptor*>(pDescriptor); VERIFY(pRasterDescriptor != NULL); if (pRasterDescriptor->getInterleaveFormat() == BSQ) { errorMessage += " Bands in multiple files are not supported with on-disk read-only processing."; } } else if ((errorTest == NO_ROW_SUBSETS) || (errorTest == NO_COLUMN_SUBSETS)) { errorMessage = errorMessage.substr(0, errorMessage.length() - 1); errorMessage += " with on-disk read-only processing."; } return false; } VERIFY(pDescriptor != NULL); const FileDescriptor* const pFileDescriptor = pDescriptor->getFileDescriptor(); VERIFY(pFileDescriptor != NULL); map<string, string>::const_iterator iter = mParseMessages.find(pFileDescriptor->getDatasetLocation()); if (iter != mParseMessages.end() && iter->second.empty() == false) { errorMessage += iter->second; } const qint64 actualSize = QFile(QString::fromStdString(pFileDescriptor->getFilename().getFullPathAndName())).size(); const qint64 maxSize = numeric_limits<ossim_uint64>::max() & numeric_limits<std::streamoff>::max(); if (actualSize > maxSize) { errorMessage += "This file exceeds the maximum supported size for this platform.\n" "Data at the end of the file may be missing or incorrect.\n" "IMPORT DATA FROM THIS FILE WITH THE KNOWLEDGE THAT IT IS NOT FULLY SUPPORTED.\n"; } // warn user if unsupported metadata is in the file // NITF 2.0: ICORDS values of U and C are unsupported const DynamicObject* pMetadata = pDescriptor->getMetadata(); VERIFY(pMetadata != NULL); const string versionPathName[] = { Nitf::NITF_METADATA, Nitf::FILE_HEADER, Nitf::FileHeaderFieldNames::FILE_VERSION, END_METADATA_NAME }; if (pMetadata->getAttributeByPath(versionPathName).toDisplayString() == Nitf::VERSION_02_00) { const string iCordsPath[] = { Nitf::NITF_METADATA, Nitf::IMAGE_SUBHEADER, Nitf::ImageSubheaderFieldNames::ICORDS, END_METADATA_NAME }; string iCords = pMetadata->getAttributeByPath(iCordsPath).toDisplayString(); if (iCords == Nitf::ImageSubheaderFieldValues::ICORDS_GEOCENTRIC) { errorMessage += "The ICORDS is not a supported value.\n"; } } // LUTs are unsupported bool hasLut = false; const string irepPathName[] = { Nitf::NITF_METADATA, Nitf::IMAGE_SUBHEADER, Nitf::ImageSubheaderFieldNames::REPRESENTATION, END_METADATA_NAME }; string irep; const DataVariant& dvIrep = pMetadata->getAttributeByPath(irepPathName); if (dvIrep.getValue(irep) == true) { if (irep == Nitf::ImageSubheaderFieldValues::REPRESENTATION_LUT) { hasLut = true; } } if (hasLut == false) { vector<string> irepBands; const string irepBandsPathName[] = { Nitf::NITF_METADATA, Nitf::IMAGE_SUBHEADER, Nitf::ImageSubheaderFieldNames::BAND_REPRESENTATIONS, END_METADATA_NAME }; const DataVariant& dvIrepBands = pMetadata->getAttributeByPath(irepBandsPathName); if (dvIrepBands.getValue(irepBands) == true) { for (vector<string>::iterator iter = irepBands.begin(); iter != irepBands.end(); iter++) { if (*iter == Nitf::ImageSubheaderFieldValues::BAND_REPRESENTATIONS_LUT) { hasLut = true; break; } } } } if (hasLut == false) { vector<unsigned int> numLuts; const string numLutsPathName[] = { Nitf::NITF_METADATA, Nitf::IMAGE_SUBHEADER, Nitf::ImageSubheaderFieldNames::NUMBER_OF_LUTS, END_METADATA_NAME }; const DataVariant& dvNumLuts = pMetadata->getAttributeByPath(numLutsPathName); if (dvNumLuts.getValue(numLuts) == true) { for (vector<unsigned int>::iterator iter = numLuts.begin(); iter != numLuts.end(); iter++) { if (*iter > 0) { hasLut = true; break; } } } } if (hasLut == true) { errorMessage += "The lookup table will not be applied.\n"; } // Check for valid Classification markings. If any level is higher than the file header, display a warning. FactoryResource<Classification> pClassification; const Classification* pOverallClassification = pDescriptor->getClassification(); VERIFY(pOverallClassification != NULL); // Look in the image subheader. string imageClassLevel; const string imageClassLevelPathName[] = { Nitf::NITF_METADATA, Nitf::IMAGE_SUBHEADER, Nitf::ImageSubheaderFieldNames::SECURITY_LEVEL, END_METADATA_NAME }; const DataVariant& dvImageClassLevel = pMetadata->getAttributeByPath(imageClassLevelPathName); if (dvImageClassLevel.getValue(imageClassLevel) == true) { pClassification->setLevel(imageClassLevel); if (pClassification->hasGreaterLevel(pOverallClassification) == true) { errorMessage += "THIS FILE CONTAINS INVALID CLASSIFICATION INFORMATION!\n" "The image has a higher classification level than the file.\n" "Update the Classification information before proceeding.\n"; } } // Look in each DES subheader. int numDes; const string numDesPathName[] = { Nitf::NITF_METADATA, Nitf::FILE_HEADER, Nitf::FileHeaderFieldNames::NUM_DES, END_METADATA_NAME }; const DataVariant& dvNumDes = pMetadata->getAttributeByPath(numDesPathName); if (dvNumDes.getValue(numDes) == true) { for (int i = 0; i < numDes; ++i) { stringstream desStr; desStr << "DES_" << setw(3) << setfill('0') << i; string desClassLevel; const string desClassLevelPathName[] = { Nitf::NITF_METADATA, Nitf::DES_METADATA, desStr.str(), Nitf::DesSubheaderFieldNames::SECURITY_LEVEL, END_METADATA_NAME }; const DataVariant& dvDesClassLevel = pMetadata->getAttributeByPath(desClassLevelPathName); if (dvDesClassLevel.getValue(desClassLevel) == true) { pClassification->setLevel(desClassLevel); if (pClassification->hasGreaterLevel(pOverallClassification) == true) { errorMessage += "THIS FILE CONTAINS INVALID CLASSIFICATION INFORMATION!\n" + desStr.str() + " has a higher classification level than the file.\n" "Update the Classification information before proceeding.\n"; } } } } return true; }
bool ClassificationImp::setClassification(const string& classificationText) { if (classificationText.empty() == true) { return false; } Service<UtilityServices> pUtilities; FactoryResource<Classification> pClassification; const string delimiter = "//"; // Level QString field = QString::fromStdString(classificationText); string::size_type pos = classificationText.find(delimiter); if (pos != string::npos) { field = QString::fromStdString(classificationText.substr(0, pos)); } pClassification->setLevel(field.toStdString()); // All other fields while (pos != string::npos) { // Get the next field pos += delimiter.length(); string::size_type nextPos = classificationText.find(delimiter, pos); if (nextPos != string::npos) { field = QString::fromStdString(classificationText.substr(pos, nextPos - pos)); } else { field = QString::fromStdString(classificationText.substr(pos)); } pos = nextPos; if (field.isEmpty() == true) { continue; } QStringList fieldList = field.split("/", QString::SkipEmptyParts); // Codewords bool codewordsField = true; const vector<string>& codewords = pUtilities->getCodewords(); for (int i = 0; i < fieldList.count(); ++i) { if (std::find(codewords.begin(), codewords.end(), fieldList[i].toStdString()) == codewords.end()) { codewordsField = false; break; } } if (codewordsField == true) { field.replace("/", " "); pClassification->setCodewords(field.toStdString()); continue; } // System bool systemField = true; const vector<string>& systems = pUtilities->getSystems(); for (int i = 0; i < fieldList.count(); ++i) { if (std::find(systems.begin(), systems.end(), fieldList[i].toStdString()) == systems.end()) { systemField = false; break; } } if (systemField == true) { field.replace("/", " "); pClassification->setSystem(field.toStdString()); continue; } // File releasing bool fileReleasingField = true; const QString relTo = "REL TO"; const vector<string>& fileReleasings = pUtilities->getFileReleasing(); for (int i = 0; i < fieldList.count(); ++i) { QString fileReleasing = fieldList[i]; if (fileReleasing.indexOf(relTo) == 0) { fileReleasing = relTo; } if (std::find(fileReleasings.begin(), fileReleasings.end(), fileReleasing.toStdString()) == fileReleasings.end()) { fileReleasingField = false; break; } } if (fileReleasingField == true) { int relToPos = field.indexOf(relTo); if (relToPos != -1) { // Extract the country codes from the file releasing field QString countryCodesField; int slashPos = field.indexOf("/", relToPos); if (slashPos != -1) { countryCodesField = field.mid(relToPos + relTo.length() + 1, slashPos - relToPos - relTo.length() - 1); field.remove(relToPos + relTo.length(), slashPos - relToPos - relTo.length()); } else { countryCodesField = field.mid(relToPos + relTo.length() + 1); field.truncate(relToPos + relTo.length()); } // Add the escape sequence field.replace(relTo, "REL\\ TO"); // Country codes bool validCountryCodes = true; if (countryCodesField.isEmpty() == false) { const vector<string>& countryCodes = pUtilities->getCountryCodes(); QStringList countryList = countryCodesField.split(", ", QString::SkipEmptyParts); for (int i = 0; i < countryList.count(); ++i) { if (std::find(countryCodes.begin(), countryCodes.end(), countryList[i].toStdString()) == countryCodes.end()) { validCountryCodes = false; break; } } } else { validCountryCodes = false; } // Must have valid country codes if (validCountryCodes == false) { return false; } countryCodesField.replace(",", QString()); pClassification->setCountryCode(countryCodesField.toStdString()); } field.replace("/", " "); pClassification->setFileReleasing(field.toStdString()); continue; } // Declassification exemption bool declassExemptionField = true; const vector<string>& exemptions = pUtilities->getDeclassificationExemptions(); for (int i = 0; i < fieldList.count(); ++i) { if (std::find(exemptions.begin(), exemptions.end(), fieldList[i].toStdString()) == exemptions.end()) { declassExemptionField = false; break; } } if (declassExemptionField == true) { field.replace("/", " "); pClassification->setDeclassificationExemption(field.toStdString()); continue; } // Declassification date QDate declassDate = QDate::fromString(field, "yyyyMMdd"); if (declassDate.isValid() == true) { FactoryResource<DateTime> pDeclassDate; pDeclassDate->set(declassDate.year(), declassDate.month(), declassDate.day()); pClassification->setDeclassificationDate(pDeclassDate.get()); continue; } // Classification reason bool reasonField = true; const vector<string>& reasons = pUtilities->getClassificationReasons(); for (int i = 0; i < fieldList.count(); ++i) { if (std::find(reasons.begin(), reasons.end(), fieldList[i].toStdString()) == reasons.end()) { reasonField = false; break; } } if (reasonField == true) { field.replace("/", " "); pClassification->setClassificationReason(field.toStdString()); continue; } // Declassification type bool declassTypeField = true; const vector<string>& declassTypes = pUtilities->getDeclassificationTypes(); for (int i = 0; i < fieldList.count(); ++i) { if (std::find(declassTypes.begin(), declassTypes.end(), fieldList[i].toStdString()) == declassTypes.end()) { declassTypeField = false; break; } } if (declassTypeField == true) { field.replace("/", " "); pClassification->setDeclassificationType(field.toStdString()); continue; } // File downgrades bool fileDowngradeField = true; const vector<string>& fileDowngrades = pUtilities->getFileDowngrades(); for (int i = 0; i < fieldList.count(); ++i) { if (std::find(fileDowngrades.begin(), fileDowngrades.end(), fieldList[i].toStdString()) == fileDowngrades.end()) { fileDowngradeField = false; break; } } if (fileDowngradeField == true) { field.replace("/", " "); pClassification->setFileDowngrade(field.toStdString()); continue; } // File control bool fileControlField = true; const vector<string>& fileControls = pUtilities->getFileControls(); for (int i = 0; i < fieldList.count(); ++i) { if (std::find(fileControls.begin(), fileControls.end(), fieldList[i].toStdString()) == fileControls.end()) { fileControlField = false; break; } } if (fileControlField == true) { field.replace("/", " "); pClassification->setFileControl(field.toStdString()); continue; } // Not a valid field return false; } setClassification(pClassification.get()); return true; }
vector<ImportDescriptor*> EnviImporter::getImportDescriptors(const string& filename) { string headerFile = filename; string dataFile; bool bSuccess = parseHeader(headerFile); if (bSuccess == false) { dataFile = filename; // was passed data file name instead of header file name headerFile = findHeaderFile(headerFile); if (headerFile.empty() == false) { bSuccess = parseHeader(headerFile); } } EnviField* pField = NULL; vector<ImportDescriptor*> descriptors; if (bSuccess == true) { if (dataFile.empty() == true) // was passed header file name and now need to find the data file name { dataFile = findDataFile(headerFile); } if (dataFile.empty() == false) { ImportDescriptor* pImportDescriptor = mpModel->createImportDescriptor(dataFile, "RasterElement", NULL); if (pImportDescriptor != NULL) { RasterDataDescriptor* pDescriptor = dynamic_cast<RasterDataDescriptor*>(pImportDescriptor->getDataDescriptor()); if (pDescriptor != NULL) { FactoryResource<RasterFileDescriptor> pFileDescriptor; if (pFileDescriptor.get() != NULL) { // Filename pFileDescriptor->setFilename(dataFile); // Coordinate offset int columnOffset = 0; int rowOffset = 0; pField = mFields.find("x start"); if (pField != NULL) { // ENVI numbers are 1 based vs Opticks being 0 based columnOffset = atoi(pField->mValue.c_str()) - 1; } pField = mFields.find("y start"); if (pField != NULL) { rowOffset = atoi(pField->mValue.c_str()) - 1; // ENVI numbers are 1 based vs Opticks being 0 based } // Rows vector<DimensionDescriptor> rows; pField = mFields.find("lines"); if (pField != NULL) { int numRows = atoi(pField->mValue.c_str()); for (int i = 0; i < numRows; ++i) { DimensionDescriptor rowDim; rowDim.setOriginalNumber(static_cast<unsigned int>(rowOffset + i)); rowDim.setOnDiskNumber(static_cast<unsigned int>(i)); rows.push_back(rowDim); } pDescriptor->setRows(rows); pFileDescriptor->setRows(rows); } string samplesStr = "samples"; string bandsStr = "bands"; // Special case: if the file type is an ENVI Spectral Library, then swap samples with bands // If no file type field exists, assume this is a normal ENVI header (not a Spectral Library) EnviField* pFileTypeField = mFields.find("file type"); if (pFileTypeField != NULL && (pFileTypeField->mValue == "ENVI Spectral Library" || pFileTypeField->mValue == "Spectral Library")) { samplesStr = "bands"; bandsStr = "samples"; // Since bands and samples are swapped, force the interleave to BIP pField = mFields.find("interleave"); if (pField != NULL) { pField->mValue = "bip"; } } // Columns vector<DimensionDescriptor> columns; pField = mFields.find(samplesStr); if (pField != NULL) { int numColumns = atoi(pField->mValue.c_str()); for (int i = 0; i < numColumns; ++i) { DimensionDescriptor columnDim; columnDim.setOriginalNumber(static_cast<unsigned int>(columnOffset + i)); columnDim.setOnDiskNumber(static_cast<unsigned int>(i)); columns.push_back(columnDim); } pDescriptor->setColumns(columns); pFileDescriptor->setColumns(columns); } // Bands vector<DimensionDescriptor> bands; pField = mFields.find(bandsStr); if (pField != NULL) { int numBands = atoi(pField->mValue.c_str()); bands = RasterUtilities::generateDimensionVector(numBands, true, false, true); pDescriptor->setBands(bands); pFileDescriptor->setBands(bands); } // Description list<GcpPoint> gcps; pField = mFields.find("description"); if (pField != NULL) { // Metadata if (pField->mChildren.empty() == false) { FactoryResource<DynamicObject> pMetadata; for (unsigned int i = 0; i < pField->mChildren.size(); ++i) { EnviField* pChild = pField->mChildren[i]; if (pChild != NULL) { if (pChild->mTag == "classification") { // Classification FactoryResource<Classification> pClassification; if (pClassification.get() != NULL) { string classLevel; classLevel.append(1, *(pChild->mValue.data())); pClassification->setLevel(classLevel); pDescriptor->setClassification(pClassification.get()); } } else if ((pChild->mTag == "ll") || (pChild->mTag == "lr") || (pChild->mTag == "ul") || (pChild->mTag == "ur") || (pChild->mTag == "center")) { GcpPoint gcp; bool dmsFormat = false; char ns; char ew; sscanf(pChild->mValue.c_str(), "%lg%c %lg%c", &gcp.mCoordinate.mY, &ew, &gcp.mCoordinate.mX, &ns); if (fabs(gcp.mCoordinate.mY) > 180.0 || fabs(gcp.mCoordinate.mX) > 90.0) { dmsFormat = true; } double deg; double min; double sec; if (dmsFormat == true) { deg = static_cast<int>(gcp.mCoordinate.mY / 10000.0); min = static_cast<int>((gcp.mCoordinate.mY - 10000.0 * deg) / 100.0); sec = gcp.mCoordinate.mY - 10000.0 * deg - 100.0 * min; gcp.mCoordinate.mY = deg + (min / 60.0) + (sec / 3600.0); } if (ew == 'W' || ew == 'w') { gcp.mCoordinate.mY = -gcp.mCoordinate.mY; } if (dmsFormat) { deg = static_cast<int>(gcp.mCoordinate.mX / 10000.0); min = static_cast<int>((gcp.mCoordinate.mX - 10000.0 * deg) / 100.0); sec = gcp.mCoordinate.mX - 10000.0 * deg - 100.0 * min; gcp.mCoordinate.mX = deg + (min / 60.0) + (sec / 3600.0); } if (ns == 'S' || ns == 's') { gcp.mCoordinate.mX = -gcp.mCoordinate.mX; } // ENVI uses a 1-based pixel coordinate system, with each coordinate referring // to the top-left corner of the pixel, e.g. (1,1) is the top-left // corner of the pixel in the top-left of the raster cube // The ENVI pixel coordinate format is described on p. 1126 of the ENVI 4.2 User's Guide if (pChild->mTag == "ll") { gcp.mPixel.mX = 0.0; gcp.mPixel.mY = 0.0; } else if (pChild->mTag == "lr") { gcp.mPixel.mX = columns.size() - 1.0; gcp.mPixel.mY = 0.0; } else if (pChild->mTag == "ul") { gcp.mPixel.mX = 0.0; gcp.mPixel.mY = rows.size() - 1.0; } else if (pChild->mTag == "ur") { gcp.mPixel.mX = columns.size() - 1.0; gcp.mPixel.mY = rows.size() - 1.0; } else if (pChild->mTag == "center") { gcp.mPixel.mX = floor((columns.size() - 1.0) / 2.0); gcp.mPixel.mY = floor((rows.size() - 1.0) / 2.0); } gcps.push_back(gcp); } else if (pChild->mTag.empty() == false) { pMetadata->setAttribute(pChild->mTag, pChild->mValue); } } } if (pMetadata->getNumAttributes() > 0) { pDescriptor->setMetadata(pMetadata.get()); } } } if (gcps.empty()) // not in description, check for geo points keyword { pField = mFields.find("geo points"); if (pField != NULL) { vector<double> geoValues; const int expectedNumValues = 16; // 4 values for each of the 4 corners geoValues.reserve(expectedNumValues); for (unsigned int i = 0; i < pField->mChildren.size(); i++) { vectorFromField(pField->mChildren.at(i), geoValues, "%lf"); } if (geoValues.size() == expectedNumValues) { vector<double>::iterator iter = geoValues.begin(); GcpPoint gcp; while (iter != geoValues.end()) { gcp.mPixel.mX = *iter++ - 1.0; // adjust ref point for ENVI's use of gcp.mPixel.mY = *iter++ - 1.0; // upper left corner and one-based first pixel gcp.mCoordinate.mX = *iter++; // GcpPoint has lat as mX and Lon as mY gcp.mCoordinate.mY = *iter++; // geo point field has lat then lon value gcps.push_back(gcp); } } } } // GCPs if (gcps.empty() == false) { pFileDescriptor->setGcps(gcps); } // Header bytes pField = mFields.find("header offset"); if (pField != NULL) { int headerBytes = atoi(pField->mValue.c_str()); pFileDescriptor->setHeaderBytes(static_cast<unsigned int>(headerBytes)); } // Data type pField = mFields.find("data type"); if (pField != NULL) { vector<EncodingType> validDataTypes; switch (atoi(pField->mValue.c_str())) { case 1: // char pDescriptor->setDataType(INT1UBYTE); pFileDescriptor->setBitsPerElement(8); // signed char cannot be represented in ENVI header so use the closest thing validDataTypes.push_back(INT1SBYTE); break; case 2: // short pDescriptor->setDataType(INT2SBYTES); pFileDescriptor->setBitsPerElement(16); break; case 3: // int pDescriptor->setDataType(INT4SBYTES); pFileDescriptor->setBitsPerElement(32); break; case 4: // float pDescriptor->setDataType(FLT4BYTES); pFileDescriptor->setBitsPerElement(32); break; case 5: // double pDescriptor->setDataType(FLT8BYTES); pFileDescriptor->setBitsPerElement(64); break; case 6: // float complex pDescriptor->setDataType(FLT8COMPLEX); pFileDescriptor->setBitsPerElement(64); break; case 9: // double complex // not supported break; case 12: // unsigned short pDescriptor->setDataType(INT2UBYTES); pFileDescriptor->setBitsPerElement(16); break; case 13: // unsigned int pDescriptor->setDataType(INT4UBYTES); pFileDescriptor->setBitsPerElement(32); break; case 14: // 64-bit int case 15: // unsigned 64-bit int // not supported break; case 99: // integer complex (recognized only by this application) pDescriptor->setDataType(INT4SCOMPLEX); pFileDescriptor->setBitsPerElement(32); break; default: break; } // Bad values EncodingType dataType = pDescriptor->getDataType(); if ((dataType != FLT4BYTES) && (dataType != FLT8COMPLEX) && (dataType != FLT8BYTES)) { vector<int> badValues; badValues.push_back(0); pDescriptor->setBadValues(badValues); } validDataTypes.push_back(dataType); pDescriptor->setValidDataTypes(validDataTypes); } // Interleave format pField = mFields.find("interleave"); if (pField != NULL) { string interleave = StringUtilities::toLower(pField->mValue); if (interleave == "bip") { pDescriptor->setInterleaveFormat(BIP); pFileDescriptor->setInterleaveFormat(BIP); } else if (interleave == "bil") { pDescriptor->setInterleaveFormat(BIL); pFileDescriptor->setInterleaveFormat(BIL); } else if (interleave == "bsq") { pDescriptor->setInterleaveFormat(BSQ); pFileDescriptor->setInterleaveFormat(BSQ); } } // Endian pField = mFields.find("byte order"); if (pField != NULL) { int byteOrder = atoi(pField->mValue.c_str()); if (byteOrder == 0) { pFileDescriptor->setEndian(LITTLE_ENDIAN_ORDER); } else if (byteOrder == 1) { pFileDescriptor->setEndian(BIG_ENDIAN_ORDER); } } // check for scaling factor pField = mFields.find("reflectance scale factor"); if (pField != NULL) { double scalingFactor = 0.0; stringstream scaleStream(pField->mValue); scaleStream >> scalingFactor; if (!scaleStream.fail() && scalingFactor != 0.0) { Units* pUnits = pDescriptor->getUnits(); if (pUnits != NULL) { pUnits->setScaleFromStandard(1.0 / scalingFactor); pUnits->setUnitName("Reflectance"); pUnits->setUnitType(REFLECTANCE); } } } // Pixel size pField = mFields.find("pixel size"); if (pField != NULL) { if (pField->mChildren.size() == 2) { pField = pField->mChildren[0]; if (pField != NULL) { double pixelSize = 1.0; if (sscanf(pField->mValue.c_str(), "%g", &pixelSize) == 1) { pDescriptor->setXPixelSize(pixelSize); pFileDescriptor->setXPixelSize(pixelSize); } } pField = pField->mChildren[1]; if (pField != NULL) { double pixelSize = 1.0; if (sscanf(pField->mValue.c_str(), "%g", &pixelSize) == 1) { pDescriptor->setYPixelSize(pixelSize); pFileDescriptor->setYPixelSize(pixelSize); } } } } // Default bands pField = mFields.find("default bands"); if (pField != NULL) { vector<unsigned int> displayBands; parseDefaultBands(pField, &displayBands); if (displayBands.size() == 1) { DimensionDescriptor grayBand = pFileDescriptor->getOriginalBand(displayBands[0]); pDescriptor->setDisplayBand(GRAY, grayBand); pDescriptor->setDisplayMode(GRAYSCALE_MODE); } else if (displayBands.size() == 3) { DimensionDescriptor redBand = pFileDescriptor->getOriginalBand(displayBands[0]); DimensionDescriptor greenBand = pFileDescriptor->getOriginalBand(displayBands[1]); DimensionDescriptor blueBand = pFileDescriptor->getOriginalBand(displayBands[2]); pDescriptor->setDisplayBand(RED, redBand); pDescriptor->setDisplayBand(GREEN, greenBand); pDescriptor->setDisplayBand(BLUE, blueBand); pDescriptor->setDisplayMode(RGB_MODE); } } // Bad bands pField = mFields.find("bbl"); if (pField != NULL) { vector<unsigned int> validBands; parseBbl(pField, validBands); vector<DimensionDescriptor> bandsToLoad; for (vector<unsigned int>::const_iterator iter = validBands.begin(); iter != validBands.end(); ++iter) { const unsigned int onDiskNumber = *iter; const DimensionDescriptor dim = pFileDescriptor->getOnDiskBand(onDiskNumber); if (dim.isValid()) { bandsToLoad.push_back(dim); } } pDescriptor->setBands(bandsToLoad); } DynamicObject* pMetadata = pDescriptor->getMetadata(); // Band names pField = mFields.find("band names"); if (pField != NULL) { vector<string> bandNames; bandNames.reserve(bands.size()); vector<string> strNames; for (vector<EnviField*>::size_type i = 0; i < pField->mChildren.size(); ++i) { strNames = StringUtilities::split(pField->mChildren[i]->mValue, ','); copy(strNames.begin(), strNames.end(), back_inserter(bandNames)); } vector<string>::iterator it; for (it = bandNames.begin(); it != bandNames.end(); ++it) { *it = StringUtilities::stripWhitespace(*it); } if (pMetadata != NULL) { string pNamesPath[] = { SPECIAL_METADATA_NAME, BAND_METADATA_NAME, NAMES_METADATA_NAME, END_METADATA_NAME }; pMetadata->setAttributeByPath(pNamesPath, bandNames); } } // wavelength units pField = mFields.find("wavelength units"); if (pField != NULL) { mWavelengthUnits = strToType(pField->mValue); } // Wavelengths vector<double> centerWavelengths; pField = mFields.find("wavelength"); if (pField != NULL) { if ((parseWavelengths(pField, ¢erWavelengths) == true) && (pMetadata != NULL)) { string pCenterPath[] = { SPECIAL_METADATA_NAME, BAND_METADATA_NAME, CENTER_WAVELENGTHS_METADATA_NAME, END_METADATA_NAME }; pMetadata->setAttributeByPath(pCenterPath, centerWavelengths); } } // FWHM pField = mFields.find("fwhm"); if (pField != NULL) { vector<double> startWavelengths; vector<double> endWavelengths; if ((parseFwhm(pField, &startWavelengths, ¢erWavelengths, &endWavelengths) == true) && (pMetadata != NULL)) { string pStartPath[] = { SPECIAL_METADATA_NAME, BAND_METADATA_NAME, START_WAVELENGTHS_METADATA_NAME, END_METADATA_NAME }; pMetadata->setAttributeByPath(pStartPath, startWavelengths); string pEndPath[] = { SPECIAL_METADATA_NAME, BAND_METADATA_NAME, END_WAVELENGTHS_METADATA_NAME, END_METADATA_NAME }; pMetadata->setAttributeByPath(pEndPath, endWavelengths); } } // File descriptor pDescriptor->setFileDescriptor(pFileDescriptor.get()); }
bool ImporterShell::validate(const DataDescriptor* pDescriptor, string& errorMessage) const { mValidationError = ValidationTest(); // Check for no validation int validationTest = getValidationTest(pDescriptor); if (validationTest == NO_VALIDATION) { return true; } // Always validate the data descriptor and file descriptor if (pDescriptor == NULL) { errorMessage = "The data set information is invalid."; return false; } const FileDescriptor* pFileDescriptor = pDescriptor->getFileDescriptor(); if (pFileDescriptor == NULL) { errorMessage = "The data set does not contain valid file information."; return false; } // Existing file const string& filename = pFileDescriptor->getFilename(); if (validationTest & EXISTING_FILE) { // Valid filename if (filename.empty() == true) { errorMessage = "The filename is invalid."; mValidationError = EXISTING_FILE; return false; } // Existing file LargeFileResource file(true); if (!file.open(filename.c_str(), O_RDONLY | O_BINARY, S_IREAD)) { errorMessage = "The file: " + filename + " does not exist."; mValidationError = EXISTING_FILE; return false; } } // Existing data element if (validationTest & NO_EXISTING_DATA_ELEMENT) { const string& name = pDescriptor->getName(); const string& type = pDescriptor->getType(); DataElement* pParent = pDescriptor->getParent(); Service<ModelServices> pModel; if (pModel->getElement(name, type, pParent) != NULL) { errorMessage = "The data set currently exists. It may have already been imported."; mValidationError = NO_EXISTING_DATA_ELEMENT; return false; } } // Valid classification Service<UtilityServices> pUtilities; if (validationTest & VALID_CLASSIFICATION) { // Existing Classification object const Classification* pClassification = pDescriptor->getClassification(); if (pClassification == NULL) { errorMessage = "The required classification does not exist."; mValidationError = VALID_CLASSIFICATION; return false; } // Unauthorized classification level on the system - warn the user, but continue to load the file FactoryResource<Classification> pSystemClassification; pSystemClassification->setLevel(pUtilities->getDefaultClassification()); if (pClassification->hasGreaterLevel(pSystemClassification.get()) == true) { errorMessage = "THIS FILE CONTAINS CLASSIFIED INFORMATION WHICH SHOULD NOT BE PROCESSED ON THIS SYSTEM!\n" "THIS MAY CONSTITUTE A SECURITY VIOLATION WHICH SHOULD BE REPORTED TO YOUR SECURITY OFFICER!\n"; StepResource pStep("Validate", "app", "1A881267-6A96-4eb2-A9D3-7D30334B0A0B", errorMessage); } } // Valid metadata if (validationTest & VALID_METADATA) { if (pDescriptor->getMetadata() == NULL) { errorMessage = "The required metadata does not exist."; mValidationError = VALID_METADATA; return false; } } // Processing location if (validationTest & VALID_PROCESSING_LOCATION) { if (isProcessingLocationSupported(pDescriptor->getProcessingLocation()) == false) { errorMessage = "The specified processing location is not supported."; mValidationError = VALID_PROCESSING_LOCATION; return false; } } // If no RasterDataDescriptor or RasterFileDescriptor tests are performed, end here if (validationTest < RASTER_SIZE) { return true; } // Since raster tests have been specified, always validate the raster data descriptor and raster file descriptor const RasterDataDescriptor* pRasterDescriptor = dynamic_cast<const RasterDataDescriptor*>(pDescriptor); if (pRasterDescriptor == NULL) { errorMessage = "The data set does not contain raster information."; return false; } const RasterFileDescriptor* pRasterFileDescriptor = dynamic_cast<const RasterFileDescriptor*>(pRasterDescriptor->getFileDescriptor()); if (pRasterFileDescriptor == NULL) { errorMessage = "The file does not contain valid raster data."; return false; } // Raster size if (validationTest & RASTER_SIZE) { // Data set size unsigned int loadedRows = pRasterDescriptor->getRowCount(); unsigned int loadedColumns = pRasterDescriptor->getColumnCount(); unsigned int loadedBands = pRasterDescriptor->getBandCount(); if ((loadedRows == 0) || (loadedColumns == 0) || (loadedBands == 0)) { errorMessage = "The data set is empty. Check the size of the rows, columns, and bands."; mValidationError = RASTER_SIZE; return false; } // Pixel size if (pRasterFileDescriptor->getBitsPerElement() == 0) { errorMessage = "The number of bits per element is invalid."; mValidationError = RASTER_SIZE; return false; } } // Data type if (validationTest & VALID_DATA_TYPE) { const std::vector<EncodingType>& dataTypes = pRasterDescriptor->getValidDataTypes(); if (std::find(dataTypes.begin(), dataTypes.end(), pRasterDescriptor->getDataType()) == dataTypes.end()) { errorMessage = "The data type is not valid for this data set."; mValidationError = VALID_DATA_TYPE; return false; } } // Header bytes if (validationTest & NO_HEADER_BYTES) { if (pRasterFileDescriptor->getHeaderBytes() > 0) { errorMessage = "The file has an invalid number of header bytes."; mValidationError = NO_HEADER_BYTES; return false; } } // Preline and postline bytes if (validationTest & NO_PRE_POST_LINE_BYTES) { if ((pRasterFileDescriptor->getPrelineBytes() > 0) || (pRasterFileDescriptor->getPostlineBytes() > 0)) { errorMessage = "The file has an invalid number of preline and/or postline bytes."; mValidationError = NO_PRE_POST_LINE_BYTES; return false; } } // Preband and postband bytes if (validationTest & NO_PRE_POST_BAND_BYTES) { if ((pRasterFileDescriptor->getPrebandBytes() > 0) || (pRasterFileDescriptor->getPostbandBytes() > 0)) { errorMessage = "The file has an invalid number of preband and/or postband bytes."; mValidationError = NO_PRE_POST_BAND_BYTES; return false; } } // Trailer bytes if (validationTest & NO_TRAILER_BYTES) { if (pRasterFileDescriptor->getTrailerBytes() > 0) { errorMessage = "The file has an invalid number of trailer bytes."; mValidationError = NO_TRAILER_BYTES; return false; } } // File size int64_t requiredSize = RasterUtilities::calculateFileSize(pRasterFileDescriptor); if ((validationTest & FILE_SIZE) == FILE_SIZE) { // Existing file LargeFileResource file; VERIFY(file.open(filename, O_RDONLY | O_BINARY, S_IREAD) == true); // File size if (requiredSize < 0) { errorMessage = "Unable to determine the required file size."; mValidationError = FILE_SIZE; return false; } if (file.fileLength() < requiredSize) { errorMessage = "The size of the file does not match the current parameters."; mValidationError = FILE_SIZE; return false; } } // Band files const vector<const Filename*>& bandFiles = pRasterFileDescriptor->getBandFiles(); if (validationTest & NO_BAND_FILES) { if (bandFiles.empty() == false) { errorMessage = "This data set cannot have band data in multiple files."; mValidationError = NO_BAND_FILES; return false; } } // Existing band files and band file sizes if (validationTest & EXISTING_BAND_FILES) { // Enough band files for all bands unsigned int numBands = pRasterFileDescriptor->getBandCount(); if (bandFiles.size() < numBands) { errorMessage = "The number of band files specified is less than the total number of bands to be loaded."; mValidationError = EXISTING_BAND_FILES; return false; } // Invalid file for imported bands for (vector<const Filename*>::size_type i = 0; i < bandFiles.size(); ++i) { const Filename* pFilename = bandFiles[i]; if (pFilename == NULL) { stringstream streamMessage; streamMessage << "Band filename " << i + 1 << " is missing."; errorMessage = streamMessage.str(); mValidationError = EXISTING_BAND_FILES; return false; } // Invalid filename string bandFilename = pFilename->getFullPathAndName(); if (bandFilename.empty() == true) { stringstream streamMessage; streamMessage << "Band filename " << i + 1 << " is invalid."; errorMessage = streamMessage.str(); mValidationError = EXISTING_BAND_FILES; return false; } // Existing file LargeFileResource bandFile; if (!bandFile.open(bandFilename, O_RDONLY | O_BINARY, S_IREAD)) { stringstream streamMessage; streamMessage << "Band file " << i + 1 << " does not exist."; errorMessage = streamMessage.str(); mValidationError = EXISTING_BAND_FILES; return false; } // File size if ((validationTest & BAND_FILE_SIZES) == BAND_FILE_SIZES) { if (requiredSize < 0) { errorMessage = "Unable to determine the required band file size."; mValidationError = BAND_FILE_SIZES; return false; } if (bandFile.fileLength() < requiredSize) { stringstream streamMessage; streamMessage << "The size of band file " << i + 1 << " does not match the required size " "for the current parameters."; errorMessage = streamMessage.str(); mValidationError = BAND_FILE_SIZES; return false; } } } } // Band names const DynamicObject* pMetadata = pRasterDescriptor->getMetadata(); if ((validationTest & VALID_BAND_NAMES) == VALID_BAND_NAMES) { VERIFY(pMetadata != NULL); string namesPath[] = { SPECIAL_METADATA_NAME, BAND_METADATA_NAME, NAMES_METADATA_NAME, END_METADATA_NAME }; // If band names are present in the metadata, check the number of names against the number of bands // If band names are not present in the metadata, then succeed const vector<string>* pBandNames = dv_cast<vector<string> >(&pMetadata->getAttributeByPath(namesPath)); if (pBandNames != NULL) { if (pBandNames->size() != pRasterFileDescriptor->getBandCount()) { errorMessage = "The number of band names in the metadata does not match the number of bands."; mValidationError = VALID_BAND_NAMES; return false; } } } // Wavelengths if ((validationTest & VALID_WAVELENGTHS) == VALID_WAVELENGTHS) { VERIFY(pMetadata != NULL); // If wavelengths are present in the metadata, check the number of wavelengths against the number of bands // If wavelengths are not present in the metadata, then succeed FactoryResource<Wavelengths> pWavelengths; if (pWavelengths->initializeFromDynamicObject(pMetadata, false) == true) { if (pWavelengths->getNumWavelengths() != pRasterFileDescriptor->getBandCount()) { errorMessage = "The number of wavelengths in the metadata does not match the number of bands."; mValidationError = VALID_WAVELENGTHS; return false; } } } // Interleave conversions if (validationTest & NO_INTERLEAVE_CONVERSIONS) { InterleaveFormatType dataInterleave = pRasterDescriptor->getInterleaveFormat(); InterleaveFormatType fileInterleave = pRasterFileDescriptor->getInterleaveFormat(); if ((pRasterFileDescriptor->getBandCount() > 1) && (dataInterleave != fileInterleave)) { errorMessage = "Interleave format conversions are not supported."; mValidationError = NO_INTERLEAVE_CONVERSIONS; return false; } } // Skip factors if (validationTest & NO_ROW_SKIP_FACTOR) { if (pRasterDescriptor->getRowSkipFactor() > 0) { errorMessage = "Row skip factors are not supported."; mValidationError = NO_ROW_SKIP_FACTOR; return false; } } if (validationTest & NO_COLUMN_SKIP_FACTOR) { if (pRasterDescriptor->getColumnSkipFactor() > 0) { errorMessage = "Column skip factors are not supported."; mValidationError = NO_COLUMN_SKIP_FACTOR; return false; } } // Subsets if (validationTest & NO_ROW_SUBSETS) { if (pRasterDescriptor->getRowCount() != pRasterFileDescriptor->getRowCount()) { errorMessage = "Row subsets are not supported."; mValidationError = NO_ROW_SUBSETS; return false; } } if (validationTest & NO_COLUMN_SUBSETS) { if (pRasterDescriptor->getColumnCount() != pRasterFileDescriptor->getColumnCount()) { errorMessage = "Column subsets are not supported."; mValidationError = NO_COLUMN_SUBSETS; return false; } } if (validationTest & NO_BAND_SUBSETS) { if (pRasterDescriptor->getBandCount() != pRasterFileDescriptor->getBandCount()) { errorMessage = "Band subsets are not supported."; mValidationError = NO_BAND_SUBSETS; return false; } } // Available memory if (validationTest & AVAILABLE_MEMORY) { unsigned int loadedRows = pRasterDescriptor->getRowCount(); unsigned int loadedColumns = pRasterDescriptor->getColumnCount(); unsigned int loadedBands = pRasterDescriptor->getBandCount(); unsigned int bytesPerElement = pRasterDescriptor->getBytesPerElement(); uint64_t dataSize = loadedRows * loadedColumns * loadedBands * bytesPerElement; uint64_t maxMemoryAvail = pUtilities->getMaxMemoryBlockSize(); #if PTR_SIZE > 4 uint64_t totalRam = pUtilities->getTotalPhysicalMemory(); if (totalRam < maxMemoryAvail) { maxMemoryAvail = totalRam; } #endif if (dataSize > maxMemoryAvail) { errorMessage = "The data set cannot be loaded into memory. Use a different " "processing location or specify a subset."; mValidationError = AVAILABLE_MEMORY; return false; } } return true; }