void pvlOut(Statistics stats1, Statistics stats2, QString name, int start, int end, PvlObject *one, PvlObject *two) { PvlGroup left(name); left += PvlKeyword("StartLine", toString(start + 1)); left += PvlKeyword("EndLine", toString(end)); left += PvlKeyword("TotalPixels", toString(stats1.TotalPixels())); left += PvlKeyword("ValidPixels", toString(stats1.ValidPixels())); if(stats1.ValidPixels() > 0) { left += PvlKeyword("Mean", toString(stats1.Average())); left += PvlKeyword("StandardDeviation", toString(stats1.StandardDeviation())); left += PvlKeyword("Minimum", toString(stats1.Minimum())); left += PvlKeyword("Maximum", toString(stats1.Maximum())); } one->addGroup(left); PvlGroup right(name); right += PvlKeyword("StartLine", toString(start + 1)); right += PvlKeyword("EndLine", toString(end)); right += PvlKeyword("TotalPixels", toString(stats2.TotalPixels())); right += PvlKeyword("ValidPixels", toString(stats2.ValidPixels())); if(stats2.ValidPixels() > 0) { right += PvlKeyword("Mean", toString(stats2.Average())); right += PvlKeyword("StandardDeviation", toString(stats2.StandardDeviation())); right += PvlKeyword("Minimum", toString(stats2.Minimum())); right += PvlKeyword("Maximum", toString(stats2.Maximum())); } two->addGroup(right); }
// translate the code once it is found void TranslateCode() { // read the code from the image Chip chip(8*RADIUS, 64*RADIUS); chip.TackCube(codeSample+3*RADIUS, codeLine+31*RADIUS); chip.Load(cube); for (int j=0; j<32; j++) { for (int i=0; i<4; i++) { Statistics stats; // Get the average of the subchip for (int x=1; x<=2*RADIUS; x++) { for (int y=1; y<=2*RADIUS; y++) { stats.AddData(chip.GetValue(i*2*RADIUS + x,j*2*RADIUS + y)); } } // see if it is on or off if (stats.Average() > 20000) code[i][31-j] = true; else code[i][31-j] = false; } } for (int j=0; j<32; j++) { for (int i=0; i<4; i++) { } } }
int main() { Isis::Preference::Preferences(true); cerr << "GroupedStatistics unitTest!!!\n\n"; // test constructor cerr << "testing constructor...\n\n"; GroupedStatistics *groupedStats = new GroupedStatistics(); // test AddStatistic cerr << "testing AddStatistic...\n\n"; groupedStats->AddStatistic("Height", 71.5); // test copy constructor cerr << "testing copy constructor...\n\n"; GroupedStatistics *groupedStats2 = new GroupedStatistics(*groupedStats); // test GetStatistics cerr << "testing GetStatistics...\n"; Statistics stats = groupedStats2->GetStatistics("Height"); cerr << " " << stats.Average() << "\n\n"; // test GetStatisticTypes cerr << "testing GetStatisticTypes...\n"; QVector< QString > statTypes = groupedStats->GetStatisticTypes(); for(int i = 0; i < statTypes.size(); i++) cerr << " " << statTypes[i].toStdString() << "\n"; cerr << "\n"; // test destructor delete groupedStats; delete groupedStats2; return 0; }
/** * @brief Compute the initial guess of the fit * * This method provides the non-linear fit with an initial guess of the * solution. It involves a linear fit to the latter half of the data to * provide the first two coefficents, the difference of the averages of the * residuals at both ends of the data set and 5 times the last line time as * the final (fourth) element...a bit involved really. * * @return NLVector 4-element vector of the initial guess coefficients */ NonLinearLSQ::NLVector DriftCorrect::guess() { int n = _data.dim(); int nb = n - _badLines; HiVector b1 = _data.subarray(0, nb-1); LowPassFilterComp gfilter(b1, _history, _sWidth, _sIters); int nb2 = nb/2; _b2 = gfilter.ref(); HiVector cc = poly_fit(_b2.subarray(nb2,_b2.dim()-1), nb2-1); // Compute the 3rd term guess by getting the average of the residual // at both ends of the data set. Statistics s; // Get the head of the data set int n0 = MIN(nb, 20); for ( int k = 0 ; k < n0 ; k++ ) { double d = _b2[k] - (cc[0] + cc[1] * _timet(k)); s.AddData(&d, 1); } double head = s.Average(); // Get the tail of the data set s.Reset(); n0 = (int) (0.9 * nb); for ( int l = n0 ; l < nb ; l++ ) { double d = _b2[l] - (cc[0] + cc[1] * _timet(l)); s.AddData(&d, 1); } double tail = s.Average(); // Populate the guess with the results NLVector g(4, 0.0); g[0] = cc[0]; g[1] = cc[1]; g[2] = head-tail; g[3] = -5.0/_timet(nb-1); _guess = g; _history.add("Guess["+ToString(_guess[0])+ ","+ ToString(_guess[1])+ ","+ ToString(_guess[2])+ ","+ ToString(_guess[3])+ "]"); return (g); }
/** * This method performs pass1 on one image. It analyzes each framelet's * statistics and populates the necessary global variable. * * @param progress Progress message * @param theCube Current cube that needs processing * * @return bool True if the file contains a valid framelet */ bool CheckFramelets(string progress, Cube &theCube) { bool foundValidFramelet = false; LineManager mgr(theCube); Progress prog; prog.SetText(progress); prog.SetMaximumSteps(theCube.Lines()); prog.CheckStatus(); vector<double> frameletAvgs; // We need to store off the framelet information, because if no good // framelets were found then no data should be added to the // global variable for framelets, just files. vector< pair<int,double> > excludedFrameletsTmp; Statistics frameletStats; for(int line = 1; line <= theCube.Lines(); line++) { if((line-1) % numFrameLines == 0) { frameletStats.Reset(); } mgr.SetLine(line); theCube.Read(mgr); frameletStats.AddData(mgr.DoubleBuffer(), mgr.size()); if((line-1) % numFrameLines == numFrameLines-1) { if(IsSpecial(frameletStats.StandardDeviation()) || frameletStats.StandardDeviation() > maxStdev) { excludedFrameletsTmp.push_back( pair<int,double>((line-1)/numFrameLines, frameletStats.StandardDeviation()) ); } else { foundValidFramelet = true; } frameletAvgs.push_back(frameletStats.Average()); } prog.CheckStatus(); } inputFrameletAverages.push_back(frameletAvgs); if(foundValidFramelet) { for(unsigned int i = 0; i < excludedFrameletsTmp.size(); i++) { excludedFramelets.insert(pair< pair<int,int>, double>( pair<int,int>(currImage, excludedFrameletsTmp[i].first), excludedFrameletsTmp[i].second ) ); } } return foundValidFramelet; }
void HiImageClean::cimage_dark() { // Combine calibration region std::vector<H2DBuf> blobs; blobs.push_back(_caldark); blobs.push_back(_ancdark); H2DBuf dark = appendLines(blobs); int nsamples(dark.dim2()); int nlines(dark.dim1()); // Compute averages for the mask area int firstDark(4); int ndarks(dark.dim2()-firstDark); _predark = H1DBuf(nlines); for (int line = 0 ; line < nlines ; line++) { Statistics darkave; darkave.AddData(&dark[line][firstDark], ndarks); _predark[line] = darkave.Average(); } // Get statistics to determine state of mask and next course of action _darkStats.Reset(); _darkStats.AddData(&_predark[0], _predark.dim1()); if (_darkStats.ValidPixels() <= 0) { std::ostringstream mess; mess << "No valid pixels in calibration/ancillary dark regions, " << "binning = " << _binning << std::ends; throw(iException::Message(iException::Programmer,mess.str(),_FILEINFO_)); } // Now apply a smoothing filter QuickFilter smooth(_predark.dim1(), _filterWidth, 1); smooth.AddLine(&_predark[0]); nsamples = smooth.Samples(); _dark = H1DBuf(nsamples); for (int s = 0 ; s < nsamples ; s++) { _dark[s] = smooth.Average(s); } // Now apply to all calibration data BigInt nbad(0); _calimg = row_apply(_calimg, _dark, 0, nbad, 1.0); _calbuf = row_apply(_calbuf, _dark, 0, nbad, 1.0); _caldark = row_apply(_caldark, _dark, 0, nbad, 1.0); _ancbuf = row_apply(_ancbuf, _dark, _firstImageLine, nbad, 1.0); _ancdark = row_apply(_ancdark, _dark, _firstImageLine, nbad, 1.0); return; }
void gatherAverages(Buffer &in) { Statistics lineStats; lineStats.AddData(in.DoubleBuffer(), in.size()); double average = lineStats.Average(); lineAverages[in.Band() - 1][in.Line() - 1] = average; // The cube average will finish being calculated before the correction is applied. if(!IsSpecial(average)) { cubeAverage[in.Band() - 1] += average; } else { numIgnoredLines ++; } }
// Return a PVL group containing the statistical information PvlGroup PvlStats(Statistics &stats, const QString &name) { // Construct a label with the results PvlGroup results(name); if(stats.ValidPixels() != 0) { results += PvlKeyword("Average", toString(stats.Average())); results += PvlKeyword("StandardDeviation", toString(stats.StandardDeviation())); results += PvlKeyword("Variance", toString(stats.Variance())); results += PvlKeyword("Minimum", toString(stats.Minimum())); results += PvlKeyword("Maximum", toString(stats.Maximum())); } results += PvlKeyword("TotalPixels", toString(stats.TotalPixels())); results += PvlKeyword("ValidPixels", toString(stats.ValidPixels())); results += PvlKeyword("NullPixels", toString(stats.NullPixels())); results += PvlKeyword("LisPixels", toString(stats.LisPixels())); results += PvlKeyword("LrsPixels", toString(stats.LrsPixels())); results += PvlKeyword("HisPixels", toString(stats.HisPixels())); results += PvlKeyword("HrsPixels", toString(stats.HrsPixels())); return results; }
void IsisMain() { Preference::Preferences(true); ProcessImportVicar p; Pvl vlab; p.SetVicarFile("unitTest.img", vlab); p.SetOutputCube("TO"); p.StartProcess(); p.EndProcess(); cout << vlab << endl; Process p2; CubeAttributeInput att; QString file = Application::GetUserInterface().GetFileName("TO"); Cube *icube = p2.SetInputCube(file, att); Statistics *stat = icube->statistics(); cout << stat->Average() << endl; cout << stat->Variance() << endl; p2.EndProcess(); QFile::remove(file); }
//********************************************************** // DOUSER - Get statistics on a column or row of pixels //********************************************************** void getStats(Buffer &in) { Statistics stats; stats.AddData(in.DoubleBuffer(), in.size()); band.push_back(in.Band()); element.push_back(in.Sample()); // Sort the input buffer vector<double> pixels; for(int i = 0; i < in.size(); i++) { if(IsValidPixel(in[i])) pixels.push_back(in[i]); } sort(pixels.begin(), pixels.end()); // Now obtain the median value and store in the median vector int size = pixels.size(); if(size != 0) { int med = size / 2; if(size % 2 == 0) { median.push_back((pixels[med-1] + pixels[med]) / 2.0); } else { median.push_back(pixels[med]); } } else { median.push_back(Isis::Null); } // Store the statistics in the appropriate vectors average.push_back(stats.Average()); stddev.push_back(stats.StandardDeviation()); validpixels.push_back(stats.ValidPixels()); minimum.push_back(stats.Minimum()); maximum.push_back(stats.Maximum()); }
//function to write the stats values to flat file void writeFlat (ofstream &os, Statistics &s){ os << ValidateValue(s.Minimum())<<","<< ValidateValue(s.Maximum())<<","<< ValidateValue(s.Average())<<","<< ValidateValue(s.StandardDeviation())<<","; }
void HiImageClean::cimage_mask() { // Combine calibration region std::vector<H2DBuf> blobs; blobs.push_back(_calbuf); blobs.push_back(_calimg); blobs.push_back(_caldark); H2DBuf calibration = appendSamples(blobs); // Set the mask depending on the binning mode _firstMaskLine = 20; _lastMaskLine = 39; switch (_binning) { case 1: _firstMaskLine = 21; _lastMaskLine = 38; break; case 2: _firstMaskLine = 21; _lastMaskLine = 29; break; case 3: _firstMaskLine = 21; _lastMaskLine = 26; break; case 4: _firstMaskLine = 21; _lastMaskLine = 24; break; case 8: _firstMaskLine = 21; _lastMaskLine = 22; break; case 16: _firstMaskLine = 21; _lastMaskLine = 21; break; default: std::ostringstream msg; msg << "Invalid binning mode (" << _binning << ") - valid are 1-4, 8 and 16" << std::ends; throw(iException::Message(iException::Programmer,msg.str(),_FILEINFO_)); } // Initialize lines and samples of mask area of interest int nsamples(calibration.dim2()); int nlines(_lastMaskLine - _firstMaskLine + 1); // Compute averages for the mask area _premask = H1DBuf(nsamples); for (int samp = 0 ; samp < nsamples; samp++) { H1DBuf maskcol = slice(calibration, samp); Statistics maskave; maskave.AddData(&maskcol[_firstMaskLine], nlines); _premask[samp] = maskave.Average(); } _mask = _premask.copy(); // Get statistics to determine state of mask and next course of action _maskStats.Reset(); _maskStats.AddData(&_premask[0], nsamples); if (_maskStats.ValidPixels() <= 0) { std::ostringstream mess; mess << "No valid pixels in calibration mask region in lines " << (_firstMaskLine+1) << " to " << (_lastMaskLine+1) << ", binning = " << _binning << std::ends; throw(iException::Message(iException::Programmer,mess.str(),_FILEINFO_)); } // If there are any missing values, replace with mins/maxs of region if (_maskStats.TotalPixels() != _maskStats.ValidPixels()) { for (int samp = 0 ; samp < nsamples ; samp++) { if (Pixel::IsLow(_premask[samp]) || Pixel::IsNull(_premask[samp])) { _mask[samp] = _maskStats.Minimum(); } else if (Pixel::IsHigh(_premask[samp])) { _mask[samp] = _maskStats.Maximum(); } } } // Now apply to all calibration data BigInt nbad(0); _calimg = column_apply(_calimg, _mask, _firstImageSample, nbad, 1.0); _calbuf = column_apply(_calbuf, _mask, _firstBufferSample, nbad, 1.0); _caldark = column_apply(_caldark, _mask, _firstDarkSample, nbad, 1.0); _ancbuf = column_apply(_ancbuf, _mask, _firstBufferSample, nbad, 1.0); _ancdark = column_apply(_ancdark, _mask, _firstDarkSample, nbad, 1.0); return; }
/** * This calculates the coefficients for specific energy corrections */ void calculateSpecificEnergy(Cube *icube) { PvlGroup &inst = icube->label()->findGroup("Instrument", Pvl::Traverse); bool vis = (inst["Channel"][0] != "IR"); double coefficient = 1.0; if(inst["GainMode"][0] == "HIGH") { coefficient /= 2; } if(vis && inst["SamplingMode"][0] == "HI-RES") { coefficient *= 3; } if(vis) { coefficient /= toDouble(inst["ExposureDuration"][1]) / 1000.0; } else { coefficient /= (toDouble(inst["ExposureDuration"][0]) * 1.01725) / 1000.0 - 0.004; } QString specEnergyFile = "$cassini/calibration/vims/"; if(vis) { specEnergyFile += "vis_perf_v????.cub"; } else { specEnergyFile += "ir_perf_v????.cub"; } QString waveCalFile = "$cassini/calibration/vims/wavecal_v????.cub"; FileName specEnergyFileName(specEnergyFile); specEnergyFileName = specEnergyFileName.highestVersion(); FileName waveCalFileName(waveCalFile); waveCalFileName = waveCalFileName.highestVersion(); Cube specEnergyCube; specEnergyCube.open(specEnergyFileName.expanded()); Cube waveCalCube; waveCalCube.open(waveCalFileName.expanded()); LineManager specEnergyMgr(specEnergyCube); LineManager waveCalMgr(waveCalCube); for(int i = 0; i < icube->bandCount(); i++) { Statistics specEnergyStats; Statistics waveCalStats; if(vis) { specEnergyMgr.SetLine(1, i + 1); waveCalMgr.SetLine(1, i + 1); } else { specEnergyMgr.SetLine(1, i + 1); // ir starts at band 97 waveCalMgr.SetLine(1, i + 96 + 1); } specEnergyCube.read(specEnergyMgr); waveCalCube.read(waveCalMgr); specEnergyStats.AddData(specEnergyMgr.DoubleBuffer(), specEnergyMgr.size()); waveCalStats.AddData(waveCalMgr.DoubleBuffer(), waveCalMgr.size()); double bandCoefficient = coefficient * specEnergyStats.Average() * waveCalStats.Average(); specificEnergyCorrections.push_back(bandCoefficient); } }
// Main Program void IsisMain() { UserInterface &ui = Application::GetUserInterface(); Isis::FileName fromFile = ui.GetFileName("FROM"); Isis::Cube inputCube; inputCube.open(fromFile.expanded()); //Check to make sure we got the cube properly if(!inputCube.isOpen()) { QString msg = "Could not open FROM cube" + fromFile.expanded(); throw IException(IException::User, msg, _FILEINFO_); } ProcessByLine processByLine; Cube *icube = processByLine.SetInputCube("FROM"); int totalSamples = icube->sampleCount(); //We'll be going through the cube by line, manually differentiating // between phases Isis::LineManager lineManager(inputCube); lineManager.begin(); Table hifix("HiRISE Ancillary"); int channel = icube->group("Instrument")["ChannelNumber"]; if(channel == 0) { phases = channel0Phases; } else { phases = channel1Phases; } int binning_mode = icube->group("Instrument")["Summing"]; if(binning_mode != 1 && binning_mode != 2) { /*IString msg = "You may only use input with binning mode 1 or 2, not"; msg += binning_mode; throw iException::Message(iException::User, msg, _FILEINFO_);*/ DestripeForOtherBinningModes(totalSamples); } else { //Adjust phase breaks based on the binning mode for(int i = 0 ; i < num_phases ; i++) { phases[i] /= binning_mode; } //Phases must be able to stretch across the entire cube if(totalSamples != phases[3]) { QString required_samples(phases[3]); QString bin_QString(binning_mode); QString msg = "image must have exactly "; msg += required_samples; msg += " samples per line for binning mode "; msg += bin_QString; throw IException(IException::User, msg, _FILEINFO_); } //Index starts at 1 and will go up to totalLines. This must be done since // lines go into different statistics vectors based on their index myIndex = 1; processByLine.StartProcess(getStats); //This program is trying to find horizontal striping in the image that occurs // in every other line, but at runtime we do not know whether that striping // occurs on the odd numbered lines (1, 3, 5, etc.) or the even numbered // ones (2, 4, 6, etc.). The below algorithm determines which of these is the // case. QString parity = ui.GetString("PARITY"); if(parity == "EVEN") { offset = 1; } else if(parity == "ODD") { offset = 0; } else { //PRECONDITION: getStats must have been run long double maxDiff = 0; int maxDiffIndex = 0; for(int i = 0 ; i < num_phases ; i++) { long double thisDiff; thisDiff = lineStats[i].Average() - stats.Average(); if(thisDiff < 0) { thisDiff *= -1; } if(thisDiff > maxDiff) { maxDiff = thisDiff; maxDiffIndex = i; } } if(maxDiffIndex == 1 || maxDiffIndex == 3) { offset = 1; } else { offset = 0; } } //Again we must reset the index, because we apply corrections only on every // other line and the fix processing function has no concept of where it is // in the cube. myIndex = 1; mode = (ui.GetString("CORRECTION") == "MULTIPLY"); processByLine.SetOutputCube("TO"); processByLine.StartProcess(fix); processByLine.EndProcess(); } }
/** * This method is the pass 2 processing routine. A ProcessByBrick * will call this method for sets of data (depending on the camera * type) and this method is responsible for writing the entire output * temporary cube. * * @param in Input raw image data, not including excluded files */ void CreateTemporaryData(Buffer &in) { /** * Line scan cameras process by frame columns. */ if(cameraType == LineScan) { // The statistics of every column of data need to be known // before we can write to the temp file. Gather stats for this // column. Statistics inputColStats; for(int i = 0; i < in.size(); i++) { inputColStats.AddData(in[i]); // We'll also need the stats for the entire frame in order to // normalize and in order to decide whether or not we want // to toss out the frame inputFrameStats.AddData(in[i]); } // Store off the column stats outputTmpAverages[in.Sample()-1] = inputColStats.Average(); outputTmpCounts[in.Sample()-1] = inputColStats.ValidPixels(); // Test if this is the last column and we've got all of our stats if(in.Sample() == numOutputSamples) { // Decide if we want this data if(IsSpecial(inputFrameStats.StandardDeviation()) || inputFrameStats.StandardDeviation() > maxStdev) { // We don't want this data... // CreateNullData is a helper method for this case that // nulls out the stats CreateNullData(); // Record the exclusion PvlGroup currExclusion("ExcludedLines"); currExclusion += PvlKeyword("FrameStartLine", iString(in.Line())); currExclusion += PvlKeyword("ValidPixels", iString(inputFrameStats.ValidPixels())); if(!IsSpecial(inputFrameStats.StandardDeviation())) currExclusion += PvlKeyword("StandardDeviation", inputFrameStats.StandardDeviation()); else currExclusion += PvlKeyword("StandardDeviation", "N/A"); excludedDetails[excludedDetails.size()-1].AddGroup(currExclusion); } // Let's write our data... CreateNullData took care of nulls for us // Band 1 is our normalized average oLineMgr->SetLine(oLineMgr->Line(),1); for(int i = 0; i < (int)outputTmpAverages.size(); i++) { if(!IsSpecial(outputTmpAverages[i])) { (*oLineMgr)[i] = outputTmpAverages[i] / inputFrameStats.Average(); } else { (*oLineMgr)[i] = Isis::Null; } } ocube->Write(*oLineMgr); oLineMgr->SetLine(oLineMgr->Line(),2); // band 2 is our valid dn counts for(int i = 0; i < (int)outputTmpCounts.size(); i++) { (*oLineMgr)[i] = outputTmpCounts[i]; numInputDns[i] += (int)(outputTmpCounts[i] + 0.5); } ocube->Write(*oLineMgr); (*oLineMgr) ++; inputFrameStats.Reset(); } } else if(cameraType == Framing || cameraType == PushFrame) { // Framing cameras and push frames are treated identically; // the framelet size for a framelet in the framing camera // is the entire image! int framelet = (in.Line()-1) / numFrameLines; double stdev; bool excluded = Excluded(currImage, framelet, stdev); if(excluded && ((in.Line()-1) % numFrameLines == 0)) { PvlGroup currExclusion("ExcludedFramelet"); currExclusion += PvlKeyword("FrameletStartLine", iString(in.Line())); currExclusion += PvlKeyword("FrameletNumber", (in.Line()-1) / numFrameLines); if(!IsSpecial(stdev)) { currExclusion += PvlKeyword("StandardDeviation", stdev); } else { currExclusion += PvlKeyword("StandardDeviation", "N/A"); } excludedDetails[excludedDetails.size()-1].AddGroup(currExclusion); } // Since this is a line by line iterative process, we need to get the current // data in the temp file oLineMgr->SetLine(((in.Line() - 1) % numFrameLines) + 1, 1); if(!excluded || !cubeInitialized) { ocube->Read(*oLineMgr); } if(!cubeInitialized) { for(int i = 0; i < oLineMgr->size(); i++) { (*oLineMgr)[i] = Isis::Null; } } vector<bool> isValidData; if(!excluded || !cubeInitialized) { isValidData.resize(in.size()); for(int samp = 0; samp < in.size(); samp++) { if(IsSpecial((*oLineMgr)[samp]) && !IsSpecial(in[samp])) { (*oLineMgr)[samp] = 0.0; } if(!IsSpecial(in[samp])) { isValidData[samp] = true; (*oLineMgr)[samp] += in[samp] / inputFrameletAverages[currImage][framelet]; } else { isValidData[samp] = false; } } } if(!excluded || !cubeInitialized) { ocube->Write(*oLineMgr); } oLineMgr->SetLine(oLineMgr->Line(), 2); if(!excluded || !cubeInitialized) { ocube->Read(*oLineMgr); } if(!cubeInitialized) { for(int i = 0; i < oLineMgr->size(); i++) { (*oLineMgr)[i] = Isis::Null; } if(ocube->Lines() == oLineMgr->Line()) cubeInitialized = true; } if(!excluded || !cubeInitialized) { for(int i = 0; i < (int)isValidData.size(); i++) { if(IsSpecial((*oLineMgr)[i])) { (*oLineMgr)[i] = 0.0; } if(isValidData[i]) { (*oLineMgr)[i] ++; } } } if(!excluded || !cubeInitialized) { ocube->Write(*oLineMgr); } } }
/** The ISIS smtk main application */ void IsisMain() { UserInterface &ui = Application::GetUserInterface(); // Open the first cube. It is the left hand image. Cube lhImage; CubeAttributeInput &attLeft = ui.GetInputAttribute("FROM"); vector<QString> bandLeft = attLeft.bands(); lhImage.setVirtualBands(bandLeft); lhImage.open(ui.GetFileName("FROM"),"r"); // Open the second cube, it is geomertricallty altered. We will be matching the // first to this one by attempting to compute a sample/line offsets Cube rhImage; CubeAttributeInput &attRight = ui.GetInputAttribute("MATCH"); vector<QString> bandRight = attRight.bands(); rhImage.setVirtualBands(bandRight); rhImage.open(ui.GetFileName("MATCH"),"r"); // Ensure only single bands if (lhImage.bandCount() != 1 || rhImage.bandCount() != 1) { QString msg = "Input Cubes must have only one band!"; throw IException(IException::User,msg,_FILEINFO_); } // Both images must have a Camera and can also have a Projection. We will // only deal with a Camera, however as a projected, non-mosaicked image // uses a Projection internal to the Camera object. Camera *lhCamera = NULL; Camera *rhCamera = NULL; try { lhCamera = lhImage.camera(); rhCamera = rhImage.camera(); } catch (IException &ie) { QString msg = "Both input images must have a camera"; throw IException(ie, IException::User, msg, _FILEINFO_); } // Since we are generating a DEM, we must turn off any existing // DEM that may have been initialized with spiceinit. lhCamera->IgnoreElevationModel(true); rhCamera->IgnoreElevationModel(true); // Get serial number QString serialLeft = SerialNumber::Compose(lhImage, true); QString serialRight = SerialNumber::Compose(rhImage, true); // This still precludes band to band registrations. if (serialLeft == serialRight) { QString sLeft = FileName(lhImage.fileName()).name(); QString sRight = FileName(rhImage.fileName()).name(); if (sLeft == sRight) { QString msg = "Cube Serial Numbers must be unique - FROM=" + serialLeft + ", MATCH=" + serialRight; throw IException(IException::User,msg,_FILEINFO_); } serialLeft = sLeft; serialRight = sRight; } Progress prog; prog.SetText("Finding Initial Seeds"); int nl = lhImage.lineCount(); int ns = lhImage.sampleCount(); BigInt numAttemptedInitialPoints = 0; // Declare Gruen matcher SmtkMatcher matcher(ui.GetFileName("REGDEF"), &lhImage, &rhImage); // Get line/sample linc/sinc parameters int space = ui.GetInteger("SPACE"); int linc (space), sinc(space); // Do we have a seed points from a control net file? bool useseed = ui.WasEntered("CNET"); // Base points on an input cnet SmtkQStack gstack; double lastEigen(0.0); if (useseed) { ControlNet cnet(ui.GetFileName("CNET")); prog.SetMaximumSteps(cnet.GetNumPoints()); prog.CheckStatus(); gstack.reserve(cnet.GetNumPoints()); for (int cpIndex = 0; cpIndex < cnet.GetNumPoints(); cpIndex ++) { ControlPoint *cp = cnet.GetPoint(cpIndex); if (!cp->IsIgnored()) { ControlMeasure *cmLeft(0), *cmRight(0); for(int cmIndex = 0; cmIndex < cp->GetNumMeasures(); cmIndex ++) { ControlMeasure *cm = cp->GetMeasure(cmIndex); if (!cm->IsIgnored()) { if (cm->GetCubeSerialNumber() == serialLeft) cmLeft = cp->GetMeasure(cmIndex); if (cm->GetCubeSerialNumber() == serialRight) cmRight = cp->GetMeasure(cmIndex); } } // If we have both left and right images in the control point, save it if ( (cmLeft != 0) && (cmRight != 0) ) { Coordinate left = Coordinate(cmLeft->GetLine(), cmLeft->GetSample()); Coordinate right = Coordinate(cmRight->GetLine(), cmRight->GetSample()); SmtkPoint spnt = matcher.Create(left, right); // Insert the point (unregistered) if ( spnt.isValid() ) { int line = (int) cmLeft->GetLine(); int samp = (int) cmLeft->GetSample(); matcher.isValid(spnt); gstack.insert(qMakePair(line, samp), spnt); lastEigen = spnt.GoodnessOfFit(); } } } prog.CheckStatus(); } } else { // We want to create a grid of control points that is N rows by M columns. int rows = (lhImage.lineCount() + linc - 1)/linc; int cols = (lhImage.sampleCount() + sinc - 1)/sinc; prog.SetMaximumSteps(rows * cols); prog.CheckStatus(); // First pass stack and eigen value statistics SmtkQStack fpass; fpass.reserve(rows * cols); Statistics temp_mev; // Loop through grid of points and get statistics to compute // initial set of points for (int line = linc / 2 + 1; line < nl; line += linc) { for (int samp = sinc / 2 + 1 ; samp < ns; samp += sinc) { numAttemptedInitialPoints ++; SmtkPoint spnt = matcher.Register(Coordinate(line,samp)); if ( spnt.isValid() ) { matcher.isValid(spnt); fpass.insert(qMakePair(line, samp), spnt); temp_mev.AddData(spnt.GoodnessOfFit()); } prog.CheckStatus(); } } // Now select a subset of fpass points as the seed points cout << "Number of Potential Seed Points: " << fpass.size() << "\n"; cout << "Min / Max Eigenvalues Matched: " << temp_mev.Minimum() << ", " << temp_mev.Maximum() << "\n"; // How many seed points are requested double nseed = ui.GetDouble("NSEED"); int inseed; if (nseed >= 1.0) inseed = (int) nseed; else if (nseed > 0.0) inseed = (int) (nseed * (double) (fpass.size())); else inseed = (int) ((double) (fpass.size()) * 0.05); double seedsample = ui.GetDouble("SEEDSAMPLE"); // Generate a new stack gstack.reserve(inseed); while ((gstack.size() < inseed) && (!fpass.isEmpty() )) { SmtkQStack::iterator bestm; if (seedsample <= 0.0) { bestm = matcher.FindSmallestEV(fpass); } else { bestm = matcher.FindExpDistEV(fpass, seedsample, temp_mev.Minimum(), temp_mev.Maximum()); } // Add point to stack if (bestm != fpass.end()) { Coordinate right = bestm.value().getRight(); matcher.isValid(bestm.value()); gstack.insert(bestm.key(), bestm.value()); lastEigen = bestm.value().GoodnessOfFit(); fpass.erase(bestm); } } // If a user wants to see the seed network, write it out here if (ui.WasEntered("OSEEDNET")) { WriteCnet(ui.GetFileName("OSEEDNET"), gstack, lhCamera->target()->name(), serialLeft, serialRight); } } /////////////////////////////////////////////////////////////////////// // All done with seed points. Sanity check ensures we actually found // some. /////////////////////////////////////////////////////////////////////// if (gstack.size() <= 0) { QString msg = "No seed points found - may need to check Gruen parameters."; throw IException(IException::User, msg, _FILEINFO_); } // Report seed point status if (!useseed) { cout << "Number of Seed Points used: " << gstack.size() << "\n"; cout << "EV of last Seed Point: " << lastEigen << "\n"; } else { cout << "Number of Manual Seed Points: " << gstack.size() << "\n"; } // Use seed points (in stack) to grow SmtkQStack bmf; bmf.reserve(gstack.size()); // Probably need much more but for starters... BigInt numOrigPoints = gstack.size(); BigInt passpix2 = 0; int subcbox = ui.GetInteger("SUBCBOX"); int halfBox((subcbox-1)/2); while (!gstack.isEmpty()) { SmtkQStackIter cstack = matcher.FindSmallestEV(gstack); // Print number on stack if ((gstack.size() % 1000) == 0) { cout << "Number on Stack: " << gstack.size() << ". " << cstack.value().GoodnessOfFit() << "\n"; } // Test to see if already determined SmtkQStackIter bmfPt = bmf.find(cstack.key()); if (bmfPt == bmf.end()) { // Its not in the final stack, process it // Retrieve the point SmtkPoint spnt = cstack.value(); // Register if its not already registered if (!spnt.isRegistered()) { spnt = matcher.Register(spnt, spnt.getAffine()); } // Still must check for validity if the point was just registered, // otherwise should be good if ( spnt.isValid() ) { passpix2++; bmf.insert(cstack.key(), spnt); // inserts (0,0) offset excluded below int line = cstack.key().first; int sample = cstack.key().second; // Determine match points double eigen(spnt.GoodnessOfFit()); for (int sampBox = -halfBox ; sampBox <= halfBox ; sampBox++ ) { int csamp = sample + sampBox; for (int lineBox = -halfBox ; lineBox <= halfBox ; lineBox++) { int cline = line + lineBox; if ( !( (sampBox == 0) && (lineBox == 0)) ) {// Already added above SmtkQPair dupPair(cline, csamp); SmtkQStackIter temp = bmf.find(dupPair); SmtkPoint bmfpnt; if (temp != bmf.end()) { if (temp.value().GoodnessOfFit() > eigen) { // Create cloned point with better fit bmfpnt = matcher.Clone(spnt, Coordinate(cline,csamp)); } } else { // ISIS2 is BMF(SAMP,LINE,7) .EQ VALID_MAX4) // Clone new point for insert bmfpnt = matcher.Clone(spnt, Coordinate(cline,csamp)); } // Add if good point if (bmfpnt.isValid()) { bmf.insert(dupPair, bmfpnt); } } } } // Grow stack with spacing adding info to stack for (int i = -1 ; i <= 1 ; i ++) { // Sample for (int j = -1 ; j <= 1 ; j ++) { // Line // Don't re-add the original sample, line if ( !((i == 0) && (j == 0)) ) { // Grow based upon spacing double ssamp = sample + (i * space); double sline = line + (j * space); Coordinate pnt = Coordinate(sline, ssamp); SmtkPoint gpnt = matcher.Clone(spnt, pnt); if ( gpnt.isValid() ) { SmtkQPair growpt((int) sline, (int) ssamp); // double check we don't have a finalized result at this position SmtkQStackIter temp = bmf.find(growpt); if(temp == bmf.end()) { gstack.insert(growpt, gpnt); } } } } } } } // Remove the current point from the grow stack (hole) gstack.erase(cstack); } ///////////////////////////////////////////////////////////////////////// // All done with creating points. Perform output options. ///////////////////////////////////////////////////////////////////////// // If a TO parameter was specified, create DEM with errors if (ui.WasEntered("TO")) { // Create the output DEM cout << "\nCreating output DEM from " << bmf.size() << " points.\n"; Process p; Cube *icube = p.SetInputCube("FROM"); Cube *ocube = p.SetOutputCube("TO", icube->sampleCount(), icube->lineCount(), 3); p.ClearInputCubes(); int boxsize = ui.GetInteger("BOXSIZE"); double plotdist = ui.GetDouble("PLOTDIST"); TileManager dem(*ocube), eigen(*ocube), stErr(*ocube); dem.SetTile(1, 1); // DEM Data/elevation stErr.SetTile(1, 2); // Error in stereo computation eigen.SetTile(1, 3); // Eigenvalue of the solution int nBTiles(eigen.Tiles()/3); // Total tiles / 3 bands prog.SetText("Creating DEM"); prog.SetMaximumSteps(nBTiles); prog.CheckStatus(); Statistics stAng; while ( !eigen.end() ) { // Must use the last band for this!! PointPlot tm = for_each(bmf.begin(), bmf.end(), PointPlot(dem, plotdist)); tm.FillPoints(*lhCamera, *rhCamera, boxsize, dem, stErr, eigen, &stAng); ocube->write(dem); ocube->write(stErr); ocube->write(eigen); dem.next(); stErr.next(); eigen.next(); prog.CheckStatus(); } // Report Stereo separation angles PvlGroup stresultsPvl("StereoSeparationAngle"); stresultsPvl += PvlKeyword("Minimum", toString(stAng.Minimum()), "deg"); stresultsPvl += PvlKeyword("Average", toString(stAng.Average()), "deg"); stresultsPvl += PvlKeyword("Maximum", toString(stAng.Maximum()), "deg"); stresultsPvl += PvlKeyword("StandardDeviation", toString(stAng.StandardDeviation()), "deg"); Application::Log(stresultsPvl); // Update the label with BandBin keywords PvlKeyword filter("FilterName", "Elevation", "meters"); filter.addValue("ElevationError", "meters"); filter.addValue("GoodnessOfFit", "unitless"); PvlKeyword center("Center", "1.0"); center.addValue("1.0"); center.addValue("1.0"); PvlGroup &bandbin = ocube->label()->findGroup("BandBin", PvlObject::Traverse); bandbin.addKeyword(filter, PvlContainer::Replace); bandbin.addKeyword(center, PvlContainer::Replace); center.setName("Width"); bandbin.addKeyword(center, PvlContainer::Replace); p.EndProcess(); } // If a cnet file was entered, write the ControlNet pvl to the file if (ui.WasEntered("ONET")) { WriteCnet(ui.GetFileName("ONET"), bmf, lhCamera->target()->name(), serialLeft, serialRight); } // Create output data PvlGroup totalPointsPvl("Totals"); totalPointsPvl += PvlKeyword("AttemptedPoints", toString(numAttemptedInitialPoints)); totalPointsPvl += PvlKeyword("InitialSuccesses", toString(numOrigPoints)); totalPointsPvl += PvlKeyword("GrowSuccesses", toString(passpix2)); totalPointsPvl += PvlKeyword("ResultingPoints", toString(bmf.size())); Application::Log(totalPointsPvl); Pvl arPvl = matcher.RegistrationStatistics(); PvlGroup smtkresultsPvl("SmtkResults"); smtkresultsPvl += PvlKeyword("SpiceOffImage", toString(matcher.OffImageErrorCount())); smtkresultsPvl += PvlKeyword("SpiceDistanceError", toString(matcher.SpiceErrorCount())); arPvl.addGroup(smtkresultsPvl); for(int i = 0; i < arPvl.groups(); i++) { Application::Log(arPvl.group(i)); } // add the auto registration information to print.prt PvlGroup autoRegTemplate = matcher.RegTemplate(); Application::Log(autoRegTemplate); // Don't need the cubes opened anymore lhImage.close(); rhImage.close(); }
/** * This is the main method. Makeflat runs in three steps: * * 1) Calculate statistics * - For all cameras, this checks for one band and matching * sample counts. * - For framing cameras, this checks the standard deviation of * the images and records the averages of each image * - For push frame cameras, this calls CheckFramelets for each * image. * * 2) Create the temporary file, collect more detailed statistics * - For all cameras, this generates the temporary file and calculates * the final exclusion list * - For framing/push frame cameras, the temporary file is * 2 bands, where the first is a sum of DNs from each image/framelet * and the second band is a count of valid DNs that went into each sum * * 3) Create the final flat field file * - For all cameras, this processes the temporary file to create the final flat * field file. */ void IsisMain() { // Initialize variables ResetGlobals(); UserInterface &ui = Application::GetUserInterface(); maxStdev = ui.GetDouble("STDEVTOL"); if(ui.GetString("IMAGETYPE") == "FRAMING") { cameraType = Framing; // framing cameras need to figure this out automatically // during step 1 numFrameLines = -1; } else if(ui.GetString("IMAGETYPE") == "LINESCAN") { cameraType = LineScan; numFrameLines = ui.GetInteger("NUMLINES"); } else { cameraType = PushFrame; numFrameLines = ui.GetInteger("FRAMELETHEIGHT"); } FileList inList(ui.GetFilename("FROMLIST")); Progress progress; tempFileLength = 0; numOutputSamples = 0; /** * Line scan progress is based on the input list, whereas * the other cameras take much longer and are based on the * images themselves. Prepare the progress if we're doing * line scan. */ if(cameraType == LineScan) { progress.SetText("Calculating Number of Image Lines"); progress.SetMaximumSteps(inList.size()); progress.CheckStatus(); } /** * For a push frame camera, the temp file is one framelet. * Technically this is the same for the framing, but we * don't know the height of a framelet yet. */ if(cameraType == PushFrame) { tempFileLength = numFrameLines; } /** * Start pass 1, use global currImage so that methods called * know the image we're processing. */ for(currImage = 0; currImage < inList.size(); currImage++) { /** * Read the current cube into memory */ Cube tmp; tmp.Open(Filename(inList[currImage]).Expanded()); /** * If we haven't determined how many samples the output * should have, we can do so now */ if(numOutputSamples == 0 && tmp.Bands() == 1) { numOutputSamples = tmp.Samples(); } /** * Try and validate the image, quick tests first! * * (imageValid &= means imageValid = imageValid && ...) */ bool imageValid = true; // Only single band images are acceptable imageValid &= (tmp.Bands() == 1); // Sample sizes must always match imageValid &= (numOutputSamples == tmp.Samples()); // For push frame cameras, there must be valid all framelets if(cameraType == PushFrame) { imageValid &= (tmp.Lines() % numFrameLines == 0); } // For framing cameras, we need to figure out the size... // setTempFileLength is used to revert if the file // is decided to be invalid bool setTempFileLength = false; if(cameraType == Framing) { if(tempFileLength == 0 && imageValid) { tempFileLength = tmp.Lines(); numFrameLines = tempFileLength; setTempFileLength = true; } imageValid &= (tempFileLength == tmp.Lines()); } // Statistics are necessary at this point for push frame and framing cameras // because the framing camera standard deviation tolerance is based on // entire images, and push frame framelet exclusion stats can not be collected // during pass 2 cleanly if((cameraType == Framing || cameraType == PushFrame) && imageValid) { string prog = "Calculating Standard Deviation " + iString((int)currImage+1) + "/"; prog += iString((int)inList.size()) + " (" + Filename(inList[currImage]).Name() + ")"; if(cameraType == Framing) { Statistics *stats = tmp.Statistics(1, prog); imageValid &= !IsSpecial(stats->StandardDeviation()); imageValid &= !IsSpecial(stats->Average()); imageValid &= stats->StandardDeviation() <= maxStdev; vector<double> fileStats; fileStats.push_back(stats->Average()); inputFrameletAverages.push_back(fileStats); delete stats; } else if(cameraType == PushFrame) { imageValid &= CheckFramelets(prog, tmp); } if(setTempFileLength && !imageValid) { tempFileLength = 0; } } // The line scan camera needs to actually count the number of lines in each image to know // how many total frames there are before beginning pass 2. if(imageValid && (cameraType == LineScan)) { int lines = (tmp.Lines() / numFrameLines); // partial frame? if(tmp.Lines() % numFrameLines != 0) { lines ++; } tempFileLength += lines; } else if(!imageValid) { excludedFiles.insert(pair<int, bool>(currImage, true)); } tmp.Close(); if(cameraType == LineScan) { progress.CheckStatus(); } } /** * If the number of output samples could not be determined, we never * found a legitimate cube. */ if(numOutputSamples <= 0) { string msg = "No valid input cubes were found"; throw iException::Message(iException::User,msg,_FILEINFO_); } /** * If theres no temp file length, which is based off of valid data in * the input cubes, then we havent found any valid data. */ if(tempFileLength <= 0) { string msg = "No valid input data was found"; throw iException::Message(iException::User,msg,_FILEINFO_); } /** * ocube is now the temporary file (for pass 2). */ ocube = new Cube(); ocube->SetDimensions(numOutputSamples, tempFileLength, 2); PvlGroup &prefs = Preference::Preferences().FindGroup("DataDirectory", Pvl::Traverse); iString outTmpName = (string)prefs["Temporary"][0] + "/"; outTmpName += Filename(ui.GetFilename("TO")).Basename() + ".tmp.cub"; ocube->Create(outTmpName); oLineMgr = new LineManager(*ocube); oLineMgr->SetLine(1); ProcessByBrick p; int excludedCnt = 0; if(cameraType == LineScan) { outputTmpAverages.resize(numOutputSamples); outputTmpCounts.resize(numOutputSamples); numInputDns.resize(numOutputSamples); } cubeInitialized = false; for(currImage = 0; currImage < inList.size(); currImage++) { if(Excluded(currImage)) { excludedCnt ++; continue; } PvlObject currFile("Exclusions"); currFile += PvlKeyword("Filename", inList[currImage]); currFile += PvlKeyword("Tolerance", maxStdev); if(cameraType == LineScan) { currFile += PvlKeyword("FrameLines", numFrameLines); } else if(cameraType == PushFrame) { currFile += PvlKeyword("FrameletLines", numFrameLines); } excludedDetails.push_back(currFile); CubeAttributeInput inAtt; // This needs to be set constantly because ClearInputCubes // seems to be removing the input brick size. if(cameraType == LineScan) { p.SetBrickSize(1, numFrameLines, 1); } else if(cameraType == Framing || cameraType == PushFrame) { p.SetBrickSize(numOutputSamples, 1, 1); } p.SetInputCube(inList[currImage], inAtt); iString progText = "Calculating Averages " + iString((int)currImage+1); progText += "/" + iString((int)inList.size()); progText += " (" + Filename(inList[currImage]).Name() + ")"; p.Progress()->SetText(progText); p.StartProcess(CreateTemporaryData); p.EndProcess(); p.ClearInputCubes(); if(excludedDetails[excludedDetails.size()-1].Groups() == 0) { excludedDetails.resize(excludedDetails.size()-1); } } /** * Pass 2 completed. The processing methods were responsible for writing * the entire temporary cube. */ if(oLineMgr) { delete oLineMgr; oLineMgr = NULL; } if(ocube) { ocube->Close(); delete ocube; } /** * ocube is now the final output */ ocube = new Cube(); if(cameraType == LineScan) { ocube->SetDimensions(numOutputSamples, 1, 1); } else if(cameraType == Framing || cameraType == PushFrame) { ocube->SetDimensions(numOutputSamples, tempFileLength, 1); } ocube->Create(Filename(ui.GetFilename("TO")).Expanded()); oLineMgr = new LineManager(*ocube); oLineMgr->SetLine(1); // We now have the necessary temp file, let's go ahead and combine it into // the final output! p.SetInputBrickSize(numOutputSamples, 1, 2); p.SetOutputBrickSize(numOutputSamples, 1, 1); cubeInitialized = false; CubeAttributeInput inAtt; p.Progress()->SetText("Calculating Final Flat Field"); p.SetInputCube(outTmpName, inAtt); p.StartProcess(ProcessTemporaryData); p.EndProcess(); if(cameraType == LineScan) { ocube->Write(*oLineMgr); } if(oLineMgr) { delete oLineMgr; oLineMgr = NULL; } if(ocube) { ocube->Close(); delete ocube; ocube = NULL; } /** * Build a list of excluded files */ PvlGroup excludedFiles("ExcludedFiles"); for(currImage = 0; currImage < inList.size(); currImage++) { if(Excluded(currImage)) { excludedFiles += PvlKeyword("File", inList[currImage]); } } // log the results Application::Log(excludedFiles); if(ui.WasEntered("EXCLUDE")) { Pvl excludeFile; // Find excluded files excludeFile.AddGroup(excludedFiles); for(unsigned int i = 0; i < excludedDetails.size(); i++) { excludeFile.AddObject(excludedDetails[i]); } excludeFile.Write(Filename(ui.GetFilename("EXCLUDE")).Expanded()); } remove(outTmpName.c_str()); // Clean up settings ResetGlobals(); }
void IsisMain() { Preference::Preferences(true); cout << "Testing ProcessImport Class ... " << endl; Preference::Preferences(true); ProcessImport p; p.SetInputFile("$base/testData/isisTruth.dat"); p.SetBase(0.0); p.SetMultiplier(1.0); p.SetDataHeaderBytes(0); p.SetDataPrefixBytes(0); p.SetDataSuffixBytes(0); p.SetDataTrailerBytes(0); p.SetDimensions(126, 126, 1); p.SetFileHeaderBytes(16384); p.SetOrganization(ProcessImport::BSQ); p.SetPixelType(Real); p.SetByteOrder(Lsb); p.SetOutputCube("TO"); p.StartProcess(); p.EndProcess(); Process p2; CubeAttributeInput att; QString file = Application::GetUserInterface().GetFileName("TO"); Cube *icube = p2.SetInputCube(file, att); Statistics *stat = icube->statistics(); cout << endl << "Average: " << stat->Average() << endl; cout << endl << "Variance: " << stat->Variance() << endl; p2.EndProcess(); QFile::remove(file); cout << endl; //Checks the setting of special pixel ranges cout << "Check the settings of the special pixel ranges" << endl; ProcessImport pNull; pNull.SetNull(0.0, 45.0); try { // Should NOT throw an error pNull.SetNull(0.0, 45.0); } catch(IException e) { cout << e.toString() << endl; } cout << endl; try { // Should throw an error pNull.SetLRS(35.0, 55.0); } catch(IException e) { cout << e.toString() << endl; } cout << endl; try { // Should NOT throw an error pNull.SetLIS(50.0, 52.0); } catch(IException e) { cout << e.toString() << endl; } cout << endl; try { // Should throw an error pNull.SetHRS(-10.0, 5.0); } catch(IException e) { cout << e.toString() << endl; } cout << endl; ProcessImport pLRS; pLRS.SetLRS(10.0, 145.0); try { // Should throw an error pLRS.SetNull(35.0, 55.0); } catch(IException e) { cout << e.toString() << endl; } cout << endl; try { // Should throw an error pNull.SetLIS(0.0, 15.0); } catch(IException e) { cout << e.toString() << endl; } cout << endl; try { // Should throw an error pLRS.SetHIS(-10.0, 155.0); } catch(IException e) { cout << e.toString() << endl; } cout << endl; try { // Should NOT throw an error pLRS.SetHIS(145.0, 155.0); } catch(IException e) { cout << e.toString() << endl; } cout << endl; cout << "Testing ProcessBil()" << endl; ProcessImport p3; p3.SetInputFile("$base/testData/isisTruth.dat"); p3.SetBase(0.0); p3.SetMultiplier(1.0); p3.SetDataHeaderBytes(0); p3.SetDataPrefixBytes(0); p3.SetDataSuffixBytes(0); p3.SetDataTrailerBytes(0); p3.SetDimensions(126, 126, 1); p3.SetFileHeaderBytes(16384); p3.SetOrganization(ProcessImport::BIL); p3.SetPixelType(Real); p3.SetByteOrder(Lsb); p3.SetOutputCube("TO"); p3.StartProcess(); p3.EndProcess(); cout << endl << "Testing ProcessBip()" << endl; ProcessImport p4; p4.SetInputFile("$base/testData/isisTruth.dat"); p4.SetBase(0.0); p4.SetMultiplier(1.0); p4.SetDataHeaderBytes(0); p4.SetDataPrefixBytes(0); p4.SetDataSuffixBytes(0); p4.SetDataTrailerBytes(0); p4.SetDimensions(126, 126, 1); p4.SetFileHeaderBytes(16384); p4.SetOrganization(ProcessImport::BIP); p4.SetPixelType(Real); p4.SetByteOrder(Lsb); p4.SetOutputCube("TO"); p4.StartProcess(); p4.EndProcess(); }
void IsisMain() { //Create a process to create the input cubes Process p; //Create the input cubes, matching sample/lines Cube *inCube = p.SetInputCube ("FROM"); Cube *latCube = p.SetInputCube("LATCUB", SpatialMatch); Cube *lonCube = p.SetInputCube("LONCUB", SpatialMatch); //A 1x1 brick to read in the latitude and longitude DN values from //the specified cubes Brick latBrick(1,1,1, latCube->PixelType()); Brick lonBrick(1,1,1, lonCube->PixelType()); UserInterface &ui = Application::GetUserInterface(); //Set the sample and line increments int sinc = (int)(inCube->Samples() * 0.10); if(ui.WasEntered("SINC")) { sinc = ui.GetInteger("SINC"); } int linc = (int)(inCube->Lines() * 0.10); if(ui.WasEntered("LINC")) { linc = ui.GetInteger("LINC"); } //Set the degree of the polynomial to use in our functions int degree = ui.GetInteger("DEGREE"); //We are using a polynomial with two variables PolynomialBivariate sampFunct(degree); PolynomialBivariate lineFunct(degree); //We will be solving the function using the least squares method LeastSquares sampSol(sampFunct); LeastSquares lineSol(lineFunct); //Setup the variables for solving the stereographic projection //x = cos(latitude) * sin(longitude - lon_center) //y = cos(lat_center) * sin(latitude) - sin(lat_center) * cos(latitude) * cos(longitude - lon_center) //Get the center lat and long from the input cubes double lat_center = latCube->Statistics()->Average() * PI/180.0; double lon_center = lonCube->Statistics()->Average() * PI/180.0; /** * Loop through lines and samples projecting the latitude and longitude at those * points to stereographic x and y and adding these points to the LeastSquares * matrix. */ for(int i = 1; i <= inCube->Lines(); i+= linc) { for(int j = 1; j <= inCube->Samples(); j+= sinc) { latBrick.SetBasePosition(j, i, 1); latCube->Read(latBrick); if(IsSpecial(latBrick.at(0))) continue; double lat = latBrick.at(0) * PI/180.0; lonBrick.SetBasePosition(j, i, 1); lonCube->Read(lonBrick); if(IsSpecial(lonBrick.at(0))) continue; double lon = lonBrick.at(0) * PI/180.0; //Project lat and lon to x and y using a stereographic projection double k = 2/(1 + sin(lat_center) * sin(lat) + cos(lat_center)*cos(lat)*cos(lon - lon_center)); double x = k * cos(lat) * sin(lon - lon_center); double y = k * (cos(lat_center) * sin(lat)) - (sin(lat_center) * cos(lat) * cos(lon - lon_center)); //Add x and y to the least squares matrix vector<double> data; data.push_back(x); data.push_back(y); sampSol.AddKnown(data, j); lineSol.AddKnown(data, i); //If the sample increment goes past the last sample in the line, we want to //always read the last sample.. if(j != inCube->Samples() && j + sinc > inCube->Samples()) { j = inCube->Samples() - sinc; } } //If the line increment goes past the last line in the cube, we want to //always read the last line.. if(i != inCube->Lines() && i + linc > inCube->Lines()) { i = inCube->Lines() - linc; } } //Solve the least squares functions using QR Decomposition sampSol.Solve(LeastSquares::QRD); lineSol.Solve(LeastSquares::QRD); //If the user wants to save the residuals to a file, create a file and write //the column titles to it. TextFile oFile; if(ui.WasEntered("RESIDUALS")) { oFile.Open(ui.GetFilename("RESIDUALS"), "overwrite"); oFile.PutLine("Sample,\tLine,\tX,\tY,\tSample Error,\tLine Error\n"); } //Gather the statistics for the residuals from the least squares solutions Statistics sampErr; Statistics lineErr; vector<double> sampResiduals = sampSol.Residuals(); vector<double> lineResiduals = lineSol.Residuals(); for(int i = 0; i < (int)sampResiduals.size(); i++) { sampErr.AddData(sampResiduals[i]); lineErr.AddData(lineResiduals[i]); } //If a residuals file was specified, write the previous data, and the errors to the file. if(ui.WasEntered("RESIDUALS")) { for(int i = 0; i < sampSol.Rows(); i++) { vector<double> data = sampSol.GetInput(i); iString tmp = ""; tmp += iString(sampSol.GetExpected(i)); tmp += ",\t"; tmp += iString(lineSol.GetExpected(i)); tmp += ",\t"; tmp += iString(data[0]); tmp += ",\t"; tmp += iString(data[1]); tmp += ",\t"; tmp += iString(sampResiduals[i]); tmp += ",\t"; tmp += iString(lineResiduals[i]); oFile.PutLine(tmp + "\n"); } } oFile.Close(); //Records the error to the log PvlGroup error( "Error" ); error += PvlKeyword( "Degree", degree ); error += PvlKeyword( "NumberOfPoints", (int)sampResiduals.size() ); error += PvlKeyword( "SampleMinimumError", sampErr.Minimum() ); error += PvlKeyword( "SampleAverageError", sampErr.Average() ); error += PvlKeyword( "SampleMaximumError", sampErr.Maximum() ); error += PvlKeyword( "SampleStdDeviationError", sampErr.StandardDeviation() ); error += PvlKeyword( "LineMinimumError", lineErr.Minimum() ); error += PvlKeyword( "LineAverageError", lineErr.Average() ); error += PvlKeyword( "LineMaximumError", lineErr.Maximum() ); error += PvlKeyword( "LineStdDeviationError", lineErr.StandardDeviation() ); Application::Log( error ); //Close the input cubes for cleanup p.EndProcess(); //If we want to warp the image, then continue, otherwise return if(!ui.GetBoolean("NOWARP")) { //Creates the mapping group Pvl mapFile; mapFile.Read(ui.GetFilename("MAP")); PvlGroup &mapGrp = mapFile.FindGroup("Mapping",Pvl::Traverse); //Reopen the lat and long cubes latCube = new Cube(); latCube->SetVirtualBands(ui.GetInputAttribute("LATCUB").Bands()); latCube->Open(ui.GetFilename("LATCUB")); lonCube = new Cube(); lonCube->SetVirtualBands(ui.GetInputAttribute("LONCUB").Bands()); lonCube->Open(ui.GetFilename("LONCUB")); PvlKeyword targetName; //If the user entered the target name if(ui.WasEntered("TARGET")) { targetName = PvlKeyword("TargetName", ui.GetString("TARGET")); } //Else read the target name from the input cube else { Pvl fromFile; fromFile.Read(ui.GetFilename("FROM")); targetName = fromFile.FindKeyword("TargetName", Pvl::Traverse); } mapGrp.AddKeyword(targetName, Pvl::Replace); PvlKeyword equRadius; PvlKeyword polRadius; //If the user entered the equatorial and polar radii if(ui.WasEntered("EQURADIUS") && ui.WasEntered("POLRADIUS")) { equRadius = PvlKeyword("EquatorialRadius", ui.GetDouble("EQURADIUS")); polRadius = PvlKeyword("PolarRadius", ui.GetDouble("POLRADIUS")); } //Else read them from the pck else { Filename pckFile("$base/kernels/pck/pck?????.tpc"); pckFile.HighestVersion(); string pckFilename = pckFile.Expanded(); furnsh_c(pckFilename.c_str()); string target = targetName[0]; SpiceInt code; SpiceBoolean found; bodn2c_c (target.c_str(), &code, &found); if (!found) { string msg = "Could not convert Target [" + target + "] to NAIF code"; throw Isis::iException::Message(Isis::iException::Io,msg,_FILEINFO_); } SpiceInt n; SpiceDouble radii[3]; bodvar_c(code,"RADII",&n,radii); equRadius = PvlKeyword("EquatorialRadius", radii[0] * 1000); polRadius = PvlKeyword("PolarRadius", radii[2] * 1000); } mapGrp.AddKeyword(equRadius, Pvl::Replace); mapGrp.AddKeyword(polRadius, Pvl::Replace); //If the latitude type is not in the mapping group, copy it from the input if(!mapGrp.HasKeyword("LatitudeType")) { if(ui.GetString("LATTYPE") == "PLANETOCENTRIC") { mapGrp.AddKeyword(PvlKeyword("LatitudeType","Planetocentric"), Pvl::Replace); } else { mapGrp.AddKeyword(PvlKeyword("LatitudeType","Planetographic"), Pvl::Replace); } } //If the longitude direction is not in the mapping group, copy it from the input if(!mapGrp.HasKeyword("LongitudeDirection")) { if(ui.GetString("LONDIR") == "POSITIVEEAST") { mapGrp.AddKeyword(PvlKeyword("LongitudeDirection","PositiveEast"), Pvl::Replace); } else { mapGrp.AddKeyword(PvlKeyword("LongitudeDirection","PositiveWest"), Pvl::Replace); } } //If the longitude domain is not in the mapping group, assume it is 360 if(!mapGrp.HasKeyword("LongitudeDomain")) { mapGrp.AddKeyword(PvlKeyword("LongitudeDomain","360"), Pvl::Replace); } //If the default range is to be computed, use the input lat/long cubes to determine the range if(ui.GetString("DEFAULTRANGE") == "COMPUTE") { //NOTE - When computing the min/max longitude this application does not account for the //longitude seam if it exists. Since the min/max are calculated from the statistics of //the input longitude cube and then converted to the mapping group's domain they may be //invalid for cubes containing the longitude seam. Statistics *latStats = latCube->Statistics(); Statistics *lonStats = lonCube->Statistics(); double minLat = latStats->Minimum(); double maxLat = latStats->Maximum(); bool isOcentric = ((std::string)mapGrp.FindKeyword("LatitudeType")) == "Planetocentric"; if(isOcentric) { if(ui.GetString("LATTYPE") != "PLANETOCENTRIC") { minLat = Projection::ToPlanetocentric(minLat, (double)equRadius, (double)polRadius); maxLat = Projection::ToPlanetocentric(maxLat, (double)equRadius, (double)polRadius); } } else { if(ui.GetString("LATTYPE") == "PLANETOCENTRIC") { minLat = Projection::ToPlanetographic(minLat, (double)equRadius, (double)polRadius); maxLat = Projection::ToPlanetographic(maxLat, (double)equRadius, (double)polRadius); } } int lonDomain = (int)mapGrp.FindKeyword("LongitudeDomain"); double minLon = lonDomain == 360 ? Projection::To360Domain(lonStats->Minimum()) : Projection::To180Domain(lonStats->Minimum()); double maxLon = lonDomain == 360 ? Projection::To360Domain(lonStats->Maximum()) : Projection::To180Domain(lonStats->Maximum()); bool isPosEast = ((std::string)mapGrp.FindKeyword("LongitudeDirection")) == "PositiveEast"; if(isPosEast) { if(ui.GetString("LONDIR") != "POSITIVEEAST") { minLon = Projection::ToPositiveEast(minLon, lonDomain); maxLon = Projection::ToPositiveEast(maxLon, lonDomain); } } else { if(ui.GetString("LONDIR") == "POSITIVEEAST") { minLon = Projection::ToPositiveWest(minLon, lonDomain); maxLon = Projection::ToPositiveWest(maxLon, lonDomain); } } if(minLon > maxLon) { double temp = minLon; minLon = maxLon; maxLon = temp; } mapGrp.AddKeyword(PvlKeyword("MinimumLatitude", minLat),Pvl::Replace); mapGrp.AddKeyword(PvlKeyword("MaximumLatitude", maxLat),Pvl::Replace); mapGrp.AddKeyword(PvlKeyword("MinimumLongitude", minLon),Pvl::Replace); mapGrp.AddKeyword(PvlKeyword("MaximumLongitude", maxLon),Pvl::Replace); } //If the user decided to enter a ground range then override if (ui.WasEntered("MINLAT")) { mapGrp.AddKeyword(PvlKeyword("MinimumLatitude", ui.GetDouble("MINLAT")),Pvl::Replace); } if (ui.WasEntered("MAXLAT")) { mapGrp.AddKeyword(PvlKeyword("MaximumLatitude", ui.GetDouble("MAXLAT")),Pvl::Replace); } if (ui.WasEntered("MINLON")) { mapGrp.AddKeyword(PvlKeyword("MinimumLongitude", ui.GetDouble("MINLON")),Pvl::Replace); } if (ui.WasEntered("MAXLON")) { mapGrp.AddKeyword(PvlKeyword("MaximumLongitude", ui.GetDouble("MAXLON")),Pvl::Replace); } //If the pixel resolution is to be computed, compute the pixels/degree from the input if (ui.GetString("PIXRES") == "COMPUTE") { latBrick.SetBasePosition(1,1,1); latCube->Read(latBrick); lonBrick.SetBasePosition(1,1,1); lonCube->Read(lonBrick); //Read the lat and long at the upper left corner double a = latBrick.at(0) * PI/180.0; double c = lonBrick.at(0) * PI/180.0; latBrick.SetBasePosition(latCube->Samples(),latCube->Lines(),1); latCube->Read(latBrick); lonBrick.SetBasePosition(lonCube->Samples(),lonCube->Lines(),1); lonCube->Read(lonBrick); //Read the lat and long at the lower right corner double b = latBrick.at(0) * PI/180.0; double d = lonBrick.at(0) * PI/180.0; //Determine the angle between the two points double angle = acos(cos(a) * cos(b) * cos(c - d) + sin(a) * sin(b)); //double angle = acos((cos(a1) * cos(b1) * cos(b2)) + (cos(a1) * sin(b1) * cos(a2) * sin(b2)) + (sin(a1) * sin(a2))); angle *= 180/PI; //Determine the number of pixels between the two points double pixels = sqrt(pow(latCube->Samples() -1.0, 2.0) + pow(latCube->Lines() -1.0, 2.0)); //Add the scale in pixels/degree to the mapping group mapGrp.AddKeyword(PvlKeyword("Scale", pixels/angle, "pixels/degree"), Pvl::Replace); if (mapGrp.HasKeyword("PixelResolution")) { mapGrp.DeleteKeyword("PixelResolution"); } } // If the user decided to enter a resolution then override if (ui.GetString("PIXRES") == "MPP") { mapGrp.AddKeyword(PvlKeyword("PixelResolution", ui.GetDouble("RESOLUTION"), "meters/pixel"), Pvl::Replace); if (mapGrp.HasKeyword("Scale")) { mapGrp.DeleteKeyword("Scale"); } } else if (ui.GetString("PIXRES") == "PPD") { mapGrp.AddKeyword(PvlKeyword("Scale", ui.GetDouble("RESOLUTION"), "pixels/degree"), Pvl::Replace); if (mapGrp.HasKeyword("PixelResolution")) { mapGrp.DeleteKeyword("PixelResolution"); } } //Create a projection using the map file we created int samples,lines; Projection *outmap = ProjectionFactory::CreateForCube(mapFile,samples,lines,false); //Write the map file to the log Application::GuiLog(mapGrp); //Create a process rubber sheet ProcessRubberSheet r; //Set the input cube inCube = r.SetInputCube("FROM"); double tolerance = ui.GetDouble("TOLERANCE") * outmap->Resolution(); //Create a new transform object Transform *transform = new nocam2map (sampSol, lineSol, outmap, latCube, lonCube, ui.GetString("LATTYPE") == "PLANETOCENTRIC", ui.GetString("LONDIR") == "POSITIVEEAST", tolerance, ui.GetInteger("ITERATIONS"), inCube->Samples(), inCube->Lines(), samples, lines); //Allocate the output cube and add the mapping labels Cube *oCube = r.SetOutputCube ("TO", transform->OutputSamples(), transform->OutputLines(), inCube->Bands()); oCube->PutGroup(mapGrp); //Determine which interpolation to use Interpolator *interp = NULL; if (ui.GetString("INTERP") == "NEARESTNEIGHBOR") { interp = new Interpolator(Interpolator::NearestNeighborType); } else if (ui.GetString("INTERP") == "BILINEAR") { interp = new Interpolator(Interpolator::BiLinearType); } else if (ui.GetString("INTERP") == "CUBICCONVOLUTION") { interp = new Interpolator(Interpolator::CubicConvolutionType); } //Warp the cube r.StartProcess(*transform, *interp); r.EndProcess(); // add mapping to print.prt PvlGroup mapping = outmap->Mapping(); Application::Log(mapping); //Clean up delete latCube; delete lonCube; delete outmap; delete transform; delete interp; } }
void IsisMain() { const QString caminfo_program = "caminfo"; UserInterface &ui = Application::GetUserInterface(); QList< QPair<QString, QString> > *general = NULL, *camstats = NULL, *statistics = NULL; BandGeometry *bandGeom = NULL; // Get input filename FileName in = ui.GetFileName("FROM"); // Get the format QString sFormat = ui.GetAsString("FORMAT"); // if true then run spiceinit, xml default is FALSE // spiceinit will use system kernels if(ui.GetBoolean("SPICE")) { QString parameters = "FROM=" + in.expanded(); ProgramLauncher::RunIsisProgram("spiceinit", parameters); } Process p; Cube *incube = p.SetInputCube("FROM"); // General data gathering general = new QList< QPair<QString, QString> >; general->append(MakePair("Program", caminfo_program)); general->append(MakePair("IsisVersion", Application::Version())); general->append(MakePair("RunDate", iTime::CurrentGMT())); general->append(MakePair("IsisId", SerialNumber::Compose(*incube))); general->append(MakePair("From", in.baseName() + ".cub")); general->append(MakePair("Lines", toString(incube->lineCount()))); general->append(MakePair("Samples", toString(incube->sampleCount()))); general->append(MakePair("Bands", toString(incube->bandCount()))); // Run camstats on the entire image (all bands) // another camstats will be run for each band and output // for each band. if(ui.GetBoolean("CAMSTATS")) { camstats = new QList< QPair<QString, QString> >; QString filename = ui.GetAsString("FROM"); int sinc = ui.GetInteger("SINC"); int linc = ui.GetInteger("LINC"); CameraStatistics stats(filename, sinc, linc); Pvl camPvl = stats.toPvl(); PvlGroup cg = camPvl.findGroup("Latitude", Pvl::Traverse); camstats->append(MakePair("MinimumLatitude", cg["latitudeminimum"][0])); camstats->append(MakePair("MaximumLatitude", cg["latitudemaximum"][0])); cg = camPvl.findGroup("Longitude", Pvl::Traverse); camstats->append(MakePair("MinimumLongitude", cg["longitudeminimum"][0])); camstats->append(MakePair("MaximumLongitude", cg["longitudemaximum"][0])); cg = camPvl.findGroup("Resolution", Pvl::Traverse); camstats->append(MakePair("MinimumResolution", cg["resolutionminimum"][0])); camstats->append(MakePair("MaximumResolution", cg["resolutionmaximum"][0])); cg = camPvl.findGroup("PhaseAngle", Pvl::Traverse); camstats->append(MakePair("MinimumPhase", cg["phaseminimum"][0])); camstats->append(MakePair("MaximumPhase", cg["phasemaximum"][0])); cg = camPvl.findGroup("EmissionAngle", Pvl::Traverse); camstats->append(MakePair("MinimumEmission", cg["emissionminimum"][0])); camstats->append(MakePair("MaximumEmission", cg["emissionmaximum"][0])); cg = camPvl.findGroup("IncidenceAngle", Pvl::Traverse); camstats->append(MakePair("MinimumIncidence", cg["incidenceminimum"][0])); camstats->append(MakePair("MaximumIncidence", cg["incidencemaximum"][0])); cg = camPvl.findGroup("LocalSolarTime", Pvl::Traverse); camstats->append(MakePair("LocalTimeMinimum", cg["localsolartimeMinimum"][0])); camstats->append(MakePair("LocalTimeMaximum", cg["localsolartimeMaximum"][0])); } // Compute statistics for entire cube if(ui.GetBoolean("STATISTICS")) { statistics = new QList< QPair<QString, QString> >; LineManager iline(*incube); Statistics stats; Progress progress; progress.SetText("Statistics..."); progress.SetMaximumSteps(incube->lineCount()*incube->bandCount()); progress.CheckStatus(); iline.SetLine(1); for(; !iline.end() ; iline.next()) { incube->read(iline); stats.AddData(iline.DoubleBuffer(), iline.size()); progress.CheckStatus(); } // Compute stats of entire cube double nPixels = stats.TotalPixels(); double nullpercent = (stats.NullPixels() / (nPixels)) * 100; double hispercent = (stats.HisPixels() / (nPixels)) * 100; double hrspercent = (stats.HrsPixels() / (nPixels)) * 100; double lispercent = (stats.LisPixels() / (nPixels)) * 100; double lrspercent = (stats.LrsPixels() / (nPixels)) * 100; // Statitics output for band statistics->append(MakePair("MeanValue", toString(stats.Average()))); statistics->append(MakePair("StandardDeviation", toString(stats.StandardDeviation()))); statistics->append(MakePair("MinimumValue", toString(stats.Minimum()))); statistics->append(MakePair("MaximumValue", toString(stats.Maximum()))); statistics->append(MakePair("PercentHIS", toString(hispercent))); statistics->append(MakePair("PercentHRS", toString(hrspercent))); statistics->append(MakePair("PercentLIS", toString(lispercent))); statistics->append(MakePair("PercentLRS", toString(lrspercent))); statistics->append(MakePair("PercentNull", toString(nullpercent))); statistics->append(MakePair("TotalPixels", toString(stats.TotalPixels()))); } bool getFootBlob = ui.GetBoolean("USELABEL"); bool doGeometry = ui.GetBoolean("GEOMETRY"); bool doPolygon = ui.GetBoolean("POLYGON"); if(doGeometry || doPolygon || getFootBlob) { Camera *cam = incube->camera(); QString incType = ui.GetString("INCTYPE"); int polySinc, polyLinc; if(doPolygon && incType.toUpper() == "VERTICES") { ImagePolygon poly; poly.initCube(*incube); polySinc = polyLinc = (int)(0.5 + (((poly.validSampleDim() * 2) + (poly.validLineDim() * 2) - 3.0) / ui.GetInteger("NUMVERTICES"))); } else if (incType.toUpper() == "LINCSINC"){ if(ui.WasEntered("POLYSINC")) { polySinc = ui.GetInteger("POLYSINC"); } else { polySinc = (int)(0.5 + 0.10 * incube->sampleCount()); if(polySinc == 0) polySinc = 1; } if(ui.WasEntered("POLYLINC")) { polyLinc = ui.GetInteger("POLYLINC"); } else { polyLinc = (int)(0.5 + 0.10 * incube->lineCount()); if(polyLinc == 0) polyLinc = 1; } } else { QString msg = "Invalid INCTYPE option[" + incType + "]"; throw IException(IException::Programmer, msg, _FILEINFO_); } bandGeom = new BandGeometry(); bandGeom->setSampleInc(polySinc); bandGeom->setLineInc(polyLinc); bandGeom->setMaxIncidence(ui.GetDouble("MAXINCIDENCE")); bandGeom->setMaxEmission(ui.GetDouble("MAXEMISSION")); bool precision = ui.GetBoolean("INCREASEPRECISION"); if (getFootBlob) { // Need to read history to obtain parameters that were used to // create the footprint History hist("IsisCube", in.expanded()); Pvl pvl = hist.ReturnHist(); PvlObject::PvlObjectIterator objIter; bool found = false; PvlGroup fpgrp; for (objIter=pvl.endObject()-1; objIter>=pvl.beginObject(); objIter--) { if (objIter->name().toUpper() == "FOOTPRINTINIT") { found = true; fpgrp = objIter->findGroup("UserParameters"); break; } } if (!found) { QString msg = "Footprint blob was not found in input image history"; throw IException(IException::User, msg, _FILEINFO_); } QString prec = (QString)fpgrp.findKeyword("INCREASEPRECISION"); prec = prec.toUpper(); if (prec == "TRUE") { precision = true; } else { precision = false; } QString inctype = (QString)fpgrp.findKeyword("INCTYPE"); inctype = inctype.toUpper(); if (inctype == "LINCSINC") { int linc = fpgrp.findKeyword("LINC"); int sinc = fpgrp.findKeyword("SINC"); bandGeom->setSampleInc(sinc); bandGeom->setLineInc(linc); } else { int vertices = fpgrp.findKeyword("NUMVERTICES"); int lincsinc = (int)(0.5 + (((incube->sampleCount() * 2) + (incube->lineCount() * 2) - 3.0) / vertices)); bandGeom->setSampleInc(lincsinc); bandGeom->setLineInc(lincsinc); } if (fpgrp.hasKeyword("MAXINCIDENCE")) { double maxinc = fpgrp.findKeyword("MAXINCIDENCE"); bandGeom->setMaxIncidence(maxinc); } if (fpgrp.hasKeyword("MAXEMISSION")) { double maxema = fpgrp.findKeyword("MAXEMISSION"); bandGeom->setMaxEmission(maxema); } } bandGeom->collect(*cam, *incube, doGeometry, doPolygon, getFootBlob, precision); // Check if the user requires valid image center geometry if(ui.GetBoolean("VCAMERA") && (!bandGeom->hasCenterGeometry())) { QString msg = "Image center does not project in camera model"; throw IException(IException::Unknown, msg, _FILEINFO_); } } if(sFormat.toUpper() == "PVL") GeneratePVLOutput(incube, general, camstats, statistics, bandGeom); else GenerateCSVOutput(incube, general, camstats, statistics, bandGeom); // Clean the data delete general; general = NULL; if(camstats) { delete camstats; camstats = NULL; } if(statistics) { delete statistics; statistics = NULL; } if(bandGeom) { delete bandGeom; bandGeom = NULL; } }
/** * Retrieve the statistics based on the box size * and point on the cube. * * @param p */ void StatisticsTool::getStatistics(QPoint p) { MdiCubeViewport *cvp = cubeViewport(); if(cvp == NULL) return; double sample, line; cvp->viewportToCube(p.x(), p.y(), sample, line); // If we are outside of the cube, do nothing if((sample < 0.5) || (line < 0.5) || (sample > cvp->cubeSamples() + 0.5) || (line > cvp->cubeLines() + 0.5)) { return; } int isamp = (int)(sample + 0.5); int iline = (int)(line + 0.5); Statistics stats; Brick *brick = new Brick(1, 1, 1, cvp->cube()->pixelType()); QVector<QVector<double> > pixelData(p_boxLines, QVector<double>(p_boxSamps, Null)); double lineDiff = p_boxLines / 2.0; double sampDiff = p_boxSamps / 2.0; p_ulSamp = isamp - (int)floor(sampDiff); p_ulLine = iline - (int)floor(lineDiff); int x, y; y = p_ulLine; for(int i = 0; i < p_boxLines; i++) { x = p_ulSamp; if(y < 1 || y > cvp->cubeLines()) { y++; continue; } for(int j = 0; j < p_boxSamps; j++) { if(x < 1 || x > cvp->cubeSamples()) { x++; continue; } brick->SetBasePosition(x, y, cvp->grayBand()); cvp->cube()->read(*brick); stats.AddData(brick->at(0)); pixelData[i][j] = brick->at(0); x++; } y++; } p_visualDisplay->setPixelData(pixelData, p_ulSamp, p_ulLine); if (stats.ValidPixels()) { p_minLabel->setText(QString("Minimum: %1").arg(stats.Minimum())); p_maxLabel->setText(QString("Maximum: %1").arg(stats.Maximum())); p_avgLabel->setText(QString("Average: %1").arg(stats.Average())); p_stdevLabel->setText(QString("Standard Dev: %1").arg(stats.StandardDeviation(), 0, 'f', 6)); } else { p_minLabel->setText(QString("Minimum: n/a")); p_maxLabel->setText(QString("Maximum: n/a")); p_avgLabel->setText(QString("Average: n/a")); p_stdevLabel->setText(QString("Standard Dev: n/a")); } p_set = true; resizeScrollbars(); }
void IsisMain() { // We will be processing by line ProcessByLine p; // Setup the input and output cubes Cube *icube = p.SetInputCube("FROM"); PvlKeyword &status = icube->group("RESEAUS")["STATUS"]; UserInterface &ui = Application::GetUserInterface(); QString in = ui.GetFileName("FROM"); // Check reseau status and make sure it is not nominal or removed if((QString)status == "Nominal") { QString msg = "Input file [" + in + "] appears to have nominal reseau status. You must run findrx first."; throw IException(IException::User, msg, _FILEINFO_); } if((QString)status == "Removed") { QString msg = "Input file [" + in + "] appears to already have reseaus removed."; throw IException(IException::User, msg, _FILEINFO_); } status = "Removed"; p.SetOutputCube("TO"); // Start the processing p.StartProcess(cpy); p.EndProcess(); // Get the user entered dimensions sdim = ui.GetInteger("SDIM"); ldim = ui.GetInteger("LDIM"); // Get other user entered options QString out = ui.GetFileName("TO"); resvalid = ui.GetBoolean("RESVALID"); action = ui.GetString("ACTION"); // Open the output cube Cube cube; cube.open(out, "rw"); PvlGroup &res = cube.label()->findGroup("RESEAUS", Pvl::Traverse); // Get reseau line, sample, type, and valid Keywords PvlKeyword lines = res.findKeyword("LINE"); PvlKeyword samps = res.findKeyword("SAMPLE"); PvlKeyword type = res.findKeyword("TYPE"); PvlKeyword valid = res.findKeyword("VALID"); int numres = lines.size(); Brick brick(sdim, ldim, 1, cube.pixelType()); for(int res = 0; res < numres; res++) { if((resvalid == 0 || toInt(valid[res]) == 1) && toInt(type[res]) != 0) { int baseSamp = (int)(toDouble(samps[res]) + 0.5) - (sdim / 2); int baseLine = (int)(toDouble(lines[res]) + 0.5) - (ldim / 2); brick.SetBasePosition(baseSamp, baseLine, 1); cube.read(brick); if(action == "NULL") { for(int i = 0; i < brick.size(); i++) brick[i] = Isis::Null; } else if(action == "BILINEAR") { Statistics stats; double array[sdim][ldim]; for(int s = 0; s < sdim; s++) { for(int l = 0; l < ldim; l++) { int index = l * sdim + s; array[s][l] = brick[index]; // Add perimeter data to stats object for calculations if(s == 0 || l == 0 || s == (sdim - 1) || l == (ldim - 1)) { stats.AddData(&array[s][l], 1); } } } // Get the average and standard deviation of the perimeter of the brick double avg = stats.Average(); double sdev = stats.StandardDeviation(); // Top Edge Reseau if(toInt(type[res]) == 2) { int l1 = 0; int l2 = ldim - 1; for(int s = 0; s < sdim; s++) { array[s][l1] = array[s][l2]; } } // Left Edge Reseau else if(toInt(type[res]) == 4) { int s1 = 0; int s2 = sdim - 1; for(int l = 0; l < ldim; l++) { array[s1][l] = array[s2][l]; } } // Right Edge Reseau else if(toInt(type[res]) == 6) { int s1 = 0; int s2 = sdim - 1; for(int l = 0; l < ldim; l++) { array[s2][l] = array[s1][l]; } } // Bottom Edge Reseau else if(toInt(type[res]) == 8) { int l1 = 0; int l2 = ldim - 1; for(int s = 0; s < sdim; s++) { array[s][l2] = array[s][l1]; } } // Walk top edge & replace data outside of 2devs with the avg for(int s = 0; s < sdim; s++) { int l = 0; double diff = fabs(array[s][l] - avg); if(diff > (2 * sdev)) array[s][l] = avg; } // Walk bottom edge & replace data outside of 2devs with the avg for(int s = 0; s < sdim; s++) { int l = ldim - 1; double diff = fabs(array[s][l] - avg); if(diff > (2 * sdev)) array[s][l] = avg; } // Walk left edge & replace data outside of 2devs with the avg for(int l = 0; l < ldim; l++) { int s = 0; double diff = fabs(array[s][l] - avg); if(diff > (2 * sdev)) array[s][l] = avg; } // Walk right edge & replace data outside of 2devs with the avg for(int l = 0; l < ldim; l++) { int s = sdim - 1; double diff = fabs(array[s][l] - avg); if(diff > (2 * sdev)) array[s][l] = avg; } srand(0); double dn, gdn1, gdn2; for(int l = 0; l < ldim; l++) { int c = l * sdim; //count // Top Edge Reseau if(toInt(type[res]) == 2 && l < (ldim / 2)) continue; // Bottom Edge Reseau if(toInt(type[res]) == 8 && l > (ldim / 2 + 1)) continue; for(int s = 0; s < sdim; s++, c++) { // Left Edge Reseau if(toInt(type[res]) == 4 && s < (sdim / 2)) continue; // Right Edge Reseau if(toInt(type[res]) == 6 && s > (sdim / 2 + 1)) continue; double sum = 0.0; int gline1 = 0; int gline2 = ldim - 1; gdn1 = array[s][gline1]; gdn2 = array[s][gline2]; // Linear Interpolation to get pixel value dn = gdn2 + (l - gline2) * (gdn1 - gdn2) / (gline1 - gline2); sum += dn; int gsamp1 = 0; int gsamp2 = sdim - 1; gdn1 = array[gsamp1][l]; gdn2 = array[gsamp2][l]; // Linear Interpolation to get pixel value dn = gdn2 + (s - gsamp2) * (gdn1 - gdn2) / (gsamp1 - gsamp2); sum += dn; dn = sum / 2; int rdm = rand(); double drandom = rdm / (double)RAND_MAX; double offset = 0.0; if(drandom < .333) offset = -1.0; if(drandom > .666) offset = 1.0; brick[c] = dn + offset; } } } } cube.write(brick); } cube.close(); }
void IsisMain(){ Process p; // Reset all the stats objects because they are global latStat.Reset(); lonStat.Reset(); resStat.Reset(); sampleResStat.Reset(); lineResStat.Reset(); aspectRatioStat.Reset(); phaseStat.Reset(); emissionStat.Reset(); incidenceStat.Reset(); localSolarTimeStat.Reset(); localRaduisStat.Reset(); northAzimuthStat.Reset(); UserInterface &ui = Application::GetUserInterface(); Cube *icube = p.SetInputCube("FROM"); Camera *cam = icube->Camera(); // Cube cube; // cube.Open(ui.GetFilename("FROM")); // Camera *cam = cube.Camera(); int eband = cam->Bands(); // if the camera is band independent that only run one band if (cam->IsBandIndependent()) eband = 1; int linc = ui.GetInteger("LINC"); int sinc = ui.GetInteger("SINC"); int pTotal = eband * ((cam->Lines()-2) / linc + 2) ; Progress progress; progress.SetMaximumSteps(pTotal); progress.CheckStatus(); for (int band=1; band<=eband; band++) { cam->SetBand(band); for (int line=1; line<(int)cam->Lines(); line=line+linc) { for (int sample=1; sample< cam->Samples(); sample=sample+sinc) { buildStats(cam, sample, line); } //set the sample value to the last sample and run buildstats int sample = cam->Samples(); buildStats(cam, sample, line); progress.CheckStatus(); } //set the line value to the last line and run on all samples(sample + sinc) int line = cam->Lines(); for (int sample=1; sample< cam->Samples(); sample=sample+sinc) { buildStats(cam, sample, line); } //set last sample and run with last line int sample = cam->Samples(); buildStats(cam, sample, line); progress.CheckStatus(); } //Set up the Pvl groups and get min, max, avg, and sd for each statstics object PvlGroup pUser("User Parameters"); pUser += PvlKeyword("Filename",ui.GetFilename("FROM")); pUser += PvlKeyword("Linc",ui.GetInteger("LINC")); pUser += PvlKeyword("Sinc",ui.GetInteger("SINC")); PvlGroup pLat("Latitude"); pLat += ValidateKey("LatitudeMinimum",latStat.Minimum()); pLat += ValidateKey("LatitudeMaximum",latStat.Maximum()); pLat += ValidateKey("LatitudeAverage",latStat.Average()); pLat += ValidateKey("LatitudeStandardDeviation",latStat.StandardDeviation()); PvlGroup pLon("Longitude"); pLon += ValidateKey("LongitudeMinimum",lonStat.Minimum()); pLon += ValidateKey("LongitudeMaximum",lonStat.Maximum()); pLon += ValidateKey("LongitudeAverage",lonStat.Average()); pLon += ValidateKey("LongitudeStandardDeviation",lonStat.StandardDeviation()); PvlGroup pSampleRes("SampleResolution"); pSampleRes += ValidateKey("SampleResolutionMinimum",sampleResStat.Minimum(), "meters/pixel"); pSampleRes += ValidateKey("SampleResolutionMaximum",sampleResStat.Maximum(), "meters/pixel"); pSampleRes += ValidateKey("SampleResolutionAverage",sampleResStat.Average(), "meters/pixel"); pSampleRes += ValidateKey("SampleResolutionStandardDeviation", sampleResStat.StandardDeviation(),"meters/pixel"); PvlGroup pLineRes("LineResolution"); pLineRes += ValidateKey("LineResolutionMinimum",lineResStat.Minimum(), "meters/pixel"); pLineRes += ValidateKey("LineResolutionMaximum",lineResStat.Maximum(), "meters/pixel"); pLineRes += ValidateKey("LineResolutionAverage",lineResStat.Average(), "meters/pixel"); pLineRes += ValidateKey("LineResolutionStandardDeviation", lineResStat.StandardDeviation(),"meters/pixel"); PvlGroup pResolution("Resolution"); pResolution += ValidateKey("ResolutionMinimum",resStat.Minimum(), "meters/pixel"); pResolution += ValidateKey("ResolutionMaximum",resStat.Maximum(), "meters/pixel"); pResolution += ValidateKey("ResolutionAverage",resStat.Average(), "meters/pixel"); pResolution += ValidateKey("ResolutionStandardDeviation", resStat.StandardDeviation(),"meters/pixel"); PvlGroup pAspectRatio("AspectRatio"); pAspectRatio += ValidateKey("AspectRatioMinimum",aspectRatioStat.Minimum()); pAspectRatio += ValidateKey("AspectRatioMaximun",aspectRatioStat.Maximum()); pAspectRatio += ValidateKey("AspectRatioAverage",aspectRatioStat.Average()); pAspectRatio += ValidateKey("AspectRatioStandardDeviation", aspectRatioStat.StandardDeviation()); PvlGroup pPhase("PhaseAngle"); pPhase += ValidateKey("PhaseMinimum",phaseStat.Minimum()); pPhase += ValidateKey("PhaseMaximum",phaseStat.Maximum()); pPhase += ValidateKey("PhaseAverage",phaseStat.Average()); pPhase += ValidateKey("PhaseStandardDeviation",phaseStat.StandardDeviation()); PvlGroup pEmission("EmissionAngle"); pEmission += ValidateKey("EmissionMinimum",emissionStat.Minimum()); pEmission += ValidateKey("EmissionMaximum",emissionStat.Maximum()); pEmission += ValidateKey("EmissionAverage",emissionStat.Average()); pEmission += ValidateKey("EmissionStandardDeviation", emissionStat.StandardDeviation()); PvlGroup pIncidence("IncidenceAngle"); pIncidence += ValidateKey("IncidenceMinimum",incidenceStat.Minimum()); pIncidence += ValidateKey("IncidenceMaximum",incidenceStat.Maximum()); pIncidence += ValidateKey("IncidenceAverage",incidenceStat.Average()); pIncidence += ValidateKey("IncidenceStandardDeviation", incidenceStat.StandardDeviation()); PvlGroup pTime("LocalSolarTime"); pTime += ValidateKey("LocalSolarTimeMinimum",localSolarTimeStat.Minimum(), "hours"); pTime += ValidateKey("LocalSolarTimeMaximum",localSolarTimeStat.Maximum(), "hours"); pTime += ValidateKey("LocalSolarTimeAverage",localSolarTimeStat.Average(), "hours"); pTime += ValidateKey("LocalSolarTimeStandardDeviation", localSolarTimeStat.StandardDeviation(),"hours"); PvlGroup pLocalRadius("LocalRadius"); pLocalRadius += ValidateKey("LocalRadiusMinimum",localRaduisStat.Minimum()); pLocalRadius += ValidateKey("LocalRadiusMaximum",localRaduisStat.Maximum()); pLocalRadius += ValidateKey("LocalRadiusAverage",localRaduisStat.Average()); pLocalRadius += ValidateKey("LocalRadiusStandardDeviation", localRaduisStat.StandardDeviation()); PvlGroup pNorthAzimuth("NorthAzimuth"); pNorthAzimuth += ValidateKey("NorthAzimuthMinimum",northAzimuthStat.Minimum()); pNorthAzimuth += ValidateKey("NorthAzimuthMaximum",northAzimuthStat.Maximum()); pNorthAzimuth += ValidateKey("NorthAzimuthAverage",northAzimuthStat.Average()); pNorthAzimuth += ValidateKey("NorthAzimuthStandardDeviation", northAzimuthStat.StandardDeviation()); // Send the Output to the log area Application::Log(pUser); Application::Log(pLat); Application::Log(pLon); Application::Log(pSampleRes); Application::Log(pLineRes); Application::Log(pResolution); Application::Log(pAspectRatio); Application::Log(pPhase); Application::Log(pEmission); Application::Log(pIncidence); Application::Log(pTime); Application::Log(pLocalRadius); Application::Log(pNorthAzimuth); if (ui.WasEntered("TO")) { string from = ui.GetFilename("FROM"); string outfile = Filename(ui.GetFilename("TO")).Expanded(); bool exists = Filename(outfile).Exists(); bool append = ui.GetBoolean("APPEND"); //If the user chooses a fromat of PVL then write to the output file ("TO") if (ui.GetString("FORMAT") == "PVL") { Pvl temp; temp.SetTerminator(""); temp.AddGroup(pUser); temp.AddGroup(pLat); temp.AddGroup(pLon); temp.AddGroup(pSampleRes); temp.AddGroup(pLineRes); temp.AddGroup(pResolution); temp.AddGroup(pAspectRatio); temp.AddGroup(pPhase); temp.AddGroup(pEmission); temp.AddGroup(pIncidence); temp.AddGroup(pTime); temp.AddGroup(pLocalRadius); temp.AddGroup(pNorthAzimuth); if (append) { temp.Append(outfile); } else { temp.Write(outfile); } } //Create a flatfile of the data with columhn headings // the flatfile is comma delimited and can be imported in to spreadsheets else { ofstream os; bool writeHeader = true; if (append) { os.open(outfile.c_str(),ios::app); if (exists) { writeHeader = false; } } else { os.open(outfile.c_str(),ios::out); } // if new file or append and no file exists then write header if(writeHeader){ os << "Filename,"<< "LatitudeMinimum,"<< "LatitudeMaximum,"<< "LatitudeAverage,"<< "LatitudeStandardDeviation,"<< "LongitudeMinimum,"<< "LongitudeMaximum,"<< "LongitudeAverage,"<< "LongitudeStandardDeviation,"<< "SampleResolutionMinimum,"<< "SampleResolutionMaximum,"<< "SampleResolutionAverage,"<< "SampleResolutionStandardDeviation,"<< "LineResolutionMinimum,"<< "LineResolutionMaximum,"<< "LineResolutionAverage,"<< "LineResolutionStandardDeviation,"<< "ResolutionMinimum,"<< "ResolutionMaximum,"<< "ResolutionAverage,"<< "ResolutionStandardDeviation,"<< "AspectRatioMinimum,"<< "AspectRatioMaximum,"<< "AspectRatioAverage,"<< "AspectRatioStandardDeviation,"<< "PhaseMinimum,"<< "PhaseMaximum,"<< "PhaseAverage,"<< "PhaseStandardDeviation,"<< "EmissionMinimum,"<< "EmissionMaximum,"<< "EmissionAverage,"<< "EmissionStandardDeviation,"<< "IncidenceMinimum,"<< "IncidenceMaximum,"<< "IncidenceAverage,"<< "IncidenceStandardDeviation,"<< "LocalSolarTimeMinimum,"<< "LocalSolarTimeMaximum,"<< "LocalSolarTimeAverage,"<< "LocalSolarTimeStandardDeviation,"<< "LocalRadiusMaximum,"<< "LocalRadiusMaximum,"<< "LocalRadiusAverage,"<< "LocalRadiusStandardDeviation,"<< "NorthAzimuthMinimum,"<< "NorthAzimuthMaximum,"<< "NorthAzimuthAverage,"<< "NorthAzimuthStandardDeviation,"<<endl; } os << Filename(from).Expanded() <<","; //call the function to write out the values for each group writeFlat(os, latStat); writeFlat(os, lonStat); writeFlat(os, sampleResStat); writeFlat(os, lineResStat); writeFlat(os, resStat); writeFlat(os, aspectRatioStat); writeFlat(os, phaseStat); writeFlat(os, emissionStat); writeFlat(os, incidenceStat); writeFlat(os, localSolarTimeStat); writeFlat(os, localRaduisStat); writeFlat(os, northAzimuthStat); os << endl; } } if( ui.GetBoolean("ATTACH") ) { string cam_name = "CameraStatistics"; //Creates new CameraStatistics Table TableField fname( "Name", Isis::TableField::Text, 20 ); TableField fmin( "Minimum", Isis::TableField::Double ); TableField fmax( "Maximum", Isis::TableField::Double ); TableField favg( "Average", Isis::TableField::Double ); TableField fstd( "StandardDeviation", Isis::TableField::Double ); TableRecord record; record += fname; record += fmin; record += fmax; record += favg; record += fstd; Table table( cam_name, record ); vector<PvlGroup> grps; grps.push_back( pLat ); grps.push_back( pLon ); grps.push_back( pSampleRes ); grps.push_back( pLineRes ); grps.push_back( pResolution ); grps.push_back( pAspectRatio ); grps.push_back( pPhase ); grps.push_back( pEmission ); grps.push_back( pIncidence ); grps.push_back( pTime ); grps.push_back( pLocalRadius ); grps.push_back( pNorthAzimuth ); for( vector<PvlGroup>::iterator g = grps.begin(); g != grps.end(); g++ ) { int i = 0; record[i++] = g->Name(); record[i++] = (double) (*g)[0][0]; record[i++] = (double) (*g)[1][0]; record[i++] = (double) (*g)[2][0]; record[i++] = (double) (*g)[3][0]; table += record; } icube->ReOpen( "rw" ); icube->Write( table ); p.WriteHistory(*icube); icube->Close(); } }