/** * This method performs pass1 on one image. It analyzes each framelet's * statistics and populates the necessary global variable. * * @param progress Progress message * @param theCube Current cube that needs processing * * @return bool True if the file contains a valid framelet */ bool CheckFramelets(string progress, Cube &theCube) { bool foundValidFramelet = false; LineManager mgr(theCube); Progress prog; prog.SetText(progress); prog.SetMaximumSteps(theCube.Lines()); prog.CheckStatus(); vector<double> frameletAvgs; // We need to store off the framelet information, because if no good // framelets were found then no data should be added to the // global variable for framelets, just files. vector< pair<int,double> > excludedFrameletsTmp; Statistics frameletStats; for(int line = 1; line <= theCube.Lines(); line++) { if((line-1) % numFrameLines == 0) { frameletStats.Reset(); } mgr.SetLine(line); theCube.Read(mgr); frameletStats.AddData(mgr.DoubleBuffer(), mgr.size()); if((line-1) % numFrameLines == numFrameLines-1) { if(IsSpecial(frameletStats.StandardDeviation()) || frameletStats.StandardDeviation() > maxStdev) { excludedFrameletsTmp.push_back( pair<int,double>((line-1)/numFrameLines, frameletStats.StandardDeviation()) ); } else { foundValidFramelet = true; } frameletAvgs.push_back(frameletStats.Average()); } prog.CheckStatus(); } inputFrameletAverages.push_back(frameletAvgs); if(foundValidFramelet) { for(unsigned int i = 0; i < excludedFrameletsTmp.size(); i++) { excludedFramelets.insert(pair< pair<int,int>, double>( pair<int,int>(currImage, excludedFrameletsTmp[i].first), excludedFrameletsTmp[i].second ) ); } } return foundValidFramelet; }
void IsisMain() { latLonGrid = NULL; // We will be processing by line ProcessByLine p; Cube *icube = p.SetInputCube("FROM"); UserInterface &ui = Application::GetUserInterface(); string mode = ui.GetString("MODE"); outline = ui.GetBoolean("OUTLINE"); inputSamples = icube->Samples(); inputLines = icube->Lines(); // Line & sample based grid if(mode == "IMAGE") { p.SetOutputCube ("TO"); baseLine = ui.GetInteger("BASELINE"); baseSample = ui.GetInteger("BASESAMPLE"); lineInc = ui.GetInteger("LINC"); sampleInc = ui.GetInteger("SINC"); p.StartProcess(imageGrid); p.EndProcess(); } // Lat/Lon based grid else { CubeAttributeOutput oatt("+32bit"); p.SetOutputCube (ui.GetFilename("TO"), oatt, icube->Samples(), icube->Lines(), icube->Bands()); baseLat = ui.GetDouble("BASELAT"); baseLon = ui.GetDouble("BASELON"); latInc = ui.GetDouble("LATINC"); lonInc = ui.GetDouble("LONINC"); UniversalGroundMap *gmap = new UniversalGroundMap(*icube); latLonGrid = new GroundGrid(gmap, icube->Samples(), icube->Lines()); Progress progress; progress.SetText("Calculating Grid"); latLonGrid->CreateGrid(baseLat, baseLon, latInc, lonInc, &progress); p.StartProcess(groundGrid); p.EndProcess(); delete latLonGrid; latLonGrid = NULL; delete gmap; gmap = NULL; } }
/** * @brief Initializes the object by computing all calibration statistics * * This method validates the input file, reads labels for needed values * and computes calibration statistics for data reduction. * * @param [in] (CubeInfo &) cube Opened cube where label and ancillary data * is read from */ void HiImageClean::init(Cube &cube) { _lines = cube.Lines(); _samples = cube.Samples(); _lastGoodLine = _lines - 1; _totalMaskNulled = _totalDarkNulled = 0; PvlGroup &instrument = cube.GetGroup("Instrument"); // It may be too late and a non-issue by this time, but should check to ensure // this is a valid HiRISE image iString instId = (std::string) instrument["InstrumentId"]; if (instId.UpCase() != "HIRISE") { string message = "Image must be a HiRISE image (InstrumentId != HIRISE)"; iException::Message(iException::User, message, _FILEINFO_); } // Extract what is needed _binning = instrument["Summing"]; _tdi = instrument["Tdi"]; _cpmm = instrument["CpmmNumber"]; _channelNo = instrument["ChannelNumber"]; // Initialize all HiRISE calibration blobs _calimg = blobvert(HiCalibrationImage(cube)); _calbuf = blobvert(HiCalibrationBuffer(cube)); _caldark = blobvert(HiCalibrationDark(cube)); _ancbuf = blobvert(HiAncillaryBuffer(cube)); _ancdark = blobvert(HiAncillaryDark(cube)); // Compute statistics from blobs computeStats(); return; }
void IsisMain() { //Get user parameters UserInterface &ui = Application::GetUserInterface(); Filename inFile = ui.GetFilename("FROM"); int numberOfLines = ui.GetInteger("NL"); int lineOverlap = ui.GetInteger("OVERLAP"); //Throws exception if user is dumb if ( lineOverlap >= numberOfLines ) { throw iException::Message( iException::User, "The Line Overlap (OVERLAP) must be less than the Number of Lines (LN).", _FILEINFO_ ); } //Opens the cube Cube cube; cube.Open( inFile.Expanded() ); //Loops through, cropping as desired int cropNum = 1; int startLine = 1; bool hasReachedEndOfCube = false; while ( startLine <= cube.Lines() && not hasReachedEndOfCube ) { //! Sets up the proper paramaters for running the crop program string parameters = "FROM=" + inFile.Expanded() + " TO=" + inFile.Path() + "/" + inFile.Basename() + ".segment" + iString(cropNum) + ".cub" + " LINE=" + iString(startLine) + " NLINES="; if ( startLine + numberOfLines > cube.Lines() ) { parameters += iString( cube.Lines() - ( startLine - 1 ) ); hasReachedEndOfCube = true; } else { parameters += iString(numberOfLines); } Isis::iApp ->Exec("crop",parameters); //The starting line for next crop startLine = 1 + cropNum * ( numberOfLines - lineOverlap ); cropNum++; } }
//Helper function to get camera resolution. void ComputePixRes () { Process p; UserInterface &ui = Application::GetUserInterface(); Cube *latCube = p.SetInputCube("LATCUB"); Cube *lonCube = p.SetInputCube("LONCUB"); Brick latBrick(1,1,1,latCube->PixelType()); Brick lonBrick(1,1,1,lonCube->PixelType()); latBrick.SetBasePosition(1,1,1); latCube->Read(latBrick); lonBrick.SetBasePosition(1,1,1); lonCube->Read(lonBrick); double a = latBrick.at(0) * PI/180.0; double c = lonBrick.at(0) * PI/180.0; latBrick.SetBasePosition(latCube->Samples(),latCube->Lines(),1); latCube->Read(latBrick); lonBrick.SetBasePosition(lonCube->Samples(),lonCube->Lines(),1); lonCube->Read(lonBrick); double b = latBrick.at(0) * PI/180.0; double d = lonBrick.at(0) * PI/180.0; double angle = acos(cos(a) * cos(b) * cos(c - d) + sin(a) * sin(b)); angle *= 180/PI; double pixels = sqrt(pow(latCube->Samples() -1.0, 2.0) + pow(latCube->Lines() -1.0, 2.0)); p.EndProcess(); ui.Clear("RESOLUTION"); ui.PutDouble("RESOLUTION", pixels/angle); ui.Clear("PIXRES"); ui.PutAsString("PIXRES","PPD"); }
// Gather general statistics on a particular band of a cube Isis::Statistics GatherStatistics(Cube &icube, const int band, double sampPercent, std::string maxCubeStr) { // Create our progress message iString curCubeStr (g_imageIndex+1); std::string statMsg = ""; if (icube.Bands() == 1) { statMsg = "Calculating Statistics for Band 1 in Cube " + curCubeStr + " of " + maxCubeStr; } else { iString curBandStr (band); iString maxBandStr (icube.Bands()); statMsg = "Calculating Statistics for Band " + curBandStr + " of " + maxBandStr + " in Cube " + curCubeStr + " of " + maxCubeStr; } int linc = (int) (100.0 / sampPercent + 0.5); // Calculate our line incrementer // Make sure band is valid if ((band <= 0) || (band > icube.Bands())) { string msg = "Invalid band in method [GatherStatistics]"; throw Isis::iException::Message(Isis::iException::Programmer,msg,_FILEINFO_); } // Construct a line buffer manager and a statistics object Isis::LineManager line (icube); Isis::Progress progress; progress.SetText(statMsg); // Calculate the number of steps for the Progress object, and add an extra // step if the total lines and incrementer do not divide evenly int maxSteps = icube.Lines() / linc; if (icube.Lines() % linc != 0) maxSteps += 1; progress.SetMaximumSteps(maxSteps); progress.CheckStatus(); // Add data to Statistics object by line Isis::Statistics stats; int i=1; while (i<=icube.Lines()) { line.SetLine(i,band); icube.Read(line); stats.AddData (line.DoubleBuffer(), line.size()); // Make sure we consider the last line if (i+linc > icube.Lines() && i != icube.Lines()) { i = icube.Lines(); progress.AddSteps(1); } else i += linc; // Increment the current line by our incrementer progress.CheckStatus(); } return stats; }
void IsisMain() { ProcessRubberSheet p; // Open the input cube Cube *icube = p.SetInputCube ("FROM"); // Set up the transform object UserInterface &ui = Application::GetUserInterface(); Transform *transform = new Rotate(icube->Samples(), icube->Lines(), ui.GetDouble("DEGREES")); // Determine the output size int samples = transform->OutputSamples(); int lines = transform->OutputLines(); // Allocate the output file p.SetOutputCube ("TO", samples, lines, icube->Bands()); // Set up the interpolator Interpolator *interp; if (ui.GetString("INTERP") == "NEARESTNEIGHBOR") { interp = new Interpolator(Interpolator::NearestNeighborType); } else if (ui.GetString("INTERP") == "BILINEAR") { interp = new Interpolator(Interpolator::BiLinearType); } else if (ui.GetString("INTERP") == "CUBICCONVOLUTION") { interp = new Interpolator(Interpolator::CubicConvolutionType); } else { string msg = "Unknow value for INTERP [" + ui.GetString("INTERP") + "]"; throw iException::Message(iException::Programmer,msg,_FILEINFO_); } p.StartProcess(*transform, *interp); p.EndProcess(); delete transform; delete interp; }
void IsisMain() { // We will be processing by line ProcessByLine p; // Setup the input and output cubes Cube *icube = p.SetInputCube("FROM"); p.SetOutputCube ("TO"); // Override the defaults if the user entered a value UserInterface &ui = Application::GetUserInterface(); top = ui.GetInteger ("TOP"); bottom = ui.GetInteger ("BOTTOM"); lleft = ui.GetInteger ("LEFT"); rright = ui.GetInteger ("RIGHT"); // Will anything be trimmed from the cube? bool notrim = false; if (top == 0 && bottom == 0 && lleft == 0 && rright == 0) { notrim = true; } // Adjust bottom and right bottom = icube->Lines() - bottom; rright = icube->Samples() - rright; // Start the processing p.StartProcess(trim); p.EndProcess(); //The user didn't trim anything if (notrim == true) { string message = "No trimming was done-output equals input file"; throw iException::Message(iException::User,message,_FILEINFO_); } }
void IsisMain() { // We will be processing by line ProcessByLine p; // Setup the input and output cubes Cube *icube = p.SetInputCube("FROM"); p.SetOutputCube ("TO"); // Get exposure duration and tranfer time // Override the lable values if the user entered a value double expTime,xferTime; UserInterface &ui = Application::GetUserInterface(); if (ui.WasEntered ("DURATION")) { expTime = ui.GetDouble ("DURATION"); } else { PvlGroup grp = icube->GetGroup("ISIS_INSTRUMENT"); expTime = grp["EXPOSURE_DURATION"]; } if (ui.WasEntered ("TRANSFER")) { xferTime = ui.GetDouble ("TRANSFER"); } else { PvlGroup grp = icube->GetGroup("ISIS_INSTRUMENT"); xferTime = grp["TRANSFER_TIME"]; } // Calculate the smear scale smearScale = xferTime / expTime / icube->Lines(); // Start the processing p.StartProcess(desmear); p.EndProcess(); }
// Main program void IsisMain(){ // Create an object for exporting Isis data ProcessExport p; // Open the input cube Cube *icube = p.SetInputCube("FROM"); // Conform to the Big-Endian format for FITS if(IsLsb()) p.SetOutputEndian(Isis::Msb); // Generate the name of the fits file and open it UserInterface &ui = Application::GetUserInterface(); // specify the bits per pixel string bitpix; if (ui.GetString ("BITTYPE") == "8BIT") bitpix = "8"; else if (ui.GetString ("BITTYPE") == "16BIT") bitpix = "16"; else if (ui.GetString ("BITTYPE") == "32BIT") bitpix = "-32"; else { string msg = "Pixel type of [" + ui.GetString("BITTYPE") + "] is unsupported"; throw iException::Message(iException::User, msg, _FILEINFO_); } // Determine bit size and calculate number of bytes to write // for each line. if (bitpix == "8") p.SetOutputType(Isis::UnsignedByte); if (bitpix == "16") p.SetOutputType(Isis::SignedWord); if (bitpix == "-32") p.SetOutputType(Isis::Real); // determine core base and multiplier, set up the stretch PvlGroup pix = icube->Label()->FindObject("IsisCube").FindObject("Core").FindGroup("Pixels"); double scale = pix["Multiplier"][0].ToDouble(); double base = pix["Base"][0].ToDouble(); if (ui.GetString("STRETCH") != "NONE" && bitpix != "-32") { if (ui.GetString("STRETCH") == "LINEAR") { p.SetInputRange(); } else if (ui.GetString("STRETCH") == "MANUAL") { p.SetInputRange(ui.GetDouble("MINIMUM"), ui.GetDouble("MAXIMUM")); } // create a proper scale so pixels look like 32bit data. scale = ((p.GetInputMaximum() - p.GetInputMinimum()) * (p.GetOutputMaximum() - p.GetOutputMinimum())); // round off after 14 decimals to avoid system architecture differences scale = ((floor(scale * 1e14)) / 1e14); // create a proper zero point so pixels look like 32bit data. base = -1.0 * (scale * p.GetOutputMinimum()) + p.GetInputMinimum(); // round off after 14 decimals to avoid system architecture differences base = ((floor(base * 1e14)) / 1e14); } ////////////////////////////////////////// // Write the minimal fits header // ////////////////////////////////////////// string header; // specify that this file conforms to simple fits standard header += FitsKeyword("SIMPLE", true, "T"); // specify the bits per pixel header += FitsKeyword("BITPIX", true, bitpix); // specify the number of data axes (2: samples by lines) int axes = 2; if (icube->Bands() > 1) { axes = 3; } header += FitsKeyword("NAXIS", true, iString(axes)); // specify the limit on data axis 1 (number of samples) header += FitsKeyword("NAXIS1", true, iString(icube->Samples())); // specify the limit on data axis 2 (number of lines) header += FitsKeyword("NAXIS2", true, iString(icube->Lines())); if (axes == 3){ header += FitsKeyword("NAXIS3", true, iString(icube->Bands())); } header += FitsKeyword("BZERO", true, base); header += FitsKeyword("BSCALE", true, scale); // Sky and All cases if (ui.GetString("INFO") == "SKY" || ui.GetString("INFO") == "ALL") { iString msg = "cube has not been skymapped"; PvlGroup map; if (icube->HasGroup("mapping")) { map = icube->GetGroup("mapping"); msg = (string)map["targetname"]; } // If we have sky we want it if (msg == "Sky") { double midRa = 0, midDec = 0; midRa = ((double)map["MaximumLongitude"] + (double)map["MinimumLongitude"])/2; midDec = ((double)map["MaximumLatitude"] + (double)map["MinimumLatitude"])/2; header += FitsKeyword("OBJCTRA", true, iString(midRa)); // Specify the Declination header += FitsKeyword("OBJCTDEC", true, iString(midDec)); } if (ui.GetString("INFO") == "ALL") { header += WritePvl("INSTRUME","Instrument","InstrumentId", icube, true); header += WritePvl("OBSERVER","Instrument","SpacecraftName", icube, true); header += WritePvl("OBJECT ","Instrument","TargetName", icube, true); // StartTime is sometimes middle of the exposure and somtimes beginning, // so StopTime can't be calculated off of exposure reliably. header += WritePvl("DATE-OBS","Instrument","StartTime", icube, true); // Some cameras don't have StopTime if (icube->HasGroup("Instrument")) { PvlGroup inst = icube->GetGroup("Instrument"); if (inst.HasKeyword("StopTime")) { header += WritePvl("TIME_END","Instrument","StopTime", icube, true); } if (inst.HasKeyword("ExposureDuration")) { header += WritePvl("EXPTIME","Instrument","ExposureDuration", icube, false); } } } // If we were set on SKY and Sky doesn't exist else if (msg != "Sky") { throw iException::Message(iException::User,msg,_FILEINFO_); } } // signal the end of the header header += FitsKeyword("END", false, ""); // fill the rest of the fits header with space so to conform with the fits header // size of 2880 bytes for (int i = header.length() % 2880 ; i < 2880 ; i++) header += " "; // open the cube for writing string to = ui.GetFilename("TO","fits"); ofstream fout; fout.open (to.c_str (), ios::out|ios::binary); if (!fout.is_open ()) { string msg = "Cannot open fits output file"; throw iException::Message(iException::Programmer,msg,_FILEINFO_); } fout.seekp(0); fout.write(header.c_str(),header.length()); // write the raw cube data p.StartProcess (fout); // Finish off data area to a number n % 2880 == 0 is true // 2880 is the size of the data blocks int count = 2880 - (fout.tellp() % 2880); for (int i = 0; i < count; i++) { // Write nul characters as needed. ascii 0, hex 00... fout.write("\0", 1); } fout.close(); p.EndProcess(); }
void IsisMain(){ const std::string hical_program = "hicalbeta"; const std::string hical_version = "3.5"; const std::string hical_revision = "$Revision: 1.14 $"; const std::string hical_runtime = Application::DateTime(); UserInterface &ui = Application::GetUserInterface(); string procStep("prepping phase"); try { // The output from the last processing is the input into subsequent processing ProcessByLine p; Cube *hifrom = p.SetInputCube("FROM"); int nsamps = hifrom->Samples(); int nlines = hifrom->Lines(); // Initialize the configuration file string conf(ui.GetAsString("CONF")); HiCalConf hiconf(*(hifrom->Label()), conf); DbProfile hiprof = hiconf.getMatrixProfile(); // Check for label propagation and set the output cube Cube *ocube = p.SetOutputCube("TO"); if ( !IsTrueValue(hiprof,"PropagateTables", "TRUE") ) { RemoveHiBlobs(*(ocube->Label())); } // Set specified profile if entered by user if (ui.WasEntered("PROFILE")) { hiconf.selectProfile(ui.GetAsString("PROFILE")); } // Add OPATH parameter to profiles if (ui.WasEntered("OPATH")) { hiconf.add("OPATH",ui.GetAsString("OPATH")); } else { // Set default to output directory hiconf.add("OPATH", Filename(ocube->Filename()).Path()); } // Do I/F output DN conversions string units = ui.GetString("UNITS"); // Allocate the calibration list calVars = new MatrixList; // Set up access to HiRISE ancillary data (tables, blobs) here. Note it they // are gone, this will error out. See PropagateTables in conf file. HiCalData caldata(*hifrom); //////////////////////////////////////////////////////////////////////////// // FixGaps (Z_f) Get buffer pixels and compute coefficients for equation // y = a[0] + a[1]*x + a[2] * exp(a[3] * x) // where y is the average of the buffer pixel region, // and x is the time at each line in electrons/sec/pixel procStep = "Zf module"; hiconf.selectProfile("Zf"); hiprof = hiconf.getMatrixProfile(); HiHistory ZfHist; ZfHist.add("Profile["+ hiprof.Name()+"]"); if ( !SkipModule(hiprof) ) { DriftBuffer driftB(caldata, hiconf); calVars->add("Zf", driftB.ref()); ZfHist = driftB.History(); if ( hiprof.exists("DumpModuleFile") ) { driftB.Dump(hiconf.getMatrixSource("DumpModuleFile",hiprof)); } } else { // NOT RECOMMENDED! This is required for the next step! // SURELY must be skipped with Z_d step as well! calVars->add("Zf", HiVector(nlines, 0.0)); ZfHist.add("Debug::SkipModule invoked!"); } ///////////////////////////////////////////////////////////////////// // DriftCorrect (Z_d) // Now compute the equation of fit // procStep = "Zd module"; HiHistory ZdHist; hiconf.selectProfile("Zd"); hiprof = hiconf.getMatrixProfile(); ZdHist.add("Profile["+ hiprof.Name()+"]"); if (!SkipModule(hiconf.getMatrixProfile("Zd")) ) { DriftCorrect driftC(hiconf); calVars->add("Zd", driftC.Normalize(driftC.Solve(calVars->get("Zf")))); ZdHist = driftC.History(); if ( hiprof.exists("DumpModuleFile") ) { driftC.Dump(hiconf.getMatrixSource("DumpModuleFile",hiprof)); } } else { calVars->add("Zd", HiVector(nlines, 0.0)); ZdHist.add("Debug::SkipModule invoked!"); } //////////////////////////////////////////////////////////////////// // ZeroCorrect (Z_z) Get reverse clock procStep = "Zz module"; hiconf.selectProfile("Zz"); hiprof = hiconf.getMatrixProfile(); HiHistory ZzHist; ZzHist.add("Profile["+ hiprof.Name()+"]"); if ( !SkipModule(hiprof) ) { OffsetCorrect zoff(caldata, hiconf); calVars->add("Zz", zoff.ref()); ZzHist = zoff.History(); if ( hiprof.exists("DumpModuleFile") ) { zoff.Dump(hiconf.getMatrixSource("DumpModuleFile",hiprof)); } } else { calVars->add("Zz", HiVector(nsamps, 0.0)); ZzHist.add("Debug::SkipModule invoked!"); } ///////////////////////////////////////////////////////////////// // DarkSubtract (Z_b) Remove dark current // procStep = "Zb module"; hiconf.selectProfile("Zb"); hiprof = hiconf.getMatrixProfile(); HiHistory ZbHist; ZbHist.add("Profile["+ hiprof.Name()+"]"); if ( !SkipModule(hiprof) ) { DarkSubtractComp dark(hiconf); calVars->add("Zb", dark.ref()); ZbHist = dark.History(); if ( hiprof.exists("DumpModuleFile") ) { dark.Dump(hiconf.getMatrixSource("DumpModuleFile",hiprof)); } } else { calVars->add("Zb", HiVector(nsamps, 0.0)); ZbHist.add("Debug::SkipModule invoked!"); } //////////////////////////////////////////////////////////////////// // GainVLineCorrect (Z_g) Correct for gain-based drift // procStep = "Zg module"; hiconf.selectProfile("Zg"); hiprof = hiconf.getMatrixProfile(); HiHistory ZgHist; ZgHist.add("Profile["+ hiprof.Name()+"]"); if ( !SkipModule(hiprof) ) { GainVLineComp gainV(hiconf); calVars->add("Zg", gainV.ref()); ZgHist = gainV.History(); if ( hiprof.exists("DumpModuleFile") ) { gainV.Dump(hiconf.getMatrixSource("DumpModuleFile",hiprof)); } } else { calVars->add("Zg", HiVector(nlines, 1.0)); ZgHist.add("Debug::SkipModule invoked!"); } //////////////////////////////////////////////////////////////////// // GainCorrect (Z_gg) Correct for gain with the G matrix procStep = "Zgg module"; hiconf.selectProfile("Zgg"); hiprof = hiconf.getMatrixProfile(); HiHistory ZggHist; ZggHist.add("Profile["+ hiprof.Name()+"]"); if ( !SkipModule(hiprof) ) { double bin = ToDouble(hiprof("Summing")); double tdi = ToDouble(hiprof("Tdi")); double factor = 128.0 / tdi / (bin*bin); HiVector zgg = hiconf.getMatrix("G", hiprof); for ( int i = 0 ; i < zgg.dim() ; i++ ) { zgg[i] *= factor; } calVars->add("Zgg", zgg);; ZggHist.add("LoadMatrix(G[" + hiconf.getMatrixSource("G",hiprof) + "],Band[" + ToString(hiconf.getMatrixBand(hiprof)) + "],Factor[" + ToString(factor) + "])"); if ( hiprof.exists("DumpModuleFile") ) { Component zg("GMatrix", ZggHist); zg.Process(zgg); zg.Dump(hiconf.getMatrixSource("DumpModuleFile",hiprof)); } } else { calVars->add("Zgg", HiVector(nsamps, 1.0)); ZggHist.add("Debug::SkipModule invoked!"); } //////////////////////////////////////////////////////////////////// // FlatField (Z_a) Flat field correction with A matrix procStep = "Za module"; hiconf.selectProfile("Za"); hiprof = hiconf.getMatrixProfile(); HiHistory ZaHist; ZaHist.add("Profile["+ hiprof.Name()+"]"); if ( !SkipModule(hiprof) ) { FlatFieldComp flat(hiconf); calVars->add("Za", flat.ref()); ZaHist = flat.History(); if ( hiprof.exists("DumpModuleFile") ) { flat.Dump(hiconf.getMatrixSource("DumpModuleFile",hiprof)); } } else { calVars->add("Za", HiVector(nsamps, 1.0)); ZaHist.add("Debug::SkipModule invoked!"); } //////////////////////////////////////////////////////////////////// // FlatField (Z_t) Temperature-dependant gain correction procStep = "Zt module"; hiconf.selectProfile("Zt"); hiprof = hiconf.getMatrixProfile(); HiHistory ZtHist; ZtHist.add("Profile["+ hiprof.Name()+"]"); if ( !SkipModule(hiprof) ) { TempGainCorrect tcorr(hiconf); calVars->add("Zt", tcorr.ref()); ZtHist = tcorr.History(); if ( hiprof.exists("DumpModuleFile") ) { tcorr.Dump(hiconf.getMatrixSource("DumpModuleFile",hiprof)); } } else { calVars->add("Zt", HiVector(nsamps, 1.0)); ZtHist.add("Debug::SkipModule invoked!"); } //////////////////////////////////////////////////////////////////// // I/FCorrect (Z_iof) Conversion to I/F // procStep = "Ziof module"; hiconf.selectProfile("Ziof"); hiprof = hiconf.getMatrixProfile(); HiHistory ZiofHist; ZiofHist.add("Profile["+ hiprof.Name()+"]"); if ( !SkipModule(hiprof) ) { double sed = ToDouble(hiprof("ScanExposureDuration")); // units = us if ( IsEqual(units, "IOF") ) { // Add solar I/F correction parameters double au = hiconf.sunDistanceAU(); ZiofHist.add("SunDist[" + ToString(au) + " (AU)]"); double suncorr = 1.5 / au; suncorr *= suncorr; double zbin = ToDouble(hiprof("ZiofBinFactor")); ZiofHist.add("ZiofBinFactor[" + ToString(zbin) + "]"); double zgain = ToDouble(hiprof("FilterGainCorrection")); ZiofHist.add("FilterGainCorrection[" + ToString(zgain) + "]"); ZiofHist.add("ScanExposureDuration[" + ToString(sed) + "]"); double ziof = (zbin * zgain) * (sed * 1.0e-6) * suncorr; calVars->add("Ziof", HiVector(1, ziof)); ZiofHist.add("I/F_Factor[" + ToString(ziof) + "]"); ZiofHist.add("Units[I/F Reflectance]"); } else if ( IsEqual(units, "DN/US") ) { // Ziof is a divisor in calibration equation double ziof = sed; calVars->add("Ziof", HiVector(1, ziof)); ZiofHist.add("ScanExposureDuration[" + ToString(sed) + "]"); ZiofHist.add("DN/US_Factor[" + ToString(ziof) + "]"); ZiofHist.add("Units[DNs/microsecond]"); } else { // Units are already in DN double ziof = 1.0; calVars->add("Ziof", HiVector(1, ziof)); ZiofHist.add("DN_Factor[" + ToString(ziof) + "]"); ZiofHist.add("Units[DN]"); } } else { calVars->add("Ziof", HiVector(1,1.0)); ZiofHist.add("Debug::SkipModule invoked!"); ZiofHist.add("Units[Unknown]"); } // Reset the profile selection to default hiconf.selectProfile(); //---------------------------------------------------------------------- // ///////////////////////////////////////////////////////////////////////// // Call the processing function procStep = "calibration phase"; p.StartProcess(calibrate); // Get the default profile for logging purposes hiprof = hiconf.getMatrixProfile(); const std::string conf_file = hiconf.filepath(conf); // Quitely dumps parameter history to alternative format file. This // is completely controlled by the configuration file if ( hiprof.exists("DumpHistoryFile") ) { procStep = "logging/reporting phase"; Filename hdump(hiconf.getMatrixSource("DumpHistoryFile",hiprof)); string hdumpFile = hdump.Expanded(); ofstream ofile(hdumpFile.c_str(), ios::out); if (!ofile) { string mess = "Unable to open/create history dump file " + hdump.Expanded(); iException::Message(iException::User, mess, _FILEINFO_).Report(); } else { ofile << "Program: " << hical_program << endl; ofile << "RunTime: " << hical_runtime << endl; ofile << "Version: " << hical_version << endl; ofile << "Revision: " << hical_revision << endl << endl; ofile << "FROM: " << hifrom->Filename() << endl; ofile << "TO: " << ocube->Filename() << endl; ofile << "CONF: " << conf_file << endl << endl; ofile << "/* " << hical_program << " application equation */" << endl << "/* hdn = (idn - Zd(Zf) - Zz - Zb) */" << endl << "/* odn = hdn / Zg * Zgg * Za * Zt / Ziof */" << endl << endl; ofile << "****** PARAMETER GENERATION HISTORY *******" << endl; ofile << "\nZf = " << ZfHist << endl; ofile << "\nZd = " << ZdHist << endl; ofile << "\nZz = " << ZzHist << endl; ofile << "\nZb = " << ZbHist << endl; ofile << "\nZg = " << ZgHist << endl; ofile << "\nZgg = " << ZggHist << endl; ofile << "\nZa = " << ZaHist << endl; ofile << "\nZt = " << ZtHist << endl; ofile << "\nZiof = " << ZiofHist << endl; ofile.close(); } } // Ensure the RadiometricCalibration group is out there const std::string rcalGroup("RadiometricCalibration"); if (!ocube->HasGroup(rcalGroup)) { PvlGroup temp(rcalGroup); ocube->PutGroup(temp); } PvlGroup &rcal = ocube->GetGroup(rcalGroup); rcal += PvlKeyword("Program", hical_program); rcal += PvlKeyword("RunTime", hical_runtime); rcal += PvlKeyword("Version",hical_version); rcal += PvlKeyword("Revision",hical_revision); PvlKeyword key("Conf", conf_file); key.AddCommentWrapped("/* " + hical_program + " application equation */"); key.AddComment("/* hdn = (idn - Zd(Zf) - Zz - Zb) */"); key.AddComment("/* odn = hdn / Zg * Zgg * Za * Zt / Ziof */"); rcal += key; // Record parameter generation history. Controllable in configuration // file. Note this is optional because of a BUG!! in the ISIS label // writer as this application was initially developed if ( IsEqual(ConfKey(hiprof,"LogParameterHistory",string("TRUE")),"TRUE")) { rcal += ZfHist.makekey("Zf"); rcal += ZdHist.makekey("Zd"); rcal += ZzHist.makekey("Zz"); rcal += ZbHist.makekey("Zb"); rcal += ZgHist.makekey("Zg"); rcal += ZggHist.makekey("Zgg"); rcal += ZaHist.makekey("Za"); rcal += ZiofHist.makekey("Ziof"); } p.EndProcess(); } catch (iException &ie) { delete calVars; calVars = 0; string mess = "Failed in " + procStep; ie.Message(iException::User, mess.c_str(), _FILEINFO_); throw; } // Clean up parameters delete calVars; calVars = 0; }
void IsisMain() { // Get the list of cubes to process FileList imageList; UserInterface &ui = Application::GetUserInterface(); imageList.Read(ui.GetFilename("FROMLIST")); // Read to list if one was entered FileList outList; if (ui.WasEntered("TOLIST")) { outList.Read(ui.GetFilename("TOLIST")); } // Check for user input errors and return the file list sorted by CCD numbers ErrorCheck(imageList, outList); // Adds statistics for whole and side regions of every cube for (int img=0; img<(int)imageList.size(); img++) { g_s.Reset(); g_sl.Reset(); g_sr.Reset(); iString maxCube ((int)imageList.size()); iString curCube (img+1); ProcessByLine p; p.Progress()->SetText("Gathering Statistics for Cube " + curCube + " of " + maxCube); CubeAttributeInput att; const std::string inp = imageList[img]; p.SetInputCube(inp, att); p.StartProcess(GatherStatistics); p.EndProcess(); g_allStats.push_back(g_s); g_leftStats.push_back(g_sl); g_rightStats.push_back(g_sr); } // Initialize the object that will calculate the gains and offsets g_oNorm = new OverlapNormalization(g_allStats); // Add the known overlaps between two cubes, and apply a weight to each // overlap equal the number of pixels in the overlapping area for (int i=0; i<(int)imageList.size()-1; i++) { int j = i+1; g_oNorm->AddOverlap(g_rightStats[i], i, g_leftStats[j], j, g_rightStats[i].ValidPixels()); } // Read in and then set the holdlist FileList holdList; holdList.Read(ui.GetFilename("HOLD")); for (unsigned i=0; i<holdList.size(); i++) { int index = -1; for (unsigned j=0; j<imageList.size(); j++) { std::string curName = imageList.at(j); if (curName.compare(holdList[i]) == 0) { index = j; g_oNorm->AddHold(index); } } } // Attempt to solve the least squares equation g_oNorm->Solve(OverlapNormalization::Both); // Apply correction to the cubes if desired bool applyopt = ui.GetBoolean("APPLY"); if (applyopt) { // Loop through correcting the gains and offsets by line for every cube for (int img=0; img<(int)imageList.size(); img++) { g_imageNum = img; ProcessByLine p; iString max_cube ((int)imageList.size()); iString cur_cube (img+1); p.Progress()->SetText("Equalizing Cube " + cur_cube + " of " + max_cube); CubeAttributeInput att; const std::string inp = imageList[img]; Cube *icube = p.SetInputCube(inp, att); Filename file = imageList[img]; // Establish the output file depending upon whether or not a to list // was entered std::string out; if (ui.WasEntered("TOLIST")) { out = outList[img]; } else { Filename file = imageList[img]; out = file.Path() + "/" + file.Basename() + ".equ." + file.Extension(); } CubeAttributeOutput outAtt; p.SetOutputCube(out,outAtt,icube->Samples(),icube->Lines(),icube->Bands()); p.StartProcess(Apply); p.EndProcess(); } } // Setup the output text file if the user requested one if (ui.WasEntered("OUTSTATS")) { std::string out = Filename(ui.GetFilename("OUTSTATS")).Expanded(); std::ofstream os; os.open(out.c_str(),std::ios::app); // Get statistics for each cube with PVL Pvl p; PvlObject equ("EqualizationInformation"); for (int img=0; img<(int)imageList.size(); img++) { std::string cur = imageList[img]; PvlGroup a("Adjustment"); a += PvlKeyword("FileName", cur); a += PvlKeyword("Average", g_oNorm->Average(img)); a += PvlKeyword("Base", g_oNorm->Offset(img)); a += PvlKeyword("Multiplier", g_oNorm->Gain(img)); equ.AddGroup(a); } p.AddObject(equ); os << p << std::endl; } PvlGroup results ("Results"); for (int img=0; img<(int)imageList.size(); img++) { results += PvlKeyword("FileName", imageList[img]); results += PvlKeyword("Average", g_oNorm->Average(img)); results += PvlKeyword("Base", g_oNorm->Offset(img)); results += PvlKeyword("Multiplier", g_oNorm->Gain(img)); } Application::Log(results); // Clean-up for batch list runs delete g_oNorm; g_oNorm = NULL; g_allStats.clear(); g_leftStats.clear(); g_rightStats.clear(); }
void IsisMain() { Process p; // Get the list of names of input CCD cubes to stitch together FileList flist; UserInterface &ui = Application::GetUserInterface(); flist.Read(ui.GetFilename("FROMLIST")); if (flist.size() < 1) { string msg = "The list file[" + ui.GetFilename("FROMLIST") + " does not contain any filenames"; throw iException::Message(iException::User,msg,_FILEINFO_); } string projection("Equirectangular"); if(ui.WasEntered("MAP")) { Pvl mapfile(ui.GetFilename("MAP")); projection = (string) mapfile.FindGroup("Mapping")["ProjectionName"]; } if(ui.WasEntered("PROJECTION")) { projection = ui.GetString("PROJECTION"); } // Gather other user inputs to projection string lattype = ui.GetString("LATTYPE"); string londir = ui.GetString("LONDIR"); string londom = ui.GetString("LONDOM"); int digits = ui.GetInteger("PRECISION"); // Fix them for mapping group lattype = (lattype == "PLANETOCENTRIC") ? "Planetocentric" : "Planetographic"; londir = (londir == "POSITIVEEAST") ? "PositiveEast" : "PositiveWest"; Progress prog; prog.SetMaximumSteps(flist.size()); prog.CheckStatus(); Statistics scaleStat; Statistics longitudeStat; Statistics latitudeStat; Statistics equiRadStat; Statistics poleRadStat; PvlObject fileset("FileSet"); // Save major equitorial and polar radii for last occuring double eqRad; double eq2Rad; double poleRad; string target("Unknown"); for (unsigned int i = 0 ; i < flist.size() ; i++) { // Set the input image, get the camera model, and a basic mapping // group Cube cube; cube.Open(flist[i]); int lines = cube.Lines(); int samples = cube.Samples(); PvlObject fmap("File"); fmap += PvlKeyword("Name",flist[i]); fmap += PvlKeyword("Lines", lines); fmap += PvlKeyword("Samples", samples); Camera *cam = cube.Camera(); Pvl mapping; cam->BasicMapping(mapping); PvlGroup &mapgrp = mapping.FindGroup("Mapping"); mapgrp.AddKeyword(PvlKeyword("ProjectionName",projection),Pvl::Replace); mapgrp.AddKeyword(PvlKeyword("LatitudeType",lattype),Pvl::Replace); mapgrp.AddKeyword(PvlKeyword("LongitudeDirection",londir),Pvl::Replace); mapgrp.AddKeyword(PvlKeyword("LongitudeDomain",londom),Pvl::Replace); // Get the radii double radii[3]; cam->Radii(radii); eqRad = radii[0] * 1000.0; eq2Rad = radii[1] * 1000.0; poleRad = radii[2] * 1000.0; target = cam->Target(); equiRadStat.AddData(&eqRad, 1); poleRadStat.AddData(&poleRad, 1); // Get resolution double lowres = cam->LowestImageResolution(); double hires = cam->HighestImageResolution(); scaleStat.AddData(&lowres, 1); scaleStat.AddData(&hires, 1); double pixres = (lowres+hires)/2.0; double scale = Scale(pixres, poleRad, eqRad); mapgrp.AddKeyword(PvlKeyword("PixelResolution",pixres),Pvl::Replace); mapgrp.AddKeyword(PvlKeyword("Scale",scale,"pixels/degree"),Pvl::Replace); mapgrp += PvlKeyword("MinPixelResolution",lowres,"meters"); mapgrp += PvlKeyword("MaxPixelResolution",hires,"meters"); // Get the universal ground range double minlat,maxlat,minlon,maxlon; cam->GroundRange(minlat,maxlat,minlon,maxlon,mapping); mapgrp.AddKeyword(PvlKeyword("MinimumLatitude",minlat),Pvl::Replace); mapgrp.AddKeyword(PvlKeyword("MaximumLatitude",maxlat),Pvl::Replace); mapgrp.AddKeyword(PvlKeyword("MinimumLongitude",minlon),Pvl::Replace); mapgrp.AddKeyword(PvlKeyword("MaximumLongitude",maxlon),Pvl::Replace); fmap.AddGroup(mapgrp); fileset.AddObject(fmap); longitudeStat.AddData(&minlon, 1); longitudeStat.AddData(&maxlon, 1); latitudeStat.AddData(&minlat, 1); latitudeStat.AddData(&maxlat, 1); p.ClearInputCubes(); prog.CheckStatus(); } // Construct the output mapping group with statistics PvlGroup mapping("Mapping"); double avgPixRes((scaleStat.Minimum()+scaleStat.Maximum())/2.0); double avgLat((latitudeStat.Minimum()+latitudeStat.Maximum())/2.0); double avgLon((longitudeStat.Minimum()+longitudeStat.Maximum())/2.0); double avgEqRad((equiRadStat.Minimum()+equiRadStat.Maximum())/2.0); double avgPoleRad((poleRadStat.Minimum()+poleRadStat.Maximum())/2.0); double scale = Scale(avgPixRes, avgPoleRad, avgEqRad); mapping += PvlKeyword("ProjectionName",projection); mapping += PvlKeyword("TargetName", target); mapping += PvlKeyword("EquatorialRadius",eqRad,"meters"); mapping += PvlKeyword("PolarRadius",poleRad,"meters"); mapping += PvlKeyword("LatitudeType",lattype); mapping += PvlKeyword("LongitudeDirection",londir); mapping += PvlKeyword("LongitudeDomain",londom); mapping += PvlKeyword("PixelResolution", SetRound(avgPixRes, digits), "meters/pixel"); mapping += PvlKeyword("Scale", SetRound(scale, digits), "pixels/degree"); mapping += PvlKeyword("MinPixelResolution",scaleStat.Minimum(),"meters"); mapping += PvlKeyword("MaxPixelResolution",scaleStat.Maximum(),"meters"); mapping += PvlKeyword("CenterLongitude", SetRound(avgLon,digits)); mapping += PvlKeyword("CenterLatitude", SetRound(avgLat,digits)); mapping += PvlKeyword("MinimumLatitude", MAX(SetFloor(latitudeStat.Minimum(),digits), -90.0)); mapping += PvlKeyword("MaximumLatitude", MIN(SetCeil(latitudeStat.Maximum(),digits), 90.0)); mapping += PvlKeyword("MinimumLongitude",MAX(SetFloor(longitudeStat.Minimum(),digits), -180.0)); mapping += PvlKeyword("MaximumLongitude",MIN(SetCeil(longitudeStat.Maximum(),digits), 360.0)); PvlKeyword clat("PreciseCenterLongitude", avgLon); clat.AddComment("Actual Parameters without precision applied"); mapping += clat; mapping += PvlKeyword("PreciseCenterLatitude", avgLat); mapping += PvlKeyword("PreciseMinimumLatitude", latitudeStat.Minimum()); mapping += PvlKeyword("PreciseMaximumLatitude", latitudeStat.Maximum()); mapping += PvlKeyword("PreciseMinimumLongitude",longitudeStat.Minimum()); mapping += PvlKeyword("PreciseMaximumLongitude",longitudeStat.Maximum()); Application::GuiLog(mapping); // Write the output file if requested if (ui.WasEntered("TO")) { Pvl temp; temp.AddGroup(mapping); temp.Write(ui.GetFilename("TO","map")); } if (ui.WasEntered("LOG")) { Pvl temp; temp.AddObject(fileset); temp.Write(ui.GetFilename("LOG","log")); } p.EndProcess(); }
void IsisMain(){ UserInterface &ui = Application::GetUserInterface(); ProcessByLine proc; Cube *cube = proc.SetInputCube("FROM"); BigInt npixels(cube->Lines() * cube->Samples()); // Initialize the cleaner routine try { delete iclean; iclean = new HiImageClean(*cube); } catch (iException &ie) { std::string message = "Error attempting to initialize HiRISE cleaner object"; throw (iException::Message(iException::Programmer,message,_FILEINFO_)); } catch (...) { std::string message = "Unknown error occured attempting to initialize " "HiRISE cleaner object"; throw (iException::Message(iException::Programmer,message,_FILEINFO_)); } // For IR10, channel 1 lets restrict the last 3100 lines of dark current PvlGroup &instrument = cube->GetGroup("Instrument"); std::string ccd = (std::string) instrument["CcdId"]; int channel = instrument["ChannelNumber"]; if ((ccd == "IR10") && (channel == 1)) { int bin = instrument["Summing"]; int lastLine = cube->Lines() - ((3100/bin) + iclean->getFilterWidth()/2); if (lastLine > 1) { iclean->setLastGoodLine(lastLine); } } #if defined(DEBUG) std::cout << "Lines: " << cube->Lines() << " GoodLines: " << iclean->getLastGoodLine() << std::endl; #endif // Get the output file reference for label update Cube *ocube = proc.SetOutputCube("TO"); proc.StartProcess(cleanImage); iclean->propagateBlobs(ocube); proc.EndProcess(); // Write statistics to file if requested if (ui.WasEntered("CLEANSTATS")) { std::string darkfile = ui.GetFilename("CLEANSTATS"); std::ofstream dfile; dfile.open(darkfile.c_str(), std::ios::out | std::ios::trunc); dfile << *iclean; dfile.close(); } // Dump stats to standard out Pvl p; PvlGroup grp; iclean->PvlImageStats(grp); p.AddGroup(grp); Application::Log(grp); BigInt nNulled = iclean->TotalNulled(); delete iclean; iclean = 0; // Check for calibration problems if (nNulled != 0) { double tpixels((double) nNulled / (double) npixels); std::ostringstream mess; mess << "There were " << nNulled << " of " << npixels << " (" << std::setw(6) << std::setprecision(2) << (tpixels * 100.0) << "%) due to insufficient calibration data (LUTTED or Gaps)" << std::ends; throw (iException::Message(iException::Math,mess.str(),_FILEINFO_)); } }
void IsisMain() { // Get the camera information Process p1; Cube *icube = p1.SetInputCube("FROM",OneBand); cam = icube->Camera(); // We will be processing by brick. ProcessByBrick p; // Find out which bands are to be created UserInterface &ui = Application::GetUserInterface(); nbands = 0; if ((phase = ui.GetBoolean("PHASE"))) nbands++; if ((emission = ui.GetBoolean("EMISSION"))) nbands++; if ((incidence = ui.GetBoolean("INCIDENCE"))) nbands++; if ((latitude = ui.GetBoolean("LATITUDE"))) nbands++; if ((longitude = ui.GetBoolean("LONGITUDE"))) nbands++; if ((pixelResolution = ui.GetBoolean("PIXELRESOLUTION"))) nbands++; if ((lineResolution = ui.GetBoolean("LINERESOLUTION"))) nbands++; if ((sampleResolution = ui.GetBoolean("SAMPLERESOLUTION"))) nbands++; if ((detectorResolution = ui.GetBoolean("DETECTORRESOLUTION"))) nbands++; if ((northAzimuth = ui.GetBoolean("NORTHAZIMUTH"))) nbands++; if ((sunAzimuth = ui.GetBoolean("SUNAZIMUTH"))) nbands++; if ((spacecraftAzimuth = ui.GetBoolean("SPACECRAFTAZIMUTH"))) nbands++; if ((offnadirAngle = ui.GetBoolean("OFFNADIRANGLE"))) nbands++; if (nbands < 1) { string message = "At least one photometry parameter must be entered" "[PHASE, EMISSION, INCIDENCE, LATITUDE, LONGITUDE]"; throw iException::Message (iException::User, message, _FILEINFO_); } // Create a bandbin group for the output label PvlKeyword name("Name"); if (phase) name += "Phase Angle"; if (emission) name += "Emission Angle"; if (incidence) name += "Incidence Angle"; if (latitude) name += "Latitude"; if (longitude) name += "Longitude"; if (pixelResolution) name += "Pixel Resolution"; if (lineResolution) name += "Line Resolution"; if (sampleResolution) name += "Sample Resolution"; if (detectorResolution) name += "Detector Resolution"; if (northAzimuth) name += "North Azimuth"; if (sunAzimuth) name += "Sun Azimuth"; if (spacecraftAzimuth) name += "Spacecraft Azimuth"; if (offnadirAngle) name += "OffNadir Angle"; PvlGroup bandBin("BandBin"); bandBin += name; // Create the output cube. Note we add the input cube to expedite propagation // of input cube elements (label, blobs, etc...). It *must* be cleared // prior to systematic processing. (void) p.SetInputCube("FROM", OneBand); Cube *ocube = p.SetOutputCube("TO",icube->Samples(), icube->Lines(), nbands); p.SetBrickSize(64,64,nbands); p.ClearInputCubes(); // Toss the input file as stated above // Start the processing p.StartProcess(phocube); // Add the bandbin group to the output label. If a BandBin group already // exists, remove all existing keywords and add the keywords for this app. // Otherwise, just put the group in. PvlObject &cobj = ocube->Label()->FindObject("IsisCube"); if (cobj.HasGroup("BandBin")) { PvlGroup &bb = cobj.FindGroup("BandBin"); bb.Clear(); PvlContainer::PvlKeywordIterator k = bandBin.Begin(); while (k != bandBin.End()) { bb += *k; ++k; } } else { ocube->PutGroup(bandBin); } p.EndProcess(); }
/** * This is the main method. Makeflat runs in three steps: * * 1) Calculate statistics * - For all cameras, this checks for one band and matching * sample counts. * - For framing cameras, this checks the standard deviation of * the images and records the averages of each image * - For push frame cameras, this calls CheckFramelets for each * image. * * 2) Create the temporary file, collect more detailed statistics * - For all cameras, this generates the temporary file and calculates * the final exclusion list * - For framing/push frame cameras, the temporary file is * 2 bands, where the first is a sum of DNs from each image/framelet * and the second band is a count of valid DNs that went into each sum * * 3) Create the final flat field file * - For all cameras, this processes the temporary file to create the final flat * field file. */ void IsisMain() { // Initialize variables ResetGlobals(); UserInterface &ui = Application::GetUserInterface(); maxStdev = ui.GetDouble("STDEVTOL"); if(ui.GetString("IMAGETYPE") == "FRAMING") { cameraType = Framing; // framing cameras need to figure this out automatically // during step 1 numFrameLines = -1; } else if(ui.GetString("IMAGETYPE") == "LINESCAN") { cameraType = LineScan; numFrameLines = ui.GetInteger("NUMLINES"); } else { cameraType = PushFrame; numFrameLines = ui.GetInteger("FRAMELETHEIGHT"); } FileList inList(ui.GetFilename("FROMLIST")); Progress progress; tempFileLength = 0; numOutputSamples = 0; /** * Line scan progress is based on the input list, whereas * the other cameras take much longer and are based on the * images themselves. Prepare the progress if we're doing * line scan. */ if(cameraType == LineScan) { progress.SetText("Calculating Number of Image Lines"); progress.SetMaximumSteps(inList.size()); progress.CheckStatus(); } /** * For a push frame camera, the temp file is one framelet. * Technically this is the same for the framing, but we * don't know the height of a framelet yet. */ if(cameraType == PushFrame) { tempFileLength = numFrameLines; } /** * Start pass 1, use global currImage so that methods called * know the image we're processing. */ for(currImage = 0; currImage < inList.size(); currImage++) { /** * Read the current cube into memory */ Cube tmp; tmp.Open(Filename(inList[currImage]).Expanded()); /** * If we haven't determined how many samples the output * should have, we can do so now */ if(numOutputSamples == 0 && tmp.Bands() == 1) { numOutputSamples = tmp.Samples(); } /** * Try and validate the image, quick tests first! * * (imageValid &= means imageValid = imageValid && ...) */ bool imageValid = true; // Only single band images are acceptable imageValid &= (tmp.Bands() == 1); // Sample sizes must always match imageValid &= (numOutputSamples == tmp.Samples()); // For push frame cameras, there must be valid all framelets if(cameraType == PushFrame) { imageValid &= (tmp.Lines() % numFrameLines == 0); } // For framing cameras, we need to figure out the size... // setTempFileLength is used to revert if the file // is decided to be invalid bool setTempFileLength = false; if(cameraType == Framing) { if(tempFileLength == 0 && imageValid) { tempFileLength = tmp.Lines(); numFrameLines = tempFileLength; setTempFileLength = true; } imageValid &= (tempFileLength == tmp.Lines()); } // Statistics are necessary at this point for push frame and framing cameras // because the framing camera standard deviation tolerance is based on // entire images, and push frame framelet exclusion stats can not be collected // during pass 2 cleanly if((cameraType == Framing || cameraType == PushFrame) && imageValid) { string prog = "Calculating Standard Deviation " + iString((int)currImage+1) + "/"; prog += iString((int)inList.size()) + " (" + Filename(inList[currImage]).Name() + ")"; if(cameraType == Framing) { Statistics *stats = tmp.Statistics(1, prog); imageValid &= !IsSpecial(stats->StandardDeviation()); imageValid &= !IsSpecial(stats->Average()); imageValid &= stats->StandardDeviation() <= maxStdev; vector<double> fileStats; fileStats.push_back(stats->Average()); inputFrameletAverages.push_back(fileStats); delete stats; } else if(cameraType == PushFrame) { imageValid &= CheckFramelets(prog, tmp); } if(setTempFileLength && !imageValid) { tempFileLength = 0; } } // The line scan camera needs to actually count the number of lines in each image to know // how many total frames there are before beginning pass 2. if(imageValid && (cameraType == LineScan)) { int lines = (tmp.Lines() / numFrameLines); // partial frame? if(tmp.Lines() % numFrameLines != 0) { lines ++; } tempFileLength += lines; } else if(!imageValid) { excludedFiles.insert(pair<int, bool>(currImage, true)); } tmp.Close(); if(cameraType == LineScan) { progress.CheckStatus(); } } /** * If the number of output samples could not be determined, we never * found a legitimate cube. */ if(numOutputSamples <= 0) { string msg = "No valid input cubes were found"; throw iException::Message(iException::User,msg,_FILEINFO_); } /** * If theres no temp file length, which is based off of valid data in * the input cubes, then we havent found any valid data. */ if(tempFileLength <= 0) { string msg = "No valid input data was found"; throw iException::Message(iException::User,msg,_FILEINFO_); } /** * ocube is now the temporary file (for pass 2). */ ocube = new Cube(); ocube->SetDimensions(numOutputSamples, tempFileLength, 2); PvlGroup &prefs = Preference::Preferences().FindGroup("DataDirectory", Pvl::Traverse); iString outTmpName = (string)prefs["Temporary"][0] + "/"; outTmpName += Filename(ui.GetFilename("TO")).Basename() + ".tmp.cub"; ocube->Create(outTmpName); oLineMgr = new LineManager(*ocube); oLineMgr->SetLine(1); ProcessByBrick p; int excludedCnt = 0; if(cameraType == LineScan) { outputTmpAverages.resize(numOutputSamples); outputTmpCounts.resize(numOutputSamples); numInputDns.resize(numOutputSamples); } cubeInitialized = false; for(currImage = 0; currImage < inList.size(); currImage++) { if(Excluded(currImage)) { excludedCnt ++; continue; } PvlObject currFile("Exclusions"); currFile += PvlKeyword("Filename", inList[currImage]); currFile += PvlKeyword("Tolerance", maxStdev); if(cameraType == LineScan) { currFile += PvlKeyword("FrameLines", numFrameLines); } else if(cameraType == PushFrame) { currFile += PvlKeyword("FrameletLines", numFrameLines); } excludedDetails.push_back(currFile); CubeAttributeInput inAtt; // This needs to be set constantly because ClearInputCubes // seems to be removing the input brick size. if(cameraType == LineScan) { p.SetBrickSize(1, numFrameLines, 1); } else if(cameraType == Framing || cameraType == PushFrame) { p.SetBrickSize(numOutputSamples, 1, 1); } p.SetInputCube(inList[currImage], inAtt); iString progText = "Calculating Averages " + iString((int)currImage+1); progText += "/" + iString((int)inList.size()); progText += " (" + Filename(inList[currImage]).Name() + ")"; p.Progress()->SetText(progText); p.StartProcess(CreateTemporaryData); p.EndProcess(); p.ClearInputCubes(); if(excludedDetails[excludedDetails.size()-1].Groups() == 0) { excludedDetails.resize(excludedDetails.size()-1); } } /** * Pass 2 completed. The processing methods were responsible for writing * the entire temporary cube. */ if(oLineMgr) { delete oLineMgr; oLineMgr = NULL; } if(ocube) { ocube->Close(); delete ocube; } /** * ocube is now the final output */ ocube = new Cube(); if(cameraType == LineScan) { ocube->SetDimensions(numOutputSamples, 1, 1); } else if(cameraType == Framing || cameraType == PushFrame) { ocube->SetDimensions(numOutputSamples, tempFileLength, 1); } ocube->Create(Filename(ui.GetFilename("TO")).Expanded()); oLineMgr = new LineManager(*ocube); oLineMgr->SetLine(1); // We now have the necessary temp file, let's go ahead and combine it into // the final output! p.SetInputBrickSize(numOutputSamples, 1, 2); p.SetOutputBrickSize(numOutputSamples, 1, 1); cubeInitialized = false; CubeAttributeInput inAtt; p.Progress()->SetText("Calculating Final Flat Field"); p.SetInputCube(outTmpName, inAtt); p.StartProcess(ProcessTemporaryData); p.EndProcess(); if(cameraType == LineScan) { ocube->Write(*oLineMgr); } if(oLineMgr) { delete oLineMgr; oLineMgr = NULL; } if(ocube) { ocube->Close(); delete ocube; ocube = NULL; } /** * Build a list of excluded files */ PvlGroup excludedFiles("ExcludedFiles"); for(currImage = 0; currImage < inList.size(); currImage++) { if(Excluded(currImage)) { excludedFiles += PvlKeyword("File", inList[currImage]); } } // log the results Application::Log(excludedFiles); if(ui.WasEntered("EXCLUDE")) { Pvl excludeFile; // Find excluded files excludeFile.AddGroup(excludedFiles); for(unsigned int i = 0; i < excludedDetails.size(); i++) { excludeFile.AddObject(excludedDetails[i]); } excludeFile.Write(Filename(ui.GetFilename("EXCLUDE")).Expanded()); } remove(outTmpName.c_str()); // Clean up settings ResetGlobals(); }
void IsisMain() { //Create a process to create the input cubes Process p; //Create the input cubes, matching sample/lines Cube *inCube = p.SetInputCube ("FROM"); Cube *latCube = p.SetInputCube("LATCUB", SpatialMatch); Cube *lonCube = p.SetInputCube("LONCUB", SpatialMatch); //A 1x1 brick to read in the latitude and longitude DN values from //the specified cubes Brick latBrick(1,1,1, latCube->PixelType()); Brick lonBrick(1,1,1, lonCube->PixelType()); UserInterface &ui = Application::GetUserInterface(); //Set the sample and line increments int sinc = (int)(inCube->Samples() * 0.10); if(ui.WasEntered("SINC")) { sinc = ui.GetInteger("SINC"); } int linc = (int)(inCube->Lines() * 0.10); if(ui.WasEntered("LINC")) { linc = ui.GetInteger("LINC"); } //Set the degree of the polynomial to use in our functions int degree = ui.GetInteger("DEGREE"); //We are using a polynomial with two variables PolynomialBivariate sampFunct(degree); PolynomialBivariate lineFunct(degree); //We will be solving the function using the least squares method LeastSquares sampSol(sampFunct); LeastSquares lineSol(lineFunct); //Setup the variables for solving the stereographic projection //x = cos(latitude) * sin(longitude - lon_center) //y = cos(lat_center) * sin(latitude) - sin(lat_center) * cos(latitude) * cos(longitude - lon_center) //Get the center lat and long from the input cubes double lat_center = latCube->Statistics()->Average() * PI/180.0; double lon_center = lonCube->Statistics()->Average() * PI/180.0; /** * Loop through lines and samples projecting the latitude and longitude at those * points to stereographic x and y and adding these points to the LeastSquares * matrix. */ for(int i = 1; i <= inCube->Lines(); i+= linc) { for(int j = 1; j <= inCube->Samples(); j+= sinc) { latBrick.SetBasePosition(j, i, 1); latCube->Read(latBrick); if(IsSpecial(latBrick.at(0))) continue; double lat = latBrick.at(0) * PI/180.0; lonBrick.SetBasePosition(j, i, 1); lonCube->Read(lonBrick); if(IsSpecial(lonBrick.at(0))) continue; double lon = lonBrick.at(0) * PI/180.0; //Project lat and lon to x and y using a stereographic projection double k = 2/(1 + sin(lat_center) * sin(lat) + cos(lat_center)*cos(lat)*cos(lon - lon_center)); double x = k * cos(lat) * sin(lon - lon_center); double y = k * (cos(lat_center) * sin(lat)) - (sin(lat_center) * cos(lat) * cos(lon - lon_center)); //Add x and y to the least squares matrix vector<double> data; data.push_back(x); data.push_back(y); sampSol.AddKnown(data, j); lineSol.AddKnown(data, i); //If the sample increment goes past the last sample in the line, we want to //always read the last sample.. if(j != inCube->Samples() && j + sinc > inCube->Samples()) { j = inCube->Samples() - sinc; } } //If the line increment goes past the last line in the cube, we want to //always read the last line.. if(i != inCube->Lines() && i + linc > inCube->Lines()) { i = inCube->Lines() - linc; } } //Solve the least squares functions using QR Decomposition sampSol.Solve(LeastSquares::QRD); lineSol.Solve(LeastSquares::QRD); //If the user wants to save the residuals to a file, create a file and write //the column titles to it. TextFile oFile; if(ui.WasEntered("RESIDUALS")) { oFile.Open(ui.GetFilename("RESIDUALS"), "overwrite"); oFile.PutLine("Sample,\tLine,\tX,\tY,\tSample Error,\tLine Error\n"); } //Gather the statistics for the residuals from the least squares solutions Statistics sampErr; Statistics lineErr; vector<double> sampResiduals = sampSol.Residuals(); vector<double> lineResiduals = lineSol.Residuals(); for(int i = 0; i < (int)sampResiduals.size(); i++) { sampErr.AddData(sampResiduals[i]); lineErr.AddData(lineResiduals[i]); } //If a residuals file was specified, write the previous data, and the errors to the file. if(ui.WasEntered("RESIDUALS")) { for(int i = 0; i < sampSol.Rows(); i++) { vector<double> data = sampSol.GetInput(i); iString tmp = ""; tmp += iString(sampSol.GetExpected(i)); tmp += ",\t"; tmp += iString(lineSol.GetExpected(i)); tmp += ",\t"; tmp += iString(data[0]); tmp += ",\t"; tmp += iString(data[1]); tmp += ",\t"; tmp += iString(sampResiduals[i]); tmp += ",\t"; tmp += iString(lineResiduals[i]); oFile.PutLine(tmp + "\n"); } } oFile.Close(); //Records the error to the log PvlGroup error( "Error" ); error += PvlKeyword( "Degree", degree ); error += PvlKeyword( "NumberOfPoints", (int)sampResiduals.size() ); error += PvlKeyword( "SampleMinimumError", sampErr.Minimum() ); error += PvlKeyword( "SampleAverageError", sampErr.Average() ); error += PvlKeyword( "SampleMaximumError", sampErr.Maximum() ); error += PvlKeyword( "SampleStdDeviationError", sampErr.StandardDeviation() ); error += PvlKeyword( "LineMinimumError", lineErr.Minimum() ); error += PvlKeyword( "LineAverageError", lineErr.Average() ); error += PvlKeyword( "LineMaximumError", lineErr.Maximum() ); error += PvlKeyword( "LineStdDeviationError", lineErr.StandardDeviation() ); Application::Log( error ); //Close the input cubes for cleanup p.EndProcess(); //If we want to warp the image, then continue, otherwise return if(!ui.GetBoolean("NOWARP")) { //Creates the mapping group Pvl mapFile; mapFile.Read(ui.GetFilename("MAP")); PvlGroup &mapGrp = mapFile.FindGroup("Mapping",Pvl::Traverse); //Reopen the lat and long cubes latCube = new Cube(); latCube->SetVirtualBands(ui.GetInputAttribute("LATCUB").Bands()); latCube->Open(ui.GetFilename("LATCUB")); lonCube = new Cube(); lonCube->SetVirtualBands(ui.GetInputAttribute("LONCUB").Bands()); lonCube->Open(ui.GetFilename("LONCUB")); PvlKeyword targetName; //If the user entered the target name if(ui.WasEntered("TARGET")) { targetName = PvlKeyword("TargetName", ui.GetString("TARGET")); } //Else read the target name from the input cube else { Pvl fromFile; fromFile.Read(ui.GetFilename("FROM")); targetName = fromFile.FindKeyword("TargetName", Pvl::Traverse); } mapGrp.AddKeyword(targetName, Pvl::Replace); PvlKeyword equRadius; PvlKeyword polRadius; //If the user entered the equatorial and polar radii if(ui.WasEntered("EQURADIUS") && ui.WasEntered("POLRADIUS")) { equRadius = PvlKeyword("EquatorialRadius", ui.GetDouble("EQURADIUS")); polRadius = PvlKeyword("PolarRadius", ui.GetDouble("POLRADIUS")); } //Else read them from the pck else { Filename pckFile("$base/kernels/pck/pck?????.tpc"); pckFile.HighestVersion(); string pckFilename = pckFile.Expanded(); furnsh_c(pckFilename.c_str()); string target = targetName[0]; SpiceInt code; SpiceBoolean found; bodn2c_c (target.c_str(), &code, &found); if (!found) { string msg = "Could not convert Target [" + target + "] to NAIF code"; throw Isis::iException::Message(Isis::iException::Io,msg,_FILEINFO_); } SpiceInt n; SpiceDouble radii[3]; bodvar_c(code,"RADII",&n,radii); equRadius = PvlKeyword("EquatorialRadius", radii[0] * 1000); polRadius = PvlKeyword("PolarRadius", radii[2] * 1000); } mapGrp.AddKeyword(equRadius, Pvl::Replace); mapGrp.AddKeyword(polRadius, Pvl::Replace); //If the latitude type is not in the mapping group, copy it from the input if(!mapGrp.HasKeyword("LatitudeType")) { if(ui.GetString("LATTYPE") == "PLANETOCENTRIC") { mapGrp.AddKeyword(PvlKeyword("LatitudeType","Planetocentric"), Pvl::Replace); } else { mapGrp.AddKeyword(PvlKeyword("LatitudeType","Planetographic"), Pvl::Replace); } } //If the longitude direction is not in the mapping group, copy it from the input if(!mapGrp.HasKeyword("LongitudeDirection")) { if(ui.GetString("LONDIR") == "POSITIVEEAST") { mapGrp.AddKeyword(PvlKeyword("LongitudeDirection","PositiveEast"), Pvl::Replace); } else { mapGrp.AddKeyword(PvlKeyword("LongitudeDirection","PositiveWest"), Pvl::Replace); } } //If the longitude domain is not in the mapping group, assume it is 360 if(!mapGrp.HasKeyword("LongitudeDomain")) { mapGrp.AddKeyword(PvlKeyword("LongitudeDomain","360"), Pvl::Replace); } //If the default range is to be computed, use the input lat/long cubes to determine the range if(ui.GetString("DEFAULTRANGE") == "COMPUTE") { //NOTE - When computing the min/max longitude this application does not account for the //longitude seam if it exists. Since the min/max are calculated from the statistics of //the input longitude cube and then converted to the mapping group's domain they may be //invalid for cubes containing the longitude seam. Statistics *latStats = latCube->Statistics(); Statistics *lonStats = lonCube->Statistics(); double minLat = latStats->Minimum(); double maxLat = latStats->Maximum(); bool isOcentric = ((std::string)mapGrp.FindKeyword("LatitudeType")) == "Planetocentric"; if(isOcentric) { if(ui.GetString("LATTYPE") != "PLANETOCENTRIC") { minLat = Projection::ToPlanetocentric(minLat, (double)equRadius, (double)polRadius); maxLat = Projection::ToPlanetocentric(maxLat, (double)equRadius, (double)polRadius); } } else { if(ui.GetString("LATTYPE") == "PLANETOCENTRIC") { minLat = Projection::ToPlanetographic(minLat, (double)equRadius, (double)polRadius); maxLat = Projection::ToPlanetographic(maxLat, (double)equRadius, (double)polRadius); } } int lonDomain = (int)mapGrp.FindKeyword("LongitudeDomain"); double minLon = lonDomain == 360 ? Projection::To360Domain(lonStats->Minimum()) : Projection::To180Domain(lonStats->Minimum()); double maxLon = lonDomain == 360 ? Projection::To360Domain(lonStats->Maximum()) : Projection::To180Domain(lonStats->Maximum()); bool isPosEast = ((std::string)mapGrp.FindKeyword("LongitudeDirection")) == "PositiveEast"; if(isPosEast) { if(ui.GetString("LONDIR") != "POSITIVEEAST") { minLon = Projection::ToPositiveEast(minLon, lonDomain); maxLon = Projection::ToPositiveEast(maxLon, lonDomain); } } else { if(ui.GetString("LONDIR") == "POSITIVEEAST") { minLon = Projection::ToPositiveWest(minLon, lonDomain); maxLon = Projection::ToPositiveWest(maxLon, lonDomain); } } if(minLon > maxLon) { double temp = minLon; minLon = maxLon; maxLon = temp; } mapGrp.AddKeyword(PvlKeyword("MinimumLatitude", minLat),Pvl::Replace); mapGrp.AddKeyword(PvlKeyword("MaximumLatitude", maxLat),Pvl::Replace); mapGrp.AddKeyword(PvlKeyword("MinimumLongitude", minLon),Pvl::Replace); mapGrp.AddKeyword(PvlKeyword("MaximumLongitude", maxLon),Pvl::Replace); } //If the user decided to enter a ground range then override if (ui.WasEntered("MINLAT")) { mapGrp.AddKeyword(PvlKeyword("MinimumLatitude", ui.GetDouble("MINLAT")),Pvl::Replace); } if (ui.WasEntered("MAXLAT")) { mapGrp.AddKeyword(PvlKeyword("MaximumLatitude", ui.GetDouble("MAXLAT")),Pvl::Replace); } if (ui.WasEntered("MINLON")) { mapGrp.AddKeyword(PvlKeyword("MinimumLongitude", ui.GetDouble("MINLON")),Pvl::Replace); } if (ui.WasEntered("MAXLON")) { mapGrp.AddKeyword(PvlKeyword("MaximumLongitude", ui.GetDouble("MAXLON")),Pvl::Replace); } //If the pixel resolution is to be computed, compute the pixels/degree from the input if (ui.GetString("PIXRES") == "COMPUTE") { latBrick.SetBasePosition(1,1,1); latCube->Read(latBrick); lonBrick.SetBasePosition(1,1,1); lonCube->Read(lonBrick); //Read the lat and long at the upper left corner double a = latBrick.at(0) * PI/180.0; double c = lonBrick.at(0) * PI/180.0; latBrick.SetBasePosition(latCube->Samples(),latCube->Lines(),1); latCube->Read(latBrick); lonBrick.SetBasePosition(lonCube->Samples(),lonCube->Lines(),1); lonCube->Read(lonBrick); //Read the lat and long at the lower right corner double b = latBrick.at(0) * PI/180.0; double d = lonBrick.at(0) * PI/180.0; //Determine the angle between the two points double angle = acos(cos(a) * cos(b) * cos(c - d) + sin(a) * sin(b)); //double angle = acos((cos(a1) * cos(b1) * cos(b2)) + (cos(a1) * sin(b1) * cos(a2) * sin(b2)) + (sin(a1) * sin(a2))); angle *= 180/PI; //Determine the number of pixels between the two points double pixels = sqrt(pow(latCube->Samples() -1.0, 2.0) + pow(latCube->Lines() -1.0, 2.0)); //Add the scale in pixels/degree to the mapping group mapGrp.AddKeyword(PvlKeyword("Scale", pixels/angle, "pixels/degree"), Pvl::Replace); if (mapGrp.HasKeyword("PixelResolution")) { mapGrp.DeleteKeyword("PixelResolution"); } } // If the user decided to enter a resolution then override if (ui.GetString("PIXRES") == "MPP") { mapGrp.AddKeyword(PvlKeyword("PixelResolution", ui.GetDouble("RESOLUTION"), "meters/pixel"), Pvl::Replace); if (mapGrp.HasKeyword("Scale")) { mapGrp.DeleteKeyword("Scale"); } } else if (ui.GetString("PIXRES") == "PPD") { mapGrp.AddKeyword(PvlKeyword("Scale", ui.GetDouble("RESOLUTION"), "pixels/degree"), Pvl::Replace); if (mapGrp.HasKeyword("PixelResolution")) { mapGrp.DeleteKeyword("PixelResolution"); } } //Create a projection using the map file we created int samples,lines; Projection *outmap = ProjectionFactory::CreateForCube(mapFile,samples,lines,false); //Write the map file to the log Application::GuiLog(mapGrp); //Create a process rubber sheet ProcessRubberSheet r; //Set the input cube inCube = r.SetInputCube("FROM"); double tolerance = ui.GetDouble("TOLERANCE") * outmap->Resolution(); //Create a new transform object Transform *transform = new nocam2map (sampSol, lineSol, outmap, latCube, lonCube, ui.GetString("LATTYPE") == "PLANETOCENTRIC", ui.GetString("LONDIR") == "POSITIVEEAST", tolerance, ui.GetInteger("ITERATIONS"), inCube->Samples(), inCube->Lines(), samples, lines); //Allocate the output cube and add the mapping labels Cube *oCube = r.SetOutputCube ("TO", transform->OutputSamples(), transform->OutputLines(), inCube->Bands()); oCube->PutGroup(mapGrp); //Determine which interpolation to use Interpolator *interp = NULL; if (ui.GetString("INTERP") == "NEARESTNEIGHBOR") { interp = new Interpolator(Interpolator::NearestNeighborType); } else if (ui.GetString("INTERP") == "BILINEAR") { interp = new Interpolator(Interpolator::BiLinearType); } else if (ui.GetString("INTERP") == "CUBICCONVOLUTION") { interp = new Interpolator(Interpolator::CubicConvolutionType); } //Warp the cube r.StartProcess(*transform, *interp); r.EndProcess(); // add mapping to print.prt PvlGroup mapping = outmap->Mapping(); Application::Log(mapping); //Clean up delete latCube; delete lonCube; delete outmap; delete transform; delete interp; } }
void IsisMain() { // We will be warping a cube ProcessRubberSheet p; // Get the map projection file provided by the user UserInterface &ui = Application::GetUserInterface(); Pvl userPvl(ui.GetFilename("MAP")); PvlGroup &userMappingGrp = userPvl.FindGroup("Mapping",Pvl::Traverse); // Open the input cube and get the projection Cube *icube = p.SetInputCube ("FROM"); // Get the mapping group PvlGroup fromMappingGrp = icube->GetGroup("Mapping"); Projection *inproj = icube->Projection(); PvlGroup outMappingGrp = fromMappingGrp; // If the default range is FROM, then wipe out any range data in user mapping file if(ui.GetString("DEFAULTRANGE").compare("FROM") == 0 && !ui.GetBoolean("MATCHMAP")) { if(userMappingGrp.HasKeyword("MinimumLatitude")) { userMappingGrp.DeleteKeyword("MinimumLatitude"); } if(userMappingGrp.HasKeyword("MaximumLatitude")) { userMappingGrp.DeleteKeyword("MaximumLatitude"); } if(userMappingGrp.HasKeyword("MinimumLongitude")) { userMappingGrp.DeleteKeyword("MinimumLongitude"); } if(userMappingGrp.HasKeyword("MaximumLongitude")) { userMappingGrp.DeleteKeyword("MaximumLongitude"); } } // Deal with user overrides entered in the GUI. Do this by changing the user's mapping group, which // will then overlay anything in the output mapping group. if(ui.WasEntered("MINLAT") && !ui.GetBoolean("MATCHMAP")) { userMappingGrp.AddKeyword( PvlKeyword("MinimumLatitude", ui.GetDouble("MINLAT")), Pvl::Replace ); } if(ui.WasEntered("MAXLAT") && !ui.GetBoolean("MATCHMAP")) { userMappingGrp.AddKeyword( PvlKeyword("MaximumLatitude", ui.GetDouble("MAXLAT")), Pvl::Replace ); } if(ui.WasEntered("MINLON") && !ui.GetBoolean("MATCHMAP")) { userMappingGrp.AddKeyword( PvlKeyword("MinimumLongitude", ui.GetDouble("MINLON")), Pvl::Replace ); } if(ui.WasEntered("MAXLON") && !ui.GetBoolean("MATCHMAP")) { userMappingGrp.AddKeyword( PvlKeyword("MaximumLongitude", ui.GetDouble("MAXLON")), Pvl::Replace ); } /** * If the user is changing from positive east to positive west, or vice-versa, the output minimum is really * the input maximum. However, the user mapping group must be left unaffected (an input minimum must be the * output minimum). To accomplish this, we swap the minimums/maximums in the output group ahead of time. This * causes the minimums and maximums to correlate to the output minimums and maximums. That way when we copy * the user mapping group into the output group a mimimum overrides a minimum and a maximum overrides a maximum. */ bool sameDirection = true; if(userMappingGrp.HasKeyword("LongitudeDirection")) { if(((string)userMappingGrp["LongitudeDirection"]).compare(fromMappingGrp["LongitudeDirection"]) != 0) { sameDirection = false; } } // Since the out mapping group came from the from mapping group, which came from a valid cube, // we can assume both min/max lon exists if min longitude exists. if(!sameDirection && outMappingGrp.HasKeyword("MinimumLongitude")) { double minLon = outMappingGrp["MinimumLongitude"]; double maxLon = outMappingGrp["MaximumLongitude"]; outMappingGrp["MaximumLongitude"] = minLon; outMappingGrp["MinimumLongitude"] = maxLon; } if(ui.GetString("PIXRES").compare("FROM") == 0 && !ui.GetBoolean("MATCHMAP")) { // Resolution will be in fromMappingGrp and outMappingGrp at this time // delete from user mapping grp if(userMappingGrp.HasKeyword("Scale")) { userMappingGrp.DeleteKeyword("Scale"); } if(userMappingGrp.HasKeyword("PixelResolution")) { userMappingGrp.DeleteKeyword("PixelResolution"); } } else if(ui.GetString("PIXRES").compare("MAP") == 0 || ui.GetBoolean("MATCHMAP")) { // Resolution will be in userMappingGrp - delete all others if(outMappingGrp.HasKeyword("Scale")) { outMappingGrp.DeleteKeyword("Scale"); } if(outMappingGrp.HasKeyword("PixelResolution")) { outMappingGrp.DeleteKeyword("PixelResolution"); } if(fromMappingGrp.HasKeyword("Scale")); { fromMappingGrp.DeleteKeyword("Scale"); } if(fromMappingGrp.HasKeyword("PixelResolution")) { fromMappingGrp.DeleteKeyword("PixelResolution"); } } else if(ui.GetString("PIXRES").compare("MPP") == 0) { // Resolution specified - delete all and add to outMappingGrp if(outMappingGrp.HasKeyword("Scale")) { outMappingGrp.DeleteKeyword("Scale"); } if(outMappingGrp.HasKeyword("PixelResolution")) { outMappingGrp.DeleteKeyword("PixelResolution"); } if(fromMappingGrp.HasKeyword("Scale")) { fromMappingGrp.DeleteKeyword("Scale"); } if(fromMappingGrp.HasKeyword("PixelResolution")) { fromMappingGrp.DeleteKeyword("PixelResolution"); } if(userMappingGrp.HasKeyword("Scale")) { userMappingGrp.DeleteKeyword("Scale"); } if(userMappingGrp.HasKeyword("PixelResolution")) { userMappingGrp.DeleteKeyword("PixelResolution"); } outMappingGrp.AddKeyword(PvlKeyword("PixelResolution", ui.GetDouble("RESOLUTION"), "meters/pixel"), Pvl::Replace); } else if(ui.GetString("PIXRES").compare("PPD") == 0) { // Resolution specified - delete all and add to outMappingGrp if(outMappingGrp.HasKeyword("Scale")) { outMappingGrp.DeleteKeyword("Scale"); } if(outMappingGrp.HasKeyword("PixelResolution")) { outMappingGrp.DeleteKeyword("PixelResolution"); } if(fromMappingGrp.HasKeyword("Scale")) { fromMappingGrp.DeleteKeyword("Scale"); } if(fromMappingGrp.HasKeyword("PixelResolution")) { fromMappingGrp.DeleteKeyword("PixelResolution"); } if(userMappingGrp.HasKeyword("Scale")) { userMappingGrp.DeleteKeyword("Scale"); } if(userMappingGrp.HasKeyword("PixelResolution")) { userMappingGrp.DeleteKeyword("PixelResolution"); } outMappingGrp.AddKeyword(PvlKeyword("Scale", ui.GetDouble("RESOLUTION"), "pixels/degree"), Pvl::Replace); } // Rotation will NOT Propagate if(outMappingGrp.HasKeyword("Rotation")) { outMappingGrp.DeleteKeyword("Rotation"); } /** * The user specified map template file overrides what ever is in the * cube's mapping group. */ for(int keyword = 0; keyword < userMappingGrp.Keywords(); keyword ++) { outMappingGrp.AddKeyword(userMappingGrp[keyword], Pvl::Replace); } /** * Now, we have to deal with unit conversions. We convert only if the following are true: * 1) We used values from the input cube * 2) The values are longitudes or latitudes * 3) The map file or user-specified information uses a different measurement system than * the input cube for said values. * * The data is corrected for: * 1) Positive east/positive west * 2) Longitude domain * 3) planetographic/planetocentric. */ // First, the longitude direction if(!sameDirection) { PvlGroup longitudes = inproj->MappingLongitudes(); for(int index = 0; index < longitudes.Keywords(); index ++) { if(!userMappingGrp.HasKeyword(longitudes[index].Name())) { // use the from domain because that's where our values are coming from if(((string)userMappingGrp["LongitudeDirection"]).compare("PositiveEast") == 0) { outMappingGrp[longitudes[index].Name()] = Projection::ToPositiveEast(outMappingGrp[longitudes[index].Name()], outMappingGrp["LongitudeDomain"]); } else { outMappingGrp[longitudes[index].Name()] = Projection::ToPositiveWest(outMappingGrp[longitudes[index].Name()], outMappingGrp["LongitudeDomain"]); } } } } // The minimum/maximum longitudes should be in order now. However, if the user entered a // maximum that was lower than the minimum, or a minimum that was higher than the maximum this // may still fail. Let it throw an error when we instantiate the projection. // Second, longitude domain if(userMappingGrp.HasKeyword("LongitudeDomain")) { // user set a new domain? if((int)userMappingGrp["LongitudeDomain"] != (int)fromMappingGrp["LongitudeDomain"]) { // new domain different? PvlGroup longitudes = inproj->MappingLongitudes(); for(int index = 0; index < longitudes.Keywords(); index ++) { if(!userMappingGrp.HasKeyword(longitudes[index].Name())) { if((int)userMappingGrp["LongitudeDomain"] == 180) { outMappingGrp[longitudes[index].Name()] = Projection::To180Domain(outMappingGrp[longitudes[index].Name()]); } else { outMappingGrp[longitudes[index].Name()] = Projection::To360Domain(outMappingGrp[longitudes[index].Name()]); } } } } } // Third, planetographic/planetocentric if(userMappingGrp.HasKeyword("LatitudeType")) { // user set a new domain? if(((string)userMappingGrp["LatitudeType"]).compare(fromMappingGrp["LatitudeType"]) != 0) { // new lat type different? PvlGroup latitudes = inproj->MappingLatitudes(); for(int index = 0; index < latitudes.Keywords(); index ++) { if(!userMappingGrp.HasKeyword(latitudes[index].Name())) { if(((string)userMappingGrp["LatitudeType"]).compare("Planetographic") == 0) { outMappingGrp[latitudes[index].Name()] = Projection::ToPlanetographic( (double)fromMappingGrp[latitudes[index].Name()], (double)fromMappingGrp["EquatorialRadius"], (double)fromMappingGrp["PolarRadius"]); } else { outMappingGrp[latitudes[index].Name()] = Projection::ToPlanetocentric( (double)fromMappingGrp[latitudes[index].Name()], (double)fromMappingGrp["EquatorialRadius"], (double)fromMappingGrp["PolarRadius"]); } } } } } // If MinLon/MaxLon out of order, we weren't able to calculate the correct values if((double)outMappingGrp["MinimumLongitude"] >= (double)outMappingGrp["MaximumLongitude"]) { if(!ui.WasEntered("MINLON") || !ui.WasEntered("MAXLON")) { string msg = "Unable to determine the correct [MinimumLongitude,MaximumLongitude]."; msg += " Please specify these values in the [MINLON,MAXLON] parameters"; throw iException::Message(iException::Pvl,msg,_FILEINFO_); } } int samples,lines; Pvl mapData; // Copy to preserve cube labels so we can match cube size if (userPvl.HasObject("IsisCube")) { mapData = userPvl; mapData.FindObject("IsisCube").DeleteGroup("Mapping"); mapData.FindObject("IsisCube").AddGroup(outMappingGrp); } else { mapData.AddGroup(outMappingGrp); } // *NOTE: The UpperLeftX,UpperLeftY keywords will not be used in the CreateForCube // method, and they will instead be recalculated. This is correct. Projection *outproj = ProjectionFactory::CreateForCube(mapData,samples,lines, ui.GetBoolean("MATCHMAP")); // Set up the transform object which will simply map // output line/samps -> output lat/lons -> input line/samps Transform *transform = new map2map (icube->Samples(), icube->Lines(), icube->Projection(), samples, lines, outproj, ui.GetBoolean("TRIM")); // Allocate the output cube and add the mapping labels Cube *ocube = p.SetOutputCube ("TO", transform->OutputSamples(), transform->OutputLines(), icube->Bands()); PvlGroup cleanOutGrp = outproj->Mapping(); // ProjectionFactory::CreateForCube updated mapData to have the correct // upperleftcornerx, upperleftcornery, scale and resolution. Use these // updated numbers. cleanOutGrp.AddKeyword(mapData.FindGroup("Mapping",Pvl::Traverse)["UpperLeftCornerX"], Pvl::Replace); cleanOutGrp.AddKeyword(mapData.FindGroup("Mapping",Pvl::Traverse)["UpperLeftCornerY"], Pvl::Replace); cleanOutGrp.AddKeyword(mapData.FindGroup("Mapping",Pvl::Traverse)["Scale"], Pvl::Replace); cleanOutGrp.AddKeyword(mapData.FindGroup("Mapping",Pvl::Traverse)["PixelResolution"], Pvl::Replace); ocube->PutGroup(cleanOutGrp); // Set up the interpolator Interpolator *interp; if (ui.GetString("INTERP") == "NEARESTNEIGHBOR") { interp = new Interpolator(Interpolator::NearestNeighborType); } else if (ui.GetString("INTERP") == "BILINEAR") { interp = new Interpolator(Interpolator::BiLinearType); } else if (ui.GetString("INTERP") == "CUBICCONVOLUTION") { interp = new Interpolator(Interpolator::CubicConvolutionType); } else { string msg = "Unknow value for INTERP [" + ui.GetString("INTERP") + "]"; throw iException::Message(iException::Programmer,msg,_FILEINFO_); } // Warp the cube p.StartProcess(*transform, *interp); p.EndProcess(); Application::Log(cleanOutGrp); // Cleanup delete transform; delete interp; }
void IsisMain() { UserInterface &ui = Application::GetUserInterface(); /*Processing steps 1. Open and read the jitter table, convert the pixel offsets to angles, and create the polynomials (solve for the coefficients) to use to do the high pass filter putting the results into a rotation matrix in the jitter class. 2. Apply the jitter correction in the LineScanCameraRotation object of the master cube. 3. Loop through FROMLIST correcting the pointing and writing out the updated camera pointing from the master cube */ int degree = ui.GetInteger("DEGREE"); // Get the input file list to make sure it is not empty and the master cube is included FileList list; list.Read(ui.GetFilename("FROMLIST")); if (list.size() < 1) { string msg = "The input list file [" + ui.GetFilename("FROMLIST") + "is empty"; throw iException::Message(iException::User,msg,_FILEINFO_); } int ifile = 0; // Make sure the master file is included in the input file list while (ifile < (int) list.size() && Filename(list[ifile]).Expanded() != Filename(ui.GetFilename("MASTER")).Expanded()) { ifile++; } if (ifile >= (int) list.size()) { string msg = "The master file, [" + Filename(ui.GetFilename("MASTER")).Expanded() + " is not included in " + "the input list file " + ui.GetFilename("FROMLIST") + "]"; throw iException::Message(iException::User,msg,_FILEINFO_); } bool step2 = false; PvlGroup gp("AppjitResults"); //Step 1: Create the jitter rotation try { // Open the master cube Cube cube; cube.Open(ui.GetFilename("MASTER"),"rw"); //check for existing polygon, if exists delete it if (cube.Label()->HasObject("Polygon")){ cube.Label()->DeleteObject("Polygon"); } // Get the camera Camera *cam = cube.Camera(); if (cam->DetectorMap()->LineRate() == 0.0) { string msg = "[" + ui.GetFilename("MASTER") + "] is not a line scan camera image"; throw iException::Message(Isis::iException::User,msg,_FILEINFO_); } // Create the master rotation to be corrected int frameCode = cam->InstrumentRotation()->Frame(); cam->SetImage(int(cube.Samples()/2), int(cube.Lines()/2) ); double tol = cam->PixelResolution(); if (tol < 0.) { // Alternative calculation of .01*ground resolution of a pixel tol = cam->PixelPitch()*cam->SpacecraftAltitude()*1000./cam->FocalLength()/100.; } LineScanCameraRotation crot(frameCode, *(cube.Label()), cam->InstrumentRotation()->GetFullCacheTime(), tol ); crot.SetPolynomialDegree(ui.GetInteger("DEGREE")); crot.SetAxes(1, 2, 3); if (ui.WasEntered("PITCHRATE")) crot.ResetPitchRate(ui.GetDouble("PITCHRATE")); if (ui.WasEntered("YAW")) crot.ResetYaw(ui.GetDouble("YAW")); crot.SetPolynomial(); double baseTime = crot.GetBaseTime(); double timeScale = crot.GetTimeScale(); double fl = cam->FocalLength(); double pixpitch = cam->PixelPitch(); std::vector<double> cacheTime = cam->InstrumentRotation()->GetFullCacheTime(); // Get the jitter in pixels, compute jitter angles, and fit a polynomial to each angle PixelOffset jitter(ui.GetFilename("JITTERFILE"), fl, pixpitch, baseTime, timeScale, degree); jitter.LoadAngles(cacheTime); jitter.SetPolynomial(); // Set the jitter and apply to the instrument rotation crot.SetJitter( &jitter ); crot.ReloadCache(); // Pull out the pointing cache as a table and write it Table cmatrix = crot.Cache("InstrumentPointing"); cmatrix.Label().AddComment("Corrected using appjit and" + ui.GetFilename("JITTERFILE")); cube.Write(cmatrix); // Write out the instrument position table Isis::PvlGroup kernels = cube.Label()->FindGroup("Kernels",Isis::Pvl::Traverse); // Write out the "Table" label to the tabled kernels in the kernels group kernels["InstrumentPointing"] = "Table"; // kernels["InstrumentPosition"] = "Table"; cube.PutGroup(kernels); cube.Close(); gp += PvlKeyword("StatusMaster",ui.GetFilename("MASTER") + ": camera pointing updated"); // Apply the dejittered pointing to the rest of the files step2 = true; for (int ifile = 0; ifile < (int) list.size(); ifile++) { if (list[ifile] != ui.GetFilename("MASTER")) { // Open the cube cube.Open(list[ifile],"rw"); //check for existing polygon, if exists delete it if (cube.Label()->HasObject("Polygon")){ cube.Label()->DeleteObject("Polygon"); } // Get the camera and make sure it is a line scan camera Camera *cam = cube.Camera(); if (cam->DetectorMap()->LineRate() == 0.0) { string msg = "[" + ui.GetFilename("FROM") + "] is not a line scan camera"; throw iException::Message(Isis::iException::User,msg,_FILEINFO_); } // Pull out the pointing cache as a table and write it cube.Write(cmatrix); cube.PutGroup(kernels); cube.Close(); gp += PvlKeyword("Status" + iString(ifile), list[ifile] + ": camera pointing updated"); } } Application::Log( gp ); } catch (iException &e) { string msg; if (!step2) { msg = "Unable to fit pointing for [" + ui.GetFilename("MASTER") + "]"; } else { msg = "Unable to update pointing for nonMaster file(s)"; } throw iException::Message(Isis::iException::User,msg,_FILEINFO_); } }
void IsisMain() { // Get the list of cubes to mosaic FileList imageList; UserInterface &ui = Application::GetUserInterface(); imageList.Read(ui.GetFilename("FROMLIST")); if (imageList.size() < 1) { std::string msg = "The list file [" + ui.GetFilename("FROMLIST") + "] does not contain any data"; throw iException::Message(iException::User,msg,_FILEINFO_); } // Make sure the user enters a "OUTSTATS" file if the CALCULATE option // is selected std::string processOpt = ui.GetString("PROCESS"); if (processOpt == "CALCULATE") { if (!ui.WasEntered("OUTSTATS")) { std::string msg = "If the CALCULATE option is selected, you must enter"; msg += " an OUTSTATS file"; throw iException::Message(iException::User,msg,_FILEINFO_); } } // Make sure number of bands and projection parameters match for all cubes for (unsigned int i=0; i<imageList.size(); i++) { Cube cube1; cube1.Open(imageList[i]); g_maxBand = cube1.Bands(); for (unsigned int j=(i+1); j<imageList.size(); j++) { Cube cube2; cube2.Open(imageList[j]); // Make sure number of bands match if (g_maxBand != cube2.Bands()) { string msg = "Number of bands do not match between cubes [" + imageList[i] + "] and [" + imageList[j] + "]"; throw iException::Message(iException::User,msg,_FILEINFO_); } //Create projection from each cube Projection *proj1 = cube1.Projection(); Projection *proj2 = cube2.Projection(); // Test to make sure projection parameters match if (*proj1 != *proj2) { string msg = "Mapping groups do not match between cubes [" + imageList[i] + "] and [" + imageList[j] + "]"; throw iException::Message(iException::User,msg,_FILEINFO_); } } } // Read hold list if one was entered std::vector<int> hold; if (ui.WasEntered("HOLD")) { FileList holdList; holdList.Read(ui.GetFilename("HOLD")); // Make sure each file in the holdlist matches a file in the fromlist for (int i=0; i<(int)holdList.size(); i++) { bool matched = false; for (int j=0; j<(int)imageList.size(); j++) { if (holdList[i] == imageList[j]) { matched = true; hold.push_back(j); break; } } if (!matched) { std::string msg = "The hold list file [" + holdList[i] + "] does not match a file in the from list"; throw iException::Message(iException::User,msg,_FILEINFO_); } } } // Read to list if one was entered FileList outList; if (ui.WasEntered("TOLIST")) { outList.Read(ui.GetFilename("TOLIST")); // Make sure each file in the tolist matches a file in the fromlist if (outList.size() != imageList.size()) { std::string msg = "Each input file in the FROM LIST must have a "; msg += "corresponding output file in the TO LIST."; throw iException::Message(iException::User,msg,_FILEINFO_); } // Make sure that all output files do not have the same names as their // corresponding input files for (unsigned i = 0; i < outList.size(); i++) { if (outList[i].compare(imageList[i]) == 0) { std::string msg = "The to list file [" + outList[i] + "] has the same name as its corresponding from list file."; throw iException::Message(iException::User,msg,_FILEINFO_); } } } // Test to ensure sampling percent in bound double sampPercent = ui.GetDouble("PERCENT"); if (sampPercent <= 0.0 || sampPercent > 100.0) { string msg = "The sampling percent must be a decimal (0.0, 100.0]"; throw iException::Message(iException::User,msg,_FILEINFO_); } int mincnt = ui.GetInteger("MINCOUNT"); bool wtopt = ui.GetBoolean("WEIGHT"); if (processOpt != "APPLY") { // Loop through all the input cubes, calculating statistics for each cube to use later iString maxCubeStr ((int)imageList.size()); for (int band=1; band<=g_maxBand; band++) { std::vector<Statistics> statsList; for (int img=0; img<(int)imageList.size(); img++) { Process p; const CubeAttributeInput att; const std::string inp = imageList[img]; Cube *icube = p.SetInputCube(inp, att); // Add a Statistics object to the list for every band of every input cube g_imageIndex = img; Statistics stats = GatherStatistics(*icube, band, sampPercent, maxCubeStr); statsList.push_back(stats); p.EndProcess(); } // Create a separate OverlapNormalization object for every band OverlapNormalization *oNorm = new OverlapNormalization (statsList); for (int h=0; h<(int)hold.size(); h++) oNorm->AddHold(hold[h]); g_oNormList.push_back(oNorm); } // A list for keeping track of which input cubes are known to overlap another std::vector<bool> doesOverlapList; for (unsigned int i=0; i<imageList.size(); i++) doesOverlapList.push_back(false); // Find overlapping areas and add them to the set of known overlaps for each // band shared amongst cubes for (unsigned int i=0; i<imageList.size(); i++){ Cube cube1; cube1.Open(imageList[i]); for (unsigned int j=(i+1); j<imageList.size(); j++) { Cube cube2; cube2.Open(imageList[j]); iString cubeStr1 ((int)(i+1)); iString cubeStr2 ((int)(j+1)); string statMsg = "Gathering Overlap Statisitcs for Cube " + cubeStr1 + " vs " + cubeStr2 + " of " + maxCubeStr; // Get overlap statistics for cubes OverlapStatistics oStats(cube1, cube2, statMsg, sampPercent); // Only push the stats onto the oList vector if there is an overlap in at // least one of the bands if (oStats.HasOverlap()) { oStats.SetMincount(mincnt); g_overlapList.push_back(oStats); for (int band=1; band<=g_maxBand; band++) { // Fill wt vector with 1's if the overlaps are not to be weighted, or // fill the vector with the number of valid pixels in each overlap int weight = 1; if (wtopt) weight = oStats.GetMStats(band).ValidPixels(); // Make sure overlap has at least MINCOUNT pixels and add if (oStats.GetMStats(band).ValidPixels() >= mincnt) { g_oNormList[band-1]->AddOverlap(oStats.GetMStats(band).X(), i, oStats.GetMStats(band).Y(), j, weight); doesOverlapList[i] = true; doesOverlapList[j] = true; } } } } } // Print an error if one or more of the images does not overlap another { std::string badFiles = ""; for (unsigned int img=0; img<imageList.size(); img++) { // Print the name of each input cube without an overlap if (!doesOverlapList[img]) { badFiles += "[" + imageList[img] + "] "; } } if (badFiles != "") { std::string msg = "File(s) " + badFiles; msg += " do(es) not overlap any other input images with enough valid pixels"; throw iException::Message(iException::User,msg,_FILEINFO_); } } // Determine whether to calculate gains or offsets std::string adjust = ui.GetString("ADJUST"); OverlapNormalization::SolutionType sType = OverlapNormalization::Both; if (adjust == "CONTRAST") sType = OverlapNormalization::Gains; if (adjust == "BRIGHTNESS") sType = OverlapNormalization::Offsets; // Loop through each band making all necessary calculations for (int band=0; band<g_maxBand; band++) { g_oNormList[band]->Solve(sType); } } // Print gathered statistics to the gui and the print file int validCnt = 0; int invalidCnt = 0; if (processOpt != "APPLY") { PvlGroup results("Results"); // Compute the number valid and invalid overlaps for (unsigned int o=0; o<g_overlapList.size(); o++) { for (int band=1; band<=g_maxBand; band++) { if (g_overlapList[o].IsValid(band)) validCnt++; else invalidCnt++; } } results += PvlKeyword("TotalOverlaps", validCnt+invalidCnt); results += PvlKeyword("ValidOverlaps", validCnt); results += PvlKeyword("InvalidOverlaps", invalidCnt); std::string weightStr = "false"; if (wtopt) weightStr = "true"; results += PvlKeyword("Weighted", weightStr); results += PvlKeyword("MinCount", mincnt); // Name and band modifiers for each image for (unsigned int img=0; img<imageList.size(); img++) { results += PvlKeyword("FileName", imageList[img]); // Band by band statistics for (int band=1; band<=g_maxBand; band++) { iString mult (g_oNormList[band-1]->Gain(img)); iString base (g_oNormList[band-1]->Offset(img)); iString avg (g_oNormList[band-1]->Average(img)); iString bandNum (band); std::string bandStr = "Band" + bandNum; PvlKeyword bandStats(bandStr); bandStats += mult; bandStats += base; bandStats += avg; results += bandStats; } } // Write the results to the log Application::Log(results); } // Setup the output text file if the user requested one if (ui.WasEntered("OUTSTATS")) { PvlObject equ("EqualizationInformation"); PvlGroup gen("General"); gen += PvlKeyword("TotalOverlaps", validCnt+invalidCnt); gen += PvlKeyword("ValidOverlaps", validCnt); gen += PvlKeyword("InvalidOverlaps", invalidCnt); std::string weightStr = "false"; if (wtopt) weightStr = "true"; gen += PvlKeyword("Weighted", weightStr); gen += PvlKeyword("MinCount", mincnt); equ.AddGroup(gen); for (unsigned int img=0; img<imageList.size(); img++) { // Format and name information PvlGroup norm("Normalization"); norm.AddComment("Formula: newDN = (oldDN - AVERAGE) * GAIN + AVERAGE + OFFSET"); norm.AddComment("BandN = (GAIN, OFFSET, AVERAGE)"); norm += PvlKeyword("FileName", imageList[img]); // Band by band statistics for (int band=1; band<=g_maxBand; band++) { iString mult (g_oNormList[band-1]->Gain(img)); iString base (g_oNormList[band-1]->Offset(img)); iString avg (g_oNormList[band-1]->Average(img)); iString bandNum (band); std::string bandStr = "Band" + bandNum; PvlKeyword bandStats(bandStr); bandStats += mult; bandStats += base; bandStats += avg; norm += bandStats; } equ.AddGroup(norm); } // Write the equalization and overlap statistics to the file std::string out = Filename(ui.GetFilename("OUTSTATS")).Expanded(); std::ofstream os; os.open(out.c_str(),std::ios::app); Pvl p; p.SetTerminator(""); p.AddObject(equ); os << p << std::endl; for (unsigned int i=0; i<g_overlapList.size(); i++) { os << g_overlapList[i]; if (i != g_overlapList.size()-1) os << std::endl; } os << "End"; } // Check for errors with the input statistics if (processOpt == "APPLY") { Pvl inStats (ui.GetFilename("INSTATS")); PvlObject &equalInfo = inStats.FindObject("EqualizationInformation"); // Make sure each file in the instats matches a file in the fromlist if (imageList.size() > (unsigned)equalInfo.Groups()-1) { std::string msg = "Each input file in the FROM LIST must have a "; msg += "corresponding input file in the INPUT STATISTICS."; throw iException::Message(iException::User,msg,_FILEINFO_); } // Check that each file in the FROM LIST is present in the INPUT STATISTICS for (unsigned i = 0; i < imageList.size(); i++) { std::string fromFile = imageList[i]; bool foundFile = false; for (int j = 1; j < equalInfo.Groups(); j++) { PvlGroup &normalization = equalInfo.Group(j); std::string normFile = normalization["Filename"][0]; if (fromFile == normFile) { // Store the index in INPUT STATISTICS file corresponding to the // current FROM LIST file normIndices.push_back(j); foundFile = true; } } if (!foundFile) { std::string msg = "The from list file [" + fromFile + "] does not have any corresponding file in the stats list."; throw iException::Message(iException::User,msg,_FILEINFO_); } } } // Apply the correction to the images if the user wants this done if (processOpt != "CALCULATE") { iString maxCubeStr ((int)imageList.size()); for (int img=0; img<(int)imageList.size(); img++) { // Set up for progress bar ProcessByLine p; iString curCubeStr (img+1); p.Progress()->SetText("Equalizing Cube " + curCubeStr + " of " + maxCubeStr); // Open input cube CubeAttributeInput att; const std::string inp = imageList[img]; Cube *icube = p.SetInputCube(inp, att); // Establish the output file depending upon whether or not a to list // was entered std::string out; if (ui.WasEntered("TOLIST")) { out = outList[img]; } else { Filename file = imageList[img]; out = file.Path() + "/" + file.Basename() + ".equ." + file.Extension(); } // Allocate output cube CubeAttributeOutput outAtt; p.SetOutputCube(out,outAtt,icube->Samples(),icube->Lines(),icube->Bands()); // Apply gain/offset to the image g_imageIndex = img; if (processOpt == "APPLY") { // Apply correction based on pre-determined statistics information Pvl inStats (ui.GetFilename("INSTATS")); PvlObject &equalInfo = inStats.FindObject("EqualizationInformation"); PvlGroup &normalization = equalInfo.Group(normIndices[g_imageIndex]); gains.clear(); offsets.clear(); avgs.clear(); // Get and store the modifiers for each band for (int band = 1; band < normalization.Keywords(); band++) { gains.push_back(normalization[band][0]); offsets.push_back(normalization[band][1]); avgs.push_back(normalization[band][2]); } p.StartProcess(ApplyViaFile); } else { // Apply correction based on the statistics gathered in this run p.StartProcess(ApplyViaObject); } p.EndProcess(); } } // Clean-up for batch list runs for (unsigned int o=0; o<g_oNormList.size(); o++) delete g_oNormList[o]; g_oNormList.clear(); g_overlapList.clear(); normIndices.clear(); gains.clear(); offsets.clear(); avgs.clear(); }
void Histogram::InitializeFromCube(Cube &cube, const int band, Progress *progress) { // Make sure band is valid if ((band < 0) || (band > cube.Bands())) { string msg = "Invalid band in [Histogram constructor]"; throw Isis::iException::Message(Isis::iException::Programmer,msg,_FILEINFO_); } double min,max; int nbins; if (cube.PixelType() == Isis::UnsignedByte) { min = 0.0 * cube.Multiplier() + cube.Base(); max = 255.0 * cube.Multiplier() + cube.Base(); nbins = 256; } else if (cube.PixelType() == Isis::SignedWord) { min = -32768.0 * cube.Multiplier() + cube.Base(); max = 32767.0 * cube.Multiplier() + cube.Base(); nbins = 65536; } else if (cube.PixelType() == Isis::Real) { // Determine the band for statistics int bandStart = band; int bandStop = band; int maxSteps = cube.Lines(); if (band == 0){ bandStart = 1; bandStop = cube.Bands(); maxSteps = cube.Lines() * cube.Bands(); } // Construct a line buffer manager and a statistics object LineManager line(cube); Statistics stats = Statistics(); // Prep for reporting progress if necessary if (progress != NULL) { string save = progress->Text (); progress->SetText("Computing min/max for histogram"); progress->SetMaximumSteps(maxSteps); progress->CheckStatus(); } for (int useBand = bandStart ; useBand <= bandStop ; useBand++){ // Loop and get the statistics for a good minimum/maximum for (int i=1; i<=cube.Lines(); i++) { line.SetLine(i,useBand); cube.Read(line); stats.AddData (line.DoubleBuffer(),line.size()); if (progress != NULL) progress->CheckStatus(); } } // Get the min/max for constructing a histogram object if (stats.ValidPixels() == 0) { min = 0.0; max = 1.0; } else { min = stats.BestMinimum (); max = stats.BestMaximum (); } nbins = 65536; } else { std::string msg = "Unsupported pixel type"; throw iException::Message(Isis::iException::Programmer,msg,_FILEINFO_); } // Set the bins and range SetBinRange(min,max); SetBins(nbins); }
void IsisMain() { Process p; Cube *icube = p.SetInputCube("FROM"); // Setup the histogram UserInterface &ui = Application::GetUserInterface(); Histogram hist(*icube,1,p.Progress()); if (ui.WasEntered("MINIMUM")) { hist.SetValidRange(ui.GetDouble("MINIMUM"),ui.GetDouble("MAXIMUM")); } if (ui.WasEntered("NBINS")) { hist.SetBins(ui.GetInteger("NBINS")); } // Loop and accumulate histogram p.Progress()->SetText("Gathering Histogram"); p.Progress()->SetMaximumSteps(icube->Lines()); p.Progress()->CheckStatus(); LineManager line(*icube); for (int i=1; i<=icube->Lines(); i++) { line.SetLine(i); icube->Read(line); hist.AddData(line.DoubleBuffer(),line.size()); p.Progress()->CheckStatus(); } if(!ui.IsInteractive() || ui.WasEntered("TO")) { // Write the results if (!ui.WasEntered("TO")) { string msg = "The [TO] parameter must be entered"; throw iException::Message(iException::User,msg,_FILEINFO_); } string outfile = ui.GetFilename("TO"); ofstream fout; fout.open (outfile.c_str()); fout << "Cube: " << ui.GetFilename("FROM") << endl; fout << "Band: " << icube->Bands() << endl; fout << "Average: " << hist.Average() << endl; fout << "Std Deviation: " << hist.StandardDeviation() << endl; fout << "Variance: " << hist.Variance() << endl; fout << "Median: " << hist.Median() << endl; fout << "Mode: " << hist.Mode() << endl; fout << "Skew: " << hist.Skew() << endl; fout << "Minimum: " << hist.Minimum() << endl; fout << "Maximum: " << hist.Maximum() << endl; fout << endl; fout << "Total Pixels: " << hist.TotalPixels() << endl; fout << "Valid Pixels: " << hist.ValidPixels() << endl; fout << "Null Pixels: " << hist.NullPixels() << endl; fout << "Lis Pixels: " << hist.LisPixels() << endl; fout << "Lrs Pixels: " << hist.LrsPixels() << endl; fout << "His Pixels: " << hist.HisPixels() << endl; fout << "Hrs Pixels: " << hist.HrsPixels() << endl; // Write histogram in tabular format fout << endl; fout << endl; fout << "DN,Pixels,CumulativePixels,Percent,CumulativePercent" << endl; Isis::BigInt total = 0; double cumpct = 0.0; for (int i=0; i<hist.Bins(); i++) { if (hist.BinCount(i) > 0) { total += hist.BinCount(i); double pct = (double)hist.BinCount(i) / hist.ValidPixels() * 100.; cumpct += pct; fout << hist.BinMiddle(i) << ","; fout << hist.BinCount(i) << ","; fout << total << ","; fout << pct << ","; fout << cumpct << endl; } } fout.close(); } // If we are in gui mode, create a histogram plot if (ui.IsInteractive()) { // Set the title for the dialog string title; if (ui.WasEntered("TITLE")) { title = ui.GetString("TITLE"); } else { title = "Histogram Plot for " + Filename(ui.GetAsString("FROM")).Name(); } // Create the QHistogram, set the title & load the Isis::Histogram into it Qisis::HistogramToolWindow *plot = new Qisis::HistogramToolWindow(title.c_str(), ui.TheGui()); // Set the xaxis title if they entered one if (ui.WasEntered("XAXIS")) { string xaxis(ui.GetString("XAXIS")); plot->setAxisLabel(QwtPlot::xBottom,xaxis.c_str()); } // Set the yLeft axis title if they entered one if (ui.WasEntered("Y1AXIS")) { string yaxis(ui.GetString("Y1AXIS")); plot->setAxisLabel(QwtPlot::yLeft,yaxis.c_str()); } // Set the yRight axis title if they entered one if (ui.WasEntered("Y2AXIS")) { string y2axis(ui.GetString("Y2AXIS")); plot->setAxisLabel(QwtPlot::yRight,y2axis.c_str()); } //Transfer data from histogram to the plotcurve std::vector<double> xarray,yarray,y2array; double cumpct = 0.0; for (int i=0; i<hist.Bins(); i++) { if (hist.BinCount(i) > 0) { xarray.push_back(hist.BinMiddle(i)); yarray.push_back(hist.BinCount(i)); double pct = (double)hist.BinCount(i) / hist.ValidPixels() * 100.; cumpct += pct; y2array.push_back(cumpct); } } Qisis::HistogramItem *histCurve = new Qisis::HistogramItem(); histCurve->setColor(Qt::darkCyan); histCurve->setTitle("Frequency"); Qisis::PlotToolCurve *cdfCurve = new Qisis::PlotToolCurve(); cdfCurve->setStyle(QwtPlotCurve::Lines); cdfCurve->setTitle("Percentage"); QPen *pen = new QPen(Qt::red); pen->setWidth(2); histCurve->setYAxis(QwtPlot::yLeft); cdfCurve->setYAxis(QwtPlot::yRight); cdfCurve->setPen(*pen); //These are all variables needed in the following for loop. //---------------------------------------------- QwtArray<QwtDoubleInterval> intervals(xarray.size()); QwtArray<double> values(yarray.size()); double maxYValue = DBL_MIN; double minYValue = DBL_MAX; // --------------------------------------------- for(unsigned int y = 0; y < yarray.size(); y++) { intervals[y] = QwtDoubleInterval(xarray[y], xarray[y] + hist.BinSize()); values[y] = yarray[y]; if(values[y] > maxYValue) maxYValue = values[y]; if(values[y] < minYValue) minYValue = values[y]; } histCurve->setData(QwtIntervalData(intervals, values)); cdfCurve->setData(&xarray[0],&y2array[0],xarray.size()); plot->add(histCurve); plot->add(cdfCurve); plot->fillTable(); plot->setScale(QwtPlot::yLeft,0,maxYValue); plot->setScale(QwtPlot::xBottom,hist.Minimum(),hist.Maximum()); QLabel *label = new QLabel(" Average = " + QString::number(hist.Average()) + '\n' + "\n Minimum = " + QString::number(hist.Minimum()) + '\n' + "\n Maximum = " + QString::number(hist.Maximum()) + '\n' + "\n Stand. Dev.= " + QString::number(hist.StandardDeviation()) + '\n' + "\n Variance = " + QString::number(hist.Variance()) + '\n' + "\n Median = " + QString::number(hist.Median()) + '\n' + "\n Mode = " + QString::number(hist.Mode()) +'\n' + "\n Skew = " + QString::number(hist.Skew()), plot); plot->getDockWidget()->setWidget(label); plot->showWindow(); } p.EndProcess(); }
void IsisMain() { UserInterface &ui = Application::GetUserInterface(); ProcessByLine p; Cube *icube = p.SetInputCube("FROM"); numIgnoredLines = 0; cubeAverage.resize(icube->Bands()); lineAverages.resize(icube->Bands()); for(int i = 0; i < icube->Bands(); i++) { cubeAverage[i] = 0; lineAverages[i] = NULL; } int boxcarSize; if(ui.GetString("BOXTYPE").compare("NONE") == 0) { boxcarSize = (int)(icube->Lines() * 0.10); } else if(ui.GetString("BOXTYPE").compare("ABSOLUTE") == 0) { boxcarSize = ui.GetInteger("BOXSIZE"); } else if(ui.GetString("BOXTYPE").compare("PERCENTAGE") == 0) { boxcarSize = (int)(((double)ui.GetInteger("BOXSIZE") / 100.0) * icube->Lines()); } // Boxcar must be odd size if(boxcarSize % 2 != 1) { boxcarSize ++; } PvlGroup data("lineeq"); data += PvlKeyword("BoxcarSize", boxcarSize, "lines"); data += PvlKeyword("OutputCsv", ui.GetBoolean("AVERAGES")); TextFile *csvOutput = NULL; if(ui.GetBoolean("AVERAGES")) { csvOutput = new TextFile(ui.GetFilename("CSV"), "overwrite", ""); csvOutput->PutLine("Average,SmoothedAvg"); data += PvlKeyword("CsvFile", ui.GetFilename("CSV")); } Application::Log(data); for(int band = 0; band < icube->Bands(); band ++) { lineAverages[band] = new double[icube->Lines()]; } p.Progress()->SetText("Gathering line averages"); p.StartProcess(gatherAverages); // Now filter the bands p.Progress()->SetText("Smoothing line averages"); p.Progress()->SetMaximumSteps((icube->Bands() + 1) * icube->Lines()); p.Progress()->CheckStatus(); QuickFilter filter(icube->Lines(), boxcarSize, 1); if(icube->Lines() <= numIgnoredLines) { throw iException::Message(iException::User, "Image does not contain any valid data.", _FILEINFO_); } for(int band = 0; band < icube->Bands(); band ++) { cubeAverage[band] /= (icube->Lines() - numIgnoredLines); filter.AddLine(lineAverages[band]); for(int line = 0; line < icube->Lines(); line ++) { p.Progress()->CheckStatus(); double filteredLine = filter.Average(line); if(csvOutput != NULL) { csvOutput->PutLine((iString)lineAverages[band][line] + (iString)"," + (iString)filteredLine); } lineAverages[band][line] = filteredLine; } filter.RemoveLine(lineAverages[band]); } if(csvOutput != NULL) { delete csvOutput; // This closes the file automatically csvOutput = NULL; } p.SetOutputCube("TO"); p.Progress()->SetText("Applying Equalization"); p.StartProcess(apply); for(int band = 0; band < icube->Bands(); band ++) { delete [] lineAverages[band]; lineAverages[band] = NULL; } p.EndProcess(); }