void IsisMain(){ const std::string hical_program = "hicalbeta"; const std::string hical_version = "3.5"; const std::string hical_revision = "$Revision: 1.14 $"; const std::string hical_runtime = Application::DateTime(); UserInterface &ui = Application::GetUserInterface(); string procStep("prepping phase"); try { // The output from the last processing is the input into subsequent processing ProcessByLine p; Cube *hifrom = p.SetInputCube("FROM"); int nsamps = hifrom->Samples(); int nlines = hifrom->Lines(); // Initialize the configuration file string conf(ui.GetAsString("CONF")); HiCalConf hiconf(*(hifrom->Label()), conf); DbProfile hiprof = hiconf.getMatrixProfile(); // Check for label propagation and set the output cube Cube *ocube = p.SetOutputCube("TO"); if ( !IsTrueValue(hiprof,"PropagateTables", "TRUE") ) { RemoveHiBlobs(*(ocube->Label())); } // Set specified profile if entered by user if (ui.WasEntered("PROFILE")) { hiconf.selectProfile(ui.GetAsString("PROFILE")); } // Add OPATH parameter to profiles if (ui.WasEntered("OPATH")) { hiconf.add("OPATH",ui.GetAsString("OPATH")); } else { // Set default to output directory hiconf.add("OPATH", Filename(ocube->Filename()).Path()); } // Do I/F output DN conversions string units = ui.GetString("UNITS"); // Allocate the calibration list calVars = new MatrixList; // Set up access to HiRISE ancillary data (tables, blobs) here. Note it they // are gone, this will error out. See PropagateTables in conf file. HiCalData caldata(*hifrom); //////////////////////////////////////////////////////////////////////////// // FixGaps (Z_f) Get buffer pixels and compute coefficients for equation // y = a[0] + a[1]*x + a[2] * exp(a[3] * x) // where y is the average of the buffer pixel region, // and x is the time at each line in electrons/sec/pixel procStep = "Zf module"; hiconf.selectProfile("Zf"); hiprof = hiconf.getMatrixProfile(); HiHistory ZfHist; ZfHist.add("Profile["+ hiprof.Name()+"]"); if ( !SkipModule(hiprof) ) { DriftBuffer driftB(caldata, hiconf); calVars->add("Zf", driftB.ref()); ZfHist = driftB.History(); if ( hiprof.exists("DumpModuleFile") ) { driftB.Dump(hiconf.getMatrixSource("DumpModuleFile",hiprof)); } } else { // NOT RECOMMENDED! This is required for the next step! // SURELY must be skipped with Z_d step as well! calVars->add("Zf", HiVector(nlines, 0.0)); ZfHist.add("Debug::SkipModule invoked!"); } ///////////////////////////////////////////////////////////////////// // DriftCorrect (Z_d) // Now compute the equation of fit // procStep = "Zd module"; HiHistory ZdHist; hiconf.selectProfile("Zd"); hiprof = hiconf.getMatrixProfile(); ZdHist.add("Profile["+ hiprof.Name()+"]"); if (!SkipModule(hiconf.getMatrixProfile("Zd")) ) { DriftCorrect driftC(hiconf); calVars->add("Zd", driftC.Normalize(driftC.Solve(calVars->get("Zf")))); ZdHist = driftC.History(); if ( hiprof.exists("DumpModuleFile") ) { driftC.Dump(hiconf.getMatrixSource("DumpModuleFile",hiprof)); } } else { calVars->add("Zd", HiVector(nlines, 0.0)); ZdHist.add("Debug::SkipModule invoked!"); } //////////////////////////////////////////////////////////////////// // ZeroCorrect (Z_z) Get reverse clock procStep = "Zz module"; hiconf.selectProfile("Zz"); hiprof = hiconf.getMatrixProfile(); HiHistory ZzHist; ZzHist.add("Profile["+ hiprof.Name()+"]"); if ( !SkipModule(hiprof) ) { OffsetCorrect zoff(caldata, hiconf); calVars->add("Zz", zoff.ref()); ZzHist = zoff.History(); if ( hiprof.exists("DumpModuleFile") ) { zoff.Dump(hiconf.getMatrixSource("DumpModuleFile",hiprof)); } } else { calVars->add("Zz", HiVector(nsamps, 0.0)); ZzHist.add("Debug::SkipModule invoked!"); } ///////////////////////////////////////////////////////////////// // DarkSubtract (Z_b) Remove dark current // procStep = "Zb module"; hiconf.selectProfile("Zb"); hiprof = hiconf.getMatrixProfile(); HiHistory ZbHist; ZbHist.add("Profile["+ hiprof.Name()+"]"); if ( !SkipModule(hiprof) ) { DarkSubtractComp dark(hiconf); calVars->add("Zb", dark.ref()); ZbHist = dark.History(); if ( hiprof.exists("DumpModuleFile") ) { dark.Dump(hiconf.getMatrixSource("DumpModuleFile",hiprof)); } } else { calVars->add("Zb", HiVector(nsamps, 0.0)); ZbHist.add("Debug::SkipModule invoked!"); } //////////////////////////////////////////////////////////////////// // GainVLineCorrect (Z_g) Correct for gain-based drift // procStep = "Zg module"; hiconf.selectProfile("Zg"); hiprof = hiconf.getMatrixProfile(); HiHistory ZgHist; ZgHist.add("Profile["+ hiprof.Name()+"]"); if ( !SkipModule(hiprof) ) { GainVLineComp gainV(hiconf); calVars->add("Zg", gainV.ref()); ZgHist = gainV.History(); if ( hiprof.exists("DumpModuleFile") ) { gainV.Dump(hiconf.getMatrixSource("DumpModuleFile",hiprof)); } } else { calVars->add("Zg", HiVector(nlines, 1.0)); ZgHist.add("Debug::SkipModule invoked!"); } //////////////////////////////////////////////////////////////////// // GainCorrect (Z_gg) Correct for gain with the G matrix procStep = "Zgg module"; hiconf.selectProfile("Zgg"); hiprof = hiconf.getMatrixProfile(); HiHistory ZggHist; ZggHist.add("Profile["+ hiprof.Name()+"]"); if ( !SkipModule(hiprof) ) { double bin = ToDouble(hiprof("Summing")); double tdi = ToDouble(hiprof("Tdi")); double factor = 128.0 / tdi / (bin*bin); HiVector zgg = hiconf.getMatrix("G", hiprof); for ( int i = 0 ; i < zgg.dim() ; i++ ) { zgg[i] *= factor; } calVars->add("Zgg", zgg);; ZggHist.add("LoadMatrix(G[" + hiconf.getMatrixSource("G",hiprof) + "],Band[" + ToString(hiconf.getMatrixBand(hiprof)) + "],Factor[" + ToString(factor) + "])"); if ( hiprof.exists("DumpModuleFile") ) { Component zg("GMatrix", ZggHist); zg.Process(zgg); zg.Dump(hiconf.getMatrixSource("DumpModuleFile",hiprof)); } } else { calVars->add("Zgg", HiVector(nsamps, 1.0)); ZggHist.add("Debug::SkipModule invoked!"); } //////////////////////////////////////////////////////////////////// // FlatField (Z_a) Flat field correction with A matrix procStep = "Za module"; hiconf.selectProfile("Za"); hiprof = hiconf.getMatrixProfile(); HiHistory ZaHist; ZaHist.add("Profile["+ hiprof.Name()+"]"); if ( !SkipModule(hiprof) ) { FlatFieldComp flat(hiconf); calVars->add("Za", flat.ref()); ZaHist = flat.History(); if ( hiprof.exists("DumpModuleFile") ) { flat.Dump(hiconf.getMatrixSource("DumpModuleFile",hiprof)); } } else { calVars->add("Za", HiVector(nsamps, 1.0)); ZaHist.add("Debug::SkipModule invoked!"); } //////////////////////////////////////////////////////////////////// // FlatField (Z_t) Temperature-dependant gain correction procStep = "Zt module"; hiconf.selectProfile("Zt"); hiprof = hiconf.getMatrixProfile(); HiHistory ZtHist; ZtHist.add("Profile["+ hiprof.Name()+"]"); if ( !SkipModule(hiprof) ) { TempGainCorrect tcorr(hiconf); calVars->add("Zt", tcorr.ref()); ZtHist = tcorr.History(); if ( hiprof.exists("DumpModuleFile") ) { tcorr.Dump(hiconf.getMatrixSource("DumpModuleFile",hiprof)); } } else { calVars->add("Zt", HiVector(nsamps, 1.0)); ZtHist.add("Debug::SkipModule invoked!"); } //////////////////////////////////////////////////////////////////// // I/FCorrect (Z_iof) Conversion to I/F // procStep = "Ziof module"; hiconf.selectProfile("Ziof"); hiprof = hiconf.getMatrixProfile(); HiHistory ZiofHist; ZiofHist.add("Profile["+ hiprof.Name()+"]"); if ( !SkipModule(hiprof) ) { double sed = ToDouble(hiprof("ScanExposureDuration")); // units = us if ( IsEqual(units, "IOF") ) { // Add solar I/F correction parameters double au = hiconf.sunDistanceAU(); ZiofHist.add("SunDist[" + ToString(au) + " (AU)]"); double suncorr = 1.5 / au; suncorr *= suncorr; double zbin = ToDouble(hiprof("ZiofBinFactor")); ZiofHist.add("ZiofBinFactor[" + ToString(zbin) + "]"); double zgain = ToDouble(hiprof("FilterGainCorrection")); ZiofHist.add("FilterGainCorrection[" + ToString(zgain) + "]"); ZiofHist.add("ScanExposureDuration[" + ToString(sed) + "]"); double ziof = (zbin * zgain) * (sed * 1.0e-6) * suncorr; calVars->add("Ziof", HiVector(1, ziof)); ZiofHist.add("I/F_Factor[" + ToString(ziof) + "]"); ZiofHist.add("Units[I/F Reflectance]"); } else if ( IsEqual(units, "DN/US") ) { // Ziof is a divisor in calibration equation double ziof = sed; calVars->add("Ziof", HiVector(1, ziof)); ZiofHist.add("ScanExposureDuration[" + ToString(sed) + "]"); ZiofHist.add("DN/US_Factor[" + ToString(ziof) + "]"); ZiofHist.add("Units[DNs/microsecond]"); } else { // Units are already in DN double ziof = 1.0; calVars->add("Ziof", HiVector(1, ziof)); ZiofHist.add("DN_Factor[" + ToString(ziof) + "]"); ZiofHist.add("Units[DN]"); } } else { calVars->add("Ziof", HiVector(1,1.0)); ZiofHist.add("Debug::SkipModule invoked!"); ZiofHist.add("Units[Unknown]"); } // Reset the profile selection to default hiconf.selectProfile(); //---------------------------------------------------------------------- // ///////////////////////////////////////////////////////////////////////// // Call the processing function procStep = "calibration phase"; p.StartProcess(calibrate); // Get the default profile for logging purposes hiprof = hiconf.getMatrixProfile(); const std::string conf_file = hiconf.filepath(conf); // Quitely dumps parameter history to alternative format file. This // is completely controlled by the configuration file if ( hiprof.exists("DumpHistoryFile") ) { procStep = "logging/reporting phase"; Filename hdump(hiconf.getMatrixSource("DumpHistoryFile",hiprof)); string hdumpFile = hdump.Expanded(); ofstream ofile(hdumpFile.c_str(), ios::out); if (!ofile) { string mess = "Unable to open/create history dump file " + hdump.Expanded(); iException::Message(iException::User, mess, _FILEINFO_).Report(); } else { ofile << "Program: " << hical_program << endl; ofile << "RunTime: " << hical_runtime << endl; ofile << "Version: " << hical_version << endl; ofile << "Revision: " << hical_revision << endl << endl; ofile << "FROM: " << hifrom->Filename() << endl; ofile << "TO: " << ocube->Filename() << endl; ofile << "CONF: " << conf_file << endl << endl; ofile << "/* " << hical_program << " application equation */" << endl << "/* hdn = (idn - Zd(Zf) - Zz - Zb) */" << endl << "/* odn = hdn / Zg * Zgg * Za * Zt / Ziof */" << endl << endl; ofile << "****** PARAMETER GENERATION HISTORY *******" << endl; ofile << "\nZf = " << ZfHist << endl; ofile << "\nZd = " << ZdHist << endl; ofile << "\nZz = " << ZzHist << endl; ofile << "\nZb = " << ZbHist << endl; ofile << "\nZg = " << ZgHist << endl; ofile << "\nZgg = " << ZggHist << endl; ofile << "\nZa = " << ZaHist << endl; ofile << "\nZt = " << ZtHist << endl; ofile << "\nZiof = " << ZiofHist << endl; ofile.close(); } } // Ensure the RadiometricCalibration group is out there const std::string rcalGroup("RadiometricCalibration"); if (!ocube->HasGroup(rcalGroup)) { PvlGroup temp(rcalGroup); ocube->PutGroup(temp); } PvlGroup &rcal = ocube->GetGroup(rcalGroup); rcal += PvlKeyword("Program", hical_program); rcal += PvlKeyword("RunTime", hical_runtime); rcal += PvlKeyword("Version",hical_version); rcal += PvlKeyword("Revision",hical_revision); PvlKeyword key("Conf", conf_file); key.AddCommentWrapped("/* " + hical_program + " application equation */"); key.AddComment("/* hdn = (idn - Zd(Zf) - Zz - Zb) */"); key.AddComment("/* odn = hdn / Zg * Zgg * Za * Zt / Ziof */"); rcal += key; // Record parameter generation history. Controllable in configuration // file. Note this is optional because of a BUG!! in the ISIS label // writer as this application was initially developed if ( IsEqual(ConfKey(hiprof,"LogParameterHistory",string("TRUE")),"TRUE")) { rcal += ZfHist.makekey("Zf"); rcal += ZdHist.makekey("Zd"); rcal += ZzHist.makekey("Zz"); rcal += ZbHist.makekey("Zb"); rcal += ZgHist.makekey("Zg"); rcal += ZggHist.makekey("Zgg"); rcal += ZaHist.makekey("Za"); rcal += ZiofHist.makekey("Ziof"); } p.EndProcess(); } catch (iException &ie) { delete calVars; calVars = 0; string mess = "Failed in " + procStep; ie.Message(iException::User, mess.c_str(), _FILEINFO_); throw; } // Clean up parameters delete calVars; calVars = 0; }
void IsisMain() { // We will be processing by brick ProcessByBrick p; Isis::Cube *amatrixCube=NULL; Isis::Cube *bmatrixCube=NULL; // Setup the user input for the input/output files and the option UserInterface &ui = Application::GetUserInterface(); // Setup the input HiRise cube Isis::Cube *icube = p.SetInputCube("FROM"); if (icube->Bands() != 1) { std::string msg = "Only single-band HiRise cubes can be calibrated"; throw Isis::iException::Message(Isis::iException::Io,msg,_FILEINFO_); } //Get pertinent label information to determine which band of matrix cube to use HiLab hilab(icube); int ccd = hilab.getCcd(); int channel = hilab.getChannel(); if (channel != 0 && channel != 1) { std::string msg = "Only unstitched cubes can be calibrated"; throw Isis::iException::Message(Isis::iException::Io,msg,_FILEINFO_); } int band = 1 + ccd*2 + channel; string option = ui.GetString("OPTION"); // Set attributes (input band number) for the matrix cube(s); CubeAttributeInput att("+" + iString(band)); // Determine the file specification to the matrix file(s) if defaulted // and open if (ui.WasEntered ("MATRIX") ) { if (option == "GAIN") { string matrixFile = ui.GetFilename("MATRIX"); amatrixCube = p.SetInputCube(matrixFile, att); } else if (option == "OFFSET") { string matrixFile = ui.GetFilename("MATRIX"); bmatrixCube = p.SetInputCube(matrixFile, att); } else { //(option == "BOTH") std::string msg = "The BOTH option cannot be used if a MATRIX is entered"; throw Isis::iException::Message(Isis::iException::Io,msg,_FILEINFO_); } } else { int tdi = hilab.getTdi(); int bin = hilab.getBin(); if (option == "OFFSET" || option == "BOTH") { std::string bmatrixFile = "$mro/calibration"; bmatrixFile += "/B_matrix_tdi"; bmatrixFile += iString(tdi) + "_bin" + iString(bin); bmatrixCube = p.SetInputCube(bmatrixFile, att); } if (option == "GAIN" || option == "BOTH") { std::string amatrixFile = "$mro/calibration"; amatrixFile += "/A_matrix_tdi"; amatrixFile += iString(tdi) + "_bin" + iString(bin); amatrixCube = p.SetInputCube(amatrixFile, att); } } // Open the output file and set processing parameters Cube *ocube = p.SetOutputCube ("TO"); p.SetWrap (true); p.SetBrickSize ( icube->Samples(), 1, 1); // Add the radiometry group if it is not there yet. Otherwise // read the current value of the keyword CalibrationParameters. // Then delete the keyword and rewrite it after appending the // new value to it. Do it this way to avoid multiple Calibration // Parameter keywords. PvlGroup calgrp; PvlKeyword calKey; if (ocube->HasGroup("Radiometry")) { calgrp = ocube->GetGroup ("Radiometry"); if (calgrp.HasKeyword("CalibrationParameters")) { calKey = calgrp.FindKeyword("CalibrationParameters"); calgrp.DeleteKeyword( "CalibrationParameters" ); } else { calKey.SetName ("CalibrationParameters"); } } else { calgrp.SetName("Radiometry"); calKey.SetName ("CalibrationParameters"); } string keyValue = option; if (option == "GAIN") { keyValue += ":" + amatrixCube->Filename(); } else if (option == "OFFSET") { keyValue += ":" + bmatrixCube->Filename(); } else { // "BOTH" keyValue += ":"+bmatrixCube->Filename()+":"+amatrixCube->Filename(); } calKey += keyValue; calgrp += calKey; ocube->PutGroup(calgrp); // Start the processing based on the option if (option == "GAIN") { p.StartProcess(mult); } else if (option == "OFFSET") { p.StartProcess(sub); } else { //(option == "BOTH") p.StartProcess(multSub); } // Cleanup p.EndProcess(); }
void IsisMain () { stretch.ClearPairs(); for (int i=0; i<6; i++) { gapCount[i] = 0; suspectGapCount[i] = 0; invalidCount[i] = 0; lisCount[i] = 0; hisCount[i] = 0; validCount[i] = 0; } void TranslateHiriseEdrLabels (Filename &labelFile, Cube *); void SaveHiriseCalibrationData (ProcessImportPds &process, Cube *, Pvl &pdsLabel); void SaveHiriseAncillaryData (ProcessImportPds &process, Cube *); void FixDns8 (Buffer &buf); void FixDns16 (Buffer &buf); ProcessImportPds p; Pvl pdsLabel; UserInterface &ui = Application::GetUserInterface(); // Get the input filename and make sure it is a HiRISE EDR Filename inFile = ui.GetFilename("FROM"); iString id; bool projected; try { Pvl lab(inFile.Expanded()); id = (string) lab.FindKeyword ("DATA_SET_ID"); projected = lab.HasObject("IMAGE_MAP_PROJECTION"); } catch (iException &e) { string msg = "Unable to read [DATA_SET_ID] from input file [" + inFile.Expanded() + "]"; throw iException::Message(iException::Io,msg, _FILEINFO_); } //Checks if in file is rdr if( projected ) { string msg = "[" + inFile.Name() + "] appears to be an rdr file."; msg += " Use pds2isis."; throw iException::Message(iException::User,msg, _FILEINFO_); } id.ConvertWhiteSpace(); id.Compress(); id.Trim(" "); if (id != "MRO-M-HIRISE-2-EDR-V1.0") { string msg = "Input file [" + inFile.Expanded() + "] does not appear to be " + "in HiRISE EDR format. DATA_SET_ID is [" + id + "]"; throw iException::Message(iException::Io,msg, _FILEINFO_); } p.SetPdsFile (inFile.Expanded(), "", pdsLabel); // Make sure the data we need for the BLOBs is saved by the Process p.SaveFileHeader(); p.SaveDataPrefix(); p.SaveDataSuffix(); // Let the Process create the output file but override any commandline // output bit type and min/max. It has to be 16bit for the rest of hi2isis // to run. // Setting the min/max to the 16 bit min/max keeps all the dns (including // the 8 bit special pixels from changing their value when they are mapped // to the 16 bit output. CubeAttributeOutput &outAtt = ui.GetOutputAttribute("TO"); outAtt.PixelType (Isis::SignedWord); outAtt.Minimum((double)VALID_MIN2); outAtt.Maximum((double)VALID_MAX2); Cube *ocube = p.SetOutputCube(ui.GetFilename("TO"), outAtt); p.StartProcess (); TranslateHiriseEdrLabels (inFile, ocube); // Pull out the lookup table so we can apply it in the second pass // and remove it from the labels. // Add the UNLUTTED keyword to the instrument group so we know // if the lut has been used to convert back to 14 bit data PvlGroup &instgrp = ocube->GetGroup("Instrument"); PvlKeyword lutKey = instgrp["LookupTable"]; PvlSequence lutSeq; lutSeq = lutKey; // Set up the Stretch object with the info from the lookup table // If the first entry is (0,0) then no lut was applied. if ((lutKey.IsNull()) || (lutSeq.Size()==1 && lutSeq[0][0]=="0" && lutSeq[0][1]=="0")) { stretch.AddPair(0.0, 0.0); stretch.AddPair(65536.0, 65536.0); instgrp.AddKeyword(PvlKeyword("Unlutted","TRUE")); instgrp.DeleteKeyword ("LookupTable"); } // The user wants it unlutted else if (ui.GetBoolean("UNLUT")) { for (int i=0; i<lutSeq.Size(); i++) { stretch.AddPair(i, (((double)lutSeq[i][0] + (double)lutSeq[i][1]) / 2.0)); } instgrp.AddKeyword(PvlKeyword("Unlutted","TRUE")); instgrp.DeleteKeyword ("LookupTable"); } // The user does not want the data unlutted else { stretch.AddPair(0.0, 0.0); stretch.AddPair(65536.0, 65536.0); instgrp.AddKeyword(PvlKeyword("Unlutted","FALSE")); } ocube->PutGroup(instgrp); // Save the calibration and ancillary data as BLOBs. Both get run thru the // lookup table just like the image data. SaveHiriseCalibrationData (p, ocube, pdsLabel); SaveHiriseAncillaryData (p, ocube); // Save off the input bit type so we know how to process it on the // second pass below. Isis::PixelType inType = p.PixelType(); // All finished with the ImportPds object p.EndProcess (); // Make another pass thru the data using the output file in read/write mode // This allows us to correct gaps, remap special pixels and accumulate some // counts lsbGap = ui.GetBoolean("LSBGAP"); ProcessByLine p2; string ioFile = ui.GetFilename("TO"); CubeAttributeInput att; p2.SetInputCube(ioFile, att, ReadWrite); p2.Progress()->SetText("Converting special pixels"); section = 4; p2.StartProcess((inType == Isis::UnsignedByte) ? FixDns8 : FixDns16); p2.EndProcess(); // Log the results of the image conversion PvlGroup results("Results"); results += PvlKeyword ("From", inFile.Expanded()); results += PvlKeyword ("CalibrationBufferGaps", gapCount[0]); results += PvlKeyword ("CalibrationBufferLIS", lisCount[0]); results += PvlKeyword ("CalibrationBufferHIS", hisCount[0]); results += PvlKeyword ("CalibrationBufferPossibleGaps", suspectGapCount[0]); results += PvlKeyword ("CalibrationBufferInvalid", invalidCount[0]); results += PvlKeyword ("CalibrationBufferValid", validCount[0]); results += PvlKeyword ("CalibrationImageGaps", gapCount[1]); results += PvlKeyword ("CalibrationImageLIS", lisCount[1]); results += PvlKeyword ("CalibrationImageHIS", hisCount[1]); results += PvlKeyword ("CalibrationImagePossibleGaps", suspectGapCount[1]); results += PvlKeyword ("CalibrationImageInvalid", invalidCount[1]); results += PvlKeyword ("CalibrationImageValid", validCount[1]); results += PvlKeyword ("CalibrationDarkGaps", gapCount[2]); results += PvlKeyword ("CalibrationDarkLIS", lisCount[2]); results += PvlKeyword ("CalibrationDarkHIS", hisCount[2]); results += PvlKeyword ("CalibrationDarkPossibleGaps", suspectGapCount[2]); results += PvlKeyword ("CalibrationDarkInvalid", invalidCount[2]); results += PvlKeyword ("CalibrationDarkValid", validCount[2]); results += PvlKeyword ("ObservationBufferGaps", gapCount[3]); results += PvlKeyword ("ObservationBufferLIS", lisCount[3]); results += PvlKeyword ("ObservationBufferHIS", hisCount[3]); results += PvlKeyword ("ObservationBufferPossibleGaps", suspectGapCount[3]); results += PvlKeyword ("ObservationBufferInvalid", invalidCount[3]); results += PvlKeyword ("ObservationBufferValid", validCount[3]); results += PvlKeyword ("ObservationImageGaps", gapCount[4]); results += PvlKeyword ("ObservationImageLIS", lisCount[4]); results += PvlKeyword ("ObservationImageHIS", hisCount[4]); results += PvlKeyword ("ObservationImagePossibleGaps", suspectGapCount[4]); results += PvlKeyword ("ObservationImageInvalid", invalidCount[4]); results += PvlKeyword ("ObservationImageValid", validCount[4]); results += PvlKeyword ("ObservationDarkGaps", gapCount[5]); results += PvlKeyword ("ObservationDarkLIS", lisCount[5]); results += PvlKeyword ("ObservationDarkHIS", hisCount[5]); results += PvlKeyword ("ObservationDarkPossibleGaps", suspectGapCount[5]); results += PvlKeyword ("ObservationDarkInvalid", invalidCount[5]); results += PvlKeyword ("ObservationDarkValid", validCount[5]); // Write the results to the log Application::Log(results); return; }
void IsisMain() { //Create a process to create the input cubes Process p; //Create the input cubes, matching sample/lines Cube *inCube = p.SetInputCube ("FROM"); Cube *latCube = p.SetInputCube("LATCUB", SpatialMatch); Cube *lonCube = p.SetInputCube("LONCUB", SpatialMatch); //A 1x1 brick to read in the latitude and longitude DN values from //the specified cubes Brick latBrick(1,1,1, latCube->PixelType()); Brick lonBrick(1,1,1, lonCube->PixelType()); UserInterface &ui = Application::GetUserInterface(); //Set the sample and line increments int sinc = (int)(inCube->Samples() * 0.10); if(ui.WasEntered("SINC")) { sinc = ui.GetInteger("SINC"); } int linc = (int)(inCube->Lines() * 0.10); if(ui.WasEntered("LINC")) { linc = ui.GetInteger("LINC"); } //Set the degree of the polynomial to use in our functions int degree = ui.GetInteger("DEGREE"); //We are using a polynomial with two variables PolynomialBivariate sampFunct(degree); PolynomialBivariate lineFunct(degree); //We will be solving the function using the least squares method LeastSquares sampSol(sampFunct); LeastSquares lineSol(lineFunct); //Setup the variables for solving the stereographic projection //x = cos(latitude) * sin(longitude - lon_center) //y = cos(lat_center) * sin(latitude) - sin(lat_center) * cos(latitude) * cos(longitude - lon_center) //Get the center lat and long from the input cubes double lat_center = latCube->Statistics()->Average() * PI/180.0; double lon_center = lonCube->Statistics()->Average() * PI/180.0; /** * Loop through lines and samples projecting the latitude and longitude at those * points to stereographic x and y and adding these points to the LeastSquares * matrix. */ for(int i = 1; i <= inCube->Lines(); i+= linc) { for(int j = 1; j <= inCube->Samples(); j+= sinc) { latBrick.SetBasePosition(j, i, 1); latCube->Read(latBrick); if(IsSpecial(latBrick.at(0))) continue; double lat = latBrick.at(0) * PI/180.0; lonBrick.SetBasePosition(j, i, 1); lonCube->Read(lonBrick); if(IsSpecial(lonBrick.at(0))) continue; double lon = lonBrick.at(0) * PI/180.0; //Project lat and lon to x and y using a stereographic projection double k = 2/(1 + sin(lat_center) * sin(lat) + cos(lat_center)*cos(lat)*cos(lon - lon_center)); double x = k * cos(lat) * sin(lon - lon_center); double y = k * (cos(lat_center) * sin(lat)) - (sin(lat_center) * cos(lat) * cos(lon - lon_center)); //Add x and y to the least squares matrix vector<double> data; data.push_back(x); data.push_back(y); sampSol.AddKnown(data, j); lineSol.AddKnown(data, i); //If the sample increment goes past the last sample in the line, we want to //always read the last sample.. if(j != inCube->Samples() && j + sinc > inCube->Samples()) { j = inCube->Samples() - sinc; } } //If the line increment goes past the last line in the cube, we want to //always read the last line.. if(i != inCube->Lines() && i + linc > inCube->Lines()) { i = inCube->Lines() - linc; } } //Solve the least squares functions using QR Decomposition sampSol.Solve(LeastSquares::QRD); lineSol.Solve(LeastSquares::QRD); //If the user wants to save the residuals to a file, create a file and write //the column titles to it. TextFile oFile; if(ui.WasEntered("RESIDUALS")) { oFile.Open(ui.GetFilename("RESIDUALS"), "overwrite"); oFile.PutLine("Sample,\tLine,\tX,\tY,\tSample Error,\tLine Error\n"); } //Gather the statistics for the residuals from the least squares solutions Statistics sampErr; Statistics lineErr; vector<double> sampResiduals = sampSol.Residuals(); vector<double> lineResiduals = lineSol.Residuals(); for(int i = 0; i < (int)sampResiduals.size(); i++) { sampErr.AddData(sampResiduals[i]); lineErr.AddData(lineResiduals[i]); } //If a residuals file was specified, write the previous data, and the errors to the file. if(ui.WasEntered("RESIDUALS")) { for(int i = 0; i < sampSol.Rows(); i++) { vector<double> data = sampSol.GetInput(i); iString tmp = ""; tmp += iString(sampSol.GetExpected(i)); tmp += ",\t"; tmp += iString(lineSol.GetExpected(i)); tmp += ",\t"; tmp += iString(data[0]); tmp += ",\t"; tmp += iString(data[1]); tmp += ",\t"; tmp += iString(sampResiduals[i]); tmp += ",\t"; tmp += iString(lineResiduals[i]); oFile.PutLine(tmp + "\n"); } } oFile.Close(); //Records the error to the log PvlGroup error( "Error" ); error += PvlKeyword( "Degree", degree ); error += PvlKeyword( "NumberOfPoints", (int)sampResiduals.size() ); error += PvlKeyword( "SampleMinimumError", sampErr.Minimum() ); error += PvlKeyword( "SampleAverageError", sampErr.Average() ); error += PvlKeyword( "SampleMaximumError", sampErr.Maximum() ); error += PvlKeyword( "SampleStdDeviationError", sampErr.StandardDeviation() ); error += PvlKeyword( "LineMinimumError", lineErr.Minimum() ); error += PvlKeyword( "LineAverageError", lineErr.Average() ); error += PvlKeyword( "LineMaximumError", lineErr.Maximum() ); error += PvlKeyword( "LineStdDeviationError", lineErr.StandardDeviation() ); Application::Log( error ); //Close the input cubes for cleanup p.EndProcess(); //If we want to warp the image, then continue, otherwise return if(!ui.GetBoolean("NOWARP")) { //Creates the mapping group Pvl mapFile; mapFile.Read(ui.GetFilename("MAP")); PvlGroup &mapGrp = mapFile.FindGroup("Mapping",Pvl::Traverse); //Reopen the lat and long cubes latCube = new Cube(); latCube->SetVirtualBands(ui.GetInputAttribute("LATCUB").Bands()); latCube->Open(ui.GetFilename("LATCUB")); lonCube = new Cube(); lonCube->SetVirtualBands(ui.GetInputAttribute("LONCUB").Bands()); lonCube->Open(ui.GetFilename("LONCUB")); PvlKeyword targetName; //If the user entered the target name if(ui.WasEntered("TARGET")) { targetName = PvlKeyword("TargetName", ui.GetString("TARGET")); } //Else read the target name from the input cube else { Pvl fromFile; fromFile.Read(ui.GetFilename("FROM")); targetName = fromFile.FindKeyword("TargetName", Pvl::Traverse); } mapGrp.AddKeyword(targetName, Pvl::Replace); PvlKeyword equRadius; PvlKeyword polRadius; //If the user entered the equatorial and polar radii if(ui.WasEntered("EQURADIUS") && ui.WasEntered("POLRADIUS")) { equRadius = PvlKeyword("EquatorialRadius", ui.GetDouble("EQURADIUS")); polRadius = PvlKeyword("PolarRadius", ui.GetDouble("POLRADIUS")); } //Else read them from the pck else { Filename pckFile("$base/kernels/pck/pck?????.tpc"); pckFile.HighestVersion(); string pckFilename = pckFile.Expanded(); furnsh_c(pckFilename.c_str()); string target = targetName[0]; SpiceInt code; SpiceBoolean found; bodn2c_c (target.c_str(), &code, &found); if (!found) { string msg = "Could not convert Target [" + target + "] to NAIF code"; throw Isis::iException::Message(Isis::iException::Io,msg,_FILEINFO_); } SpiceInt n; SpiceDouble radii[3]; bodvar_c(code,"RADII",&n,radii); equRadius = PvlKeyword("EquatorialRadius", radii[0] * 1000); polRadius = PvlKeyword("PolarRadius", radii[2] * 1000); } mapGrp.AddKeyword(equRadius, Pvl::Replace); mapGrp.AddKeyword(polRadius, Pvl::Replace); //If the latitude type is not in the mapping group, copy it from the input if(!mapGrp.HasKeyword("LatitudeType")) { if(ui.GetString("LATTYPE") == "PLANETOCENTRIC") { mapGrp.AddKeyword(PvlKeyword("LatitudeType","Planetocentric"), Pvl::Replace); } else { mapGrp.AddKeyword(PvlKeyword("LatitudeType","Planetographic"), Pvl::Replace); } } //If the longitude direction is not in the mapping group, copy it from the input if(!mapGrp.HasKeyword("LongitudeDirection")) { if(ui.GetString("LONDIR") == "POSITIVEEAST") { mapGrp.AddKeyword(PvlKeyword("LongitudeDirection","PositiveEast"), Pvl::Replace); } else { mapGrp.AddKeyword(PvlKeyword("LongitudeDirection","PositiveWest"), Pvl::Replace); } } //If the longitude domain is not in the mapping group, assume it is 360 if(!mapGrp.HasKeyword("LongitudeDomain")) { mapGrp.AddKeyword(PvlKeyword("LongitudeDomain","360"), Pvl::Replace); } //If the default range is to be computed, use the input lat/long cubes to determine the range if(ui.GetString("DEFAULTRANGE") == "COMPUTE") { //NOTE - When computing the min/max longitude this application does not account for the //longitude seam if it exists. Since the min/max are calculated from the statistics of //the input longitude cube and then converted to the mapping group's domain they may be //invalid for cubes containing the longitude seam. Statistics *latStats = latCube->Statistics(); Statistics *lonStats = lonCube->Statistics(); double minLat = latStats->Minimum(); double maxLat = latStats->Maximum(); bool isOcentric = ((std::string)mapGrp.FindKeyword("LatitudeType")) == "Planetocentric"; if(isOcentric) { if(ui.GetString("LATTYPE") != "PLANETOCENTRIC") { minLat = Projection::ToPlanetocentric(minLat, (double)equRadius, (double)polRadius); maxLat = Projection::ToPlanetocentric(maxLat, (double)equRadius, (double)polRadius); } } else { if(ui.GetString("LATTYPE") == "PLANETOCENTRIC") { minLat = Projection::ToPlanetographic(minLat, (double)equRadius, (double)polRadius); maxLat = Projection::ToPlanetographic(maxLat, (double)equRadius, (double)polRadius); } } int lonDomain = (int)mapGrp.FindKeyword("LongitudeDomain"); double minLon = lonDomain == 360 ? Projection::To360Domain(lonStats->Minimum()) : Projection::To180Domain(lonStats->Minimum()); double maxLon = lonDomain == 360 ? Projection::To360Domain(lonStats->Maximum()) : Projection::To180Domain(lonStats->Maximum()); bool isPosEast = ((std::string)mapGrp.FindKeyword("LongitudeDirection")) == "PositiveEast"; if(isPosEast) { if(ui.GetString("LONDIR") != "POSITIVEEAST") { minLon = Projection::ToPositiveEast(minLon, lonDomain); maxLon = Projection::ToPositiveEast(maxLon, lonDomain); } } else { if(ui.GetString("LONDIR") == "POSITIVEEAST") { minLon = Projection::ToPositiveWest(minLon, lonDomain); maxLon = Projection::ToPositiveWest(maxLon, lonDomain); } } if(minLon > maxLon) { double temp = minLon; minLon = maxLon; maxLon = temp; } mapGrp.AddKeyword(PvlKeyword("MinimumLatitude", minLat),Pvl::Replace); mapGrp.AddKeyword(PvlKeyword("MaximumLatitude", maxLat),Pvl::Replace); mapGrp.AddKeyword(PvlKeyword("MinimumLongitude", minLon),Pvl::Replace); mapGrp.AddKeyword(PvlKeyword("MaximumLongitude", maxLon),Pvl::Replace); } //If the user decided to enter a ground range then override if (ui.WasEntered("MINLAT")) { mapGrp.AddKeyword(PvlKeyword("MinimumLatitude", ui.GetDouble("MINLAT")),Pvl::Replace); } if (ui.WasEntered("MAXLAT")) { mapGrp.AddKeyword(PvlKeyword("MaximumLatitude", ui.GetDouble("MAXLAT")),Pvl::Replace); } if (ui.WasEntered("MINLON")) { mapGrp.AddKeyword(PvlKeyword("MinimumLongitude", ui.GetDouble("MINLON")),Pvl::Replace); } if (ui.WasEntered("MAXLON")) { mapGrp.AddKeyword(PvlKeyword("MaximumLongitude", ui.GetDouble("MAXLON")),Pvl::Replace); } //If the pixel resolution is to be computed, compute the pixels/degree from the input if (ui.GetString("PIXRES") == "COMPUTE") { latBrick.SetBasePosition(1,1,1); latCube->Read(latBrick); lonBrick.SetBasePosition(1,1,1); lonCube->Read(lonBrick); //Read the lat and long at the upper left corner double a = latBrick.at(0) * PI/180.0; double c = lonBrick.at(0) * PI/180.0; latBrick.SetBasePosition(latCube->Samples(),latCube->Lines(),1); latCube->Read(latBrick); lonBrick.SetBasePosition(lonCube->Samples(),lonCube->Lines(),1); lonCube->Read(lonBrick); //Read the lat and long at the lower right corner double b = latBrick.at(0) * PI/180.0; double d = lonBrick.at(0) * PI/180.0; //Determine the angle between the two points double angle = acos(cos(a) * cos(b) * cos(c - d) + sin(a) * sin(b)); //double angle = acos((cos(a1) * cos(b1) * cos(b2)) + (cos(a1) * sin(b1) * cos(a2) * sin(b2)) + (sin(a1) * sin(a2))); angle *= 180/PI; //Determine the number of pixels between the two points double pixels = sqrt(pow(latCube->Samples() -1.0, 2.0) + pow(latCube->Lines() -1.0, 2.0)); //Add the scale in pixels/degree to the mapping group mapGrp.AddKeyword(PvlKeyword("Scale", pixels/angle, "pixels/degree"), Pvl::Replace); if (mapGrp.HasKeyword("PixelResolution")) { mapGrp.DeleteKeyword("PixelResolution"); } } // If the user decided to enter a resolution then override if (ui.GetString("PIXRES") == "MPP") { mapGrp.AddKeyword(PvlKeyword("PixelResolution", ui.GetDouble("RESOLUTION"), "meters/pixel"), Pvl::Replace); if (mapGrp.HasKeyword("Scale")) { mapGrp.DeleteKeyword("Scale"); } } else if (ui.GetString("PIXRES") == "PPD") { mapGrp.AddKeyword(PvlKeyword("Scale", ui.GetDouble("RESOLUTION"), "pixels/degree"), Pvl::Replace); if (mapGrp.HasKeyword("PixelResolution")) { mapGrp.DeleteKeyword("PixelResolution"); } } //Create a projection using the map file we created int samples,lines; Projection *outmap = ProjectionFactory::CreateForCube(mapFile,samples,lines,false); //Write the map file to the log Application::GuiLog(mapGrp); //Create a process rubber sheet ProcessRubberSheet r; //Set the input cube inCube = r.SetInputCube("FROM"); double tolerance = ui.GetDouble("TOLERANCE") * outmap->Resolution(); //Create a new transform object Transform *transform = new nocam2map (sampSol, lineSol, outmap, latCube, lonCube, ui.GetString("LATTYPE") == "PLANETOCENTRIC", ui.GetString("LONDIR") == "POSITIVEEAST", tolerance, ui.GetInteger("ITERATIONS"), inCube->Samples(), inCube->Lines(), samples, lines); //Allocate the output cube and add the mapping labels Cube *oCube = r.SetOutputCube ("TO", transform->OutputSamples(), transform->OutputLines(), inCube->Bands()); oCube->PutGroup(mapGrp); //Determine which interpolation to use Interpolator *interp = NULL; if (ui.GetString("INTERP") == "NEARESTNEIGHBOR") { interp = new Interpolator(Interpolator::NearestNeighborType); } else if (ui.GetString("INTERP") == "BILINEAR") { interp = new Interpolator(Interpolator::BiLinearType); } else if (ui.GetString("INTERP") == "CUBICCONVOLUTION") { interp = new Interpolator(Interpolator::CubicConvolutionType); } //Warp the cube r.StartProcess(*transform, *interp); r.EndProcess(); // add mapping to print.prt PvlGroup mapping = outmap->Mapping(); Application::Log(mapping); //Clean up delete latCube; delete lonCube; delete outmap; delete transform; delete interp; } }
void IsisMain() { // We will be warping a cube ProcessRubberSheet p; // Get the map projection file provided by the user UserInterface &ui = Application::GetUserInterface(); Pvl userPvl(ui.GetFilename("MAP")); PvlGroup &userMappingGrp = userPvl.FindGroup("Mapping",Pvl::Traverse); // Open the input cube and get the projection Cube *icube = p.SetInputCube ("FROM"); // Get the mapping group PvlGroup fromMappingGrp = icube->GetGroup("Mapping"); Projection *inproj = icube->Projection(); PvlGroup outMappingGrp = fromMappingGrp; // If the default range is FROM, then wipe out any range data in user mapping file if(ui.GetString("DEFAULTRANGE").compare("FROM") == 0 && !ui.GetBoolean("MATCHMAP")) { if(userMappingGrp.HasKeyword("MinimumLatitude")) { userMappingGrp.DeleteKeyword("MinimumLatitude"); } if(userMappingGrp.HasKeyword("MaximumLatitude")) { userMappingGrp.DeleteKeyword("MaximumLatitude"); } if(userMappingGrp.HasKeyword("MinimumLongitude")) { userMappingGrp.DeleteKeyword("MinimumLongitude"); } if(userMappingGrp.HasKeyword("MaximumLongitude")) { userMappingGrp.DeleteKeyword("MaximumLongitude"); } } // Deal with user overrides entered in the GUI. Do this by changing the user's mapping group, which // will then overlay anything in the output mapping group. if(ui.WasEntered("MINLAT") && !ui.GetBoolean("MATCHMAP")) { userMappingGrp.AddKeyword( PvlKeyword("MinimumLatitude", ui.GetDouble("MINLAT")), Pvl::Replace ); } if(ui.WasEntered("MAXLAT") && !ui.GetBoolean("MATCHMAP")) { userMappingGrp.AddKeyword( PvlKeyword("MaximumLatitude", ui.GetDouble("MAXLAT")), Pvl::Replace ); } if(ui.WasEntered("MINLON") && !ui.GetBoolean("MATCHMAP")) { userMappingGrp.AddKeyword( PvlKeyword("MinimumLongitude", ui.GetDouble("MINLON")), Pvl::Replace ); } if(ui.WasEntered("MAXLON") && !ui.GetBoolean("MATCHMAP")) { userMappingGrp.AddKeyword( PvlKeyword("MaximumLongitude", ui.GetDouble("MAXLON")), Pvl::Replace ); } /** * If the user is changing from positive east to positive west, or vice-versa, the output minimum is really * the input maximum. However, the user mapping group must be left unaffected (an input minimum must be the * output minimum). To accomplish this, we swap the minimums/maximums in the output group ahead of time. This * causes the minimums and maximums to correlate to the output minimums and maximums. That way when we copy * the user mapping group into the output group a mimimum overrides a minimum and a maximum overrides a maximum. */ bool sameDirection = true; if(userMappingGrp.HasKeyword("LongitudeDirection")) { if(((string)userMappingGrp["LongitudeDirection"]).compare(fromMappingGrp["LongitudeDirection"]) != 0) { sameDirection = false; } } // Since the out mapping group came from the from mapping group, which came from a valid cube, // we can assume both min/max lon exists if min longitude exists. if(!sameDirection && outMappingGrp.HasKeyword("MinimumLongitude")) { double minLon = outMappingGrp["MinimumLongitude"]; double maxLon = outMappingGrp["MaximumLongitude"]; outMappingGrp["MaximumLongitude"] = minLon; outMappingGrp["MinimumLongitude"] = maxLon; } if(ui.GetString("PIXRES").compare("FROM") == 0 && !ui.GetBoolean("MATCHMAP")) { // Resolution will be in fromMappingGrp and outMappingGrp at this time // delete from user mapping grp if(userMappingGrp.HasKeyword("Scale")) { userMappingGrp.DeleteKeyword("Scale"); } if(userMappingGrp.HasKeyword("PixelResolution")) { userMappingGrp.DeleteKeyword("PixelResolution"); } } else if(ui.GetString("PIXRES").compare("MAP") == 0 || ui.GetBoolean("MATCHMAP")) { // Resolution will be in userMappingGrp - delete all others if(outMappingGrp.HasKeyword("Scale")) { outMappingGrp.DeleteKeyword("Scale"); } if(outMappingGrp.HasKeyword("PixelResolution")) { outMappingGrp.DeleteKeyword("PixelResolution"); } if(fromMappingGrp.HasKeyword("Scale")); { fromMappingGrp.DeleteKeyword("Scale"); } if(fromMappingGrp.HasKeyword("PixelResolution")) { fromMappingGrp.DeleteKeyword("PixelResolution"); } } else if(ui.GetString("PIXRES").compare("MPP") == 0) { // Resolution specified - delete all and add to outMappingGrp if(outMappingGrp.HasKeyword("Scale")) { outMappingGrp.DeleteKeyword("Scale"); } if(outMappingGrp.HasKeyword("PixelResolution")) { outMappingGrp.DeleteKeyword("PixelResolution"); } if(fromMappingGrp.HasKeyword("Scale")) { fromMappingGrp.DeleteKeyword("Scale"); } if(fromMappingGrp.HasKeyword("PixelResolution")) { fromMappingGrp.DeleteKeyword("PixelResolution"); } if(userMappingGrp.HasKeyword("Scale")) { userMappingGrp.DeleteKeyword("Scale"); } if(userMappingGrp.HasKeyword("PixelResolution")) { userMappingGrp.DeleteKeyword("PixelResolution"); } outMappingGrp.AddKeyword(PvlKeyword("PixelResolution", ui.GetDouble("RESOLUTION"), "meters/pixel"), Pvl::Replace); } else if(ui.GetString("PIXRES").compare("PPD") == 0) { // Resolution specified - delete all and add to outMappingGrp if(outMappingGrp.HasKeyword("Scale")) { outMappingGrp.DeleteKeyword("Scale"); } if(outMappingGrp.HasKeyword("PixelResolution")) { outMappingGrp.DeleteKeyword("PixelResolution"); } if(fromMappingGrp.HasKeyword("Scale")) { fromMappingGrp.DeleteKeyword("Scale"); } if(fromMappingGrp.HasKeyword("PixelResolution")) { fromMappingGrp.DeleteKeyword("PixelResolution"); } if(userMappingGrp.HasKeyword("Scale")) { userMappingGrp.DeleteKeyword("Scale"); } if(userMappingGrp.HasKeyword("PixelResolution")) { userMappingGrp.DeleteKeyword("PixelResolution"); } outMappingGrp.AddKeyword(PvlKeyword("Scale", ui.GetDouble("RESOLUTION"), "pixels/degree"), Pvl::Replace); } // Rotation will NOT Propagate if(outMappingGrp.HasKeyword("Rotation")) { outMappingGrp.DeleteKeyword("Rotation"); } /** * The user specified map template file overrides what ever is in the * cube's mapping group. */ for(int keyword = 0; keyword < userMappingGrp.Keywords(); keyword ++) { outMappingGrp.AddKeyword(userMappingGrp[keyword], Pvl::Replace); } /** * Now, we have to deal with unit conversions. We convert only if the following are true: * 1) We used values from the input cube * 2) The values are longitudes or latitudes * 3) The map file or user-specified information uses a different measurement system than * the input cube for said values. * * The data is corrected for: * 1) Positive east/positive west * 2) Longitude domain * 3) planetographic/planetocentric. */ // First, the longitude direction if(!sameDirection) { PvlGroup longitudes = inproj->MappingLongitudes(); for(int index = 0; index < longitudes.Keywords(); index ++) { if(!userMappingGrp.HasKeyword(longitudes[index].Name())) { // use the from domain because that's where our values are coming from if(((string)userMappingGrp["LongitudeDirection"]).compare("PositiveEast") == 0) { outMappingGrp[longitudes[index].Name()] = Projection::ToPositiveEast(outMappingGrp[longitudes[index].Name()], outMappingGrp["LongitudeDomain"]); } else { outMappingGrp[longitudes[index].Name()] = Projection::ToPositiveWest(outMappingGrp[longitudes[index].Name()], outMappingGrp["LongitudeDomain"]); } } } } // The minimum/maximum longitudes should be in order now. However, if the user entered a // maximum that was lower than the minimum, or a minimum that was higher than the maximum this // may still fail. Let it throw an error when we instantiate the projection. // Second, longitude domain if(userMappingGrp.HasKeyword("LongitudeDomain")) { // user set a new domain? if((int)userMappingGrp["LongitudeDomain"] != (int)fromMappingGrp["LongitudeDomain"]) { // new domain different? PvlGroup longitudes = inproj->MappingLongitudes(); for(int index = 0; index < longitudes.Keywords(); index ++) { if(!userMappingGrp.HasKeyword(longitudes[index].Name())) { if((int)userMappingGrp["LongitudeDomain"] == 180) { outMappingGrp[longitudes[index].Name()] = Projection::To180Domain(outMappingGrp[longitudes[index].Name()]); } else { outMappingGrp[longitudes[index].Name()] = Projection::To360Domain(outMappingGrp[longitudes[index].Name()]); } } } } } // Third, planetographic/planetocentric if(userMappingGrp.HasKeyword("LatitudeType")) { // user set a new domain? if(((string)userMappingGrp["LatitudeType"]).compare(fromMappingGrp["LatitudeType"]) != 0) { // new lat type different? PvlGroup latitudes = inproj->MappingLatitudes(); for(int index = 0; index < latitudes.Keywords(); index ++) { if(!userMappingGrp.HasKeyword(latitudes[index].Name())) { if(((string)userMappingGrp["LatitudeType"]).compare("Planetographic") == 0) { outMappingGrp[latitudes[index].Name()] = Projection::ToPlanetographic( (double)fromMappingGrp[latitudes[index].Name()], (double)fromMappingGrp["EquatorialRadius"], (double)fromMappingGrp["PolarRadius"]); } else { outMappingGrp[latitudes[index].Name()] = Projection::ToPlanetocentric( (double)fromMappingGrp[latitudes[index].Name()], (double)fromMappingGrp["EquatorialRadius"], (double)fromMappingGrp["PolarRadius"]); } } } } } // If MinLon/MaxLon out of order, we weren't able to calculate the correct values if((double)outMappingGrp["MinimumLongitude"] >= (double)outMappingGrp["MaximumLongitude"]) { if(!ui.WasEntered("MINLON") || !ui.WasEntered("MAXLON")) { string msg = "Unable to determine the correct [MinimumLongitude,MaximumLongitude]."; msg += " Please specify these values in the [MINLON,MAXLON] parameters"; throw iException::Message(iException::Pvl,msg,_FILEINFO_); } } int samples,lines; Pvl mapData; // Copy to preserve cube labels so we can match cube size if (userPvl.HasObject("IsisCube")) { mapData = userPvl; mapData.FindObject("IsisCube").DeleteGroup("Mapping"); mapData.FindObject("IsisCube").AddGroup(outMappingGrp); } else { mapData.AddGroup(outMappingGrp); } // *NOTE: The UpperLeftX,UpperLeftY keywords will not be used in the CreateForCube // method, and they will instead be recalculated. This is correct. Projection *outproj = ProjectionFactory::CreateForCube(mapData,samples,lines, ui.GetBoolean("MATCHMAP")); // Set up the transform object which will simply map // output line/samps -> output lat/lons -> input line/samps Transform *transform = new map2map (icube->Samples(), icube->Lines(), icube->Projection(), samples, lines, outproj, ui.GetBoolean("TRIM")); // Allocate the output cube and add the mapping labels Cube *ocube = p.SetOutputCube ("TO", transform->OutputSamples(), transform->OutputLines(), icube->Bands()); PvlGroup cleanOutGrp = outproj->Mapping(); // ProjectionFactory::CreateForCube updated mapData to have the correct // upperleftcornerx, upperleftcornery, scale and resolution. Use these // updated numbers. cleanOutGrp.AddKeyword(mapData.FindGroup("Mapping",Pvl::Traverse)["UpperLeftCornerX"], Pvl::Replace); cleanOutGrp.AddKeyword(mapData.FindGroup("Mapping",Pvl::Traverse)["UpperLeftCornerY"], Pvl::Replace); cleanOutGrp.AddKeyword(mapData.FindGroup("Mapping",Pvl::Traverse)["Scale"], Pvl::Replace); cleanOutGrp.AddKeyword(mapData.FindGroup("Mapping",Pvl::Traverse)["PixelResolution"], Pvl::Replace); ocube->PutGroup(cleanOutGrp); // Set up the interpolator Interpolator *interp; if (ui.GetString("INTERP") == "NEARESTNEIGHBOR") { interp = new Interpolator(Interpolator::NearestNeighborType); } else if (ui.GetString("INTERP") == "BILINEAR") { interp = new Interpolator(Interpolator::BiLinearType); } else if (ui.GetString("INTERP") == "CUBICCONVOLUTION") { interp = new Interpolator(Interpolator::CubicConvolutionType); } else { string msg = "Unknow value for INTERP [" + ui.GetString("INTERP") + "]"; throw iException::Message(iException::Programmer,msg,_FILEINFO_); } // Warp the cube p.StartProcess(*transform, *interp); p.EndProcess(); Application::Log(cleanOutGrp); // Cleanup delete transform; delete interp; }
void IsisMain() { UserInterface &ui = Application::GetUserInterface(); /*Processing steps 1. Open and read the jitter table, convert the pixel offsets to angles, and create the polynomials (solve for the coefficients) to use to do the high pass filter putting the results into a rotation matrix in the jitter class. 2. Apply the jitter correction in the LineScanCameraRotation object of the master cube. 3. Loop through FROMLIST correcting the pointing and writing out the updated camera pointing from the master cube */ int degree = ui.GetInteger("DEGREE"); // Get the input file list to make sure it is not empty and the master cube is included FileList list; list.Read(ui.GetFilename("FROMLIST")); if (list.size() < 1) { string msg = "The input list file [" + ui.GetFilename("FROMLIST") + "is empty"; throw iException::Message(iException::User,msg,_FILEINFO_); } int ifile = 0; // Make sure the master file is included in the input file list while (ifile < (int) list.size() && Filename(list[ifile]).Expanded() != Filename(ui.GetFilename("MASTER")).Expanded()) { ifile++; } if (ifile >= (int) list.size()) { string msg = "The master file, [" + Filename(ui.GetFilename("MASTER")).Expanded() + " is not included in " + "the input list file " + ui.GetFilename("FROMLIST") + "]"; throw iException::Message(iException::User,msg,_FILEINFO_); } bool step2 = false; PvlGroup gp("AppjitResults"); //Step 1: Create the jitter rotation try { // Open the master cube Cube cube; cube.Open(ui.GetFilename("MASTER"),"rw"); //check for existing polygon, if exists delete it if (cube.Label()->HasObject("Polygon")){ cube.Label()->DeleteObject("Polygon"); } // Get the camera Camera *cam = cube.Camera(); if (cam->DetectorMap()->LineRate() == 0.0) { string msg = "[" + ui.GetFilename("MASTER") + "] is not a line scan camera image"; throw iException::Message(Isis::iException::User,msg,_FILEINFO_); } // Create the master rotation to be corrected int frameCode = cam->InstrumentRotation()->Frame(); cam->SetImage(int(cube.Samples()/2), int(cube.Lines()/2) ); double tol = cam->PixelResolution(); if (tol < 0.) { // Alternative calculation of .01*ground resolution of a pixel tol = cam->PixelPitch()*cam->SpacecraftAltitude()*1000./cam->FocalLength()/100.; } LineScanCameraRotation crot(frameCode, *(cube.Label()), cam->InstrumentRotation()->GetFullCacheTime(), tol ); crot.SetPolynomialDegree(ui.GetInteger("DEGREE")); crot.SetAxes(1, 2, 3); if (ui.WasEntered("PITCHRATE")) crot.ResetPitchRate(ui.GetDouble("PITCHRATE")); if (ui.WasEntered("YAW")) crot.ResetYaw(ui.GetDouble("YAW")); crot.SetPolynomial(); double baseTime = crot.GetBaseTime(); double timeScale = crot.GetTimeScale(); double fl = cam->FocalLength(); double pixpitch = cam->PixelPitch(); std::vector<double> cacheTime = cam->InstrumentRotation()->GetFullCacheTime(); // Get the jitter in pixels, compute jitter angles, and fit a polynomial to each angle PixelOffset jitter(ui.GetFilename("JITTERFILE"), fl, pixpitch, baseTime, timeScale, degree); jitter.LoadAngles(cacheTime); jitter.SetPolynomial(); // Set the jitter and apply to the instrument rotation crot.SetJitter( &jitter ); crot.ReloadCache(); // Pull out the pointing cache as a table and write it Table cmatrix = crot.Cache("InstrumentPointing"); cmatrix.Label().AddComment("Corrected using appjit and" + ui.GetFilename("JITTERFILE")); cube.Write(cmatrix); // Write out the instrument position table Isis::PvlGroup kernels = cube.Label()->FindGroup("Kernels",Isis::Pvl::Traverse); // Write out the "Table" label to the tabled kernels in the kernels group kernels["InstrumentPointing"] = "Table"; // kernels["InstrumentPosition"] = "Table"; cube.PutGroup(kernels); cube.Close(); gp += PvlKeyword("StatusMaster",ui.GetFilename("MASTER") + ": camera pointing updated"); // Apply the dejittered pointing to the rest of the files step2 = true; for (int ifile = 0; ifile < (int) list.size(); ifile++) { if (list[ifile] != ui.GetFilename("MASTER")) { // Open the cube cube.Open(list[ifile],"rw"); //check for existing polygon, if exists delete it if (cube.Label()->HasObject("Polygon")){ cube.Label()->DeleteObject("Polygon"); } // Get the camera and make sure it is a line scan camera Camera *cam = cube.Camera(); if (cam->DetectorMap()->LineRate() == 0.0) { string msg = "[" + ui.GetFilename("FROM") + "] is not a line scan camera"; throw iException::Message(Isis::iException::User,msg,_FILEINFO_); } // Pull out the pointing cache as a table and write it cube.Write(cmatrix); cube.PutGroup(kernels); cube.Close(); gp += PvlKeyword("Status" + iString(ifile), list[ifile] + ": camera pointing updated"); } } Application::Log( gp ); } catch (iException &e) { string msg; if (!step2) { msg = "Unable to fit pointing for [" + ui.GetFilename("MASTER") + "]"; } else { msg = "Unable to update pointing for nonMaster file(s)"; } throw iException::Message(Isis::iException::User,msg,_FILEINFO_); } }
void IsisMain() { // Get the camera information Process p1; Cube *icube = p1.SetInputCube("FROM",OneBand); cam = icube->Camera(); // We will be processing by brick. ProcessByBrick p; // Find out which bands are to be created UserInterface &ui = Application::GetUserInterface(); nbands = 0; if ((phase = ui.GetBoolean("PHASE"))) nbands++; if ((emission = ui.GetBoolean("EMISSION"))) nbands++; if ((incidence = ui.GetBoolean("INCIDENCE"))) nbands++; if ((latitude = ui.GetBoolean("LATITUDE"))) nbands++; if ((longitude = ui.GetBoolean("LONGITUDE"))) nbands++; if ((pixelResolution = ui.GetBoolean("PIXELRESOLUTION"))) nbands++; if ((lineResolution = ui.GetBoolean("LINERESOLUTION"))) nbands++; if ((sampleResolution = ui.GetBoolean("SAMPLERESOLUTION"))) nbands++; if ((detectorResolution = ui.GetBoolean("DETECTORRESOLUTION"))) nbands++; if ((northAzimuth = ui.GetBoolean("NORTHAZIMUTH"))) nbands++; if ((sunAzimuth = ui.GetBoolean("SUNAZIMUTH"))) nbands++; if ((spacecraftAzimuth = ui.GetBoolean("SPACECRAFTAZIMUTH"))) nbands++; if ((offnadirAngle = ui.GetBoolean("OFFNADIRANGLE"))) nbands++; if (nbands < 1) { string message = "At least one photometry parameter must be entered" "[PHASE, EMISSION, INCIDENCE, LATITUDE, LONGITUDE]"; throw iException::Message (iException::User, message, _FILEINFO_); } // Create a bandbin group for the output label PvlKeyword name("Name"); if (phase) name += "Phase Angle"; if (emission) name += "Emission Angle"; if (incidence) name += "Incidence Angle"; if (latitude) name += "Latitude"; if (longitude) name += "Longitude"; if (pixelResolution) name += "Pixel Resolution"; if (lineResolution) name += "Line Resolution"; if (sampleResolution) name += "Sample Resolution"; if (detectorResolution) name += "Detector Resolution"; if (northAzimuth) name += "North Azimuth"; if (sunAzimuth) name += "Sun Azimuth"; if (spacecraftAzimuth) name += "Spacecraft Azimuth"; if (offnadirAngle) name += "OffNadir Angle"; PvlGroup bandBin("BandBin"); bandBin += name; // Create the output cube. Note we add the input cube to expedite propagation // of input cube elements (label, blobs, etc...). It *must* be cleared // prior to systematic processing. (void) p.SetInputCube("FROM", OneBand); Cube *ocube = p.SetOutputCube("TO",icube->Samples(), icube->Lines(), nbands); p.SetBrickSize(64,64,nbands); p.ClearInputCubes(); // Toss the input file as stated above // Start the processing p.StartProcess(phocube); // Add the bandbin group to the output label. If a BandBin group already // exists, remove all existing keywords and add the keywords for this app. // Otherwise, just put the group in. PvlObject &cobj = ocube->Label()->FindObject("IsisCube"); if (cobj.HasGroup("BandBin")) { PvlGroup &bb = cobj.FindGroup("BandBin"); bb.Clear(); PvlContainer::PvlKeywordIterator k = bandBin.Begin(); while (k != bandBin.End()) { bb += *k; ++k; } } else { ocube->PutGroup(bandBin); } p.EndProcess(); }