int run ( int argc, char** argv) { int i; double argumentExists; double doubleArg; // start clock for performance timings clock_t t1 = clock (); int optind=1; // process command-line options i = 1; while (i < (argc - 2))// || (prob && (i < (argc - 1)) && phase2)) { // Debug-only cout << "argv[" << i << "] = " << argv[i] << endl; if (strcmp(argv[i], "-mdl") == 0) { i++; phase = 4; argumentExists = sscanf(argv[i], "%lf", &doubleArg); if ((argumentExists != 1) || (doubleArg <= (double) 0.0) || (doubleArg >= (double) 1.0)) { fprintf(stderr, "%s: GBAD-MDL threshold must be greater than 0.0 and less than 1.0\n", argv[0]); return 1; } else cout << "GBAD-MDL option chosen." << endl; matchThreshold = doubleArg; i++; optind = optind + 2; mdlOption = true; // Debug-only cout << " matchThreshold (MDL) = " << matchThreshold << endl; } else if (strcmp(argv[i], "-mps") == 0) { i++; phase = 4; argumentExists = sscanf(argv[i], "%lf", &doubleArg); if ((argumentExists != 1) || (doubleArg <= (double) 0.0) || (doubleArg >= (double) 1.0)) { fprintf(stderr, "%s: GBAD-MPS threshold must be greater than 0.0 and less than 1.0\n", argv[0]); return 1; } else cout << "GBAD-MPS option chosen." << endl; matchThreshold = doubleArg; i++; optind = optind + 2; mpsOption = true; // Debug-only cout << " matchThreshold (MPS) = " << matchThreshold << endl; } else if (strcmp(argv[i], "-prob") == 0) { i++; phase = 4; cout << "GBAD-P option chosen." << endl; optind = optind + 1; probOption = true; } else { i++; } } if (strcmp(argv[optind], "-mst") == 0) { optind = optind + 1; minfreq = atoi ( argv[optind] ); if(minfreq <= 0) { cerr << "ERROR: invalid mst value (" << minfreq << "), mst must be greater than 0" << endl; return 1; } // Debug-only cout << "minfreq = " << minfreq << endl; } // open input file (graph) for reading cerr << "Read" << endl; // Debug-only cout << "input graph file: " << argv[optind+1] << endl; FILE *input = fopen ( argv[optind+1], "r" ); // If unable to open input file, need to give an error message and abort if (input == NULL) { fprintf(stderr, "ERROR: Unable to open specified input file: %s\n", argv[optind+1]); return 1; } if ( argc - optind == 3 && argc > 4) { dooutput = true; // Debug-only cout << "instances file: " << argv[optind+2] << endl; output = fopen ( argv[optind+2], "w" ); } else { cout << "No instances file specified" << endl; } database.read ( input ); rewind(input); Database testDatabase; testDatabase.read(input); fclose ( input ); cerr << "Edgecount" << endl; database.edgecount (); cerr << "Reorder" << endl; database.reorder (); initLegStatics (); graphstate.init (); if ((mdlOption) || (mpsOption) || (probOption)) cout << "Searching for anomalies..." << endl; // If it is the "searching for anomalies" phase, and the GBAD-P option was // chosen (-r), then no need to examine the graph again - just use // the database tree and best_sub.inst info to find the most anomalous // extra edge if ((phase == 4) && (probOption)) { gbad.gbadPROB(); } // Else, use the FSM algorithm to find the normative pattern... else { for ( unsigned int i = 0; i < database.nodelabels.size (); i++ ) { if (((database.nodelabels[i].frequency >= minfreq) && (database.nodelabels[i].frequentedgelabels.size())) || ((database.nodelabels[i].frequency < minfreq) && (database.nodelabels[i].frequency > 0) && (phase == 4) && (mdlOption))) { Path path ( i ); // Debug-only cout << " Processing path(" << i << "): " << path << "..." << endl; path.expand (); } } } // // If this is an anomaly detection phase, print out the best anomalous // value (if there is one). // if (phase == 4) { if ((bestAnomValue <= 1.0) && (bestAnomValue > 0.000000)) cout << "Anomalous Value: " << fixed << bestAnomValue << endl; else cout << "Anomalous Value: None" << endl; } clock_t t2 = clock (); statistics.print (); cout << "Approximate total runtime: " << ( (float) t2 - t1 ) / CLOCKS_PER_SEC << "s" << endl; if (dooutput) fclose ( output ); if(dot_file_name != NULL) { string dotFile(dot_file_name); testDatabase.toDot(dotFile, gbad.anomNodes, gbad.anomEdges, gbad.normNodes, gbad.normEdges); } return 0; }
void IsisMain() { //get the number of samples to skip from the left and right ends // of the prefix and suffix data UserInterface &ui = Application::GetUserInterface(); int imageLeft = 0; int imageRight = 0; int rampLeft = 0; int rampRight = 0; int calLeftBuffer = 0; int calRightBuffer = 0; int calLeftDark = 0; int calRightDark = 0; int leftBuffer = 0; int rightBuffer = 0; int leftDark = 0; int rightDark = 0; if(ui.GetBoolean("USEOFFSETS")) { imageLeft = ui.GetInteger("LEFTIMAGE"); imageRight = ui.GetInteger("RIGHTIMAGE"); rampLeft = ui.GetInteger("LEFTIMAGE"); rampRight = ui.GetInteger("RIGHTIMAGE"); calLeftBuffer = ui.GetInteger("LEFTCALBUFFER"); calRightBuffer = ui.GetInteger("LEFTCALBUFFER"); calLeftDark = ui.GetInteger("LEFTCALDARK"); calRightDark = ui.GetInteger("RIGHTCALDARK"); leftBuffer = ui.GetInteger("LEFTBUFFER"); rightBuffer = ui.GetInteger("RIGHTBUFFER"); leftDark = ui.GetInteger("LEFTDARK"); rightDark = ui.GetInteger("RIGHTDARK"); } Isis::FileName fromFile = ui.GetFileName("FROM"); Isis::Cube inputCube; inputCube.open(fromFile.expanded()); //Check to make sure we got the cube properly if(!inputCube.isOpen()) { QString msg = "Could not open FROM cube " + fromFile.expanded(); throw IException(IException::User, msg, _FILEINFO_); } Process p; Cube *icube = p.SetInputCube("FROM"); // Get statistics from the cube prefix and suffix data Table hifix("HiRISE Ancillary"); icube->read(hifix); Statistics darkStats, bufStats, rampDarkStats; int tdi = icube->group("Instrument")["Tdi"]; int binning_mode = icube->group("Instrument")["Summing"]; //This gets us the statistics for the dark and buffer pixels // alongside of the image itself for(int rec = 2; rec < hifix.Records(); rec++) { vector<int> dark = hifix[rec]["DarkPixels"]; vector<int> buf = hifix[rec]["BufferPixels"]; if(buf.size() <= (unsigned int)(leftBuffer + rightBuffer)) { ThrowException(buf.size(), leftBuffer, rightBuffer, "image buffer"); } if(dark.size() <= (unsigned int)(leftDark + rightDark)) { ThrowException(dark.size(), leftDark, rightDark, "image dark reference"); } for(int i = leftDark; i < (int)dark.size() - rightDark; i++) { double d; if(dark[i] == NULL2) d = NULL8; else if(dark[i] == LOW_REPR_SAT2) d = LOW_REPR_SAT8; else if(dark[i] == LOW_INSTR_SAT2) d = LOW_INSTR_SAT8; else if(dark[i] == HIGH_INSTR_SAT2) d = HIGH_INSTR_SAT8; else if(dark[i] == HIGH_REPR_SAT2) d = HIGH_REPR_SAT8; else d = dark[i]; darkStats.AddData(&d, 1); } for(int i = leftBuffer; i < (int)buf.size() - rightBuffer; i++) { double d; if(buf[i] == NULL2) d = NULL8; else if(buf[i] == LOW_REPR_SAT2) d = LOW_REPR_SAT8; else if(buf[i] == LOW_INSTR_SAT2) d = LOW_INSTR_SAT8; else if(buf[i] == HIGH_INSTR_SAT2) d = HIGH_INSTR_SAT8; else if(buf[i] == HIGH_REPR_SAT2) d = HIGH_REPR_SAT8; else d = buf[i]; bufStats.AddData(&d, 1); } } // Get statistics from the calibration image //Calculate boundaries of the reverse readout lines, // Masked lines, and ramp lines. //There are always 20 reverse readout lines int reverseReadoutLines = 20; //Number of mask pixels depends on Binning mode int maskLines; maskLines = 20 / binning_mode; //mask lines go after reverse lines maskLines += reverseReadoutLines; // Actual starting line, number Ramp lines int rampStart = maskLines; int rampLines = tdi / binning_mode; Table calimg("HiRISE Calibration Image"); icube->read(calimg); Statistics calStats; //Statistics for the Reverse readout lines of the cal image Statistics reverseStats; //Statistics for the masked lines of the cal image Statistics maskStats; //Statistics for the ramped lines of the cal image Statistics rampStats; //Iterate through the calibration image //Add in reverse data for(int rec = 2 ; rec <= 18 ; rec++) { //Lines [2,18] vector<int> lineBuffer = calimg[rec]["Calibration"]; for(unsigned int i = 2 ; i < lineBuffer.size() - 1 ; i++) { //Samples [2, * -1] double d = lineBuffer[i]; if(lineBuffer[i] == NULL2) { d = NULL8; } else if(lineBuffer[i] == LOW_REPR_SAT2) { d = LOW_REPR_SAT8; } else if(lineBuffer[i] == LOW_INSTR_SAT2) { d = LOW_INSTR_SAT8; } else if(lineBuffer[i] == HIGH_INSTR_SAT2) { d = HIGH_INSTR_SAT8; } else if(lineBuffer[i] == HIGH_REPR_SAT2) { d = HIGH_REPR_SAT8; } reverseStats.AddData(&d, 1); } } //Add in the mask data for(int rec = 22 ; rec < maskLines - 1 ; rec++) {//Lines [22, 38] !!!!dependant on bin vector<int> lineBuffer = calimg[rec]["Calibration"]; for(int i = 2 ; i < (int)lineBuffer.size() - 1 ; i++) { //Samples [2, *-1] double d = lineBuffer[i]; if(d == NULL2) { d = NULL8; } else if(d == LOW_REPR_SAT2) { d = LOW_REPR_SAT8; } else if(d == LOW_INSTR_SAT2) { d = LOW_INSTR_SAT8; } else if(d == HIGH_INSTR_SAT2) { d = HIGH_INSTR_SAT8; } else if(d == HIGH_REPR_SAT2) { d = HIGH_REPR_SAT8; } maskStats.AddData(&d, 1); } } //Add in the ramp data for(int rec = maskLines + 2; rec < calimg.Records() - 1; rec++) { vector<int> buf = calimg[rec]["Calibration"]; //loop through all but the first and last sample of the calibration image for(int i = rampLeft; i < (int)buf.size() - rampRight; i++) { double d; if(buf[i] == NULL2) d = NULL8; else if(buf[i] == LOW_REPR_SAT2) d = LOW_REPR_SAT8; else if(buf[i] == LOW_INSTR_SAT2) d = LOW_INSTR_SAT8; else if(buf[i] == HIGH_INSTR_SAT2) d = HIGH_INSTR_SAT8; else if(buf[i] == HIGH_REPR_SAT2) d = HIGH_REPR_SAT8; else d = buf[i]; //Determine which group of stats to add to rampStats.AddData(&d, 1); } } // Get statistics from the calibration prefix and suffix data Table calfix("HiRISE Calibration Ancillary"); icube->read(calfix); Statistics calDarkStats, calBufStats; int rampLine0 = rampStart + 1; int rampLineN = (rampStart + rampLines - 1) - 1; rampLineN = calfix.Records() - 1; for(int rec = 0; rec < calfix.Records(); rec++) { vector<int> dark = calfix[rec]["DarkPixels"]; vector<int> buf = calfix[rec]["BufferPixels"]; if(buf.size() <= (unsigned int)(calLeftBuffer + calRightBuffer)) { ThrowException(buf.size(), calLeftBuffer, calRightBuffer, "calibration buffer"); } if(dark.size() <= (unsigned int)(calLeftDark + calRightDark)) { ThrowException(dark.size(), calLeftDark, calRightDark, "calibration dark reference"); } for(int i = calLeftDark; i < (int)dark.size() - calRightDark; i++) { double d; if(dark[i] == NULL2) d = NULL8; else if(dark[i] == LOW_REPR_SAT2) d = LOW_REPR_SAT8; else if(dark[i] == LOW_INSTR_SAT2) d = LOW_INSTR_SAT8; else if(dark[i] == HIGH_INSTR_SAT2) d = HIGH_INSTR_SAT8; else if(dark[i] == HIGH_REPR_SAT2) d = HIGH_REPR_SAT8; else d = dark[i]; calDarkStats.AddData(&d, 1); if((rec > rampLine0) && (rec < rampLineN)) { rampDarkStats.AddData(&d, 1); } } for(int i = calLeftBuffer; i < (int)buf.size() - calRightBuffer; i++) { double d; if(buf[i] == NULL2) d = NULL8; else if(buf[i] == LOW_REPR_SAT2) d = LOW_REPR_SAT8; else if(buf[i] == LOW_INSTR_SAT2) d = LOW_INSTR_SAT8; else if(buf[i] == HIGH_INSTR_SAT2) d = HIGH_INSTR_SAT8; else if(buf[i] == HIGH_REPR_SAT2) d = HIGH_REPR_SAT8; else d = buf[i]; calBufStats.AddData(&d, 1); } } Statistics linesPostrampStats; Statistics imageStats; Isis::LineManager imageBuffer(inputCube); imageBuffer.begin(); Buffer out(imageBuffer.SampleDimension() - (imageLeft + imageRight), imageBuffer.LineDimension(), imageBuffer.BandDimension(), imageBuffer.PixelType()); for(int postRampLine = 0 ; postRampLine < LINES_POSTRAMP ; postRampLine++) { inputCube.read(imageBuffer); for(int postRampSamp = 0 ; postRampSamp < out.SampleDimension() ; postRampSamp++) { out[postRampSamp] = imageBuffer[postRampSamp + imageLeft]; } linesPostrampStats.AddData(out.DoubleBuffer(), out.size()); imageBuffer++; } for(int imageLine = LINES_POSTRAMP; imageLine < inputCube.lineCount(); imageLine++) { inputCube.read(imageBuffer); for(int imageSample = 0 ; imageSample < out.SampleDimension(); imageSample++) { out[imageSample] = imageBuffer[imageSample + imageLeft]; } imageStats.AddData(out.DoubleBuffer(), out.size()); imageBuffer++; } // Generate the statistics in pvl form const int NUM_GROUPS = 10; PvlGroup groups[NUM_GROUPS]; groups[0] = PvlStats(linesPostrampStats, "IMAGE_POSTRAMP"); groups[1] = PvlStats(imageStats, "IMAGE"); groups[2] = PvlStats(darkStats, "IMAGE_DARK"); groups[3] = PvlStats(bufStats, "IMAGE_BUFFER"); groups[4] = PvlStats(reverseStats, "CAL_REVERSE"); groups[5] = PvlStats(maskStats, "CAL_MASK"); groups[6] = PvlStats(rampStats, "CAL_RAMP"); groups[7] = PvlStats(calDarkStats, "CAL_DARK"); groups[8] = PvlStats(rampDarkStats, "CAL_DARK_RAMP"); groups[9] = PvlStats(calBufStats, "CAL_BUFFER"); // Write the results to the output file if the user specified one if(ui.WasEntered("TO")) { Pvl temp; for(int i = 0 ; i < NUM_GROUPS ; i++) { temp.addGroup(groups[i]); } temp.write(ui.GetFileName("TO")); } else { // Log the results for(int i = 0 ; i < NUM_GROUPS ; i++) { Application::Log(groups[i]); } } }
void ViewEqualizer::Listener::notifyLoadData(Channel* channel, const uint32_t frameNumber, const Statistics& statistics, const Viewport& /*region*/) { Load& load = _getLoad(frameNumber); if (load == Load::NONE) return; LBASSERT(_taskIDs.find(channel) != _taskIDs.end()); const uint32_t taskID = _taskIDs[channel]; // gather relevant load data int64_t startTime = std::numeric_limits<int64_t>::max(); int64_t endTime = 0; bool loadSet = false; int64_t transmitTime = 0; for (size_t i = 0; i < statistics.size() && !loadSet; ++i) { const Statistic& data = statistics[i]; if (data.task != taskID) // data from another compound continue; switch (data.type) { case Statistic::CHANNEL_CLEAR: case Statistic::CHANNEL_DRAW: case Statistic::CHANNEL_READBACK: startTime = LB_MIN(startTime, data.startTime); endTime = LB_MAX(endTime, data.endTime); break; case Statistic::CHANNEL_ASYNC_READBACK: case Statistic::CHANNEL_FRAME_TRANSMIT: transmitTime += data.startTime - data.endTime; break; case Statistic::CHANNEL_FRAME_WAIT_SENDTOKEN: transmitTime -= data.endTime - data.startTime; break; // assemble blocks on input frames, stop using subsequent data case Statistic::CHANNEL_ASSEMBLE: loadSet = true; break; default: break; } } if (startTime == std::numeric_limits<int64_t>::max()) return; LBASSERTINFO(load.missing > 0, load << " for " << channel->getName() << " " << channel->getSerial()); const int64_t time = LB_MAX(endTime - startTime, transmitTime); load.time += time; --load.missing; if (load.missing == 0) { const float rTime = float(load.time) / float(load.nResources); load.time = int64_t(rTime * sqrtf(float(load.nResources))); } LBLOG(LOG_LB1) << "Task " << taskID << ", added time " << time << " to " << load << " from " << channel->getName() << " " << channel->getSerial() << std::endl; }
void IsisMain() { UserInterface &ui = Application::GetUserInterface(); FileName inFile = ui.GetFileName("FROM"); // Set the processing object ProcessExportMiniRFLroPds cProcess; // Setup the input cube Cube *cInCube = cProcess.SetInputCube("FROM"); Pvl *cInLabel = cInCube->label(); // Get the output label file FileName outFile(ui.GetFileName("TO", "lbl")); QString outFileName(outFile.expanded()); cProcess.SetDetached(outFileName); cProcess.SetExportType(ProcessExportPds::Fixed); //Set the resolution to Kilometers cProcess.SetPdsResolution(ProcessExportPds::Kilometer); // 32bit cProcess.SetOutputType(Isis::Real); cProcess.SetOutputNull(Isis::NULL4); cProcess.SetOutputLrs(Isis::LOW_REPR_SAT4); cProcess.SetOutputLis(Isis::LOW_INSTR_SAT4); cProcess.SetOutputHrs(Isis::HIGH_REPR_SAT4); cProcess.SetOutputHis(Isis::HIGH_INSTR_SAT4); cProcess.SetOutputRange(-DBL_MAX, DBL_MAX); cProcess.SetOutputEndian(Isis::Msb); // Turn off Keywords cProcess.ForceScalingFactor(false); cProcess.ForceSampleBitMask(false); cProcess.ForceCoreNull(false); cProcess.ForceCoreLrs(false); cProcess.ForceCoreLis(false); cProcess.ForceCoreHrs(false); cProcess.ForceCoreHis(false); // Standard label Translation Pvl &pdsLabel = cProcess.StandardPdsLabel(ProcessExportPds::Image); // bLevel => Level 2 = True, Level 3 = False bool bLevel2 = cInCube->hasGroup("Instrument"); // Translate the keywords from the original EDR PDS label that go in // this RDR PDS label for Level2 images only if(bLevel2) { OriginalLabel cOriginalBlob; cInCube->read(cOriginalBlob); Pvl cOrigLabel; PvlObject cOrigLabelObj = cOriginalBlob.ReturnLabels(); cOrigLabelObj.setName("OriginalLabelObject"); cOrigLabel.addObject(cOrigLabelObj); // Translates the ISIS labels along with the original EDR labels cOrigLabel.addObject(*(cInCube->label())); PvlTranslationManager cCubeLabel2(cOrigLabel, "$lro/translations/mrfExportOrigLabel.trn"); cCubeLabel2.Auto(pdsLabel); if(cInLabel->findObject("IsisCube").findGroup("Instrument").hasKeyword("MissionName")) { PvlKeyword &cKeyMissionName = cInLabel->findObject("IsisCube").findGroup("Instrument").findKeyword("MissionName"); int sFound = cKeyMissionName[0].indexOf("CHANDRAYAAN"); if(sFound != -1) { cCubeLabel2 = PvlTranslationManager(cOrigLabel, "$lro/translations/mrfExportOrigLabelCH1.trn"); cCubeLabel2.Auto(pdsLabel); } else { cCubeLabel2 = PvlTranslationManager(cOrigLabel, "$lro/translations/mrfExportOrigLabelLRO.trn"); cCubeLabel2.Auto(pdsLabel); } } } else { //Level3 - add Band_Name keyword PvlGroup &cBandBinGrp = cInCube->group("BandBin"); PvlKeyword cKeyBandBin = PvlKeyword("BAND_NAME"); PvlKeyword cKeyInBandBin; if(cBandBinGrp.hasKeyword("OriginalBand")) { cKeyInBandBin = cBandBinGrp.findKeyword("OriginalBand"); } else if(cBandBinGrp.hasKeyword("FilterName")) { cKeyInBandBin = cBandBinGrp.findKeyword("FilterName"); } for(int i = 0; i < cKeyInBandBin.size(); i++) { cKeyBandBin += cKeyInBandBin[i]; } PvlObject &cImageObject(pdsLabel.findObject("IMAGE")); cImageObject += cKeyBandBin; } // Get the Sources Product ID if entered for Level2 only as per example if(ui.WasEntered("SRC") && bLevel2) { QString sSrcFile = ui.GetFileName("SRC"); QString sSrcType = ui.GetString("TYPE"); GetSourceProductID(sSrcFile, sSrcType, pdsLabel); } // Get the User defined Labels if(ui.WasEntered("USERLBL")) { QString sUserLbl = ui.GetFileName("USERLBL"); GetUserLabel(sUserLbl, pdsLabel, bLevel2); } // Calculate CheckSum Statistics *cStats = cInCube->statistics(); iCheckSum = (unsigned int)cStats->Sum(); FixLabel(pdsLabel, bLevel2); // Add an output format template to the PDS PVL // Distinguish betweeen Level 2 and 3 images by calling the camera() // function as only non mosaic images(Level2) have a camera if(bLevel2) { pdsLabel.setFormatTemplate("$lro/translations/mrfPdsLevel2.pft"); } else { pdsLabel.setFormatTemplate("$lro/translations/mrfPdsLevel3.pft"); } int iFound = outFileName.indexOf(".lbl"); outFileName.replace(iFound, 4, ".img"); ofstream oCube(outFileName.toAscii().data()); cProcess.OutputDetachedLabel(); //cProcess.OutputLabel(oCube); cProcess.StartProcess(oCube); oCube.close(); cProcess.EndProcess(); }
bool SioImporter::ensureStatisticsReadProperly(Progress *pProgress, std::ostream& failure) { bool success = true; success &= setBatch(); PlugInArgList* pInList = NULL; PlugInArgList* pOutList = NULL; isseas(getInputSpecification(pInList) != false, failure); isseas(getOutputSpecification(pOutList) != false, failure); PlugInArg* pRasterElementArg = NULL; isseas(pInList->getArg(Importer::ImportElementArg(), pRasterElementArg) != false, failure); string testFilePath = TestUtilities::getTestDataPath() + "tipjul5bands.sio"; RasterElement* pRasterElement = NULL; if (success) { vector<ImportDescriptor*> descriptors = getImportDescriptors(testFilePath); if (descriptors.empty() == false) { ImportDescriptor* pImportDescriptor = descriptors.front(); if (pImportDescriptor != NULL) { DataDescriptor* pDescriptor = pImportDescriptor->getDataDescriptor(); if (pDescriptor != NULL) { Service<ModelServices> pModel; pRasterElement = dynamic_cast<RasterElement*>(pModel->createElement(pDescriptor)); if (pRasterElement != NULL) { pRasterElementArg->setActualValue(pRasterElement); } } } } } isseas(execute(pInList, pOutList) != false, failure); isseas(pRasterElement != NULL, failure); if (success) { RasterDataDescriptor* pDescriptor = dynamic_cast<RasterDataDescriptor*>(pRasterElement->getDataDescriptor()); isseas(pDescriptor != NULL, failure); const vector<DimensionDescriptor>& loadedBands = pDescriptor->getBands(); isseas(loadedBands.size() == 5, failure); int iNumBandsWithStats = 0; for (int i = 0; i < 5; ++i) { // we don't want to do an assert yet... only when we know 4 bands have computed statistics Statistics* pStatistics = pRasterElement->getStatistics(loadedBands[i]); if (pStatistics != NULL) { if (pStatistics->areStatisticsCalculated() == true) { if (success) { iNumBandsWithStats++; } } } } // success of the band computation is dependent on 4 bands with statistics isseas(iNumBandsWithStats == 3, failure); } if (pRasterElement != NULL) { Service<ModelServices> pModel; pModel->destroyElement(pRasterElement); pRasterElement = NULL; } Service<PlugInManagerServices> pPim; if (pInList) { pPim->destroyPlugInArgList(pInList); } if (pOutList) { pPim->destroyPlugInArgList(pOutList); } return success; }
/** * This is the main method. Makeflat runs in three steps: * * 1) Calculate statistics * - For all cameras, this checks for one band and matching * sample counts. * - For framing cameras, this checks the standard deviation of * the images and records the averages of each image * - For push frame cameras, this calls CheckFramelets for each * image. * * 2) Create the temporary file, collect more detailed statistics * - For all cameras, this generates the temporary file and calculates * the final exclusion list * - For framing/push frame cameras, the temporary file is * 2 bands, where the first is a sum of DNs from each image/framelet * and the second band is a count of valid DNs that went into each sum * * 3) Create the final flat field file * - For all cameras, this processes the temporary file to create the final flat * field file. */ void IsisMain() { // Initialize variables ResetGlobals(); UserInterface &ui = Application::GetUserInterface(); maxStdev = ui.GetDouble("STDEVTOL"); if(ui.GetString("IMAGETYPE") == "FRAMING") { cameraType = Framing; // framing cameras need to figure this out automatically // during step 1 numFrameLines = -1; } else if(ui.GetString("IMAGETYPE") == "LINESCAN") { cameraType = LineScan; numFrameLines = ui.GetInteger("NUMLINES"); } else { cameraType = PushFrame; numFrameLines = ui.GetInteger("FRAMELETHEIGHT"); } FileList inList(ui.GetFilename("FROMLIST")); Progress progress; tempFileLength = 0; numOutputSamples = 0; /** * Line scan progress is based on the input list, whereas * the other cameras take much longer and are based on the * images themselves. Prepare the progress if we're doing * line scan. */ if(cameraType == LineScan) { progress.SetText("Calculating Number of Image Lines"); progress.SetMaximumSteps(inList.size()); progress.CheckStatus(); } /** * For a push frame camera, the temp file is one framelet. * Technically this is the same for the framing, but we * don't know the height of a framelet yet. */ if(cameraType == PushFrame) { tempFileLength = numFrameLines; } /** * Start pass 1, use global currImage so that methods called * know the image we're processing. */ for(currImage = 0; currImage < inList.size(); currImage++) { /** * Read the current cube into memory */ Cube tmp; tmp.Open(Filename(inList[currImage]).Expanded()); /** * If we haven't determined how many samples the output * should have, we can do so now */ if(numOutputSamples == 0 && tmp.Bands() == 1) { numOutputSamples = tmp.Samples(); } /** * Try and validate the image, quick tests first! * * (imageValid &= means imageValid = imageValid && ...) */ bool imageValid = true; // Only single band images are acceptable imageValid &= (tmp.Bands() == 1); // Sample sizes must always match imageValid &= (numOutputSamples == tmp.Samples()); // For push frame cameras, there must be valid all framelets if(cameraType == PushFrame) { imageValid &= (tmp.Lines() % numFrameLines == 0); } // For framing cameras, we need to figure out the size... // setTempFileLength is used to revert if the file // is decided to be invalid bool setTempFileLength = false; if(cameraType == Framing) { if(tempFileLength == 0 && imageValid) { tempFileLength = tmp.Lines(); numFrameLines = tempFileLength; setTempFileLength = true; } imageValid &= (tempFileLength == tmp.Lines()); } // Statistics are necessary at this point for push frame and framing cameras // because the framing camera standard deviation tolerance is based on // entire images, and push frame framelet exclusion stats can not be collected // during pass 2 cleanly if((cameraType == Framing || cameraType == PushFrame) && imageValid) { string prog = "Calculating Standard Deviation " + iString((int)currImage+1) + "/"; prog += iString((int)inList.size()) + " (" + Filename(inList[currImage]).Name() + ")"; if(cameraType == Framing) { Statistics *stats = tmp.Statistics(1, prog); imageValid &= !IsSpecial(stats->StandardDeviation()); imageValid &= !IsSpecial(stats->Average()); imageValid &= stats->StandardDeviation() <= maxStdev; vector<double> fileStats; fileStats.push_back(stats->Average()); inputFrameletAverages.push_back(fileStats); delete stats; } else if(cameraType == PushFrame) { imageValid &= CheckFramelets(prog, tmp); } if(setTempFileLength && !imageValid) { tempFileLength = 0; } } // The line scan camera needs to actually count the number of lines in each image to know // how many total frames there are before beginning pass 2. if(imageValid && (cameraType == LineScan)) { int lines = (tmp.Lines() / numFrameLines); // partial frame? if(tmp.Lines() % numFrameLines != 0) { lines ++; } tempFileLength += lines; } else if(!imageValid) { excludedFiles.insert(pair<int, bool>(currImage, true)); } tmp.Close(); if(cameraType == LineScan) { progress.CheckStatus(); } } /** * If the number of output samples could not be determined, we never * found a legitimate cube. */ if(numOutputSamples <= 0) { string msg = "No valid input cubes were found"; throw iException::Message(iException::User,msg,_FILEINFO_); } /** * If theres no temp file length, which is based off of valid data in * the input cubes, then we havent found any valid data. */ if(tempFileLength <= 0) { string msg = "No valid input data was found"; throw iException::Message(iException::User,msg,_FILEINFO_); } /** * ocube is now the temporary file (for pass 2). */ ocube = new Cube(); ocube->SetDimensions(numOutputSamples, tempFileLength, 2); PvlGroup &prefs = Preference::Preferences().FindGroup("DataDirectory", Pvl::Traverse); iString outTmpName = (string)prefs["Temporary"][0] + "/"; outTmpName += Filename(ui.GetFilename("TO")).Basename() + ".tmp.cub"; ocube->Create(outTmpName); oLineMgr = new LineManager(*ocube); oLineMgr->SetLine(1); ProcessByBrick p; int excludedCnt = 0; if(cameraType == LineScan) { outputTmpAverages.resize(numOutputSamples); outputTmpCounts.resize(numOutputSamples); numInputDns.resize(numOutputSamples); } cubeInitialized = false; for(currImage = 0; currImage < inList.size(); currImage++) { if(Excluded(currImage)) { excludedCnt ++; continue; } PvlObject currFile("Exclusions"); currFile += PvlKeyword("Filename", inList[currImage]); currFile += PvlKeyword("Tolerance", maxStdev); if(cameraType == LineScan) { currFile += PvlKeyword("FrameLines", numFrameLines); } else if(cameraType == PushFrame) { currFile += PvlKeyword("FrameletLines", numFrameLines); } excludedDetails.push_back(currFile); CubeAttributeInput inAtt; // This needs to be set constantly because ClearInputCubes // seems to be removing the input brick size. if(cameraType == LineScan) { p.SetBrickSize(1, numFrameLines, 1); } else if(cameraType == Framing || cameraType == PushFrame) { p.SetBrickSize(numOutputSamples, 1, 1); } p.SetInputCube(inList[currImage], inAtt); iString progText = "Calculating Averages " + iString((int)currImage+1); progText += "/" + iString((int)inList.size()); progText += " (" + Filename(inList[currImage]).Name() + ")"; p.Progress()->SetText(progText); p.StartProcess(CreateTemporaryData); p.EndProcess(); p.ClearInputCubes(); if(excludedDetails[excludedDetails.size()-1].Groups() == 0) { excludedDetails.resize(excludedDetails.size()-1); } } /** * Pass 2 completed. The processing methods were responsible for writing * the entire temporary cube. */ if(oLineMgr) { delete oLineMgr; oLineMgr = NULL; } if(ocube) { ocube->Close(); delete ocube; } /** * ocube is now the final output */ ocube = new Cube(); if(cameraType == LineScan) { ocube->SetDimensions(numOutputSamples, 1, 1); } else if(cameraType == Framing || cameraType == PushFrame) { ocube->SetDimensions(numOutputSamples, tempFileLength, 1); } ocube->Create(Filename(ui.GetFilename("TO")).Expanded()); oLineMgr = new LineManager(*ocube); oLineMgr->SetLine(1); // We now have the necessary temp file, let's go ahead and combine it into // the final output! p.SetInputBrickSize(numOutputSamples, 1, 2); p.SetOutputBrickSize(numOutputSamples, 1, 1); cubeInitialized = false; CubeAttributeInput inAtt; p.Progress()->SetText("Calculating Final Flat Field"); p.SetInputCube(outTmpName, inAtt); p.StartProcess(ProcessTemporaryData); p.EndProcess(); if(cameraType == LineScan) { ocube->Write(*oLineMgr); } if(oLineMgr) { delete oLineMgr; oLineMgr = NULL; } if(ocube) { ocube->Close(); delete ocube; ocube = NULL; } /** * Build a list of excluded files */ PvlGroup excludedFiles("ExcludedFiles"); for(currImage = 0; currImage < inList.size(); currImage++) { if(Excluded(currImage)) { excludedFiles += PvlKeyword("File", inList[currImage]); } } // log the results Application::Log(excludedFiles); if(ui.WasEntered("EXCLUDE")) { Pvl excludeFile; // Find excluded files excludeFile.AddGroup(excludedFiles); for(unsigned int i = 0; i < excludedDetails.size(); i++) { excludeFile.AddObject(excludedDetails[i]); } excludeFile.Write(Filename(ui.GetFilename("EXCLUDE")).Expanded()); } remove(outTmpName.c_str()); // Clean up settings ResetGlobals(); }
void IsisMain() { UserInterface &ui = Application::GetUserInterface(); Cube cube; cube.open(ui.GetFileName("FROM")); // Check that it is a Mariner10 cube. Pvl * labels = cube.label(); if ("Mariner_10" != (QString)labels->findKeyword("SpacecraftName", Pvl::Traverse)) { QString msg = "The cube [" + ui.GetFileName("FROM") + "] does not appear" + " to be a Mariner10 cube"; throw IException(IException::User, msg, _FILEINFO_); } // Check that the cube actually needs reconstruction Chip cp(5, 5); cp.TackCube(25, 25); cp.Load(cube); Statistics *stats = NULL; stats = cp.Statistics(); // Maximum possible number of good pixels in a 5x5 if(stats->ValidPixels() > 8) { QString msg = "The cube [" + ui.GetFileName("FROM") + "] does not need" + " reconstruction, try mar10clean instead"; throw IException(IException::User, msg, _FILEINFO_); } if (stats != NULL) { delete stats; stats = NULL; } // Open the input cube Pipeline p("mar10restore"); p.SetInputFile("FROM"); p.SetOutputFile("TO"); p.KeepTemporaryFiles(!ui.GetBoolean("REMOVE")); // Run a standard deviation filter on the cube p.AddToPipeline("noisefilter", "noise1"); p.Application("noise1").SetInputParameter("FROM", true); p.Application("noise1").SetOutputParameter("TO", "noise1"); p.Application("noise1").AddConstParameter("TOLDEF", "stddev"); p.Application("noise1").AddConstParameter("FLATTOL", "10"); p.Application("noise1").AddConstParameter("SAMP", "5"); p.Application("noise1").AddConstParameter("LINE", "5"); p.Application("noise1").AddConstParameter("MINIMUM", "4"); p.Application("noise1").AddConstParameter("TOLMIN", "2.0"); p.Application("noise1").AddConstParameter("TOLMAX", "1.5"); p.Application("noise1").AddConstParameter("REPLACE", "null"); // run a standard deviation filter on the cube p.AddToPipeline("noisefilter", "noise2"); p.Application("noise2").SetInputParameter("FROM", true); p.Application("noise2").SetOutputParameter("TO", "noise2"); p.Application("noise2").AddConstParameter("TOLDEF", "stddev"); p.Application("noise2").AddConstParameter("FLATTOL", "10"); p.Application("noise2").AddConstParameter("SAMP", "11"); p.Application("noise2").AddConstParameter("LINE", "11"); p.Application("noise2").AddConstParameter("MINIMUM", "9"); p.Application("noise2").AddConstParameter("TOLMIN", "100"); p.Application("noise2").AddConstParameter("TOLMAX", "2.0"); p.Application("noise2").AddConstParameter("REPLACE", "null"); // Run a standard deviation filter on the cube p.AddToPipeline("noisefilter", "noise3"); p.Application("noise3").SetInputParameter("FROM", true); p.Application("noise3").SetOutputParameter("TO", "noise3"); p.Application("noise3").AddConstParameter("TOLDEF", "stddev"); p.Application("noise3").AddConstParameter("FLATTOL", "10"); p.Application("noise3").AddConstParameter("SAMP", "7"); p.Application("noise3").AddConstParameter("LINE", "7"); p.Application("noise3").AddConstParameter("MINIMUM", "4"); p.Application("noise3").AddConstParameter("TOLMIN", "100"); p.Application("noise3").AddConstParameter("TOLMAX", "1.5"); p.Application("noise3").AddConstParameter("REPLACE", "null"); // Run a low pass filter on the invalid data in the cube p.AddToPipeline("lowpass", "lowpass1"); p.Application("lowpass1").SetInputParameter("FROM", true); p.Application("lowpass1").SetOutputParameter("TO", "lp1"); p.Application("lowpass1").AddConstParameter("SAMP", "3"); p.Application("lowpass1").AddConstParameter("LINE", "3"); p.Application("lowpass1").AddConstParameter("MINIMUM", "2"); p.Application("lowpass1").AddConstParameter("FILTER", "outside"); p.Application("lowpass1").AddConstParameter("NULL", "true"); p.Application("lowpass1").AddConstParameter("LIS", "true"); p.Application("lowpass1").AddConstParameter("HIS", "true"); p.Application("lowpass1").AddConstParameter("LRS", "true"); // Run a low pass filter on the invalid data in the cube p.AddToPipeline("lowpass", "lowpass2"); p.Application("lowpass2").SetInputParameter("FROM", true); p.Application("lowpass2").SetOutputParameter("TO", "lp2"); p.Application("lowpass2").AddConstParameter("SAMP", "3"); p.Application("lowpass2").AddConstParameter("LINE", "3"); p.Application("lowpass2").AddConstParameter("MINIMUM", "2"); p.Application("lowpass2").AddConstParameter("FILTER", "outside"); p.Application("lowpass2").AddConstParameter("NULL", "true"); p.Application("lowpass2").AddConstParameter("LIS", "true"); p.Application("lowpass2").AddConstParameter("HIS", "true"); p.Application("lowpass2").AddConstParameter("LRS", "true"); // Run a low pass filter on the invalid data in the cube p.AddToPipeline("lowpass", "lowpass3"); p.Application("lowpass3").SetInputParameter("FROM", true); p.Application("lowpass3").SetOutputParameter("TO", "lp3"); p.Application("lowpass3").AddConstParameter("SAMP", "3"); p.Application("lowpass3").AddConstParameter("LINE", "3"); p.Application("lowpass3").AddConstParameter("MINIMUM", "2"); p.Application("lowpass3").AddConstParameter("FILTER", "outside"); p.Application("lowpass3").AddConstParameter("NULL", "true"); p.Application("lowpass3").AddConstParameter("LIS", "true"); p.Application("lowpass3").AddConstParameter("HIS", "true"); p.Application("lowpass3").AddConstParameter("LRS", "true"); p.AddToPipeline("trim"); p.Application("trim").SetInputParameter("FROM", true); p.Application("trim").SetOutputParameter("TO", "trim"); p.Application("trim").AddConstParameter("LEFT", toString(15)); p.Application("trim").AddConstParameter("RIGHT", toString(5)); p.Application("trim").AddConstParameter("BOTTOM", toString(0)); p.Application("trim").AddConstParameter("TOP", toString(5)); p.Run(); }
QWidget* ResultsExporter::getExportOptionsWidget(const PlugInArgList *pInArgList) { const DataDescriptor* pDescriptor = NULL; if (pInArgList != NULL) { RasterElement* pElement = pInArgList->getPlugInArgValue<RasterElement>(Exporter::ExportItemArg()); if (pElement != NULL) { pDescriptor = pElement->getDataDescriptor(); } } if (mpOptionsWidget == NULL) { Service<DesktopServices> pDesktop; VERIFY(pDesktop.get() != NULL); mpOptionsWidget = new ResultsOptionsWidget(pDesktop->getMainWidget()); } if (mpOptionsWidget != NULL) { const string& name = pDescriptor->getName(); const string& type = pDescriptor->getType(); DataElement* pParent = pDescriptor->getParent(); RasterElement* pResults = dynamic_cast<RasterElement*>(mpModel->getElement(name, type, pParent)); if (pResults != NULL) { GeocoordType geocoordType; PassArea passArea = MIDDLE; double dFirstThreshold = 0.0; double dSecondThreshold = 0.0; SpatialDataWindow* pWindow = dynamic_cast<SpatialDataWindow*>(mpDesktop->getCurrentWorkspaceWindow()); if (pWindow != NULL) { SpatialDataView* pView = pWindow->getSpatialDataView(); if (pView != NULL) { LayerList* pLayerList = pView->getLayerList(); if (pLayerList != NULL) { ThresholdLayer* pThresholdLayer = static_cast<ThresholdLayer*>(pLayerList->getLayer(THRESHOLD, pResults)); if (pThresholdLayer != NULL) { passArea = pThresholdLayer->getPassArea(); dFirstThreshold = pThresholdLayer->getFirstThreshold(); dSecondThreshold = pThresholdLayer->getSecondThreshold(); } else { Statistics* pStatistics = pResults->getStatistics(); if (pStatistics != NULL) { dFirstThreshold = pStatistics->getMin(); dSecondThreshold = pStatistics->getMax(); } } } LatLonLayer* pLatLonLayer = static_cast<LatLonLayer*>(pView->getTopMostLayer(LAT_LONG)); if (pLatLonLayer != NULL) { geocoordType = pLatLonLayer->getGeocoordType(); } } if (geocoordType.isValid() == false) { bool hasGeoData = pResults->isGeoreferenced(); if (hasGeoData == false) { RasterElement* pParent = dynamic_cast<RasterElement*>(mpResults->getParent()); if (pParent != NULL) { hasGeoData = pParent->isGeoreferenced(); } } if (hasGeoData == true) { geocoordType = Georeference::getSettingGeocoordType(); } } } mpOptionsWidget->setGeocoordType(geocoordType); mpOptionsWidget->setPassArea(passArea); mpOptionsWidget->setFirstThreshold(dFirstThreshold); mpOptionsWidget->setSecondThreshold(dSecondThreshold); } } return mpOptionsWidget; }
/** * Retrieve the statistics based on the box size * and point on the cube. * * @param p */ void StatisticsTool::getStatistics(QPoint p) { MdiCubeViewport *cvp = cubeViewport(); if(cvp == NULL) return; double sample, line; cvp->viewportToCube(p.x(), p.y(), sample, line); // If we are outside of the cube, do nothing if((sample < 0.5) || (line < 0.5) || (sample > cvp->cubeSamples() + 0.5) || (line > cvp->cubeLines() + 0.5)) { return; } int isamp = (int)(sample + 0.5); int iline = (int)(line + 0.5); Statistics stats; Brick *brick = new Brick(1, 1, 1, cvp->cube()->pixelType()); QVector<QVector<double> > pixelData(p_boxLines, QVector<double>(p_boxSamps, Null)); double lineDiff = p_boxLines / 2.0; double sampDiff = p_boxSamps / 2.0; p_ulSamp = isamp - (int)floor(sampDiff); p_ulLine = iline - (int)floor(lineDiff); int x, y; y = p_ulLine; for(int i = 0; i < p_boxLines; i++) { x = p_ulSamp; if(y < 1 || y > cvp->cubeLines()) { y++; continue; } for(int j = 0; j < p_boxSamps; j++) { if(x < 1 || x > cvp->cubeSamples()) { x++; continue; } brick->SetBasePosition(x, y, cvp->grayBand()); cvp->cube()->read(*brick); stats.AddData(brick->at(0)); pixelData[i][j] = brick->at(0); x++; } y++; } p_visualDisplay->setPixelData(pixelData, p_ulSamp, p_ulLine); if (stats.ValidPixels()) { p_minLabel->setText(QString("Minimum: %1").arg(stats.Minimum())); p_maxLabel->setText(QString("Maximum: %1").arg(stats.Maximum())); p_avgLabel->setText(QString("Average: %1").arg(stats.Average())); p_stdevLabel->setText(QString("Standard Dev: %1").arg(stats.StandardDeviation(), 0, 'f', 6)); } else { p_minLabel->setText(QString("Minimum: n/a")); p_maxLabel->setText(QString("Maximum: n/a")); p_avgLabel->setText(QString("Average: n/a")); p_stdevLabel->setText(QString("Standard Dev: n/a")); } p_set = true; resizeScrollbars(); }
int main (int argc, char *argv[]) { //myTableLookUp.readTable(hXOR); //myTableLookUp.readTable(hIFF); //maxMemory=0; if (argc != 11) { printf ("DSMGA ell nInitial selectionPressure pc pm maxGen maxFe repeat display rand_seed\n"); return -1; } int ell = atoi (argv[1]); // problem size int nInitial = atoi (argv[2]); // initial population size int selectionPressure = atoi (argv[3]); // selection pressure double pc = atof (argv[4]); // pc double pm = atof (argv[5]); // pm int maxGen = atoi (argv[6]); // max generation int maxFe = atoi (argv[7]); // max fe int repeat = atoi (argv[8]); // how many time to repeat int display = atoi (argv[9]); // display each generation or not int rand_seed = atoi (argv[10]); // rand seed if (rand_seed != -1) // time myRand.seed((unsigned long)rand_seed); int i; Statistics stGen; int usedGen; int failNum = 0; int maxMemoryUsage = 0; for (i = 0; i < repeat; i++) { DSMGA dsmga (ell, nInitial, selectionPressure, pc, pm, maxGen, maxFe); if (display == 1) usedGen = dsmga.doIt (true); else usedGen = dsmga.doIt (false); if (!dsmga.foundOptima()) { failNum++; printf ("-"); } else { stGen.record (usedGen); printf ("+"); } maxMemoryUsage += maxMemory; fflush (NULL); } cout << endl; cout << "Max DSM memory usage:" << (double)maxMemoryUsage/(double)repeat << " bytes." << endl; cout << "Memory usage of DSM + population: " << (double)maxMemory/(double)repeat + nInitial*ell/8 << " bytes." << endl; printf ("\n"); printf ("%f %d\n", stGen.getMean (), failNum); return EXIT_SUCCESS; }
bool ResultsExporter::writeOutput(ostream &stream) { mMessage = "Exporting results matrix..."; if (mpProgress != NULL) { mpProgress->updateProgress(mMessage, 0, NORMAL); } StepResource pStep(mMessage, "app", "D890E37C-B960-4527-8AAC-D62F2DE7A541"); RasterDataDescriptor* pDescriptor = dynamic_cast<RasterDataDescriptor*>(mpResults->getDataDescriptor()); if (pDescriptor == NULL) { mMessage = "Could not get the results data descriptor!"; if (mpProgress != NULL) { mpProgress->updateProgress(mMessage, 0, ERRORS); } pStep->finalize(Message::Failure); return false; } VERIFY(mpResults != NULL); string name = mpResults->getName(); VERIFY(mpFileDescriptor != NULL); const vector<DimensionDescriptor>& rows = mpFileDescriptor->getRows(); const vector<DimensionDescriptor>& columns = mpFileDescriptor->getColumns(); unsigned int numRows = pDescriptor->getRowCount(); unsigned int numColumns = pDescriptor->getColumnCount(); EncodingType eDataType = pDescriptor->getDataType(); const vector<int>& badValues = pDescriptor->getBadValues(); if (mbMetadata) { stream << APP_NAME << " Results Raster\n"; stream << "Version = 4\n"; stream << "Results Name = " << name << "\n"; DataElement* pParent = mpResults->getParent(); if (pParent != NULL) { stream << "Data Set Name = " << pParent->getName() << "\n"; } stream << "Rows = " << numRows << "\n"; stream << "Columns = " << numColumns << "\n"; string dataType = StringUtilities::toDisplayString(eDataType); stream << "Data Type = " << dataType << "\n"; Statistics* pStatistics = mpResults->getStatistics(); if (pStatistics != NULL) { stream << "Min = " << pStatistics->getMin() << "\n"; stream << "Max = " << pStatistics->getMax() << "\n"; stream << "Average = " << pStatistics->getAverage() << "\n"; stream << "Standard Deviation = " << pStatistics->getStandardDeviation() << "\n\n"; } } RasterElement* pGeo = getGeoreferencedRaster(); DataAccessor da = mpResults->getDataAccessor(); if (!da.isValid()) { mMessage = "Could not access the data in the results raster!"; if (mpProgress != NULL) { mpProgress->updateProgress(mMessage, 0, ERRORS); } pStep->finalize(Message::Failure); return false; } unsigned int activeRowNumber = 0; for (unsigned int r = 0; r < rows.size(); ++r) { if (mbAbort) { mMessage = "Results exporter aborted!"; if (mpProgress != NULL) { mpProgress->updateProgress(mMessage, 0, ABORT); } pStep->finalize(Message::Abort); return false; } DimensionDescriptor rowDim = rows[r]; // Skip to the next row for (; activeRowNumber < rowDim.getActiveNumber(); ++activeRowNumber) { da->nextRow(); } unsigned int activeColumnNumber = 0; for (unsigned int c = 0; c < columns.size(); ++c) { DimensionDescriptor columnDim = columns[c]; // Skip to the next column for (; activeColumnNumber < columnDim.getActiveNumber(); ++activeColumnNumber) { da->nextColumn(); } VERIFY(da.isValid()); double dValue = ModelServices::getDataValue(eDataType, da->getColumn(), COMPLEX_MAGNITUDE, 0); if (isValueExported(dValue, badValues)) { string location = getLocationString(r, c, pGeo); char buffer[1024]; sprintf(buffer, "%lf\n", dValue); stream << name << " " << location << " " << buffer; } } // Update the progress int iProgress = (r * 100) / rows.size(); if (iProgress == 100) { iProgress = 99; } if (mpProgress != NULL) { mpProgress->updateProgress(mMessage, iProgress, NORMAL); } } stream << "\n"; return true; }
/** The ISIS smtk main application */ void IsisMain() { UserInterface &ui = Application::GetUserInterface(); // Open the first cube. It is the left hand image. Cube lhImage; CubeAttributeInput &attLeft = ui.GetInputAttribute("FROM"); vector<QString> bandLeft = attLeft.bands(); lhImage.setVirtualBands(bandLeft); lhImage.open(ui.GetFileName("FROM"),"r"); // Open the second cube, it is geomertricallty altered. We will be matching the // first to this one by attempting to compute a sample/line offsets Cube rhImage; CubeAttributeInput &attRight = ui.GetInputAttribute("MATCH"); vector<QString> bandRight = attRight.bands(); rhImage.setVirtualBands(bandRight); rhImage.open(ui.GetFileName("MATCH"),"r"); // Ensure only single bands if (lhImage.bandCount() != 1 || rhImage.bandCount() != 1) { QString msg = "Input Cubes must have only one band!"; throw IException(IException::User,msg,_FILEINFO_); } // Both images must have a Camera and can also have a Projection. We will // only deal with a Camera, however as a projected, non-mosaicked image // uses a Projection internal to the Camera object. Camera *lhCamera = NULL; Camera *rhCamera = NULL; try { lhCamera = lhImage.camera(); rhCamera = rhImage.camera(); } catch (IException &ie) { QString msg = "Both input images must have a camera"; throw IException(ie, IException::User, msg, _FILEINFO_); } // Since we are generating a DEM, we must turn off any existing // DEM that may have been initialized with spiceinit. lhCamera->IgnoreElevationModel(true); rhCamera->IgnoreElevationModel(true); // Get serial number QString serialLeft = SerialNumber::Compose(lhImage, true); QString serialRight = SerialNumber::Compose(rhImage, true); // This still precludes band to band registrations. if (serialLeft == serialRight) { QString sLeft = FileName(lhImage.fileName()).name(); QString sRight = FileName(rhImage.fileName()).name(); if (sLeft == sRight) { QString msg = "Cube Serial Numbers must be unique - FROM=" + serialLeft + ", MATCH=" + serialRight; throw IException(IException::User,msg,_FILEINFO_); } serialLeft = sLeft; serialRight = sRight; } Progress prog; prog.SetText("Finding Initial Seeds"); int nl = lhImage.lineCount(); int ns = lhImage.sampleCount(); BigInt numAttemptedInitialPoints = 0; // Declare Gruen matcher SmtkMatcher matcher(ui.GetFileName("REGDEF"), &lhImage, &rhImage); // Get line/sample linc/sinc parameters int space = ui.GetInteger("SPACE"); int linc (space), sinc(space); // Do we have a seed points from a control net file? bool useseed = ui.WasEntered("CNET"); // Base points on an input cnet SmtkQStack gstack; double lastEigen(0.0); if (useseed) { ControlNet cnet(ui.GetFileName("CNET")); prog.SetMaximumSteps(cnet.GetNumPoints()); prog.CheckStatus(); gstack.reserve(cnet.GetNumPoints()); for (int cpIndex = 0; cpIndex < cnet.GetNumPoints(); cpIndex ++) { ControlPoint *cp = cnet.GetPoint(cpIndex); if (!cp->IsIgnored()) { ControlMeasure *cmLeft(0), *cmRight(0); for(int cmIndex = 0; cmIndex < cp->GetNumMeasures(); cmIndex ++) { ControlMeasure *cm = cp->GetMeasure(cmIndex); if (!cm->IsIgnored()) { if (cm->GetCubeSerialNumber() == serialLeft) cmLeft = cp->GetMeasure(cmIndex); if (cm->GetCubeSerialNumber() == serialRight) cmRight = cp->GetMeasure(cmIndex); } } // If we have both left and right images in the control point, save it if ( (cmLeft != 0) && (cmRight != 0) ) { Coordinate left = Coordinate(cmLeft->GetLine(), cmLeft->GetSample()); Coordinate right = Coordinate(cmRight->GetLine(), cmRight->GetSample()); SmtkPoint spnt = matcher.Create(left, right); // Insert the point (unregistered) if ( spnt.isValid() ) { int line = (int) cmLeft->GetLine(); int samp = (int) cmLeft->GetSample(); matcher.isValid(spnt); gstack.insert(qMakePair(line, samp), spnt); lastEigen = spnt.GoodnessOfFit(); } } } prog.CheckStatus(); } } else { // We want to create a grid of control points that is N rows by M columns. int rows = (lhImage.lineCount() + linc - 1)/linc; int cols = (lhImage.sampleCount() + sinc - 1)/sinc; prog.SetMaximumSteps(rows * cols); prog.CheckStatus(); // First pass stack and eigen value statistics SmtkQStack fpass; fpass.reserve(rows * cols); Statistics temp_mev; // Loop through grid of points and get statistics to compute // initial set of points for (int line = linc / 2 + 1; line < nl; line += linc) { for (int samp = sinc / 2 + 1 ; samp < ns; samp += sinc) { numAttemptedInitialPoints ++; SmtkPoint spnt = matcher.Register(Coordinate(line,samp)); if ( spnt.isValid() ) { matcher.isValid(spnt); fpass.insert(qMakePair(line, samp), spnt); temp_mev.AddData(spnt.GoodnessOfFit()); } prog.CheckStatus(); } } // Now select a subset of fpass points as the seed points cout << "Number of Potential Seed Points: " << fpass.size() << "\n"; cout << "Min / Max Eigenvalues Matched: " << temp_mev.Minimum() << ", " << temp_mev.Maximum() << "\n"; // How many seed points are requested double nseed = ui.GetDouble("NSEED"); int inseed; if (nseed >= 1.0) inseed = (int) nseed; else if (nseed > 0.0) inseed = (int) (nseed * (double) (fpass.size())); else inseed = (int) ((double) (fpass.size()) * 0.05); double seedsample = ui.GetDouble("SEEDSAMPLE"); // Generate a new stack gstack.reserve(inseed); while ((gstack.size() < inseed) && (!fpass.isEmpty() )) { SmtkQStack::iterator bestm; if (seedsample <= 0.0) { bestm = matcher.FindSmallestEV(fpass); } else { bestm = matcher.FindExpDistEV(fpass, seedsample, temp_mev.Minimum(), temp_mev.Maximum()); } // Add point to stack if (bestm != fpass.end()) { Coordinate right = bestm.value().getRight(); matcher.isValid(bestm.value()); gstack.insert(bestm.key(), bestm.value()); lastEigen = bestm.value().GoodnessOfFit(); fpass.erase(bestm); } } // If a user wants to see the seed network, write it out here if (ui.WasEntered("OSEEDNET")) { WriteCnet(ui.GetFileName("OSEEDNET"), gstack, lhCamera->target()->name(), serialLeft, serialRight); } } /////////////////////////////////////////////////////////////////////// // All done with seed points. Sanity check ensures we actually found // some. /////////////////////////////////////////////////////////////////////// if (gstack.size() <= 0) { QString msg = "No seed points found - may need to check Gruen parameters."; throw IException(IException::User, msg, _FILEINFO_); } // Report seed point status if (!useseed) { cout << "Number of Seed Points used: " << gstack.size() << "\n"; cout << "EV of last Seed Point: " << lastEigen << "\n"; } else { cout << "Number of Manual Seed Points: " << gstack.size() << "\n"; } // Use seed points (in stack) to grow SmtkQStack bmf; bmf.reserve(gstack.size()); // Probably need much more but for starters... BigInt numOrigPoints = gstack.size(); BigInt passpix2 = 0; int subcbox = ui.GetInteger("SUBCBOX"); int halfBox((subcbox-1)/2); while (!gstack.isEmpty()) { SmtkQStackIter cstack = matcher.FindSmallestEV(gstack); // Print number on stack if ((gstack.size() % 1000) == 0) { cout << "Number on Stack: " << gstack.size() << ". " << cstack.value().GoodnessOfFit() << "\n"; } // Test to see if already determined SmtkQStackIter bmfPt = bmf.find(cstack.key()); if (bmfPt == bmf.end()) { // Its not in the final stack, process it // Retrieve the point SmtkPoint spnt = cstack.value(); // Register if its not already registered if (!spnt.isRegistered()) { spnt = matcher.Register(spnt, spnt.getAffine()); } // Still must check for validity if the point was just registered, // otherwise should be good if ( spnt.isValid() ) { passpix2++; bmf.insert(cstack.key(), spnt); // inserts (0,0) offset excluded below int line = cstack.key().first; int sample = cstack.key().second; // Determine match points double eigen(spnt.GoodnessOfFit()); for (int sampBox = -halfBox ; sampBox <= halfBox ; sampBox++ ) { int csamp = sample + sampBox; for (int lineBox = -halfBox ; lineBox <= halfBox ; lineBox++) { int cline = line + lineBox; if ( !( (sampBox == 0) && (lineBox == 0)) ) {// Already added above SmtkQPair dupPair(cline, csamp); SmtkQStackIter temp = bmf.find(dupPair); SmtkPoint bmfpnt; if (temp != bmf.end()) { if (temp.value().GoodnessOfFit() > eigen) { // Create cloned point with better fit bmfpnt = matcher.Clone(spnt, Coordinate(cline,csamp)); } } else { // ISIS2 is BMF(SAMP,LINE,7) .EQ VALID_MAX4) // Clone new point for insert bmfpnt = matcher.Clone(spnt, Coordinate(cline,csamp)); } // Add if good point if (bmfpnt.isValid()) { bmf.insert(dupPair, bmfpnt); } } } } // Grow stack with spacing adding info to stack for (int i = -1 ; i <= 1 ; i ++) { // Sample for (int j = -1 ; j <= 1 ; j ++) { // Line // Don't re-add the original sample, line if ( !((i == 0) && (j == 0)) ) { // Grow based upon spacing double ssamp = sample + (i * space); double sline = line + (j * space); Coordinate pnt = Coordinate(sline, ssamp); SmtkPoint gpnt = matcher.Clone(spnt, pnt); if ( gpnt.isValid() ) { SmtkQPair growpt((int) sline, (int) ssamp); // double check we don't have a finalized result at this position SmtkQStackIter temp = bmf.find(growpt); if(temp == bmf.end()) { gstack.insert(growpt, gpnt); } } } } } } } // Remove the current point from the grow stack (hole) gstack.erase(cstack); } ///////////////////////////////////////////////////////////////////////// // All done with creating points. Perform output options. ///////////////////////////////////////////////////////////////////////// // If a TO parameter was specified, create DEM with errors if (ui.WasEntered("TO")) { // Create the output DEM cout << "\nCreating output DEM from " << bmf.size() << " points.\n"; Process p; Cube *icube = p.SetInputCube("FROM"); Cube *ocube = p.SetOutputCube("TO", icube->sampleCount(), icube->lineCount(), 3); p.ClearInputCubes(); int boxsize = ui.GetInteger("BOXSIZE"); double plotdist = ui.GetDouble("PLOTDIST"); TileManager dem(*ocube), eigen(*ocube), stErr(*ocube); dem.SetTile(1, 1); // DEM Data/elevation stErr.SetTile(1, 2); // Error in stereo computation eigen.SetTile(1, 3); // Eigenvalue of the solution int nBTiles(eigen.Tiles()/3); // Total tiles / 3 bands prog.SetText("Creating DEM"); prog.SetMaximumSteps(nBTiles); prog.CheckStatus(); Statistics stAng; while ( !eigen.end() ) { // Must use the last band for this!! PointPlot tm = for_each(bmf.begin(), bmf.end(), PointPlot(dem, plotdist)); tm.FillPoints(*lhCamera, *rhCamera, boxsize, dem, stErr, eigen, &stAng); ocube->write(dem); ocube->write(stErr); ocube->write(eigen); dem.next(); stErr.next(); eigen.next(); prog.CheckStatus(); } // Report Stereo separation angles PvlGroup stresultsPvl("StereoSeparationAngle"); stresultsPvl += PvlKeyword("Minimum", toString(stAng.Minimum()), "deg"); stresultsPvl += PvlKeyword("Average", toString(stAng.Average()), "deg"); stresultsPvl += PvlKeyword("Maximum", toString(stAng.Maximum()), "deg"); stresultsPvl += PvlKeyword("StandardDeviation", toString(stAng.StandardDeviation()), "deg"); Application::Log(stresultsPvl); // Update the label with BandBin keywords PvlKeyword filter("FilterName", "Elevation", "meters"); filter.addValue("ElevationError", "meters"); filter.addValue("GoodnessOfFit", "unitless"); PvlKeyword center("Center", "1.0"); center.addValue("1.0"); center.addValue("1.0"); PvlGroup &bandbin = ocube->label()->findGroup("BandBin", PvlObject::Traverse); bandbin.addKeyword(filter, PvlContainer::Replace); bandbin.addKeyword(center, PvlContainer::Replace); center.setName("Width"); bandbin.addKeyword(center, PvlContainer::Replace); p.EndProcess(); } // If a cnet file was entered, write the ControlNet pvl to the file if (ui.WasEntered("ONET")) { WriteCnet(ui.GetFileName("ONET"), bmf, lhCamera->target()->name(), serialLeft, serialRight); } // Create output data PvlGroup totalPointsPvl("Totals"); totalPointsPvl += PvlKeyword("AttemptedPoints", toString(numAttemptedInitialPoints)); totalPointsPvl += PvlKeyword("InitialSuccesses", toString(numOrigPoints)); totalPointsPvl += PvlKeyword("GrowSuccesses", toString(passpix2)); totalPointsPvl += PvlKeyword("ResultingPoints", toString(bmf.size())); Application::Log(totalPointsPvl); Pvl arPvl = matcher.RegistrationStatistics(); PvlGroup smtkresultsPvl("SmtkResults"); smtkresultsPvl += PvlKeyword("SpiceOffImage", toString(matcher.OffImageErrorCount())); smtkresultsPvl += PvlKeyword("SpiceDistanceError", toString(matcher.SpiceErrorCount())); arPvl.addGroup(smtkresultsPvl); for(int i = 0; i < arPvl.groups(); i++) { Application::Log(arPvl.group(i)); } // add the auto registration information to print.prt PvlGroup autoRegTemplate = matcher.RegTemplate(); Application::Log(autoRegTemplate); // Don't need the cubes opened anymore lhImage.close(); rhImage.close(); }
void Facility::GenerateStandartStatistics(int thread, int mask) { Statistics *s; char buf[9]; sprintf(buf,"%d",thread); if(mask&1) { s=new IntervalStatistics(true,thread); if(thread!=-1) s->SetName("w_"+name + "_" + buf); else s->SetName("w_"+name); s->SetNode(0); stats[0].push_back(s); s=new IntervalStatistics(false,thread); if(thread!=-1) s->SetName("w_"+name + "_" + buf); else s->SetName("w_"+name); s->SetNode(0); stats[1].push_back(s); } if(mask&2) { s=new IntervalStatistics(true,thread); if(thread!=-1) s->SetName(name + "_" + buf); else s->SetName(name); s->SetNode(0); stats[0].push_back(s); s=new IntervalStatistics(false,thread); if(thread!=-1) s->SetName(name + "_" + buf); else s->SetName(name); s->SetNode(0); stats[2].push_back(s); } }
/** * This calculates the coefficients for specific energy corrections */ void calculateSpecificEnergy(Cube *icube) { PvlGroup &inst = icube->label()->findGroup("Instrument", Pvl::Traverse); bool vis = (inst["Channel"][0] != "IR"); double coefficient = 1.0; if(inst["GainMode"][0] == "HIGH") { coefficient /= 2; } if(vis && inst["SamplingMode"][0] == "HI-RES") { coefficient *= 3; } if(vis) { coefficient /= toDouble(inst["ExposureDuration"][1]) / 1000.0; } else { coefficient /= (toDouble(inst["ExposureDuration"][0]) * 1.01725) / 1000.0 - 0.004; } QString specEnergyFile = "$cassini/calibration/vims/"; if(vis) { specEnergyFile += "vis_perf_v????.cub"; } else { specEnergyFile += "ir_perf_v????.cub"; } QString waveCalFile = "$cassini/calibration/vims/wavecal_v????.cub"; FileName specEnergyFileName(specEnergyFile); specEnergyFileName = specEnergyFileName.highestVersion(); FileName waveCalFileName(waveCalFile); waveCalFileName = waveCalFileName.highestVersion(); Cube specEnergyCube; specEnergyCube.open(specEnergyFileName.expanded()); Cube waveCalCube; waveCalCube.open(waveCalFileName.expanded()); LineManager specEnergyMgr(specEnergyCube); LineManager waveCalMgr(waveCalCube); for(int i = 0; i < icube->bandCount(); i++) { Statistics specEnergyStats; Statistics waveCalStats; if(vis) { specEnergyMgr.SetLine(1, i + 1); waveCalMgr.SetLine(1, i + 1); } else { specEnergyMgr.SetLine(1, i + 1); // ir starts at band 97 waveCalMgr.SetLine(1, i + 96 + 1); } specEnergyCube.read(specEnergyMgr); waveCalCube.read(waveCalMgr); specEnergyStats.AddData(specEnergyMgr.DoubleBuffer(), specEnergyMgr.size()); waveCalStats.AddData(waveCalMgr.DoubleBuffer(), waveCalMgr.size()); double bandCoefficient = coefficient * specEnergyStats.Average() * waveCalStats.Average(); specificEnergyCorrections.push_back(bandCoefficient); } }
//function to write the stats values to flat file void writeFlat (ofstream &os, Statistics &s){ os << ValidateValue(s.Minimum())<<","<< ValidateValue(s.Maximum())<<","<< ValidateValue(s.Average())<<","<< ValidateValue(s.StandardDeviation())<<","; }
void LoadEqualizer::notifyLoadData( Channel* channel, const uint32_t frameNumber, const Statistics& statistics, const Viewport& region ) { LBLOG( LOG_LB2 ) << statistics.size() << " samples from "<< channel->getName() << " @ " << frameNumber << std::endl; for( std::deque< LBFrameData >::iterator i = _history.begin(); i != _history.end(); ++i ) { LBFrameData& frameData = *i; if( frameData.first != frameNumber ) continue; // Found corresponding historical data set LBDatas& items = frameData.second; for( LBDatas::iterator j = items.begin(); j != items.end(); ++j ) { Data& data = *j; if( data.channel != channel ) continue; // Found corresponding historical data item const uint32_t taskID = data.taskID; LBASSERTINFO( taskID > 0, channel->getName( )); // gather relevant load data int64_t startTime = std::numeric_limits< int64_t >::max(); int64_t endTime = 0; bool loadSet = false; int64_t transmitTime = 0; for( size_t k = 0; k < statistics.size(); ++k ) { const Statistic& stat = statistics[k]; if( stat.task == data.destTaskID ) _updateAssembleTime( data, stat ); // from different compound if( stat.task != taskID || loadSet ) continue; switch( stat.type ) { case Statistic::CHANNEL_CLEAR: case Statistic::CHANNEL_DRAW: case Statistic::CHANNEL_READBACK: startTime = LB_MIN( startTime, stat.startTime ); endTime = LB_MAX( endTime, stat.endTime ); break; case Statistic::CHANNEL_ASYNC_READBACK: case Statistic::CHANNEL_FRAME_TRANSMIT: transmitTime += stat.endTime - stat.startTime; break; case Statistic::CHANNEL_FRAME_WAIT_SENDTOKEN: transmitTime -= stat.endTime - stat.startTime; break; // assemble blocks on input frames, stop using subsequent data case Statistic::CHANNEL_ASSEMBLE: loadSet = true; break; default: break; } } if( startTime == std::numeric_limits< int64_t >::max( )) return; data.vp.apply( region ); // Update ROI data.time = endTime - startTime; data.time = LB_MAX( data.time, 1 ); data.time = LB_MAX( data.time, transmitTime ); data.assembleTime = LB_MAX( data.assembleTime, 0 ); LBLOG( LOG_LB2 ) << "Added time " << data.time << " (+" << data.assembleTime << ") for " << channel->getName() << " " << data.vp << ", " << data.range << " @ " << frameNumber << std::endl; return; // Note: if the same channel is used twice as a child, the // load-compound association does not work. } } }
void IsisMain(){ Process p; // Reset all the stats objects because they are global latStat.Reset(); lonStat.Reset(); resStat.Reset(); sampleResStat.Reset(); lineResStat.Reset(); aspectRatioStat.Reset(); phaseStat.Reset(); emissionStat.Reset(); incidenceStat.Reset(); localSolarTimeStat.Reset(); localRaduisStat.Reset(); northAzimuthStat.Reset(); UserInterface &ui = Application::GetUserInterface(); Cube *icube = p.SetInputCube("FROM"); Camera *cam = icube->Camera(); // Cube cube; // cube.Open(ui.GetFilename("FROM")); // Camera *cam = cube.Camera(); int eband = cam->Bands(); // if the camera is band independent that only run one band if (cam->IsBandIndependent()) eband = 1; int linc = ui.GetInteger("LINC"); int sinc = ui.GetInteger("SINC"); int pTotal = eband * ((cam->Lines()-2) / linc + 2) ; Progress progress; progress.SetMaximumSteps(pTotal); progress.CheckStatus(); for (int band=1; band<=eband; band++) { cam->SetBand(band); for (int line=1; line<(int)cam->Lines(); line=line+linc) { for (int sample=1; sample< cam->Samples(); sample=sample+sinc) { buildStats(cam, sample, line); } //set the sample value to the last sample and run buildstats int sample = cam->Samples(); buildStats(cam, sample, line); progress.CheckStatus(); } //set the line value to the last line and run on all samples(sample + sinc) int line = cam->Lines(); for (int sample=1; sample< cam->Samples(); sample=sample+sinc) { buildStats(cam, sample, line); } //set last sample and run with last line int sample = cam->Samples(); buildStats(cam, sample, line); progress.CheckStatus(); } //Set up the Pvl groups and get min, max, avg, and sd for each statstics object PvlGroup pUser("User Parameters"); pUser += PvlKeyword("Filename",ui.GetFilename("FROM")); pUser += PvlKeyword("Linc",ui.GetInteger("LINC")); pUser += PvlKeyword("Sinc",ui.GetInteger("SINC")); PvlGroup pLat("Latitude"); pLat += ValidateKey("LatitudeMinimum",latStat.Minimum()); pLat += ValidateKey("LatitudeMaximum",latStat.Maximum()); pLat += ValidateKey("LatitudeAverage",latStat.Average()); pLat += ValidateKey("LatitudeStandardDeviation",latStat.StandardDeviation()); PvlGroup pLon("Longitude"); pLon += ValidateKey("LongitudeMinimum",lonStat.Minimum()); pLon += ValidateKey("LongitudeMaximum",lonStat.Maximum()); pLon += ValidateKey("LongitudeAverage",lonStat.Average()); pLon += ValidateKey("LongitudeStandardDeviation",lonStat.StandardDeviation()); PvlGroup pSampleRes("SampleResolution"); pSampleRes += ValidateKey("SampleResolutionMinimum",sampleResStat.Minimum(), "meters/pixel"); pSampleRes += ValidateKey("SampleResolutionMaximum",sampleResStat.Maximum(), "meters/pixel"); pSampleRes += ValidateKey("SampleResolutionAverage",sampleResStat.Average(), "meters/pixel"); pSampleRes += ValidateKey("SampleResolutionStandardDeviation", sampleResStat.StandardDeviation(),"meters/pixel"); PvlGroup pLineRes("LineResolution"); pLineRes += ValidateKey("LineResolutionMinimum",lineResStat.Minimum(), "meters/pixel"); pLineRes += ValidateKey("LineResolutionMaximum",lineResStat.Maximum(), "meters/pixel"); pLineRes += ValidateKey("LineResolutionAverage",lineResStat.Average(), "meters/pixel"); pLineRes += ValidateKey("LineResolutionStandardDeviation", lineResStat.StandardDeviation(),"meters/pixel"); PvlGroup pResolution("Resolution"); pResolution += ValidateKey("ResolutionMinimum",resStat.Minimum(), "meters/pixel"); pResolution += ValidateKey("ResolutionMaximum",resStat.Maximum(), "meters/pixel"); pResolution += ValidateKey("ResolutionAverage",resStat.Average(), "meters/pixel"); pResolution += ValidateKey("ResolutionStandardDeviation", resStat.StandardDeviation(),"meters/pixel"); PvlGroup pAspectRatio("AspectRatio"); pAspectRatio += ValidateKey("AspectRatioMinimum",aspectRatioStat.Minimum()); pAspectRatio += ValidateKey("AspectRatioMaximun",aspectRatioStat.Maximum()); pAspectRatio += ValidateKey("AspectRatioAverage",aspectRatioStat.Average()); pAspectRatio += ValidateKey("AspectRatioStandardDeviation", aspectRatioStat.StandardDeviation()); PvlGroup pPhase("PhaseAngle"); pPhase += ValidateKey("PhaseMinimum",phaseStat.Minimum()); pPhase += ValidateKey("PhaseMaximum",phaseStat.Maximum()); pPhase += ValidateKey("PhaseAverage",phaseStat.Average()); pPhase += ValidateKey("PhaseStandardDeviation",phaseStat.StandardDeviation()); PvlGroup pEmission("EmissionAngle"); pEmission += ValidateKey("EmissionMinimum",emissionStat.Minimum()); pEmission += ValidateKey("EmissionMaximum",emissionStat.Maximum()); pEmission += ValidateKey("EmissionAverage",emissionStat.Average()); pEmission += ValidateKey("EmissionStandardDeviation", emissionStat.StandardDeviation()); PvlGroup pIncidence("IncidenceAngle"); pIncidence += ValidateKey("IncidenceMinimum",incidenceStat.Minimum()); pIncidence += ValidateKey("IncidenceMaximum",incidenceStat.Maximum()); pIncidence += ValidateKey("IncidenceAverage",incidenceStat.Average()); pIncidence += ValidateKey("IncidenceStandardDeviation", incidenceStat.StandardDeviation()); PvlGroup pTime("LocalSolarTime"); pTime += ValidateKey("LocalSolarTimeMinimum",localSolarTimeStat.Minimum(), "hours"); pTime += ValidateKey("LocalSolarTimeMaximum",localSolarTimeStat.Maximum(), "hours"); pTime += ValidateKey("LocalSolarTimeAverage",localSolarTimeStat.Average(), "hours"); pTime += ValidateKey("LocalSolarTimeStandardDeviation", localSolarTimeStat.StandardDeviation(),"hours"); PvlGroup pLocalRadius("LocalRadius"); pLocalRadius += ValidateKey("LocalRadiusMinimum",localRaduisStat.Minimum()); pLocalRadius += ValidateKey("LocalRadiusMaximum",localRaduisStat.Maximum()); pLocalRadius += ValidateKey("LocalRadiusAverage",localRaduisStat.Average()); pLocalRadius += ValidateKey("LocalRadiusStandardDeviation", localRaduisStat.StandardDeviation()); PvlGroup pNorthAzimuth("NorthAzimuth"); pNorthAzimuth += ValidateKey("NorthAzimuthMinimum",northAzimuthStat.Minimum()); pNorthAzimuth += ValidateKey("NorthAzimuthMaximum",northAzimuthStat.Maximum()); pNorthAzimuth += ValidateKey("NorthAzimuthAverage",northAzimuthStat.Average()); pNorthAzimuth += ValidateKey("NorthAzimuthStandardDeviation", northAzimuthStat.StandardDeviation()); // Send the Output to the log area Application::Log(pUser); Application::Log(pLat); Application::Log(pLon); Application::Log(pSampleRes); Application::Log(pLineRes); Application::Log(pResolution); Application::Log(pAspectRatio); Application::Log(pPhase); Application::Log(pEmission); Application::Log(pIncidence); Application::Log(pTime); Application::Log(pLocalRadius); Application::Log(pNorthAzimuth); if (ui.WasEntered("TO")) { string from = ui.GetFilename("FROM"); string outfile = Filename(ui.GetFilename("TO")).Expanded(); bool exists = Filename(outfile).Exists(); bool append = ui.GetBoolean("APPEND"); //If the user chooses a fromat of PVL then write to the output file ("TO") if (ui.GetString("FORMAT") == "PVL") { Pvl temp; temp.SetTerminator(""); temp.AddGroup(pUser); temp.AddGroup(pLat); temp.AddGroup(pLon); temp.AddGroup(pSampleRes); temp.AddGroup(pLineRes); temp.AddGroup(pResolution); temp.AddGroup(pAspectRatio); temp.AddGroup(pPhase); temp.AddGroup(pEmission); temp.AddGroup(pIncidence); temp.AddGroup(pTime); temp.AddGroup(pLocalRadius); temp.AddGroup(pNorthAzimuth); if (append) { temp.Append(outfile); } else { temp.Write(outfile); } } //Create a flatfile of the data with columhn headings // the flatfile is comma delimited and can be imported in to spreadsheets else { ofstream os; bool writeHeader = true; if (append) { os.open(outfile.c_str(),ios::app); if (exists) { writeHeader = false; } } else { os.open(outfile.c_str(),ios::out); } // if new file or append and no file exists then write header if(writeHeader){ os << "Filename,"<< "LatitudeMinimum,"<< "LatitudeMaximum,"<< "LatitudeAverage,"<< "LatitudeStandardDeviation,"<< "LongitudeMinimum,"<< "LongitudeMaximum,"<< "LongitudeAverage,"<< "LongitudeStandardDeviation,"<< "SampleResolutionMinimum,"<< "SampleResolutionMaximum,"<< "SampleResolutionAverage,"<< "SampleResolutionStandardDeviation,"<< "LineResolutionMinimum,"<< "LineResolutionMaximum,"<< "LineResolutionAverage,"<< "LineResolutionStandardDeviation,"<< "ResolutionMinimum,"<< "ResolutionMaximum,"<< "ResolutionAverage,"<< "ResolutionStandardDeviation,"<< "AspectRatioMinimum,"<< "AspectRatioMaximum,"<< "AspectRatioAverage,"<< "AspectRatioStandardDeviation,"<< "PhaseMinimum,"<< "PhaseMaximum,"<< "PhaseAverage,"<< "PhaseStandardDeviation,"<< "EmissionMinimum,"<< "EmissionMaximum,"<< "EmissionAverage,"<< "EmissionStandardDeviation,"<< "IncidenceMinimum,"<< "IncidenceMaximum,"<< "IncidenceAverage,"<< "IncidenceStandardDeviation,"<< "LocalSolarTimeMinimum,"<< "LocalSolarTimeMaximum,"<< "LocalSolarTimeAverage,"<< "LocalSolarTimeStandardDeviation,"<< "LocalRadiusMaximum,"<< "LocalRadiusMaximum,"<< "LocalRadiusAverage,"<< "LocalRadiusStandardDeviation,"<< "NorthAzimuthMinimum,"<< "NorthAzimuthMaximum,"<< "NorthAzimuthAverage,"<< "NorthAzimuthStandardDeviation,"<<endl; } os << Filename(from).Expanded() <<","; //call the function to write out the values for each group writeFlat(os, latStat); writeFlat(os, lonStat); writeFlat(os, sampleResStat); writeFlat(os, lineResStat); writeFlat(os, resStat); writeFlat(os, aspectRatioStat); writeFlat(os, phaseStat); writeFlat(os, emissionStat); writeFlat(os, incidenceStat); writeFlat(os, localSolarTimeStat); writeFlat(os, localRaduisStat); writeFlat(os, northAzimuthStat); os << endl; } } if( ui.GetBoolean("ATTACH") ) { string cam_name = "CameraStatistics"; //Creates new CameraStatistics Table TableField fname( "Name", Isis::TableField::Text, 20 ); TableField fmin( "Minimum", Isis::TableField::Double ); TableField fmax( "Maximum", Isis::TableField::Double ); TableField favg( "Average", Isis::TableField::Double ); TableField fstd( "StandardDeviation", Isis::TableField::Double ); TableRecord record; record += fname; record += fmin; record += fmax; record += favg; record += fstd; Table table( cam_name, record ); vector<PvlGroup> grps; grps.push_back( pLat ); grps.push_back( pLon ); grps.push_back( pSampleRes ); grps.push_back( pLineRes ); grps.push_back( pResolution ); grps.push_back( pAspectRatio ); grps.push_back( pPhase ); grps.push_back( pEmission ); grps.push_back( pIncidence ); grps.push_back( pTime ); grps.push_back( pLocalRadius ); grps.push_back( pNorthAzimuth ); for( vector<PvlGroup>::iterator g = grps.begin(); g != grps.end(); g++ ) { int i = 0; record[i++] = g->Name(); record[i++] = (double) (*g)[0][0]; record[i++] = (double) (*g)[1][0]; record[i++] = (double) (*g)[2][0]; record[i++] = (double) (*g)[3][0]; table += record; } icube->ReOpen( "rw" ); icube->Write( table ); p.WriteHistory(*icube); icube->Close(); } }
// stats bool RenderBin::getStats(Statistics& stats) const { stats.addBins(1); // different by return type - collects the stats in this renderrBin bool statsCollected = false; stats.addOrderedLeaves(_renderLeafList.size()); // draw fine grained ordering. for(RenderLeafList::const_iterator dw_itr = _renderLeafList.begin(); dw_itr != _renderLeafList.end(); ++dw_itr) { const RenderLeaf* rl = *dw_itr; const Drawable* dw= rl->getDrawable(); stats.addDrawable(); // number of geosets const Geometry* geom = dw->asGeometry(); if (geom) { stats.addFastDrawable(); } if (rl->_modelview.get()) { stats.addMatrix(); // number of matrices } if (dw) { // then tot up the primitive types and no vertices. dw->accept(stats); // use sub-class to find the stats for each drawable } statsCollected = true; } stats.addStateGraphs(_stateGraphList.size()); for(StateGraphList::const_iterator oitr=_stateGraphList.begin(); oitr!=_stateGraphList.end(); ++oitr) { for(StateGraph::LeafList::const_iterator dw_itr = (*oitr)->_leaves.begin(); dw_itr != (*oitr)->_leaves.end(); ++dw_itr) { const RenderLeaf* rl = dw_itr->get(); const Drawable* dw= rl->getDrawable(); stats.addDrawable(); // number of geosets const Geometry* geom = dw->asGeometry(); if (geom) { stats.addFastDrawable(); } if (rl->_modelview.get()) stats.addMatrix(); // number of matrices if (dw) { // then tot up the primitive types and no vertices. dw->accept(stats); // use sub-class to find the stats for each drawable } } statsCollected = true; } // now collects stats for any subbins. for(RenderBinList::const_iterator itr = _bins.begin(); itr!=_bins.end(); ++itr) { if (itr->second->getStats(stats)) { statsCollected = true; } } return statsCollected; }
/** * This method is the pass 2 processing routine. A ProcessByBrick * will call this method for sets of data (depending on the camera * type) and this method is responsible for writing the entire output * temporary cube. * * @param in Input raw image data, not including excluded files */ void CreateTemporaryData(Buffer &in) { /** * Line scan cameras process by frame columns. */ if(cameraType == LineScan) { // The statistics of every column of data need to be known // before we can write to the temp file. Gather stats for this // column. Statistics inputColStats; for(int i = 0; i < in.size(); i++) { inputColStats.AddData(in[i]); // We'll also need the stats for the entire frame in order to // normalize and in order to decide whether or not we want // to toss out the frame inputFrameStats.AddData(in[i]); } // Store off the column stats outputTmpAverages[in.Sample()-1] = inputColStats.Average(); outputTmpCounts[in.Sample()-1] = inputColStats.ValidPixels(); // Test if this is the last column and we've got all of our stats if(in.Sample() == numOutputSamples) { // Decide if we want this data if(IsSpecial(inputFrameStats.StandardDeviation()) || inputFrameStats.StandardDeviation() > maxStdev) { // We don't want this data... // CreateNullData is a helper method for this case that // nulls out the stats CreateNullData(); // Record the exclusion PvlGroup currExclusion("ExcludedLines"); currExclusion += PvlKeyword("FrameStartLine", iString(in.Line())); currExclusion += PvlKeyword("ValidPixels", iString(inputFrameStats.ValidPixels())); if(!IsSpecial(inputFrameStats.StandardDeviation())) currExclusion += PvlKeyword("StandardDeviation", inputFrameStats.StandardDeviation()); else currExclusion += PvlKeyword("StandardDeviation", "N/A"); excludedDetails[excludedDetails.size()-1].AddGroup(currExclusion); } // Let's write our data... CreateNullData took care of nulls for us // Band 1 is our normalized average oLineMgr->SetLine(oLineMgr->Line(),1); for(int i = 0; i < (int)outputTmpAverages.size(); i++) { if(!IsSpecial(outputTmpAverages[i])) { (*oLineMgr)[i] = outputTmpAverages[i] / inputFrameStats.Average(); } else { (*oLineMgr)[i] = Isis::Null; } } ocube->Write(*oLineMgr); oLineMgr->SetLine(oLineMgr->Line(),2); // band 2 is our valid dn counts for(int i = 0; i < (int)outputTmpCounts.size(); i++) { (*oLineMgr)[i] = outputTmpCounts[i]; numInputDns[i] += (int)(outputTmpCounts[i] + 0.5); } ocube->Write(*oLineMgr); (*oLineMgr) ++; inputFrameStats.Reset(); } } else if(cameraType == Framing || cameraType == PushFrame) { // Framing cameras and push frames are treated identically; // the framelet size for a framelet in the framing camera // is the entire image! int framelet = (in.Line()-1) / numFrameLines; double stdev; bool excluded = Excluded(currImage, framelet, stdev); if(excluded && ((in.Line()-1) % numFrameLines == 0)) { PvlGroup currExclusion("ExcludedFramelet"); currExclusion += PvlKeyword("FrameletStartLine", iString(in.Line())); currExclusion += PvlKeyword("FrameletNumber", (in.Line()-1) / numFrameLines); if(!IsSpecial(stdev)) { currExclusion += PvlKeyword("StandardDeviation", stdev); } else { currExclusion += PvlKeyword("StandardDeviation", "N/A"); } excludedDetails[excludedDetails.size()-1].AddGroup(currExclusion); } // Since this is a line by line iterative process, we need to get the current // data in the temp file oLineMgr->SetLine(((in.Line() - 1) % numFrameLines) + 1, 1); if(!excluded || !cubeInitialized) { ocube->Read(*oLineMgr); } if(!cubeInitialized) { for(int i = 0; i < oLineMgr->size(); i++) { (*oLineMgr)[i] = Isis::Null; } } vector<bool> isValidData; if(!excluded || !cubeInitialized) { isValidData.resize(in.size()); for(int samp = 0; samp < in.size(); samp++) { if(IsSpecial((*oLineMgr)[samp]) && !IsSpecial(in[samp])) { (*oLineMgr)[samp] = 0.0; } if(!IsSpecial(in[samp])) { isValidData[samp] = true; (*oLineMgr)[samp] += in[samp] / inputFrameletAverages[currImage][framelet]; } else { isValidData[samp] = false; } } } if(!excluded || !cubeInitialized) { ocube->Write(*oLineMgr); } oLineMgr->SetLine(oLineMgr->Line(), 2); if(!excluded || !cubeInitialized) { ocube->Read(*oLineMgr); } if(!cubeInitialized) { for(int i = 0; i < oLineMgr->size(); i++) { (*oLineMgr)[i] = Isis::Null; } if(ocube->Lines() == oLineMgr->Line()) cubeInitialized = true; } if(!excluded || !cubeInitialized) { for(int i = 0; i < (int)isValidData.size(); i++) { if(IsSpecial((*oLineMgr)[i])) { (*oLineMgr)[i] = 0.0; } if(isValidData[i]) { (*oLineMgr)[i] ++; } } } if(!excluded || !cubeInitialized) { ocube->Write(*oLineMgr); } } }
int main (int argc, char *argv[]) { if (argc != 7) { printf ("GA ell lower upper std mode maxOverlap\n"); return -1; } int ell = atoi(argv[1]); int lower = atoi(argv[2]); int upper = atoi(argv[3]); int std = atoi(argv[4]); int mode = atoi(argv[5]); int maxOverlap = atoi(argv[6]); //if ( std != -1 ) // generateMoneTest(ell,std); int round; int failround; int i, j; int populationSize; int minpopulationSize = 0; // // bisection // if (SHOW_DETAIL) { printf("***************\n"); printf("Bisection Phase\n"); printf("***************\n"); } failround = 0; for(round=0; round<BI_REPEAT; round++) { // populationSize = (int) (0.5 * ell * log((double)ell) / log(2.71828)) / 2; populationSize = lower/2; if ( populationSize <= 0 ) populationSize = 1; int left, right, middle; bool foundOptima = false; // bisection phase 1 if (lower < 0 || upper < 0) { if (SHOW_DETAIL) { printf("round %d phase 1\n", round+1); printf("---------------\n"); } do { populationSize *= 2; if (SHOW_DETAIL) printf("[%d]: ", populationSize); foundOptima = true; for (j=0; j<NUMCONV; j++) { if(std!=-1) generateMoneTest(ell,std,j,maxOverlap); DSMGA dsmga(ell, populationSize, 2, 1, 0, MAX_GEN, -1); //dsmga.doIt(false); dsmga.doIt(false,mode); if (!dsmga.foundOptima()) { foundOptima = false; if (SHOW_DETAIL) { printf("-"); fflush(NULL); } break; } if (SHOW_DETAIL) { printf("+"); fflush(NULL); } } if (SHOW_DETAIL) printf("\n"); } while (!foundOptima); left = populationSize/2; right = populationSize; } else { left = lower; right = upper; } if (SHOW_DETAIL) printf("===============\n"); // bisection phase 2 if (SHOW_DETAIL) { printf("round %d phase 2\n", round+1); printf("---------------\n"); } while ((right > 1.05 * left) && right > left + 2) { middle = (left + right) / 2; if (SHOW_DETAIL) printf("[%d]: ", middle); foundOptima = true; for (j=0; j<NUMCONV; j++) { DSMGA dsmga(ell, middle, 2, 1, 0, MAX_GEN, -1); //dsmga.doIt(false); dsmga.doIt(false,mode); if (!dsmga.foundOptima()) { foundOptima = false; if (SHOW_DETAIL) { printf("-"); fflush(NULL); } break; } if (SHOW_DETAIL) { printf("+"); fflush(NULL); } } if (foundOptima) right = middle; else left = middle; if (SHOW_DETAIL) printf("\n"); }; middle = (left + right) / 2; if (SHOW_DETAIL) { printf("===============\n"); printf("%d\n", middle); printf("===============\n\n"); } // if can find reliable population size if (!foundOptima && right == upper) { failround++; } else minpopulationSize += middle; } if (failround != BI_REPEAT) minpopulationSize /= (BI_REPEAT - failround); if (SHOW_DETAIL) { double failrate = ( 100.0 * (double)failround ) / (double)BI_REPEAT; if ((BI_REPEAT - failround) == RELIABLE) { printf("minimum population size: %d\n\n", minpopulationSize); printf("failed rate: %f %c\n\n", failrate, '%'); } else { printf("Bisection cannot find reliable population size in [%d, %d]\n", lower, upper); printf("failed rate: %f %c\n\n", failrate, '%'); return EXIT_SUCCESS; } } // // optnfe // if (SHOW_DETAIL) { printf("***************\n"); printf("Opt. nfe Phase\n"); printf("***************\n"); } double avgoptnfe[3] = {0.0, 0.0, 0.0}; failround = 0; for(round=0; round<NFE_REPEAT; round++) { double optimalnfe[3]; // nfe, population size, convergence time double history[5][3]; for(i=0; i<5; i++) for(j=0; j<3;j++) history[i][j] = 0.0; // nfe phase 1 populationSize = minpopulationSize; bool foundLower = false; double memo[3]; if (SHOW_DETAIL) { printf("round %d phase 1\n", round+1); printf("---------------\n"); } while (1) { if (SHOW_DETAIL) { printf("[%d]: ", populationSize); fflush(NULL); } Statistics stGen; int usedGen; for (j=0; j<GA_REPEAT; j++) { DSMGA dsmga(ell, populationSize, 2, 1, 0, MAX_GEN, -1); usedGen = dsmga.doIt(false); usedGen = dsmga.doIt(false, mode); if (dsmga.foundOptima()) stGen.record (usedGen); if (SHOW_DETAIL && ((j % COUNTER) == 0)) { printf(">"); fflush(NULL); } } memo[1] = (double)populationSize; memo[2] = stGen.getMean(); memo[0] = memo[1] * memo[2]; if (SHOW_DETAIL) printf(" %f = %d x %f\n", memo[0], (int)memo[1], memo[2]); if (!foundLower) { history[0][0] = memo[0]; history[0][1] = memo[1]; history[0][2] = memo[2]; foundLower = true; } else { if ( (memo[0] >= history[0][0]) || ((memo[0] >= history[4][0]) && (history[4][0] != 0.0)) ) { double memo_swap = memo[0]; memo[0] = history[4][0]; history[4][0] = memo_swap; memo_swap = memo[1]; memo[1] = history[4][1]; history[4][1] = memo_swap; memo_swap = memo[2]; memo[2] = history[4][2]; history[4][2] = memo_swap; break; } else { if (history[4][0] >= memo[0]) { history[0][0] = history[4][0]; history[0][1] = history[4][1]; history[0][2] = history[4][2]; } history[4][0] = memo[0]; history[4][1] = memo[1]; history[4][2] = memo[2]; if (history[0][0] == history[4][0]) break; } } populationSize += (int)(minpopulationSize * 0.1); } if (SHOW_DETAIL) printf("===============\n"); // nfe phase 2 if (SHOW_DETAIL) printf("round %d phase 2\n", round+1); double precision = (double)lower * PRECISION; if (precision < 1.0) precision = 1.0; double history_temp[3] = {0.0, 0.0, 0.0}; int skip = 0; bool flag = false; while (1) { if ( (history[4][1] - history[0][1]) <= precision ) { double temp = history[0][0]; int index = 0; for (i=1; i<=4; i++) { if ( (history[i][0]<temp) && (history[i][0]!=0.0) && (history[i][1]>=history[0][1]) && (history[i][1]<=history[4][1]) ) { temp = history[i][0]; index = i; } } optimalnfe[0] = history[index][0]; optimalnfe[1] = history[index][1]; optimalnfe[2] = history[index][2]; break; } else { if (SHOW_DETAIL) { printf("---------------\n"); printf("[%d <-> %d]\n", (int)history[0][1], (int)history[4][1]); } for (i=0; i<=4; i++) { populationSize = (int)( ( (double)(4-i)*history[0][1] + (double)i*history[4][1] ) / 4.0 ); if (SHOW_DETAIL) { printf("[%d]: ", populationSize); fflush(NULL); } // skip if (i==0 || i==4) skip = 1; else { if (populationSize == (int)memo[1]) skip = 2; else if (populationSize == (int)history_temp[1]) skip = 3; else if (populationSize == (int)history[i-1][1]) skip = 4; else skip = 0; } if (skip != 0) { if (skip == 2) { history[i][0] = memo[0]; history[i][1] = memo[1]; history[i][2] = memo[2]; } else if (skip == 3) { history[i][0] = history_temp[0]; history[i][1] = history_temp[1]; history[i][2] = history_temp[2]; } else if (skip == 4) { history[i][0] = history[i-1][0]; history[i][1] = history[i-1][1]; history[i][2] = history[i-1][2]; } if (SHOW_DETAIL) { printf("skipped "); for (j=0; j<(GA_REPEAT/COUNTER-8); j++) printf(">"); } } else { Statistics stGen; int usedGen; for (j=0; j<GA_REPEAT; j++) { DSMGA dsmga(ell, populationSize, 2, 1, 0, MAX_GEN, -1); //usedGen = dsmga.doIt(false); usedGen = dsmga.doIt(false, mode); if (dsmga.foundOptima()) stGen.record (usedGen); if (SHOW_DETAIL && ((j % COUNTER) == 0)) { printf(">"); fflush(NULL); } } history[i][1] = (double)populationSize; history[i][2] = stGen.getMean(); history[i][0] = history[i][1] * history[i][2]; } if (SHOW_DETAIL) printf(" %f = %d x %f\n", history[i][0], (int)history[i][1], history[i][2]); if ( (i != 0) && ((history[i][0]-history[i-1][0]) >= 0.0) && (skip != 4) ) { if (history[i][0]-history[i-1][0]==0) flag = true; else flag = false; break; } } int a, b, c; switch (i) { case 1: a=0; b=1; c=0; break; case 2: a=0; b=2; c=1; if (flag) { a=1; b=2; c=0; } break; case 3: a=1; b=3; c=2; if (flag) { a=2; b=3; c=0; } break; case 4: a=2; b=4; c=3; if (flag) { a=3; b=4; c=0; } break; default: a=3; b=4; c=0; } history[0][0] = history[a][0]; history[0][1] = history[a][1]; history[0][2] = history[a][2]; history[4][0] = history[b][0]; history[4][1] = history[b][1]; history[4][2] = history[b][2]; if (c != 0) { history_temp[0] = history[c][0]; history_temp[1] = history[c][1]; history_temp[2] = history[c][2]; } } } if (SHOW_DETAIL) { printf("===============\n"); printf("%f = %d x %f\n", optimalnfe[0], (int)optimalnfe[1], optimalnfe[2]); printf("===============\n\n"); } else printf("%f %d %f\n", optimalnfe[0], (int)optimalnfe[1], optimalnfe[2]); avgoptnfe[0] += optimalnfe[0]; avgoptnfe[1] += optimalnfe[1]; avgoptnfe[2] += optimalnfe[2]; } avgoptnfe[0] /= (double)NFE_REPEAT; avgoptnfe[1] /= (double)NFE_REPEAT; avgoptnfe[2] /= (double)NFE_REPEAT; if (SHOW_DETAIL) printf("optimal nfe: %f = %f x %f (population size x convergence time)\n", avgoptnfe[0], avgoptnfe[1], avgoptnfe[2]); else printf("%f %f %f\n", avgoptnfe[0], avgoptnfe[1], avgoptnfe[2]); return EXIT_SUCCESS; }
void Histogram::InitializeFromCube(Cube &cube, const int band, Progress *progress) { // Make sure band is valid if ((band < 0) || (band > cube.Bands())) { string msg = "Invalid band in [Histogram constructor]"; throw Isis::iException::Message(Isis::iException::Programmer,msg,_FILEINFO_); } double min,max; int nbins; if (cube.PixelType() == Isis::UnsignedByte) { min = 0.0 * cube.Multiplier() + cube.Base(); max = 255.0 * cube.Multiplier() + cube.Base(); nbins = 256; } else if (cube.PixelType() == Isis::SignedWord) { min = -32768.0 * cube.Multiplier() + cube.Base(); max = 32767.0 * cube.Multiplier() + cube.Base(); nbins = 65536; } else if (cube.PixelType() == Isis::Real) { // Determine the band for statistics int bandStart = band; int bandStop = band; int maxSteps = cube.Lines(); if (band == 0){ bandStart = 1; bandStop = cube.Bands(); maxSteps = cube.Lines() * cube.Bands(); } // Construct a line buffer manager and a statistics object LineManager line(cube); Statistics stats = Statistics(); // Prep for reporting progress if necessary if (progress != NULL) { string save = progress->Text (); progress->SetText("Computing min/max for histogram"); progress->SetMaximumSteps(maxSteps); progress->CheckStatus(); } for (int useBand = bandStart ; useBand <= bandStop ; useBand++){ // Loop and get the statistics for a good minimum/maximum for (int i=1; i<=cube.Lines(); i++) { line.SetLine(i,useBand); cube.Read(line); stats.AddData (line.DoubleBuffer(),line.size()); if (progress != NULL) progress->CheckStatus(); } } // Get the min/max for constructing a histogram object if (stats.ValidPixels() == 0) { min = 0.0; max = 1.0; } else { min = stats.BestMinimum (); max = stats.BestMaximum (); } nbins = 65536; } else { std::string msg = "Unsupported pixel type"; throw iException::Message(Isis::iException::Programmer,msg,_FILEINFO_); } // Set the bins and range SetBinRange(min,max); SetBins(nbins); }
void IsisMain() { const QString caminfo_program = "caminfo"; UserInterface &ui = Application::GetUserInterface(); QList< QPair<QString, QString> > *general = NULL, *camstats = NULL, *statistics = NULL; BandGeometry *bandGeom = NULL; // Get input filename FileName in = ui.GetFileName("FROM"); // Get the format QString sFormat = ui.GetAsString("FORMAT"); // if true then run spiceinit, xml default is FALSE // spiceinit will use system kernels if(ui.GetBoolean("SPICE")) { QString parameters = "FROM=" + in.expanded(); ProgramLauncher::RunIsisProgram("spiceinit", parameters); } Process p; Cube *incube = p.SetInputCube("FROM"); // General data gathering general = new QList< QPair<QString, QString> >; general->append(MakePair("Program", caminfo_program)); general->append(MakePair("IsisVersion", Application::Version())); general->append(MakePair("RunDate", iTime::CurrentGMT())); general->append(MakePair("IsisId", SerialNumber::Compose(*incube))); general->append(MakePair("From", in.baseName() + ".cub")); general->append(MakePair("Lines", toString(incube->lineCount()))); general->append(MakePair("Samples", toString(incube->sampleCount()))); general->append(MakePair("Bands", toString(incube->bandCount()))); // Run camstats on the entire image (all bands) // another camstats will be run for each band and output // for each band. if(ui.GetBoolean("CAMSTATS")) { camstats = new QList< QPair<QString, QString> >; QString filename = ui.GetAsString("FROM"); int sinc = ui.GetInteger("SINC"); int linc = ui.GetInteger("LINC"); CameraStatistics stats(filename, sinc, linc); Pvl camPvl = stats.toPvl(); PvlGroup cg = camPvl.findGroup("Latitude", Pvl::Traverse); camstats->append(MakePair("MinimumLatitude", cg["latitudeminimum"][0])); camstats->append(MakePair("MaximumLatitude", cg["latitudemaximum"][0])); cg = camPvl.findGroup("Longitude", Pvl::Traverse); camstats->append(MakePair("MinimumLongitude", cg["longitudeminimum"][0])); camstats->append(MakePair("MaximumLongitude", cg["longitudemaximum"][0])); cg = camPvl.findGroup("Resolution", Pvl::Traverse); camstats->append(MakePair("MinimumResolution", cg["resolutionminimum"][0])); camstats->append(MakePair("MaximumResolution", cg["resolutionmaximum"][0])); cg = camPvl.findGroup("PhaseAngle", Pvl::Traverse); camstats->append(MakePair("MinimumPhase", cg["phaseminimum"][0])); camstats->append(MakePair("MaximumPhase", cg["phasemaximum"][0])); cg = camPvl.findGroup("EmissionAngle", Pvl::Traverse); camstats->append(MakePair("MinimumEmission", cg["emissionminimum"][0])); camstats->append(MakePair("MaximumEmission", cg["emissionmaximum"][0])); cg = camPvl.findGroup("IncidenceAngle", Pvl::Traverse); camstats->append(MakePair("MinimumIncidence", cg["incidenceminimum"][0])); camstats->append(MakePair("MaximumIncidence", cg["incidencemaximum"][0])); cg = camPvl.findGroup("LocalSolarTime", Pvl::Traverse); camstats->append(MakePair("LocalTimeMinimum", cg["localsolartimeMinimum"][0])); camstats->append(MakePair("LocalTimeMaximum", cg["localsolartimeMaximum"][0])); } // Compute statistics for entire cube if(ui.GetBoolean("STATISTICS")) { statistics = new QList< QPair<QString, QString> >; LineManager iline(*incube); Statistics stats; Progress progress; progress.SetText("Statistics..."); progress.SetMaximumSteps(incube->lineCount()*incube->bandCount()); progress.CheckStatus(); iline.SetLine(1); for(; !iline.end() ; iline.next()) { incube->read(iline); stats.AddData(iline.DoubleBuffer(), iline.size()); progress.CheckStatus(); } // Compute stats of entire cube double nPixels = stats.TotalPixels(); double nullpercent = (stats.NullPixels() / (nPixels)) * 100; double hispercent = (stats.HisPixels() / (nPixels)) * 100; double hrspercent = (stats.HrsPixels() / (nPixels)) * 100; double lispercent = (stats.LisPixels() / (nPixels)) * 100; double lrspercent = (stats.LrsPixels() / (nPixels)) * 100; // Statitics output for band statistics->append(MakePair("MeanValue", toString(stats.Average()))); statistics->append(MakePair("StandardDeviation", toString(stats.StandardDeviation()))); statistics->append(MakePair("MinimumValue", toString(stats.Minimum()))); statistics->append(MakePair("MaximumValue", toString(stats.Maximum()))); statistics->append(MakePair("PercentHIS", toString(hispercent))); statistics->append(MakePair("PercentHRS", toString(hrspercent))); statistics->append(MakePair("PercentLIS", toString(lispercent))); statistics->append(MakePair("PercentLRS", toString(lrspercent))); statistics->append(MakePair("PercentNull", toString(nullpercent))); statistics->append(MakePair("TotalPixels", toString(stats.TotalPixels()))); } bool getFootBlob = ui.GetBoolean("USELABEL"); bool doGeometry = ui.GetBoolean("GEOMETRY"); bool doPolygon = ui.GetBoolean("POLYGON"); if(doGeometry || doPolygon || getFootBlob) { Camera *cam = incube->camera(); QString incType = ui.GetString("INCTYPE"); int polySinc, polyLinc; if(doPolygon && incType.toUpper() == "VERTICES") { ImagePolygon poly; poly.initCube(*incube); polySinc = polyLinc = (int)(0.5 + (((poly.validSampleDim() * 2) + (poly.validLineDim() * 2) - 3.0) / ui.GetInteger("NUMVERTICES"))); } else if (incType.toUpper() == "LINCSINC"){ if(ui.WasEntered("POLYSINC")) { polySinc = ui.GetInteger("POLYSINC"); } else { polySinc = (int)(0.5 + 0.10 * incube->sampleCount()); if(polySinc == 0) polySinc = 1; } if(ui.WasEntered("POLYLINC")) { polyLinc = ui.GetInteger("POLYLINC"); } else { polyLinc = (int)(0.5 + 0.10 * incube->lineCount()); if(polyLinc == 0) polyLinc = 1; } } else { QString msg = "Invalid INCTYPE option[" + incType + "]"; throw IException(IException::Programmer, msg, _FILEINFO_); } bandGeom = new BandGeometry(); bandGeom->setSampleInc(polySinc); bandGeom->setLineInc(polyLinc); bandGeom->setMaxIncidence(ui.GetDouble("MAXINCIDENCE")); bandGeom->setMaxEmission(ui.GetDouble("MAXEMISSION")); bool precision = ui.GetBoolean("INCREASEPRECISION"); if (getFootBlob) { // Need to read history to obtain parameters that were used to // create the footprint History hist("IsisCube", in.expanded()); Pvl pvl = hist.ReturnHist(); PvlObject::PvlObjectIterator objIter; bool found = false; PvlGroup fpgrp; for (objIter=pvl.endObject()-1; objIter>=pvl.beginObject(); objIter--) { if (objIter->name().toUpper() == "FOOTPRINTINIT") { found = true; fpgrp = objIter->findGroup("UserParameters"); break; } } if (!found) { QString msg = "Footprint blob was not found in input image history"; throw IException(IException::User, msg, _FILEINFO_); } QString prec = (QString)fpgrp.findKeyword("INCREASEPRECISION"); prec = prec.toUpper(); if (prec == "TRUE") { precision = true; } else { precision = false; } QString inctype = (QString)fpgrp.findKeyword("INCTYPE"); inctype = inctype.toUpper(); if (inctype == "LINCSINC") { int linc = fpgrp.findKeyword("LINC"); int sinc = fpgrp.findKeyword("SINC"); bandGeom->setSampleInc(sinc); bandGeom->setLineInc(linc); } else { int vertices = fpgrp.findKeyword("NUMVERTICES"); int lincsinc = (int)(0.5 + (((incube->sampleCount() * 2) + (incube->lineCount() * 2) - 3.0) / vertices)); bandGeom->setSampleInc(lincsinc); bandGeom->setLineInc(lincsinc); } if (fpgrp.hasKeyword("MAXINCIDENCE")) { double maxinc = fpgrp.findKeyword("MAXINCIDENCE"); bandGeom->setMaxIncidence(maxinc); } if (fpgrp.hasKeyword("MAXEMISSION")) { double maxema = fpgrp.findKeyword("MAXEMISSION"); bandGeom->setMaxEmission(maxema); } } bandGeom->collect(*cam, *incube, doGeometry, doPolygon, getFootBlob, precision); // Check if the user requires valid image center geometry if(ui.GetBoolean("VCAMERA") && (!bandGeom->hasCenterGeometry())) { QString msg = "Image center does not project in camera model"; throw IException(IException::Unknown, msg, _FILEINFO_); } } if(sFormat.toUpper() == "PVL") GeneratePVLOutput(incube, general, camstats, statistics, bandGeom); else GenerateCSVOutput(incube, general, camstats, statistics, bandGeom); // Clean the data delete general; general = NULL; if(camstats) { delete camstats; camstats = NULL; } if(statistics) { delete statistics; statistics = NULL; } if(bandGeom) { delete bandGeom; bandGeom = NULL; } }
bool SioImporter::execute(PlugInArgList* pInArgList, PlugInArgList* pOutArgList) { // Read the statistics from the file and set into the raster element RasterElement* pRaster = NULL; if (pInArgList != NULL) { pRaster = pInArgList->getPlugInArgValue<RasterElement>(Importer::ImportElementArg()); } if (pRaster != NULL) { // Get the filename string filename = pRaster->getFilename(); // Read the file FileResource pFile(filename.c_str(), "rb"); SioFile sioFile; if (sioFile.deserialize(pFile.get()) == true) { if (sioFile.mOriginalVersion == 9 && isBatch()) { //Since version 9 sio's are not officially supported //don't load them in batch to force users to load them //interactively which will show them the reason why. return false; } const RasterDataDescriptor* pDescriptor = dynamic_cast<const RasterDataDescriptor*>(pRaster->getDataDescriptor()); if (pDescriptor != NULL && RasterUtilities::isSubcube(pDescriptor, false) == false && Service<SessionManager>()->isSessionLoading() == false) { const vector<DimensionDescriptor>& bands = pDescriptor->getBands(); for (unsigned int i = 0; i < bands.size(); ++i) { Statistics* pStatistics = pRaster->getStatistics(bands[i]); if (pStatistics != NULL) { // Bad values if (i < sioFile.mBadValues.size()) { vector<int> badValues = sioFile.mBadValues[i]; pStatistics->setBadValues(badValues); } // Min if (i < sioFile.mStatMin.size()) { double dMin = sioFile.mStatMin[i]; pStatistics->setMin(dMin); } // Max if (i < sioFile.mStatMax.size()) { double dMax = sioFile.mStatMax[i]; pStatistics->setMax(dMax); } // Average if (i < sioFile.mStatAvg.size()) { double dAverage = sioFile.mStatAvg[i]; pStatistics->setAverage(dAverage); } // Standard deviation if (i < sioFile.mStatStdDev.size()) { double dStdDev = sioFile.mStatStdDev[i]; pStatistics->setStandardDeviation(dStdDev); } // Percentiles if (i < sioFile.mStatPercentile.size()) { double* pPercentiles = sioFile.mStatPercentile[i]; pStatistics->setPercentiles(pPercentiles); } // Histogram if ((i < sioFile.mStatBinCenter.size()) && (i < sioFile.mStatHistogram.size())) { double* pBinCenters = sioFile.mStatBinCenter[i]; unsigned int* pCounts = sioFile.mStatHistogram[i]; pStatistics->setHistogram(pBinCenters, pCounts); } } } } } } return RasterElementImporterShell::execute(pInArgList, pOutArgList); }
void IsisMain() { //Create a process to create the input cubes Process p; //Create the input cubes, matching sample/lines Cube *inCube = p.SetInputCube ("FROM"); Cube *latCube = p.SetInputCube("LATCUB", SpatialMatch); Cube *lonCube = p.SetInputCube("LONCUB", SpatialMatch); //A 1x1 brick to read in the latitude and longitude DN values from //the specified cubes Brick latBrick(1,1,1, latCube->PixelType()); Brick lonBrick(1,1,1, lonCube->PixelType()); UserInterface &ui = Application::GetUserInterface(); //Set the sample and line increments int sinc = (int)(inCube->Samples() * 0.10); if(ui.WasEntered("SINC")) { sinc = ui.GetInteger("SINC"); } int linc = (int)(inCube->Lines() * 0.10); if(ui.WasEntered("LINC")) { linc = ui.GetInteger("LINC"); } //Set the degree of the polynomial to use in our functions int degree = ui.GetInteger("DEGREE"); //We are using a polynomial with two variables PolynomialBivariate sampFunct(degree); PolynomialBivariate lineFunct(degree); //We will be solving the function using the least squares method LeastSquares sampSol(sampFunct); LeastSquares lineSol(lineFunct); //Setup the variables for solving the stereographic projection //x = cos(latitude) * sin(longitude - lon_center) //y = cos(lat_center) * sin(latitude) - sin(lat_center) * cos(latitude) * cos(longitude - lon_center) //Get the center lat and long from the input cubes double lat_center = latCube->Statistics()->Average() * PI/180.0; double lon_center = lonCube->Statistics()->Average() * PI/180.0; /** * Loop through lines and samples projecting the latitude and longitude at those * points to stereographic x and y and adding these points to the LeastSquares * matrix. */ for(int i = 1; i <= inCube->Lines(); i+= linc) { for(int j = 1; j <= inCube->Samples(); j+= sinc) { latBrick.SetBasePosition(j, i, 1); latCube->Read(latBrick); if(IsSpecial(latBrick.at(0))) continue; double lat = latBrick.at(0) * PI/180.0; lonBrick.SetBasePosition(j, i, 1); lonCube->Read(lonBrick); if(IsSpecial(lonBrick.at(0))) continue; double lon = lonBrick.at(0) * PI/180.0; //Project lat and lon to x and y using a stereographic projection double k = 2/(1 + sin(lat_center) * sin(lat) + cos(lat_center)*cos(lat)*cos(lon - lon_center)); double x = k * cos(lat) * sin(lon - lon_center); double y = k * (cos(lat_center) * sin(lat)) - (sin(lat_center) * cos(lat) * cos(lon - lon_center)); //Add x and y to the least squares matrix vector<double> data; data.push_back(x); data.push_back(y); sampSol.AddKnown(data, j); lineSol.AddKnown(data, i); //If the sample increment goes past the last sample in the line, we want to //always read the last sample.. if(j != inCube->Samples() && j + sinc > inCube->Samples()) { j = inCube->Samples() - sinc; } } //If the line increment goes past the last line in the cube, we want to //always read the last line.. if(i != inCube->Lines() && i + linc > inCube->Lines()) { i = inCube->Lines() - linc; } } //Solve the least squares functions using QR Decomposition sampSol.Solve(LeastSquares::QRD); lineSol.Solve(LeastSquares::QRD); //If the user wants to save the residuals to a file, create a file and write //the column titles to it. TextFile oFile; if(ui.WasEntered("RESIDUALS")) { oFile.Open(ui.GetFilename("RESIDUALS"), "overwrite"); oFile.PutLine("Sample,\tLine,\tX,\tY,\tSample Error,\tLine Error\n"); } //Gather the statistics for the residuals from the least squares solutions Statistics sampErr; Statistics lineErr; vector<double> sampResiduals = sampSol.Residuals(); vector<double> lineResiduals = lineSol.Residuals(); for(int i = 0; i < (int)sampResiduals.size(); i++) { sampErr.AddData(sampResiduals[i]); lineErr.AddData(lineResiduals[i]); } //If a residuals file was specified, write the previous data, and the errors to the file. if(ui.WasEntered("RESIDUALS")) { for(int i = 0; i < sampSol.Rows(); i++) { vector<double> data = sampSol.GetInput(i); iString tmp = ""; tmp += iString(sampSol.GetExpected(i)); tmp += ",\t"; tmp += iString(lineSol.GetExpected(i)); tmp += ",\t"; tmp += iString(data[0]); tmp += ",\t"; tmp += iString(data[1]); tmp += ",\t"; tmp += iString(sampResiduals[i]); tmp += ",\t"; tmp += iString(lineResiduals[i]); oFile.PutLine(tmp + "\n"); } } oFile.Close(); //Records the error to the log PvlGroup error( "Error" ); error += PvlKeyword( "Degree", degree ); error += PvlKeyword( "NumberOfPoints", (int)sampResiduals.size() ); error += PvlKeyword( "SampleMinimumError", sampErr.Minimum() ); error += PvlKeyword( "SampleAverageError", sampErr.Average() ); error += PvlKeyword( "SampleMaximumError", sampErr.Maximum() ); error += PvlKeyword( "SampleStdDeviationError", sampErr.StandardDeviation() ); error += PvlKeyword( "LineMinimumError", lineErr.Minimum() ); error += PvlKeyword( "LineAverageError", lineErr.Average() ); error += PvlKeyword( "LineMaximumError", lineErr.Maximum() ); error += PvlKeyword( "LineStdDeviationError", lineErr.StandardDeviation() ); Application::Log( error ); //Close the input cubes for cleanup p.EndProcess(); //If we want to warp the image, then continue, otherwise return if(!ui.GetBoolean("NOWARP")) { //Creates the mapping group Pvl mapFile; mapFile.Read(ui.GetFilename("MAP")); PvlGroup &mapGrp = mapFile.FindGroup("Mapping",Pvl::Traverse); //Reopen the lat and long cubes latCube = new Cube(); latCube->SetVirtualBands(ui.GetInputAttribute("LATCUB").Bands()); latCube->Open(ui.GetFilename("LATCUB")); lonCube = new Cube(); lonCube->SetVirtualBands(ui.GetInputAttribute("LONCUB").Bands()); lonCube->Open(ui.GetFilename("LONCUB")); PvlKeyword targetName; //If the user entered the target name if(ui.WasEntered("TARGET")) { targetName = PvlKeyword("TargetName", ui.GetString("TARGET")); } //Else read the target name from the input cube else { Pvl fromFile; fromFile.Read(ui.GetFilename("FROM")); targetName = fromFile.FindKeyword("TargetName", Pvl::Traverse); } mapGrp.AddKeyword(targetName, Pvl::Replace); PvlKeyword equRadius; PvlKeyword polRadius; //If the user entered the equatorial and polar radii if(ui.WasEntered("EQURADIUS") && ui.WasEntered("POLRADIUS")) { equRadius = PvlKeyword("EquatorialRadius", ui.GetDouble("EQURADIUS")); polRadius = PvlKeyword("PolarRadius", ui.GetDouble("POLRADIUS")); } //Else read them from the pck else { Filename pckFile("$base/kernels/pck/pck?????.tpc"); pckFile.HighestVersion(); string pckFilename = pckFile.Expanded(); furnsh_c(pckFilename.c_str()); string target = targetName[0]; SpiceInt code; SpiceBoolean found; bodn2c_c (target.c_str(), &code, &found); if (!found) { string msg = "Could not convert Target [" + target + "] to NAIF code"; throw Isis::iException::Message(Isis::iException::Io,msg,_FILEINFO_); } SpiceInt n; SpiceDouble radii[3]; bodvar_c(code,"RADII",&n,radii); equRadius = PvlKeyword("EquatorialRadius", radii[0] * 1000); polRadius = PvlKeyword("PolarRadius", radii[2] * 1000); } mapGrp.AddKeyword(equRadius, Pvl::Replace); mapGrp.AddKeyword(polRadius, Pvl::Replace); //If the latitude type is not in the mapping group, copy it from the input if(!mapGrp.HasKeyword("LatitudeType")) { if(ui.GetString("LATTYPE") == "PLANETOCENTRIC") { mapGrp.AddKeyword(PvlKeyword("LatitudeType","Planetocentric"), Pvl::Replace); } else { mapGrp.AddKeyword(PvlKeyword("LatitudeType","Planetographic"), Pvl::Replace); } } //If the longitude direction is not in the mapping group, copy it from the input if(!mapGrp.HasKeyword("LongitudeDirection")) { if(ui.GetString("LONDIR") == "POSITIVEEAST") { mapGrp.AddKeyword(PvlKeyword("LongitudeDirection","PositiveEast"), Pvl::Replace); } else { mapGrp.AddKeyword(PvlKeyword("LongitudeDirection","PositiveWest"), Pvl::Replace); } } //If the longitude domain is not in the mapping group, assume it is 360 if(!mapGrp.HasKeyword("LongitudeDomain")) { mapGrp.AddKeyword(PvlKeyword("LongitudeDomain","360"), Pvl::Replace); } //If the default range is to be computed, use the input lat/long cubes to determine the range if(ui.GetString("DEFAULTRANGE") == "COMPUTE") { //NOTE - When computing the min/max longitude this application does not account for the //longitude seam if it exists. Since the min/max are calculated from the statistics of //the input longitude cube and then converted to the mapping group's domain they may be //invalid for cubes containing the longitude seam. Statistics *latStats = latCube->Statistics(); Statistics *lonStats = lonCube->Statistics(); double minLat = latStats->Minimum(); double maxLat = latStats->Maximum(); bool isOcentric = ((std::string)mapGrp.FindKeyword("LatitudeType")) == "Planetocentric"; if(isOcentric) { if(ui.GetString("LATTYPE") != "PLANETOCENTRIC") { minLat = Projection::ToPlanetocentric(minLat, (double)equRadius, (double)polRadius); maxLat = Projection::ToPlanetocentric(maxLat, (double)equRadius, (double)polRadius); } } else { if(ui.GetString("LATTYPE") == "PLANETOCENTRIC") { minLat = Projection::ToPlanetographic(minLat, (double)equRadius, (double)polRadius); maxLat = Projection::ToPlanetographic(maxLat, (double)equRadius, (double)polRadius); } } int lonDomain = (int)mapGrp.FindKeyword("LongitudeDomain"); double minLon = lonDomain == 360 ? Projection::To360Domain(lonStats->Minimum()) : Projection::To180Domain(lonStats->Minimum()); double maxLon = lonDomain == 360 ? Projection::To360Domain(lonStats->Maximum()) : Projection::To180Domain(lonStats->Maximum()); bool isPosEast = ((std::string)mapGrp.FindKeyword("LongitudeDirection")) == "PositiveEast"; if(isPosEast) { if(ui.GetString("LONDIR") != "POSITIVEEAST") { minLon = Projection::ToPositiveEast(minLon, lonDomain); maxLon = Projection::ToPositiveEast(maxLon, lonDomain); } } else { if(ui.GetString("LONDIR") == "POSITIVEEAST") { minLon = Projection::ToPositiveWest(minLon, lonDomain); maxLon = Projection::ToPositiveWest(maxLon, lonDomain); } } if(minLon > maxLon) { double temp = minLon; minLon = maxLon; maxLon = temp; } mapGrp.AddKeyword(PvlKeyword("MinimumLatitude", minLat),Pvl::Replace); mapGrp.AddKeyword(PvlKeyword("MaximumLatitude", maxLat),Pvl::Replace); mapGrp.AddKeyword(PvlKeyword("MinimumLongitude", minLon),Pvl::Replace); mapGrp.AddKeyword(PvlKeyword("MaximumLongitude", maxLon),Pvl::Replace); } //If the user decided to enter a ground range then override if (ui.WasEntered("MINLAT")) { mapGrp.AddKeyword(PvlKeyword("MinimumLatitude", ui.GetDouble("MINLAT")),Pvl::Replace); } if (ui.WasEntered("MAXLAT")) { mapGrp.AddKeyword(PvlKeyword("MaximumLatitude", ui.GetDouble("MAXLAT")),Pvl::Replace); } if (ui.WasEntered("MINLON")) { mapGrp.AddKeyword(PvlKeyword("MinimumLongitude", ui.GetDouble("MINLON")),Pvl::Replace); } if (ui.WasEntered("MAXLON")) { mapGrp.AddKeyword(PvlKeyword("MaximumLongitude", ui.GetDouble("MAXLON")),Pvl::Replace); } //If the pixel resolution is to be computed, compute the pixels/degree from the input if (ui.GetString("PIXRES") == "COMPUTE") { latBrick.SetBasePosition(1,1,1); latCube->Read(latBrick); lonBrick.SetBasePosition(1,1,1); lonCube->Read(lonBrick); //Read the lat and long at the upper left corner double a = latBrick.at(0) * PI/180.0; double c = lonBrick.at(0) * PI/180.0; latBrick.SetBasePosition(latCube->Samples(),latCube->Lines(),1); latCube->Read(latBrick); lonBrick.SetBasePosition(lonCube->Samples(),lonCube->Lines(),1); lonCube->Read(lonBrick); //Read the lat and long at the lower right corner double b = latBrick.at(0) * PI/180.0; double d = lonBrick.at(0) * PI/180.0; //Determine the angle between the two points double angle = acos(cos(a) * cos(b) * cos(c - d) + sin(a) * sin(b)); //double angle = acos((cos(a1) * cos(b1) * cos(b2)) + (cos(a1) * sin(b1) * cos(a2) * sin(b2)) + (sin(a1) * sin(a2))); angle *= 180/PI; //Determine the number of pixels between the two points double pixels = sqrt(pow(latCube->Samples() -1.0, 2.0) + pow(latCube->Lines() -1.0, 2.0)); //Add the scale in pixels/degree to the mapping group mapGrp.AddKeyword(PvlKeyword("Scale", pixels/angle, "pixels/degree"), Pvl::Replace); if (mapGrp.HasKeyword("PixelResolution")) { mapGrp.DeleteKeyword("PixelResolution"); } } // If the user decided to enter a resolution then override if (ui.GetString("PIXRES") == "MPP") { mapGrp.AddKeyword(PvlKeyword("PixelResolution", ui.GetDouble("RESOLUTION"), "meters/pixel"), Pvl::Replace); if (mapGrp.HasKeyword("Scale")) { mapGrp.DeleteKeyword("Scale"); } } else if (ui.GetString("PIXRES") == "PPD") { mapGrp.AddKeyword(PvlKeyword("Scale", ui.GetDouble("RESOLUTION"), "pixels/degree"), Pvl::Replace); if (mapGrp.HasKeyword("PixelResolution")) { mapGrp.DeleteKeyword("PixelResolution"); } } //Create a projection using the map file we created int samples,lines; Projection *outmap = ProjectionFactory::CreateForCube(mapFile,samples,lines,false); //Write the map file to the log Application::GuiLog(mapGrp); //Create a process rubber sheet ProcessRubberSheet r; //Set the input cube inCube = r.SetInputCube("FROM"); double tolerance = ui.GetDouble("TOLERANCE") * outmap->Resolution(); //Create a new transform object Transform *transform = new nocam2map (sampSol, lineSol, outmap, latCube, lonCube, ui.GetString("LATTYPE") == "PLANETOCENTRIC", ui.GetString("LONDIR") == "POSITIVEEAST", tolerance, ui.GetInteger("ITERATIONS"), inCube->Samples(), inCube->Lines(), samples, lines); //Allocate the output cube and add the mapping labels Cube *oCube = r.SetOutputCube ("TO", transform->OutputSamples(), transform->OutputLines(), inCube->Bands()); oCube->PutGroup(mapGrp); //Determine which interpolation to use Interpolator *interp = NULL; if (ui.GetString("INTERP") == "NEARESTNEIGHBOR") { interp = new Interpolator(Interpolator::NearestNeighborType); } else if (ui.GetString("INTERP") == "BILINEAR") { interp = new Interpolator(Interpolator::BiLinearType); } else if (ui.GetString("INTERP") == "CUBICCONVOLUTION") { interp = new Interpolator(Interpolator::CubicConvolutionType); } //Warp the cube r.StartProcess(*transform, *interp); r.EndProcess(); // add mapping to print.prt PvlGroup mapping = outmap->Mapping(); Application::Log(mapping); //Clean up delete latCube; delete lonCube; delete outmap; delete transform; delete interp; } }
int main(int argc, char* argv[]) { if (argc!=2) { std::cerr << "CoordinateEncoding <map directory>" << std::endl; return 1; } std::list<Encoder*> encoders; Statistics statistics; encoders.push_back(new TrivialEncoder()); encoders.push_back(new MinimumVLQDeltaEncoder()); encoders.push_back(new VLQDeltaEncoder()); encoders.push_back(new StaticOptimizedDeltaEncoder()); std::string mapDirectory=argv[1]; std::string areaDatFilename=osmscout::AppendFileToDir(mapDirectory,"areas.dat"); std::string wayDatFilename=osmscout::AppendFileToDir(mapDirectory,"ways.dat"); osmscout::TypeConfig typeConfig; osmscout::FileScanner scanner; uint32_t dataCount; std::cout << "Reading type config from map directory '" << mapDirectory << "'..." << std::endl; if (!typeConfig.LoadFromDataFile(mapDirectory)) { std::cerr << "Cannot open type config" << std::endl; return 1; } /* std::cout << "Reading '" << areaDatFilename << "'..." << std::endl; if (!scanner.Open(areaDatFilename,osmscout::FileScanner::Sequential,true)) { std::cerr << "Cannot open '" << scanner.GetFilename() << "'" << std::endl; return 1; } if (!scanner.Read(dataCount)) { std::cerr << "Cannot read number of entries in file" << std::endl; } std::cout << dataCount << " entries..." << std::endl; for (size_t i=1; i<=dataCount; i++) { osmscout::Area area; if (!area.Read(typeConfig,scanner)) { std::cerr << "Cannot read data set #" << i << "'from file " << scanner.GetFilename() << "'" << std::endl; return 1; } for (const auto& ring : area.rings) { statistics.Measure(ring.nodes); for (auto& encoder : encoders) { encoder->Encode(area.GetFileOffset(),ring.nodes); } } } scanner.Close();*/ std::cout << "Reading " << wayDatFilename << "..." << std::endl; if (!scanner.Open(wayDatFilename,osmscout::FileScanner::Sequential,true)) { std::cerr << "Cannot open '" << scanner.GetFilename() << "'" << std::endl; return 1; } if (!scanner.Read(dataCount)) { std::cerr << "Cannot read number of entries in file" << std::endl; } std::cout << dataCount << " entries..." << std::endl; for (size_t i=1; i<=dataCount; i++) { osmscout::Way way; if (!way.Read(typeConfig,scanner)) { std::cerr << "Cannot read data set #" << i << "'from file " << scanner.GetFilename() << "'" << std::endl; return 1; } for (size_t n =0; n<way.nodes.size(); n++) { std::cout << way.nodes[n].GetDisplayText() << " "; } std::cout << std::endl; statistics.Measure(way.nodes); for (auto& encoder : encoders) { encoder->Encode(way.GetFileOffset(),way.nodes); } } scanner.Close(); std::cout << "---" << std::endl; for (auto& encoder : encoders) { std::cout << "Encoder: " << encoder->name << " " << encoder->bytesNeeded << std::endl; delete encoder; } encoders.clear(); std::cout << "---" << std::endl; std::cout << "Number of vectors: " << statistics.numberOfVectors << std::endl; std::cout << "Number of empty vectors: " << statistics.emptyVectorCount << std::endl; std::cout << "Number of six bit length vectors: " << statistics.sixBitVectorCount << " " << statistics.sixBitVectorCount*100.0/statistics.numberOfVectors << "%" << std::endl; std::cout << "Number of coords: " << statistics.coordCount << std::endl; std::cout << "Min. length: " << statistics.minLength << std::endl; std::cout << "Max. length: " << statistics.maxLength << std::endl; std::cout << "Avg. length: " << statistics.coordCount*1.0/statistics.numberOfVectors << std::endl; std::cout << "Delta: " << statistics.minDelta << " - " << statistics.deltaSum/statistics.deltaCount << " - " << statistics.maxDelta; std::cout << " (" << statistics.deltaSum << "/" << statistics.deltaCount << ")" << std::endl; std::cout << "Delta distribution (3,4,5,6,7,15,23 bits): "; std::cout << statistics.threeBitDeltaCount*100.0/statistics.deltaCount << "% "; std::cout << statistics.fourBitDeltaCount*100.0/statistics.deltaCount << "% "; std::cout << statistics.fiveBitDeltaCount*100.0/statistics.deltaCount << "% "; std::cout << statistics.sixBitDeltaCount*100.0/statistics.deltaCount << "% "; std::cout << statistics.sevenBitDeltaCount*100.0/statistics.deltaCount << "% "; std::cout << statistics.fifteenBitDeltaCount*100.0/statistics.deltaCount << "% "; std::cout << statistics.twentythreeBitDeltaCount*100.0/statistics.deltaCount << "%" << std::endl; return 0; }
//Helper function to compute input range. void ComputeInputRange () { Process p; Cube *latCub = p.SetInputCube("LATCUB"); Cube *lonCub = p.SetInputCube("LONCUB"); UserInterface &ui = Application::GetUserInterface(); Pvl userMap; userMap.Read(ui.GetFilename("MAP")); PvlGroup &userGrp = userMap.FindGroup("Mapping",Pvl::Traverse); Statistics *latStats = latCub->Statistics(); Statistics *lonStats = lonCub->Statistics(); double minLat = latStats->Minimum(); double maxLat = latStats->Maximum(); int lonDomain = userGrp.HasKeyword("LongitudeDomain") ? (int)userGrp.FindKeyword("LongitudeDomain") : 360; double minLon = lonDomain == 360 ? Projection::To360Domain(lonStats->Minimum()) : Projection::To180Domain(lonStats->Minimum()); double maxLon = lonDomain == 360 ? Projection::To360Domain(lonStats->Maximum()) : Projection::To180Domain(lonStats->Maximum()); if(userGrp.HasKeyword("LatitudeType")) { bool isOcentric = ((std::string)userGrp.FindKeyword("LatitudeType")) == "Planetocentric"; double equRadius; double polRadius; //If the user entered the equatorial and polar radii if(ui.WasEntered("EQURADIUS") && ui.WasEntered("POLRADIUS")) { equRadius = ui.GetDouble("EQURADIUS"); polRadius = ui.GetDouble("POLRADIUS"); } //Else read them from the pck else { Filename pckFile("$base/kernels/pck/pck?????.tpc"); pckFile.HighestVersion(); string pckFilename = pckFile.Expanded(); furnsh_c(pckFilename.c_str()); string target; //If user entered target if(ui.WasEntered("TARGET")) { target = ui.GetString("TARGET"); } //Else read the target name from the input cube else { Pvl fromFile; fromFile.Read(ui.GetFilename("FROM")); target = (string)fromFile.FindKeyword("TargetName", Pvl::Traverse); } SpiceInt code; SpiceBoolean found; bodn2c_c (target.c_str(), &code, &found); if (!found) { string msg = "Could not convert Target [" + target + "] to NAIF code"; throw Isis::iException::Message(Isis::iException::Io,msg,_FILEINFO_); } SpiceInt n; SpiceDouble radii[3]; bodvar_c(code,"RADII",&n,radii); equRadius = radii[0] * 1000; polRadius = radii[2] * 1000; } if(isOcentric) { if(ui.GetString("LATTYPE") != "PLANETOCENTRIC") { minLat = Projection::ToPlanetocentric(minLat, (double)equRadius, (double)polRadius); maxLat = Projection::ToPlanetocentric(maxLat, (double)equRadius, (double)polRadius); } } else { if(ui.GetString("LATTYPE") == "PLANETOCENTRIC") { minLat = Projection::ToPlanetographic(minLat, (double)equRadius, (double)polRadius); maxLat = Projection::ToPlanetographic(maxLat, (double)equRadius, (double)polRadius); } } } if(userGrp.HasKeyword("LongitudeDirection")) { bool isPosEast = ((std::string)userGrp.FindKeyword("LongitudeDirection")) == "PositiveEast"; if(isPosEast) { if(ui.GetString("LONDIR") != "POSITIVEEAST") { minLon = Projection::ToPositiveEast(minLon, lonDomain); maxLon = Projection::ToPositiveEast(maxLon, lonDomain); if(minLon > maxLon) { double temp = minLon; minLon = maxLon; maxLon = temp; } } } else { if(ui.GetString("LONDIR") == "POSITIVEEAST") { minLon = Projection::ToPositiveWest(minLon, lonDomain); maxLon = Projection::ToPositiveWest(maxLon, lonDomain); if(minLon > maxLon) { double temp = minLon; minLon = maxLon; maxLon = temp; } } } } // Set ground range parameters in UI ui.Clear("MINLAT"); ui.PutDouble("MINLAT", minLat); ui.Clear("MAXLAT"); ui.PutDouble("MAXLAT", maxLat); ui.Clear("MINLON"); ui.PutDouble("MINLON", minLon); ui.Clear("MAXLON"); ui.PutDouble("MAXLON", maxLon); p.EndProcess(); // Set default ground range param to camera ui.Clear("DEFAULTRANGE"); ui.PutAsString("DEFAULTRANGE","COMPUTE"); }
// Return a PVL group containing the statistical information PvlGroup PvlStats(Statistics &stats, const QString &name) { // Construct a label with the results PvlGroup results(name); if(stats.ValidPixels() != 0) { results += PvlKeyword("Average", toString(stats.Average())); results += PvlKeyword("StandardDeviation", toString(stats.StandardDeviation())); results += PvlKeyword("Variance", toString(stats.Variance())); results += PvlKeyword("Minimum", toString(stats.Minimum())); results += PvlKeyword("Maximum", toString(stats.Maximum())); } results += PvlKeyword("TotalPixels", toString(stats.TotalPixels())); results += PvlKeyword("ValidPixels", toString(stats.ValidPixels())); results += PvlKeyword("NullPixels", toString(stats.NullPixels())); results += PvlKeyword("LisPixels", toString(stats.LisPixels())); results += PvlKeyword("LrsPixels", toString(stats.LrsPixels())); results += PvlKeyword("HisPixels", toString(stats.HisPixels())); results += PvlKeyword("HrsPixels", toString(stats.HrsPixels())); return results; }
//function to add stats data to the stats object. //also tests if the line and samp are valid void buildStats (Camera *cam, int &sample, int &line){ cam->SetImage(sample, line); if (cam->HasSurfaceIntersection()) { latStat.AddData(cam->UniversalLatitude()); lonStat.AddData(cam->UniversalLongitude()); resStat.AddData(cam->PixelResolution()); sampleResStat.AddData(cam->SampleResolution()); lineResStat.AddData(cam->LineResolution()); phaseStat.AddData(cam->PhaseAngle()); emissionStat.AddData(cam->EmissionAngle()); incidenceStat.AddData(cam->IncidenceAngle()); localSolarTimeStat.AddData(cam->LocalSolarTime()); localRaduisStat.AddData(cam->LocalRadius()); northAzimuthStat.AddData(cam->NorthAzimuth()); double Aratio = cam->LineResolution() / cam->SampleResolution(); aspectRatioStat.AddData(Aratio); } }
void IsisMain() { Process p; // Get the list of names of input CCD cubes to stitch together FileList flist; UserInterface &ui = Application::GetUserInterface(); flist.Read(ui.GetFilename("FROMLIST")); if (flist.size() < 1) { string msg = "The list file[" + ui.GetFilename("FROMLIST") + " does not contain any filenames"; throw iException::Message(iException::User,msg,_FILEINFO_); } string projection("Equirectangular"); if(ui.WasEntered("MAP")) { Pvl mapfile(ui.GetFilename("MAP")); projection = (string) mapfile.FindGroup("Mapping")["ProjectionName"]; } if(ui.WasEntered("PROJECTION")) { projection = ui.GetString("PROJECTION"); } // Gather other user inputs to projection string lattype = ui.GetString("LATTYPE"); string londir = ui.GetString("LONDIR"); string londom = ui.GetString("LONDOM"); int digits = ui.GetInteger("PRECISION"); // Fix them for mapping group lattype = (lattype == "PLANETOCENTRIC") ? "Planetocentric" : "Planetographic"; londir = (londir == "POSITIVEEAST") ? "PositiveEast" : "PositiveWest"; Progress prog; prog.SetMaximumSteps(flist.size()); prog.CheckStatus(); Statistics scaleStat; Statistics longitudeStat; Statistics latitudeStat; Statistics equiRadStat; Statistics poleRadStat; PvlObject fileset("FileSet"); // Save major equitorial and polar radii for last occuring double eqRad; double eq2Rad; double poleRad; string target("Unknown"); for (unsigned int i = 0 ; i < flist.size() ; i++) { // Set the input image, get the camera model, and a basic mapping // group Cube cube; cube.Open(flist[i]); int lines = cube.Lines(); int samples = cube.Samples(); PvlObject fmap("File"); fmap += PvlKeyword("Name",flist[i]); fmap += PvlKeyword("Lines", lines); fmap += PvlKeyword("Samples", samples); Camera *cam = cube.Camera(); Pvl mapping; cam->BasicMapping(mapping); PvlGroup &mapgrp = mapping.FindGroup("Mapping"); mapgrp.AddKeyword(PvlKeyword("ProjectionName",projection),Pvl::Replace); mapgrp.AddKeyword(PvlKeyword("LatitudeType",lattype),Pvl::Replace); mapgrp.AddKeyword(PvlKeyword("LongitudeDirection",londir),Pvl::Replace); mapgrp.AddKeyword(PvlKeyword("LongitudeDomain",londom),Pvl::Replace); // Get the radii double radii[3]; cam->Radii(radii); eqRad = radii[0] * 1000.0; eq2Rad = radii[1] * 1000.0; poleRad = radii[2] * 1000.0; target = cam->Target(); equiRadStat.AddData(&eqRad, 1); poleRadStat.AddData(&poleRad, 1); // Get resolution double lowres = cam->LowestImageResolution(); double hires = cam->HighestImageResolution(); scaleStat.AddData(&lowres, 1); scaleStat.AddData(&hires, 1); double pixres = (lowres+hires)/2.0; double scale = Scale(pixres, poleRad, eqRad); mapgrp.AddKeyword(PvlKeyword("PixelResolution",pixres),Pvl::Replace); mapgrp.AddKeyword(PvlKeyword("Scale",scale,"pixels/degree"),Pvl::Replace); mapgrp += PvlKeyword("MinPixelResolution",lowres,"meters"); mapgrp += PvlKeyword("MaxPixelResolution",hires,"meters"); // Get the universal ground range double minlat,maxlat,minlon,maxlon; cam->GroundRange(minlat,maxlat,minlon,maxlon,mapping); mapgrp.AddKeyword(PvlKeyword("MinimumLatitude",minlat),Pvl::Replace); mapgrp.AddKeyword(PvlKeyword("MaximumLatitude",maxlat),Pvl::Replace); mapgrp.AddKeyword(PvlKeyword("MinimumLongitude",minlon),Pvl::Replace); mapgrp.AddKeyword(PvlKeyword("MaximumLongitude",maxlon),Pvl::Replace); fmap.AddGroup(mapgrp); fileset.AddObject(fmap); longitudeStat.AddData(&minlon, 1); longitudeStat.AddData(&maxlon, 1); latitudeStat.AddData(&minlat, 1); latitudeStat.AddData(&maxlat, 1); p.ClearInputCubes(); prog.CheckStatus(); } // Construct the output mapping group with statistics PvlGroup mapping("Mapping"); double avgPixRes((scaleStat.Minimum()+scaleStat.Maximum())/2.0); double avgLat((latitudeStat.Minimum()+latitudeStat.Maximum())/2.0); double avgLon((longitudeStat.Minimum()+longitudeStat.Maximum())/2.0); double avgEqRad((equiRadStat.Minimum()+equiRadStat.Maximum())/2.0); double avgPoleRad((poleRadStat.Minimum()+poleRadStat.Maximum())/2.0); double scale = Scale(avgPixRes, avgPoleRad, avgEqRad); mapping += PvlKeyword("ProjectionName",projection); mapping += PvlKeyword("TargetName", target); mapping += PvlKeyword("EquatorialRadius",eqRad,"meters"); mapping += PvlKeyword("PolarRadius",poleRad,"meters"); mapping += PvlKeyword("LatitudeType",lattype); mapping += PvlKeyword("LongitudeDirection",londir); mapping += PvlKeyword("LongitudeDomain",londom); mapping += PvlKeyword("PixelResolution", SetRound(avgPixRes, digits), "meters/pixel"); mapping += PvlKeyword("Scale", SetRound(scale, digits), "pixels/degree"); mapping += PvlKeyword("MinPixelResolution",scaleStat.Minimum(),"meters"); mapping += PvlKeyword("MaxPixelResolution",scaleStat.Maximum(),"meters"); mapping += PvlKeyword("CenterLongitude", SetRound(avgLon,digits)); mapping += PvlKeyword("CenterLatitude", SetRound(avgLat,digits)); mapping += PvlKeyword("MinimumLatitude", MAX(SetFloor(latitudeStat.Minimum(),digits), -90.0)); mapping += PvlKeyword("MaximumLatitude", MIN(SetCeil(latitudeStat.Maximum(),digits), 90.0)); mapping += PvlKeyword("MinimumLongitude",MAX(SetFloor(longitudeStat.Minimum(),digits), -180.0)); mapping += PvlKeyword("MaximumLongitude",MIN(SetCeil(longitudeStat.Maximum(),digits), 360.0)); PvlKeyword clat("PreciseCenterLongitude", avgLon); clat.AddComment("Actual Parameters without precision applied"); mapping += clat; mapping += PvlKeyword("PreciseCenterLatitude", avgLat); mapping += PvlKeyword("PreciseMinimumLatitude", latitudeStat.Minimum()); mapping += PvlKeyword("PreciseMaximumLatitude", latitudeStat.Maximum()); mapping += PvlKeyword("PreciseMinimumLongitude",longitudeStat.Minimum()); mapping += PvlKeyword("PreciseMaximumLongitude",longitudeStat.Maximum()); Application::GuiLog(mapping); // Write the output file if requested if (ui.WasEntered("TO")) { Pvl temp; temp.AddGroup(mapping); temp.Write(ui.GetFilename("TO","map")); } if (ui.WasEntered("LOG")) { Pvl temp; temp.AddObject(fileset); temp.Write(ui.GetFilename("LOG","log")); } p.EndProcess(); }
void XmlResultPrinter::clearStatsBy(Statistics& name) { name.reset(); }