/** Load logs from Nexus file. Logs are expected to be in * /run/sample group of the file. * @param ws :: The workspace to load the logs to. * @param entry :: The Nexus entry * @param period :: The period of this workspace */ void LoadMuonNexus2::loadLogs(API::MatrixWorkspace_sptr ws, NXEntry &entry, int period) { // Avoid compiler warning (void)period; std::string start_time = entry.getString("start_time"); std::string sampleName = entry.getString("sample/name"); NXMainClass runlogs = entry.openNXClass<NXMainClass>("sample"); ws->mutableSample().setName(sampleName); for (std::vector<NXClassInfo>::const_iterator it = runlogs.groups().begin(); it != runlogs.groups().end(); ++it) { NXLog nxLog = runlogs.openNXLog(it->nxname); Kernel::Property *logv = nxLog.createTimeSeries(start_time); if (!logv) continue; ws->mutableRun().addLogData(logv); } ws->setTitle(entry.getString("title")); if (entry.containsDataSet("notes")) { ws->setComment(entry.getString("notes")); } std::string run_num = std::to_string(entry.getInt("run_number")); // The sample is left to delete the property ws->mutableRun().addLogData( new PropertyWithValue<std::string>("run_number", run_num)); ws->populateInstrumentParameters(); }
/** * Sets the start date on a dummy workspace. If there is a detector table file * available we update the dummy workspace with the start date from this file. * @param workspace: dummy workspace */ void CreateSimulationWorkspace::setStartDate( API::MatrixWorkspace_sptr workspace) { const std::string detTableFile = getProperty("DetectorTableFilename"); auto hasDetTableFile = !detTableFile.empty(); auto &run = workspace->mutableRun(); Kernel::DateAndTime startTime; Kernel::DateAndTime endTime; try { // The start and end times might not be valid, and hence can throw startTime = run.startTime(); endTime = run.endTime(); } catch (std::runtime_error &) { startTime = Kernel::DateAndTime::getCurrentTime(); endTime = Kernel::DateAndTime::getCurrentTime(); } if (hasDetTableFile) { if (boost::algorithm::ends_with(detTableFile, ".raw") || boost::algorithm::ends_with(detTableFile, ".RAW")) { auto startAndEndTime = getStartAndEndTimesFromRawFile(detTableFile); startTime = startAndEndTime.startTime; endTime = startAndEndTime.endTime; } else if (boost::algorithm::ends_with(detTableFile, ".nxs") || boost::algorithm::ends_with(detTableFile, ".NXS")) { auto startAndEndTime = getStartAndEndTimesFromNexusFile(detTableFile, startTime, endTime); startTime = startAndEndTime.startTime; endTime = startAndEndTime.endTime; } } run.setStartAndEndTime(startTime, endTime); }
void HFIRLoad::exec() { // Reduction property manager const std::string reductionManagerName = getProperty("ReductionProperties"); boost::shared_ptr<PropertyManager> reductionManager; if (PropertyManagerDataService::Instance().doesExist(reductionManagerName)) { reductionManager = PropertyManagerDataService::Instance().retrieve(reductionManagerName); } else { reductionManager = boost::make_shared<PropertyManager>(); PropertyManagerDataService::Instance().addOrReplace(reductionManagerName, reductionManager); } Progress progress(this, 0, 1, 5); progress.report(); // If the load algorithm isn't in the reduction properties, add it if (!reductionManager->existsProperty("LoadAlgorithm")) { auto algProp = make_unique<AlgorithmProperty>("LoadAlgorithm"); algProp->setValue(toString()); reductionManager->declareProperty(std::move(algProp)); } const std::string fileName = getPropertyValue("Filename"); // Output log std::string output_message = ""; const double wavelength_input = getProperty("Wavelength"); const double wavelength_spread_input = getProperty("WavelengthSpread"); progress.report("LoadSpice2D..."); IAlgorithm_sptr loadAlg = createChildAlgorithm("LoadSpice2D", 0, 0.2); loadAlg->setProperty("Filename", fileName); if (!isEmpty(wavelength_input)) { loadAlg->setProperty("Wavelength", wavelength_input); loadAlg->setProperty("WavelengthSpread", wavelength_spread_input); } try { loadAlg->executeAsChildAlg(); } catch (...) { // The only way HFIR SANS can load Nexus files is if it's loading data that // has already // been processed. This will only happen with sensitivity data. // So if we make it here and are still unable to load the file, assume it's // a sensitivity file. // This will cover the special case where the instrument scientist uses a // reduced data set // as a sensitivity data set. g_log.warning() << "Unable to load file as a SPICE file. Trying to load as " "a Nexus file.\n"; loadAlg = createChildAlgorithm("Load", 0, 0.2); loadAlg->setProperty("Filename", fileName); loadAlg->executeAsChildAlg(); Workspace_sptr dataWS_tmp = loadAlg->getProperty("OutputWorkspace"); MatrixWorkspace_sptr dataWS = boost::dynamic_pointer_cast<MatrixWorkspace>(dataWS_tmp); dataWS->mutableRun().addProperty("is_sensitivity", 1, "", true); setProperty<MatrixWorkspace_sptr>("OutputWorkspace", dataWS); g_log.notice() << "Successfully loaded " << fileName << " and setting sensitivity flag to True\n"; return; } Workspace_sptr dataWS_tmp = loadAlg->getProperty("OutputWorkspace"); API::MatrixWorkspace_sptr dataWS = boost::dynamic_pointer_cast<MatrixWorkspace>(dataWS_tmp); // Get the sample-detector distance // If SampleDetectorDistance is provided, use it! // Otherwise get's "sample-detector-distance" from the data file // And uses SampleDetectorDistanceOffset if given! double sdd = 0.0; const double sample_det_dist = getProperty("SampleDetectorDistance"); if (!isEmpty(sample_det_dist)) { sdd = sample_det_dist; } else { const std::string sddName = "sample-detector-distance"; Mantid::Kernel::Property *prop = dataWS->run().getProperty(sddName); Mantid::Kernel::PropertyWithValue<double> *dp = dynamic_cast<Mantid::Kernel::PropertyWithValue<double> *>(prop); if (!dp) { throw std::runtime_error("Could not cast (interpret) the property " + sddName + " as a floating point numeric value."); } sdd = *dp; // Modify SDD according to offset if given const double sample_det_offset = getProperty("SampleDetectorDistanceOffset"); if (!isEmpty(sample_det_offset)) { sdd += sample_det_offset; } } dataWS->mutableRun().addProperty("sample_detector_distance", sdd, "mm", true); progress.report("MoveInstrumentComponent..."); // Move the detector to its correct position IAlgorithm_sptr mvAlg = createChildAlgorithm("MoveInstrumentComponent", 0.2, 0.4); mvAlg->setProperty<MatrixWorkspace_sptr>("Workspace", dataWS); mvAlg->setProperty("ComponentName", "detector1"); mvAlg->setProperty("Z", sdd / 1000.0); mvAlg->setProperty("RelativePosition", false); mvAlg->executeAsChildAlg(); g_log.information() << "Moving detector to " << sdd / 1000.0 << '\n'; output_message += " Detector position: " + Poco::NumberFormatter::format(sdd / 1000.0, 3) + " m\n"; // Compute beam diameter at the detector double src_to_sample = 0.0; try { src_to_sample = HFIRInstrument::getSourceToSampleDistance(dataWS); dataWS->mutableRun().addProperty("source-sample-distance", src_to_sample, "mm", true); output_message += " Computed SSD from number of guides: " + Poco::NumberFormatter::format(src_to_sample / 1000.0, 3) + " \n"; } catch (...) { Mantid::Kernel::Property *prop = dataWS->run().getProperty("source-sample-distance"); Mantid::Kernel::PropertyWithValue<double> *dp = dynamic_cast<Mantid::Kernel::PropertyWithValue<double> *>(prop); src_to_sample = *dp; output_message += " Could not compute SSD from number of guides, taking: " + Poco::NumberFormatter::format(src_to_sample / 1000.0, 3) + " \n"; } const std::string sampleADName = "sample-aperture-diameter"; Mantid::Kernel::Property *prop = dataWS->run().getProperty(sampleADName); Mantid::Kernel::PropertyWithValue<double> *dp = dynamic_cast<Mantid::Kernel::PropertyWithValue<double> *>(prop); if (!dp) { throw std::runtime_error("Could not cast (interpret) the property " + sampleADName + " as a floating point numeric value."); } double sample_apert = *dp; const std::string sourceADName = "source-aperture-diameter"; prop = dataWS->run().getProperty(sourceADName); dp = dynamic_cast<Mantid::Kernel::PropertyWithValue<double> *>(prop); if (!dp) { throw std::runtime_error("Could not cast (interpret) the property " + sourceADName + " as a floating point numeric value."); } double source_apert = *dp; const double beam_diameter = sdd / src_to_sample * (source_apert + sample_apert) + sample_apert; dataWS->mutableRun().addProperty("beam-diameter", beam_diameter, "mm", true); progress.report("Move to center beam..."); double center_x = 0; double center_y = 0; // Move the beam center to its proper position const bool noBeamCenter = getProperty("NoBeamCenter"); if (!noBeamCenter) { center_x = getProperty("BeamCenterX"); center_y = getProperty("BeamCenterY"); if (isEmpty(center_x) && isEmpty(center_y)) { if (reductionManager->existsProperty("LatestBeamCenterX") && reductionManager->existsProperty("LatestBeamCenterY")) { center_x = reductionManager->getProperty("LatestBeamCenterX"); center_y = reductionManager->getProperty("LatestBeamCenterY"); } } moveToBeamCenter(dataWS, center_x, center_y); progress.report(); // Add beam center to reduction properties, as the last beam center position // that was used. // This will give us our default position next time. if (!reductionManager->existsProperty("LatestBeamCenterX")) reductionManager->declareProperty(make_unique<PropertyWithValue<double>>( "LatestBeamCenterX", center_x)); else reductionManager->setProperty("LatestBeamCenterX", center_x); if (!reductionManager->existsProperty("LatestBeamCenterY")) reductionManager->declareProperty(make_unique<PropertyWithValue<double>>( "LatestBeamCenterY", center_y)); else reductionManager->setProperty("LatestBeamCenterY", center_y); dataWS->mutableRun().addProperty("beam_center_x", center_x, "pixel", true); dataWS->mutableRun().addProperty("beam_center_y", center_y, "pixel", true); output_message += " Beam center: " + Poco::NumberFormatter::format(center_x, 1) + ", " + Poco::NumberFormatter::format(center_y, 1) + "\n"; } else { HFIRInstrument::getDefaultBeamCenter(dataWS, center_x, center_y); dataWS->mutableRun().addProperty("beam_center_x", center_x, "pixel", true); dataWS->mutableRun().addProperty("beam_center_y", center_y, "pixel", true); output_message += " Default beam center: " + Poco::NumberFormatter::format(center_x, 1) + ", " + Poco::NumberFormatter::format(center_y, 1) + "\n"; } setProperty<MatrixWorkspace_sptr>( "OutputWorkspace", boost::dynamic_pointer_cast<MatrixWorkspace>(dataWS)); setPropertyValue("OutputMessage", output_message); }
void LoadSpiceAscii::addProperty(API::MatrixWorkspace_sptr ws, const std::string &pname, T pvalue) { ws->mutableRun().addLogData(new PropertyWithValue<T>(pname, pvalue)); }
/* * Add and check log from processed absolute time stamps */ void ProcessDasNexusLog::addLog(API::MatrixWorkspace_sptr ws, std::vector<Kernel::DateAndTime> timevec, double unifylogvalue, std::string logname, std::vector<Kernel::DateAndTime> pulsetimes, std::vector<double> orderedtofs, bool docheck) { // 1. Do some static g_log.notice() << "Vector size = " << timevec.size() << std::endl; double sum1dtms = 0.0; // sum(dt^2) double sum2dtms = 0.0; // sum(dt^2) size_t numinvert = 0; size_t numsame = 0; size_t numnormal = 0; double maxdtms = 0; double mindtms = 1.0E20; size_t numdtabove10p = 0; size_t numdtbelow10p = 0; double sampledtms = 0.00832646 * 1.0E6; double dtmsA10p = sampledtms * 1.1; double dtmsB10p = sampledtms / 1.0; for (size_t i = 1; i < timevec.size(); i++) { int64_t dtns = timevec[i].totalNanoseconds() - timevec[i - 1].totalNanoseconds(); double dtms = static_cast<double>(dtns) * 1.0E-3; sum1dtms += dtms; sum2dtms += dtms * dtms; if (dtns == 0) numsame++; else if (dtns < 0) numinvert++; else numnormal++; if (dtms > maxdtms) maxdtms = dtms; if (dtms < mindtms) mindtms = dtms; if (dtms > dtmsA10p) numdtabove10p++; else if (dtms < dtmsB10p) numdtbelow10p++; } // ENDFOR double dt = sum1dtms / static_cast<double>(timevec.size()) * 1.0E-6; double stddt = sqrt(sum2dtms / static_cast<double>(timevec.size()) * 1.0E-12 - dt * dt); g_log.notice() << "Normal dt = " << numnormal << std::endl; g_log.notice() << "Zero dt = " << numsame << std::endl; g_log.notice() << "Negative dt = " << numinvert << std::endl; g_log.notice() << "Avg d(T) = " << dt << " seconds +/- " << stddt << ", Frequency = " << 1.0 / dt << std::endl; g_log.notice() << "d(T) (unit ms) is in range [" << mindtms << ", " << maxdtms << "]" << std::endl; g_log.notice() << "Number of d(T) 10% larger than average = " << numdtabove10p << std::endl; g_log.notice() << "Number of d(T) 10% smaller than average = " << numdtbelow10p << std::endl; g_log.notice() << "Size of timevec, pulsestimes, orderedtofs = " << timevec.size() << ", " << pulsetimes.size() << ", " << orderedtofs.size() << std::endl; if (docheck) { exportErrorLog(ws, timevec, pulsetimes, orderedtofs, 1 / (0.5 * 240.1)); calDistributions(timevec, 1 / (0.5 * 240.1)); } // 2. Add log Kernel::TimeSeriesProperty<double> *newlog = new Kernel::TimeSeriesProperty<double>(logname); for (size_t i = 0; i < timevec.size(); i++) { newlog->addValue(timevec[i], unifylogvalue); } ws->mutableRun().addProperty(newlog, true); return; }
void LoadDaveGrp::exec() { const std::string filename = this->getProperty("Filename"); int yLength = 0; MantidVec *xAxis = new MantidVec(); MantidVec *yAxis = new MantidVec(); std::vector<MantidVec *> data; std::vector<MantidVec *> errors; this->ifile.open(filename.c_str()); if (this->ifile.is_open()) { // Size of x axis this->getAxisLength(this->xLength); // Size of y axis this->getAxisLength(yLength); // This is also the number of groups (spectra) this->nGroups = yLength; // Read in the x axis values this->getAxisValues(xAxis, static_cast<std::size_t>(this->xLength)); // Read in the y axis values this->getAxisValues(yAxis, static_cast<std::size_t>(yLength)); // Read in the data this->getData(data, errors); } this->ifile.close(); // Scale the x-axis if it is in micro-eV to get it to meV const bool isUeV = this->getProperty("IsMicroEV"); if (isUeV) { MantidVec::iterator iter; for (iter = xAxis->begin(); iter != xAxis->end(); ++iter) { *iter /= 1000.0; } } // Create workspace API::MatrixWorkspace_sptr outputWorkspace = \ boost::dynamic_pointer_cast<API::MatrixWorkspace>\ (API::WorkspaceFactory::Instance().create("Workspace2D", this->nGroups, this->xLength, yLength)); // Force the workspace to be a distribution outputWorkspace->isDistribution(true); // Set the x-axis units outputWorkspace->getAxis(0)->unit() = Kernel::UnitFactory::Instance().create(this->getProperty("XAxisUnits")); API::Axis* const verticalAxis = new API::NumericAxis(yLength); // Set the y-axis units verticalAxis->unit() = Kernel::UnitFactory::Instance().create(this->getProperty("YAxisUnits")); outputWorkspace->replaceAxis(1, verticalAxis); for(int i = 0; i < this->nGroups; i++) { outputWorkspace->dataX(i) = *xAxis; outputWorkspace->dataY(i) = *data[i]; outputWorkspace->dataE(i) = *errors[i]; verticalAxis->setValue(i, yAxis->at(i)); delete data[i]; delete errors[i]; } delete xAxis; delete yAxis; outputWorkspace->mutableRun().addProperty("Filename",filename); this->setProperty("OutputWorkspace", outputWorkspace); }
/// Execute the algorithm. void PDDetermineCharacterizations::exec() { // setup property manager to return const std::string managerName = getPropertyValue("ReductionProperties"); if (PropertyManagerDataService::Instance().doesExist(managerName)) { m_propertyManager = PropertyManagerDataService::Instance().retrieve(managerName); } else { m_propertyManager = boost::make_shared<Kernel::PropertyManager>(); PropertyManagerDataService::Instance().addOrReplace(managerName, m_propertyManager); } setDefaultsInPropManager(); m_characterizations = getProperty(CHAR_PROP_NAME); if (bool(m_characterizations) && (m_characterizations->rowCount() > 0)) { API::MatrixWorkspace_sptr inputWS = getProperty("InputWorkspace"); auto run = inputWS->mutableRun(); double frequency = getLogValue(run, FREQ_PROP_NAME); double wavelength = getLogValue(run, WL_PROP_NAME); // determine the container name std::string container; if (run.hasProperty("SampleContainer")) { const auto containerProp = run.getLogData("SampleContainer"); // the property is normally a TimeSeriesProperty const auto containerPropSeries = dynamic_cast<TimeSeriesProperty<std::string> *>(containerProp); if (containerPropSeries) { // assume that only the first value matters container = containerPropSeries->valuesAsVector().front(); } else { // try as a normal Property container = containerProp->value(); } // remove whitespace from the value container = Kernel::Strings::replaceAll(container, " ", ""); } getInformationFromTable(frequency, wavelength, container); } overrideRunNumProperty("BackRun", "container"); overrideRunNumProperty("NormRun", "vanadium"); overrideRunNumProperty("NormBackRun", "vanadium_background"); overrideRunNumProperty("EmptyEnv", "empty_environment"); overrideRunNumProperty("EmptyInstr", "empty_instrument"); std::vector<std::string> expectedNames = getColumnNames(); for (auto &expectedName : expectedNames) { if (m_propertyManager->existsProperty(expectedName)) { g_log.debug() << expectedName << ":" << m_propertyManager->getPropertyValue(expectedName) << "\n"; } else { g_log.warning() << expectedName << " DOES NOT EXIST\n"; } } }
/** * Splits multiperiod histogram data into seperate workspaces and puts them in * a group * * @param numPeriods :: number of periods **/ void LoadNexusMonitors2::splitMutiPeriodHistrogramData( const size_t numPeriods) { // protection - we should not have entered the routine if these are not true // More than 1 period if (numPeriods < 2) { g_log.warning() << "Attempted to split multiperiod histogram workspace with " << numPeriods << "periods, aborted." << std::endl; return; } // Y array should be divisible by the number of periods if (m_workspace->blocksize() % numPeriods != 0) { g_log.warning() << "Attempted to split multiperiod histogram workspace with " << m_workspace->blocksize() << "data entries, into " << numPeriods << "periods." " Aborted." << std::endl; return; } WorkspaceGroup_sptr wsGroup(new WorkspaceGroup); size_t yLength = m_workspace->blocksize() / numPeriods; size_t xLength = yLength + 1; size_t numSpectra = m_workspace->getNumberHistograms(); ISISRunLogs monLogCreator(m_workspace->run(), static_cast<int>(numPeriods)); for (size_t i = 0; i < numPeriods; i++) { // create the period workspace API::MatrixWorkspace_sptr wsPeriod = API::WorkspaceFactory::Instance().create(m_workspace, numSpectra, xLength, yLength); // assign x values - restart at start for all periods for (size_t specIndex = 0; specIndex < numSpectra; specIndex++) { MantidVec &outputVec = wsPeriod->dataX(specIndex); const MantidVec &inputVec = m_workspace->readX(specIndex); for (size_t index = 0; index < xLength; index++) { outputVec[index] = inputVec[index]; } } // assign y values - use the values offset by the period number for (size_t specIndex = 0; specIndex < numSpectra; specIndex++) { MantidVec &outputVec = wsPeriod->dataY(specIndex); const MantidVec &inputVec = m_workspace->readY(specIndex); for (size_t index = 0; index < yLength; index++) { outputVec[index] = inputVec[(yLength * i) + index]; } } // assign E values for (size_t specIndex = 0; specIndex < numSpectra; specIndex++) { MantidVec &outputVec = wsPeriod->dataE(specIndex); const MantidVec &inputVec = m_workspace->readE(specIndex); for (size_t index = 0; index < yLength; index++) { outputVec[index] = inputVec[(yLength * i) + index]; } } // add period logs monLogCreator.addPeriodLogs(static_cast<int>(i + 1), wsPeriod->mutableRun()); // add to workspace group wsGroup->addWorkspace(wsPeriod); } // set the output workspace this->setProperty("OutputWorkspace", wsGroup); }