/* * Write a certain number of log entries (from beginning) to file */ void ProcessDasNexusLog::writeLogtoFile(API::MatrixWorkspace_sptr ws, std::string logname, size_t numentriesoutput, std::string outputfilename) { // 1. Get log Kernel::Property *log = ws->run().getProperty(logname); Kernel::TimeSeriesProperty<double> *tslog = dynamic_cast<Kernel::TimeSeriesProperty<double> *>(log); if (!tslog) throw std::runtime_error("Invalid time series log: it could not be cast " "(interpreted) as a time series property"); std::vector<Kernel::DateAndTime> times = tslog->timesAsVector(); std::vector<double> values = tslog->valuesAsVector(); // 2. Write out std::ofstream ofs; ofs.open(outputfilename.c_str(), std::ios::out); ofs << "# Absolute Time (nanosecond)\tPulse Time (nanosecond)\tTOF (ms)\n"; Kernel::DateAndTime prevtime(0); std::vector<double> tofs; for (size_t i = 0; i < numentriesoutput; i++) { Kernel::DateAndTime tnow = times[i]; if (tnow > prevtime) { // (a) Process previous logs std::sort(tofs.begin(), tofs.end()); for (double tof : tofs) { Kernel::DateAndTime temptime = prevtime + static_cast<int64_t>(tof * 100); ofs << temptime.totalNanoseconds() << "\t" << tnow.totalNanoseconds() << "\t" << tof * 0.1 << '\n'; } // (b) Clear tofs.clear(); // (c) Update time prevtime = tnow; } // (d) Push the current value tofs.push_back(values[i]); } // ENDFOR // Clear the last if (!tofs.empty()) { // (a) Process previous logs: note value is in unit of 100 nano-second std::sort(tofs.begin(), tofs.end()); for (double tof : tofs) { Kernel::DateAndTime temptime = prevtime + static_cast<int64_t>(tof * 100); ofs << temptime.totalNanoseconds() << "\t" << prevtime.totalNanoseconds() << "\t" << tof * 0.1 << '\n'; } } else { throw std::runtime_error("Impossible for this to happen!"); } ofs.close(); } // END Function
/* * Convert DAS log to a vector of absolute time * @param orderedtofs: tofs with abstimevec */ void ProcessDasNexusLog::convertToAbsoluteTime( API::MatrixWorkspace_sptr ws, std::string logname, std::vector<Kernel::DateAndTime> &abstimevec, std::vector<double> &orderedtofs) { // 1. Get log Kernel::Property *log = ws->run().getProperty(logname); Kernel::TimeSeriesProperty<double> *tslog = dynamic_cast<Kernel::TimeSeriesProperty<double> *>(log); if (!tslog) throw std::runtime_error("Invalid time series log: it could not be cast " "(interpreted) as a time series property"); std::vector<Kernel::DateAndTime> times = tslog->timesAsVector(); std::vector<double> values = tslog->valuesAsVector(); // 2. Get converted size_t numsamepulses = 0; std::vector<double> tofs; Kernel::DateAndTime prevtime(0); for (size_t i = 0; i < times.size(); i++) { Kernel::DateAndTime tnow = times[i]; if (tnow > prevtime) { // (a) Process previous logs std::sort(tofs.begin(), tofs.end()); for (size_t j = 0; j < tofs.size(); j++) { Kernel::DateAndTime temptime = prevtime + static_cast<int64_t>(tofs[j] * 100); abstimevec.push_back(temptime); orderedtofs.push_back(tofs[j]); } // (b) Clear tofs.clear(); // (c) Update time prevtime = tnow; } else { numsamepulses++; } // (d) Push the current value tofs.push_back(values[i]); } // ENDFOR // Clear the last if (!tofs.empty()) { // (a) Process previous logs: note value is in unit of 100 nano-second std::sort(tofs.begin(), tofs.end()); for (size_t j = 0; j < tofs.size(); j++) { Kernel::DateAndTime temptime = prevtime + static_cast<int64_t>(tofs[j] * 100); abstimevec.push_back(temptime); orderedtofs.push_back(tofs[j]); } } else { throw std::runtime_error("Impossible for this to happen!"); } return; } // END Function
/** Export part of designated log to an file in column format and a output file * @param logname :: name of log to export * @param numentries :: number of log entries to export * @param outputeventws :: boolean. output workspace is event workspace if * true. */ void ExportTimeSeriesLog::exportLog(string logname, int numentries, bool outputeventws) { // 1. Get log, time, and etc. std::vector<Kernel::DateAndTime> times; std::vector<double> values; if (logname.size() > 0) { // Log Kernel::TimeSeriesProperty<double> *tlog = dynamic_cast<Kernel::TimeSeriesProperty<double> *>( m_dataWS->run().getProperty(logname)); if (!tlog) { std::stringstream errmsg; errmsg << "TimeSeriesProperty Log " << logname << " does not exist in workspace " << m_dataWS->getName(); g_log.error(errmsg.str()); throw std::invalid_argument(errmsg.str()); } times = tlog->timesAsVector(); values = tlog->valuesAsVector(); } else { throw std::runtime_error("Log name cannot be left empty."); } // 2. Determine number of export log if (numentries == EMPTY_INT()) { numentries = static_cast<int>(times.size()); } else if (numentries <= 0) { stringstream errmsg; errmsg << "For Export Log, NumberEntriesExport must be greater than 0. " "Input = " << numentries; g_log.error(errmsg.str()); throw std::runtime_error(errmsg.str()); } else if (static_cast<size_t>(numentries) > times.size()) { numentries = static_cast<int>(times.size()); } // 3. Create otuput workspace if (outputeventws) { setupEventWorkspace(numentries, times, values); } else { setupWorkspace2D(numentries, times, values); } return; }
/** Export part of designated log to an file in column format and a output file * @brief ExportTimeSeriesLog::exportLog * @param logname :: name of log to export * @param timeunit :: unit of time for input start/stop time and output * @param starttime :: relative start time of the output time series log * @param stoptime :: relative stop time of the output time series log * @param exportepoch :: flag to output time as epoch time/absolute time * @param outputeventws :: boolean. output workspace is event workspace if * @param numentries :: number of log entries to export */ void ExportTimeSeriesLog::exportLog(const std::string &logname, const std::string timeunit, const double &starttime, const double &stoptime, const bool exportepoch, bool outputeventws, int numentries) { // Get log, time, and etc. std::vector<Kernel::DateAndTime> times; std::vector<double> values; if (logname.size() > 0) { // Log Kernel::TimeSeriesProperty<double> *tlog = dynamic_cast<Kernel::TimeSeriesProperty<double> *>( m_inputWS->run().getProperty(logname)); if (!tlog) { std::stringstream errmsg; errmsg << "TimeSeriesProperty Log " << logname << " does not exist in workspace " << m_inputWS->getName(); g_log.error(errmsg.str()); throw std::invalid_argument(errmsg.str()); } times = tlog->timesAsVector(); values = tlog->valuesAsVector(); } else { throw std::runtime_error("Log name cannot be left empty."); } // Get start time, stop time and unit factor double timeunitfactor = 1.; if (timeunit.compare("Seconds") == 0) timeunitfactor = 1.E-9; // Get index for start time size_t i_start = 0; size_t i_stop = times.size() - 1; // Rule out the case that start time is behind last log entry bool i_start_cal = false; if (starttime != EMPTY_DBL()) { int64_t timerangens = times.back().totalNanoseconds() - times.front().totalNanoseconds(); double timerange = static_cast<double>(timerangens) * timeunitfactor; g_log.debug() << "Time range is " << timerange << ", Start time is " << starttime << "\n"; if (timerange < starttime) { i_start = times.size() - 1; i_start_cal = true; } } if ((!i_start_cal) && (starttime != EMPTY_DBL() || stoptime != EMPTY_DBL())) { bool export_partial = calculateTimeSeriesRangeByTime( times, starttime, i_start, stoptime, i_stop, timeunitfactor); if (!export_partial) throw std::runtime_error( "Unable to find proton_charge for run start time. " "Failed to get partial time series."); } // Determine number of export log if (numentries == EMPTY_INT()) { numentries = static_cast<int>(times.size()); } else if (numentries <= 0) { stringstream errmsg; errmsg << "For Export Log, NumberEntriesExport must be greater than 0. " "Input = " << numentries; g_log.error(errmsg.str()); throw std::runtime_error(errmsg.str()); } else if (static_cast<size_t>(numentries) > times.size()) { numentries = static_cast<int>(times.size()); } // Create otuput workspace if (outputeventws) { setupEventWorkspace(i_start, i_stop, numentries, times, values, exportepoch); } else { setupWorkspace2D(i_start, i_stop, numentries, times, values, exportepoch, timeunitfactor); } return; }
/** Executes the algorithm. Reading in the file and creating and populating * the output workspace * * @throw Exception::FileError If the Nexus file cannot be found/opened * @throw std::invalid_argument If the optional properties are set to invalid *values */ void LoadNexusLogs::exec() { std::string filename = getPropertyValue("Filename"); MatrixWorkspace_sptr workspace = getProperty("Workspace"); // Find the entry name to use (normally "entry" for SNS, "raw_data_1" for // ISIS) std::string entry_name = LoadTOFRawNexus::getEntryName(filename); ::NeXus::File file(filename); // Find the root entry try { file.openGroup(entry_name, "NXentry"); } catch (::NeXus::Exception &) { throw std::invalid_argument("Unknown NeXus file format found in file '" + filename + "'"); } /// Use frequency start for Monitor19 and Special1_19 logs with "No Time" for /// SNAP try { file.openPath("DASlogs"); try { file.openGroup("frequency", "NXlog"); try { file.openData("time"); //----- Start time is an ISO8601 string date and time. ------ try { file.getAttr("start", freqStart); } catch (::NeXus::Exception &) { // Some logs have "offset" instead of start try { file.getAttr("offset", freqStart); } catch (::NeXus::Exception &) { g_log.warning() << "Log entry has no start time indicated.\n"; file.closeData(); throw; } } file.closeData(); } catch (::NeXus::Exception &) { // No time. This is not an SNS SNAP file } file.closeGroup(); } catch (::NeXus::Exception &) { // No time. This is not an SNS frequency group } file.closeGroup(); } catch (::NeXus::Exception &) { // No time. This is not an SNS group } // print out the entry level fields std::map<std::string, std::string> entries = file.getEntries(); std::map<std::string, std::string>::const_iterator iend = entries.end(); for (std::map<std::string, std::string>::const_iterator it = entries.begin(); it != iend; ++it) { std::string group_name(it->first); std::string group_class(it->second); if (group_name == "DASlogs" || group_class == "IXrunlog" || group_class == "IXselog" || group_name == "framelog") { loadLogs(file, group_name, group_class, workspace); } if (group_class == "IXperiods") { loadNPeriods(file, workspace); } } // If there's measurement information, load that info as logs. loadAndApplyMeasurementInfo(&file, *workspace); // Freddie Akeroyd 12/10/2011 // current ISIS implementation contains an additional indirection between // collected frames via an // "event_frame_number" array in NXevent_data (which eliminates frames with no // events). // the proton_log is for all frames and so is longer than the event_index // array, so we need to // filter the proton_charge log based on event_frame_number // This difference will be removed in future for compatibility with SNS, but // the code below will allow current SANS2D files to load if (workspace->mutableRun().hasProperty("proton_log")) { std::vector<int> event_frame_number; this->getLogger().notice() << "Using old ISIS proton_log and event_frame_number indirection..." << std::endl; try { // Find the bank/name corresponding to the first event data entry, i.e. // one with type NXevent_data. file.openPath("/" + entry_name); std::map<std::string, std::string> entries = file.getEntries(); std::map<std::string, std::string>::const_iterator it = entries.begin(); std::string eventEntry; for (; it != entries.end(); ++it) { if (it->second == "NXevent_data") { eventEntry = it->first; break; } } this->getLogger().debug() << "Opening" << " /" + entry_name + "/" + eventEntry + "/event_frame_number" << " to find the event_frame_number\n"; file.openPath("/" + entry_name + "/" + eventEntry + "/event_frame_number"); file.getData(event_frame_number); } catch (const ::NeXus::Exception &) { this->getLogger().warning() << "Unable to load event_frame_number - " "filtering events by time will not work " << std::endl; } file.openPath("/" + entry_name); if (!event_frame_number.empty()) // ISIS indirection - see above comments { Kernel::TimeSeriesProperty<double> *plog = dynamic_cast<Kernel::TimeSeriesProperty<double> *>( workspace->mutableRun().getProperty("proton_log")); if (!plog) throw std::runtime_error( "Could not cast (interpret) proton_log as a time " "series property. Cannot continue."); Kernel::TimeSeriesProperty<double> *pcharge = new Kernel::TimeSeriesProperty<double>("proton_charge"); std::vector<double> pval; std::vector<Mantid::Kernel::DateAndTime> ptime; pval.reserve(event_frame_number.size()); ptime.reserve(event_frame_number.size()); std::vector<Mantid::Kernel::DateAndTime> plogt = plog->timesAsVector(); std::vector<double> plogv = plog->valuesAsVector(); for (auto number : event_frame_number) { ptime.push_back(plogt[number]); pval.push_back(plogv[number]); } pcharge->create(ptime, pval); pcharge->setUnits("uAh"); workspace->mutableRun().addProperty(pcharge, true); } } try { // Read the start and end time strings file.openData("start_time"); Kernel::DateAndTime start(file.getStrData()); file.closeData(); file.openData("end_time"); Kernel::DateAndTime end(file.getStrData()); file.closeData(); workspace->mutableRun().setStartAndEndTime(start, end); } catch (::NeXus::Exception &) { } if (!workspace->run().hasProperty("gd_prtn_chrg")) { // Try pulling it from the main proton_charge entry first try { file.openData("proton_charge"); std::vector<double> values; file.getDataCoerce(values); std::string units; file.getAttr("units", units); double charge = values.front(); if (units.find("picoCoulomb") != std::string::npos) { charge *= 1.e-06 / 3600.; } workspace->mutableRun().setProtonCharge(charge); } catch (::NeXus::Exception &) { // Try and integrate the proton logs try { // Use the DAS logs to integrate the proton charge (if any). workspace->mutableRun().getProtonCharge(); } catch (Exception::NotFoundError &) { // Ignore not found property error. } } } // Close the file file.close(); }