/** Set goniometer to matrix workspace and get its rotation matrix R (from * Q-sample to Q-lab * and output 1/R * @brief ConvertCWSDExpToMomentum::setupTransferMatrix * @param dataws :: matrix workspace containing sample rotation angles * @param rotationMatrix :: output as matrix 1/R to convert from Q-lab to * Q-sample */ void ConvertCWSDExpToMomentum::setupTransferMatrix( API::MatrixWorkspace_sptr dataws, Kernel::DblMatrix &rotationMatrix) { // Check sample logs if (!dataws->run().hasProperty("_omega") || !dataws->run().hasProperty("_chi") || !dataws->run().hasProperty("_phi")) throw std::runtime_error( "Data workspace does not have sample log _phi, _chi or _omega. " "Unable to set goniometer and calcualte roation matrix R."); // Call algorithm SetGoniometer IAlgorithm_sptr setalg = createChildAlgorithm("SetGoniometer"); setalg->initialize(); setalg->setProperty("Workspace", dataws); setalg->setProperty("Axis0", "_omega,0,1,0,-1"); setalg->setProperty("Axis1", "_chi,0,0,1,-1"); setalg->setProperty("Axis2", "_phi,0,1,0,-1"); setalg->execute(); if (setalg->isExecuted()) { rotationMatrix = dataws->run().getGoniometer().getR(); g_log.debug() << "Ratation matrix: " << rotationMatrix.str() << "\n"; rotationMatrix.Invert(); g_log.debug() << "Ratation matrix: " << rotationMatrix.str() << "\n"; } else throw std::runtime_error("Unable to set Goniometer."); return; }
/* * Check log in workspace */ void ProcessDasNexusLog::checkLog(API::MatrixWorkspace_sptr ws, std::string logname) { // 1. Get log Kernel::Property *log = ws->run().getProperty(logname); if (!log) { g_log.error() << "Log " << logname << " does not exist!" << std::endl; throw std::invalid_argument("Non-exising log name"); } Kernel::TimeSeriesProperty<double> *tslog = dynamic_cast<Kernel::TimeSeriesProperty<double> *>(log); if (!tslog) { g_log.error() << "Log " << logname << " is not time series log" << std::endl; throw std::invalid_argument("Log type error!"); } // 2. Survey std::vector<Kernel::DateAndTime> times = tslog->timesAsVector(); g_log.information() << "Entries of times = " << times.size() << std::endl; size_t countsame = 0; size_t countinverse = 0; for (size_t i = 1; i < times.size(); i++) { Kernel::DateAndTime tprev = times[i - 1]; Kernel::DateAndTime tpres = times[i]; if (tprev == tpres) countsame++; else if (tprev > tpres) countinverse++; } // 3. Output Kernel::DateAndTime t0(ws->run().getProperty("run_start")->value()); Kernel::time_duration dts = times[0] - t0; Kernel::time_duration dtf = times[times.size() - 1] - t0; size_t f = times.size() - 1; g_log.information() << "Number of Equal Time Stamps = " << countsame << std::endl; g_log.information() << "Number of Inverted Time Stamps = " << countinverse << std::endl; g_log.information() << "Run Start = " << t0.totalNanoseconds() << std::endl; g_log.information() << "First Log (Absolute Time, Relative Time): " << times[0].totalNanoseconds() << ", " << Kernel::DateAndTime::nanosecondsFromDuration(dts) << std::endl; g_log.information() << "Last Log (Absolute Time, Relative Time): " << times[f].totalNanoseconds() << ", " << Kernel::DateAndTime::nanosecondsFromDuration(dtf) << std::endl; return; }
double getSourceToSampleDistance(API::MatrixWorkspace_sptr dataWS) { const int nguides = dataWS->run().getPropertyValueAsType<int>("number-of-guides"); std::vector<std::string> pars = dataWS->getInstrument()->getStringParameter("aperture-distances"); if (pars.empty()) throw Kernel::Exception::InstrumentDefinitionError( "Unable to find [aperture-distances] instrument parameter"); double SSD = 0; Mantid::Kernel::StringTokenizer tok( pars[0], ",", Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY); if (tok.count() > 0 && tok.count() < 10 && nguides >= 0 && nguides < 9) { const std::string distance_as_string = tok[8 - nguides]; if (!Poco::NumberParser::tryParseFloat(distance_as_string, SSD)) throw Kernel::Exception::InstrumentDefinitionError( "Bad value for source-to-sample distance"); } else throw Kernel::Exception::InstrumentDefinitionError( "Unable to get source-to-sample distance"); // Check for an offset if (dataWS->getInstrument()->hasParameter("source-distance-offset")) { const double offset = readInstrumentParameter("source-distance-offset", dataWS); SSD += offset; } return SSD; }
/** * @brief LoadSpiceAscii::setupRunStartTime * @param runinfows * @param datetimeprop */ void LoadSpiceAscii::setupRunStartTime( API::MatrixWorkspace_sptr runinfows, const std::vector<std::string> &datetimeprop) { // Check if no need to process run start time if (datetimeprop.empty()) { g_log.information("User chooses not to set up run start date and time."); return; } // Parse property vector if (datetimeprop.size() != 4) { g_log.warning() << "Run start date and time property must contain 4 " "strings. User only specifies " << datetimeprop.size() << ". Set up failed." << "\n"; return; } // Parse std::string datelogname = datetimeprop[0]; std::string timelogname = datetimeprop[2]; if (!(runinfows->run().hasProperty(datelogname) && runinfows->run().hasProperty(timelogname))) { std::stringstream errss; errss << "Unable to locate user specified date and time sample logs " << datelogname << " and " << timelogname << "." << "run_start will not be set up."; g_log.error(errss.str()); return; } const std::string &rawdatestring = runinfows->run().getProperty(datelogname)->value(); const std::string &dateformat = datetimeprop[1]; std::string mtddatestring = processDateString(rawdatestring, dateformat); const std::string &rawtimestring = runinfows->run().getProperty(timelogname)->value(); const std::string &timeformat = datetimeprop[3]; std::string mtdtimestring = processTimeString(rawtimestring, timeformat); std::string mtddatetimestr = mtddatestring + "T" + mtdtimestring; // Set up property DateAndTime runstart(mtddatetimestr); addProperty<std::string>(runinfows, "run_start", runstart.toISO8601String()); }
/* * Write a certain number of log entries (from beginning) to file */ void ProcessDasNexusLog::writeLogtoFile(API::MatrixWorkspace_sptr ws, std::string logname, size_t numentriesoutput, std::string outputfilename) { // 1. Get log Kernel::Property *log = ws->run().getProperty(logname); Kernel::TimeSeriesProperty<double> *tslog = dynamic_cast<Kernel::TimeSeriesProperty<double> *>(log); if (!tslog) throw std::runtime_error("Invalid time series log: it could not be cast " "(interpreted) as a time series property"); std::vector<Kernel::DateAndTime> times = tslog->timesAsVector(); std::vector<double> values = tslog->valuesAsVector(); // 2. Write out std::ofstream ofs; ofs.open(outputfilename.c_str(), std::ios::out); ofs << "# Absolute Time (nanosecond)\tPulse Time (nanosecond)\tTOF (ms)\n"; Kernel::DateAndTime prevtime(0); std::vector<double> tofs; for (size_t i = 0; i < numentriesoutput; i++) { Kernel::DateAndTime tnow = times[i]; if (tnow > prevtime) { // (a) Process previous logs std::sort(tofs.begin(), tofs.end()); for (double tof : tofs) { Kernel::DateAndTime temptime = prevtime + static_cast<int64_t>(tof * 100); ofs << temptime.totalNanoseconds() << "\t" << tnow.totalNanoseconds() << "\t" << tof * 0.1 << '\n'; } // (b) Clear tofs.clear(); // (c) Update time prevtime = tnow; } // (d) Push the current value tofs.push_back(values[i]); } // ENDFOR // Clear the last if (!tofs.empty()) { // (a) Process previous logs: note value is in unit of 100 nano-second std::sort(tofs.begin(), tofs.end()); for (double tof : tofs) { Kernel::DateAndTime temptime = prevtime + static_cast<int64_t>(tof * 100); ofs << temptime.totalNanoseconds() << "\t" << prevtime.totalNanoseconds() << "\t" << tof * 0.1 << '\n'; } } else { throw std::runtime_error("Impossible for this to happen!"); } ofs.close(); } // END Function
/* * Convert DAS log to a vector of absolute time * @param orderedtofs: tofs with abstimevec */ void ProcessDasNexusLog::convertToAbsoluteTime( API::MatrixWorkspace_sptr ws, std::string logname, std::vector<Kernel::DateAndTime> &abstimevec, std::vector<double> &orderedtofs) { // 1. Get log Kernel::Property *log = ws->run().getProperty(logname); Kernel::TimeSeriesProperty<double> *tslog = dynamic_cast<Kernel::TimeSeriesProperty<double> *>(log); if (!tslog) throw std::runtime_error("Invalid time series log: it could not be cast " "(interpreted) as a time series property"); std::vector<Kernel::DateAndTime> times = tslog->timesAsVector(); std::vector<double> values = tslog->valuesAsVector(); // 2. Get converted size_t numsamepulses = 0; std::vector<double> tofs; Kernel::DateAndTime prevtime(0); for (size_t i = 0; i < times.size(); i++) { Kernel::DateAndTime tnow = times[i]; if (tnow > prevtime) { // (a) Process previous logs std::sort(tofs.begin(), tofs.end()); for (size_t j = 0; j < tofs.size(); j++) { Kernel::DateAndTime temptime = prevtime + static_cast<int64_t>(tofs[j] * 100); abstimevec.push_back(temptime); orderedtofs.push_back(tofs[j]); } // (b) Clear tofs.clear(); // (c) Update time prevtime = tnow; } else { numsamepulses++; } // (d) Push the current value tofs.push_back(values[i]); } // ENDFOR // Clear the last if (!tofs.empty()) { // (a) Process previous logs: note value is in unit of 100 nano-second std::sort(tofs.begin(), tofs.end()); for (size_t j = 0; j < tofs.size(); j++) { Kernel::DateAndTime temptime = prevtime + static_cast<int64_t>(tofs[j] * 100); abstimevec.push_back(temptime); orderedtofs.push_back(tofs[j]); } } else { throw std::runtime_error("Impossible for this to happen!"); } return; } // END Function
/** Validate the algorithm's properties. * * @return A map of porperty names and their issues. */ std::map<std::string, std::string> CreateEPP::validateInputs(void) { std::map<std::string, std::string> issues; API::MatrixWorkspace_sptr inputWS = getProperty(PropertyNames::INPUT_WORKSPACE); if (!inputWS->run().hasProperty("Ei")) { issues[PropertyNames::INPUT_WORKSPACE] = "Workspace is missing the 'Ei' sample log."; } return issues; }
/* * Export time stamps looking erroreous */ void ProcessDasNexusLog::exportErrorLog( API::MatrixWorkspace_sptr ws, std::vector<Kernel::DateAndTime> abstimevec, std::vector<Kernel::DateAndTime> pulsetimes, std::vector<double> orderedtofs, double dts) { std::string outputdir = getProperty("OutputDirectory"); if (outputdir[outputdir.size() - 1] != '/') outputdir += "/"; std::string ofilename = outputdir + "errordeltatime.txt"; g_log.notice() << ofilename << std::endl; std::ofstream ofs; ofs.open(ofilename.c_str(), std::ios::out); size_t numbaddt = 0; Kernel::DateAndTime t0(ws->run().getProperty("run_start")->value()); for (size_t i = 1; i < abstimevec.size(); i++) { double tempdts = static_cast<double>(abstimevec[i].totalNanoseconds() - abstimevec[i - 1].totalNanoseconds()) * 1.0E-9; double dev = (tempdts - dts) / dts; bool baddt = false; if (fabs(dev) > 0.5) baddt = true; if (baddt) { numbaddt++; double deltapulsetimeSec1 = static_cast<double>(pulsetimes[i - 1].totalNanoseconds() - t0.totalNanoseconds()) * 1.0E-9; double deltapulsetimeSec2 = static_cast<double>(pulsetimes[i].totalNanoseconds() - t0.totalNanoseconds()) * 1.0E-9; int index1 = static_cast<int>(deltapulsetimeSec1 * 60); int index2 = static_cast<int>(deltapulsetimeSec2 * 60); ofs << "Error d(T) = " << tempdts << " vs Correct d(T) = " << dts << std::endl; ofs << index1 << "\t\t" << pulsetimes[i - 1].totalNanoseconds() << "\t\t" << orderedtofs[i - 1] << std::endl; ofs << index2 << "\t\t" << pulsetimes[i].totalNanoseconds() << "\t\t" << orderedtofs[i] << std::endl; } } ofs.close(); }
/** * Extract the first good frame of a workspace * @param ws :: a workspace * @returns the date and time of the first good frame */ DateAndTime ChangeTimeZero::getStartTimeFromWorkspace(API::MatrixWorkspace_sptr ws) const { auto run = ws->run(); // Check for the first good frame in the log Mantid::Kernel::TimeSeriesProperty<double> *goodFrame = NULL; try { goodFrame = run.getTimeSeriesProperty<double>("proton_charge"); } catch (std::invalid_argument) { throw std::invalid_argument("ChangeTimeZero: The log needs a proton_charge " "time series to determine the zero time."); } DateAndTime startTime; if (goodFrame->size() > 0) { startTime = goodFrame->firstTime(); } return startTime; }
/** * If grouping was not provided, find the instrument from the input workspace * and read the default grouping from its IDF. Returns the forward and backward * groupings as arrays of integers. * @param ws :: [input] Workspace to find grouping for * @param forward :: [output] Forward spectrum indices for given instrument * @param backward :: [output] Backward spectrum indices for given instrument */ void CalMuonDetectorPhases::getGroupingFromInstrument( const API::MatrixWorkspace_sptr &ws, std::vector<int> &forward, std::vector<int> &backward) { // make sure both arrays are empty forward.clear(); backward.clear(); const auto instrument = ws->getInstrument(); auto loader = Kernel::make_unique<API::GroupingLoader>(instrument); if (instrument->getName() == "MUSR" || instrument->getName() == "CHRONUS") { // Two possibilities for grouping - use workspace log auto fieldDir = ws->run().getLogData("main_field_direction"); if (fieldDir) { loader = Kernel::make_unique<API::GroupingLoader>(instrument, fieldDir->value()); } if (!fieldDir) { throw std::invalid_argument( "Cannot use default instrument grouping for MUSR " "as main field direction is unknown"); } } // Load grouping and find forward, backward groups std::string fwdRange, bwdRange; const auto grouping = loader->getGroupingFromIDF(); size_t nGroups = grouping->groups.size(); for (size_t iGroup = 0; iGroup < nGroups; iGroup++) { const std::string name = grouping->groupNames[iGroup]; if (name == "fwd" || name == "left") { fwdRange = grouping->groups[iGroup]; } else if (name == "bwd" || name == "bkwd" || name == "right") { bwdRange = grouping->groups[iGroup]; } } // Use ArrayProperty's functionality to convert string ranges to groups this->setProperty("ForwardSpectra", fwdRange); this->setProperty("BackwardSpectra", bwdRange); forward = getProperty("ForwardSpectra"); backward = getProperty("BackwardSpectra"); }
/** Execute the algorithm. */ void CreateEPP::exec() { API::MatrixWorkspace_sptr inputWS = getProperty(PropertyNames::INPUT_WORKSPACE); const auto &spectrumInfo = inputWS->spectrumInfo(); API::ITableWorkspace_sptr outputWS = API::WorkspaceFactory::Instance().createTable("TableWorkspace"); addEPPColumns(outputWS); const double sigma = getProperty(PropertyNames::SIGMA); const size_t spectraCount = spectrumInfo.size(); outputWS->setRowCount(spectraCount); const auto l1 = spectrumInfo.l1(); const double EFixed = inputWS->run().getPropertyAsSingleValue("Ei"); for (size_t i = 0; i < spectraCount; ++i) { const auto l2 = spectrumInfo.l2(i); const auto elasticTOF = Kernel::UnitConversion::run( "Energy", "TOF", EFixed, l1, l2, 0, Kernel::DeltaEMode::Direct, EFixed); outputWS->getRef<int>(ColumnNames::WS_INDEX, i) = static_cast<int>(i); outputWS->getRef<double>(ColumnNames::PEAK_CENTRE, i) = elasticTOF; outputWS->getRef<double>(ColumnNames::PEAK_CENTRE_ERR, i) = 0; outputWS->getRef<double>(ColumnNames::SIGMA, i) = sigma; outputWS->getRef<double>(ColumnNames::SIGMA_ERR, i) = 0; double height = 0; try { const auto elasticIndex = inputWS->binIndexOf(elasticTOF, i); height = inputWS->y(i)[elasticIndex]; } catch (std::out_of_range &) { std::ostringstream sout; sout << "EPP out of TOF range for workspace index " << i << ". Peak height set to zero."; g_log.warning() << sout.str(); } outputWS->getRef<double>(ColumnNames::HEIGHT, i) = height; outputWS->getRef<double>(ColumnNames::CHI_SQUARED, i) = 1; outputWS->getRef<std::string>(ColumnNames::STATUS, i) = "success"; } setProperty(PropertyNames::OUTPUT_WORKSPACE, outputWS); }
void HFIRLoad::exec() { // Reduction property manager const std::string reductionManagerName = getProperty("ReductionProperties"); boost::shared_ptr<PropertyManager> reductionManager; if (PropertyManagerDataService::Instance().doesExist(reductionManagerName)) { reductionManager = PropertyManagerDataService::Instance().retrieve(reductionManagerName); } else { reductionManager = boost::make_shared<PropertyManager>(); PropertyManagerDataService::Instance().addOrReplace(reductionManagerName, reductionManager); } Progress progress(this, 0, 1, 5); progress.report(); // If the load algorithm isn't in the reduction properties, add it if (!reductionManager->existsProperty("LoadAlgorithm")) { auto algProp = make_unique<AlgorithmProperty>("LoadAlgorithm"); algProp->setValue(toString()); reductionManager->declareProperty(std::move(algProp)); } const std::string fileName = getPropertyValue("Filename"); // Output log std::string output_message = ""; const double wavelength_input = getProperty("Wavelength"); const double wavelength_spread_input = getProperty("WavelengthSpread"); progress.report("LoadSpice2D..."); IAlgorithm_sptr loadAlg = createChildAlgorithm("LoadSpice2D", 0, 0.2); loadAlg->setProperty("Filename", fileName); if (!isEmpty(wavelength_input)) { loadAlg->setProperty("Wavelength", wavelength_input); loadAlg->setProperty("WavelengthSpread", wavelength_spread_input); } try { loadAlg->executeAsChildAlg(); } catch (...) { // The only way HFIR SANS can load Nexus files is if it's loading data that // has already // been processed. This will only happen with sensitivity data. // So if we make it here and are still unable to load the file, assume it's // a sensitivity file. // This will cover the special case where the instrument scientist uses a // reduced data set // as a sensitivity data set. g_log.warning() << "Unable to load file as a SPICE file. Trying to load as " "a Nexus file.\n"; loadAlg = createChildAlgorithm("Load", 0, 0.2); loadAlg->setProperty("Filename", fileName); loadAlg->executeAsChildAlg(); Workspace_sptr dataWS_tmp = loadAlg->getProperty("OutputWorkspace"); MatrixWorkspace_sptr dataWS = boost::dynamic_pointer_cast<MatrixWorkspace>(dataWS_tmp); dataWS->mutableRun().addProperty("is_sensitivity", 1, "", true); setProperty<MatrixWorkspace_sptr>("OutputWorkspace", dataWS); g_log.notice() << "Successfully loaded " << fileName << " and setting sensitivity flag to True\n"; return; } Workspace_sptr dataWS_tmp = loadAlg->getProperty("OutputWorkspace"); API::MatrixWorkspace_sptr dataWS = boost::dynamic_pointer_cast<MatrixWorkspace>(dataWS_tmp); // Get the sample-detector distance // If SampleDetectorDistance is provided, use it! // Otherwise get's "sample-detector-distance" from the data file // And uses SampleDetectorDistanceOffset if given! double sdd = 0.0; const double sample_det_dist = getProperty("SampleDetectorDistance"); if (!isEmpty(sample_det_dist)) { sdd = sample_det_dist; } else { const std::string sddName = "sample-detector-distance"; Mantid::Kernel::Property *prop = dataWS->run().getProperty(sddName); Mantid::Kernel::PropertyWithValue<double> *dp = dynamic_cast<Mantid::Kernel::PropertyWithValue<double> *>(prop); if (!dp) { throw std::runtime_error("Could not cast (interpret) the property " + sddName + " as a floating point numeric value."); } sdd = *dp; // Modify SDD according to offset if given const double sample_det_offset = getProperty("SampleDetectorDistanceOffset"); if (!isEmpty(sample_det_offset)) { sdd += sample_det_offset; } } dataWS->mutableRun().addProperty("sample_detector_distance", sdd, "mm", true); progress.report("MoveInstrumentComponent..."); // Move the detector to its correct position IAlgorithm_sptr mvAlg = createChildAlgorithm("MoveInstrumentComponent", 0.2, 0.4); mvAlg->setProperty<MatrixWorkspace_sptr>("Workspace", dataWS); mvAlg->setProperty("ComponentName", "detector1"); mvAlg->setProperty("Z", sdd / 1000.0); mvAlg->setProperty("RelativePosition", false); mvAlg->executeAsChildAlg(); g_log.information() << "Moving detector to " << sdd / 1000.0 << '\n'; output_message += " Detector position: " + Poco::NumberFormatter::format(sdd / 1000.0, 3) + " m\n"; // Compute beam diameter at the detector double src_to_sample = 0.0; try { src_to_sample = HFIRInstrument::getSourceToSampleDistance(dataWS); dataWS->mutableRun().addProperty("source-sample-distance", src_to_sample, "mm", true); output_message += " Computed SSD from number of guides: " + Poco::NumberFormatter::format(src_to_sample / 1000.0, 3) + " \n"; } catch (...) { Mantid::Kernel::Property *prop = dataWS->run().getProperty("source-sample-distance"); Mantid::Kernel::PropertyWithValue<double> *dp = dynamic_cast<Mantid::Kernel::PropertyWithValue<double> *>(prop); src_to_sample = *dp; output_message += " Could not compute SSD from number of guides, taking: " + Poco::NumberFormatter::format(src_to_sample / 1000.0, 3) + " \n"; } const std::string sampleADName = "sample-aperture-diameter"; Mantid::Kernel::Property *prop = dataWS->run().getProperty(sampleADName); Mantid::Kernel::PropertyWithValue<double> *dp = dynamic_cast<Mantid::Kernel::PropertyWithValue<double> *>(prop); if (!dp) { throw std::runtime_error("Could not cast (interpret) the property " + sampleADName + " as a floating point numeric value."); } double sample_apert = *dp; const std::string sourceADName = "source-aperture-diameter"; prop = dataWS->run().getProperty(sourceADName); dp = dynamic_cast<Mantid::Kernel::PropertyWithValue<double> *>(prop); if (!dp) { throw std::runtime_error("Could not cast (interpret) the property " + sourceADName + " as a floating point numeric value."); } double source_apert = *dp; const double beam_diameter = sdd / src_to_sample * (source_apert + sample_apert) + sample_apert; dataWS->mutableRun().addProperty("beam-diameter", beam_diameter, "mm", true); progress.report("Move to center beam..."); double center_x = 0; double center_y = 0; // Move the beam center to its proper position const bool noBeamCenter = getProperty("NoBeamCenter"); if (!noBeamCenter) { center_x = getProperty("BeamCenterX"); center_y = getProperty("BeamCenterY"); if (isEmpty(center_x) && isEmpty(center_y)) { if (reductionManager->existsProperty("LatestBeamCenterX") && reductionManager->existsProperty("LatestBeamCenterY")) { center_x = reductionManager->getProperty("LatestBeamCenterX"); center_y = reductionManager->getProperty("LatestBeamCenterY"); } } moveToBeamCenter(dataWS, center_x, center_y); progress.report(); // Add beam center to reduction properties, as the last beam center position // that was used. // This will give us our default position next time. if (!reductionManager->existsProperty("LatestBeamCenterX")) reductionManager->declareProperty(make_unique<PropertyWithValue<double>>( "LatestBeamCenterX", center_x)); else reductionManager->setProperty("LatestBeamCenterX", center_x); if (!reductionManager->existsProperty("LatestBeamCenterY")) reductionManager->declareProperty(make_unique<PropertyWithValue<double>>( "LatestBeamCenterY", center_y)); else reductionManager->setProperty("LatestBeamCenterY", center_y); dataWS->mutableRun().addProperty("beam_center_x", center_x, "pixel", true); dataWS->mutableRun().addProperty("beam_center_y", center_y, "pixel", true); output_message += " Beam center: " + Poco::NumberFormatter::format(center_x, 1) + ", " + Poco::NumberFormatter::format(center_y, 1) + "\n"; } else { HFIRInstrument::getDefaultBeamCenter(dataWS, center_x, center_y); dataWS->mutableRun().addProperty("beam_center_x", center_x, "pixel", true); dataWS->mutableRun().addProperty("beam_center_y", center_y, "pixel", true); output_message += " Default beam center: " + Poco::NumberFormatter::format(center_x, 1) + ", " + Poco::NumberFormatter::format(center_y, 1) + "\n"; } setProperty<MatrixWorkspace_sptr>( "OutputWorkspace", boost::dynamic_pointer_cast<MatrixWorkspace>(dataWS)); setPropertyValue("OutputMessage", output_message); }