WorkspaceGroup_sptr PolarizationCorrectionFredrikze::execPNR(WorkspaceGroup_sptr inWS) { size_t itemIndex = 0; MatrixWorkspace_sptr Ip = boost::dynamic_pointer_cast<MatrixWorkspace>(inWS->getItem(itemIndex++)); MatrixWorkspace_sptr Ia = boost::dynamic_pointer_cast<MatrixWorkspace>(inWS->getItem(itemIndex++)); const auto rho = this->getEfficiencyWorkspace(crhoLabel); const auto pp = this->getEfficiencyWorkspace(cppLabel); const auto D = pp * (rho + 1); const auto nIp = (Ip * (rho * pp + 1.0) + Ia * (pp - 1.0)) / D; const auto nIa = (Ip * (rho * pp - 1.0) + Ia * (pp + 1.0)) / D; // Preserve the history of the inside workspaces nIp->history().addHistory(Ip->getHistory()); nIa->history().addHistory(Ia->getHistory()); WorkspaceGroup_sptr dataOut = boost::make_shared<WorkspaceGroup>(); dataOut->addWorkspace(nIp); dataOut->addWorkspace(nIa); return dataOut; }
WorkspaceGroup_sptr PolarizationCorrection::execPNR(WorkspaceGroup_sptr inWS) { size_t itemIndex = 0; MatrixWorkspace_sptr Ip = boost::dynamic_pointer_cast<MatrixWorkspace>(inWS->getItem(itemIndex++)); MatrixWorkspace_sptr Ia = boost::dynamic_pointer_cast<MatrixWorkspace>(inWS->getItem(itemIndex++)); MatrixWorkspace_sptr ones = copyShapeAndFill(Ip, 1.0); const VecDouble c_rho = getProperty(crhoLabel()); const VecDouble c_pp = getProperty(cppLabel()); const auto rho = this->execPolynomialCorrection( ones, c_rho); // Execute polynomial expression const auto pp = this->execPolynomialCorrection( ones, c_pp); // Execute polynomial expression const auto D = pp * (rho + 1); const auto nIp = (Ip * (rho * pp + 1.0) + Ia * (pp - 1.0)) / D; const auto nIa = (Ip * (rho * pp - 1.0) + Ia * (pp + 1.0)) / D; // Preserve the history of the inside workspaces nIp->history().addHistory(Ip->getHistory()); nIa->history().addHistory(Ia->getHistory()); WorkspaceGroup_sptr dataOut = boost::make_shared<WorkspaceGroup>(); dataOut->addWorkspace(nIp); dataOut->addWorkspace(nIa); return dataOut; }
/** * Add a workspace to a group. The group and the workspace must be in the ADS. * @param groupName :: A group name. * @param wsName :: Name of a workspace to add to the group. */ void AnalysisDataServiceImpl::addToGroup(const std::string &groupName, const std::string &wsName) { WorkspaceGroup_sptr group = retrieveWS<WorkspaceGroup>(groupName); if (!group) { throw std::runtime_error("Workspace " + groupName + " is not a workspace group."); } auto ws = retrieve(wsName); group->addWorkspace(ws); notificationCenter.postNotification(new GroupUpdatedNotification(groupName)); }
MatrixWorkspace_sptr MuonPairingAsymmetry::execGroupWorkspaceInput() { // Get the input workspace into a useful form Workspace_sptr tmpWS1 = getProperty("InputWorkspace1"); Workspace_sptr tmpWS2 = getProperty("InputWorkspace2"); WorkspaceGroup_sptr ws1 = workspaceToWorkspaceGroup(tmpWS1); WorkspaceGroup_sptr ws2 = workspaceToWorkspaceGroup(tmpWS2); WorkspaceGroup_sptr groupedPeriods = boost::make_shared<WorkspaceGroup>(); for (int i = 0; i < countPeriods(ws1); i++) { groupedPeriods->addWorkspace( appendSpectra(getWorkspace(ws1, i), getWorkspace(ws2, i))); } // Do the asymmetry calculation const double alpha = static_cast<double>(getProperty("Alpha")); std::vector<int> summedPeriods = getProperty("SummedPeriods"); std::vector<int> subtractedPeriods = getProperty("SubtractedPeriods"); return calcPairAsymmetryWithSummedAndSubtractedPeriods( summedPeriods, subtractedPeriods, groupedPeriods, alpha); }
/// Executes the algorithm void PoldiFitPeaks2D::exec() { std::vector<PoldiPeakCollection_sptr> peakCollections = getPeakCollectionsFromInput(); // Try to setup the 2D data and poldi instrument MatrixWorkspace_sptr ws = getProperty("InputWorkspace"); setDeltaTFromWorkspace(ws); setPoldiInstrument(boost::make_shared<PoldiInstrumentAdapter>(ws)); setTimeTransformerFromInstrument(m_poldiInstrument); // If a profile function is selected, set it on the peak collections. Property *profileFunctionProperty = getPointerToProperty("PeakProfileFunction"); if (!profileFunctionProperty->isDefault()) { for (auto &peakCollection : peakCollections) { peakCollection->setProfileFunctionName(profileFunctionProperty->value()); } } // Perform 2D-fit and return Fit algorithm to extract various information IAlgorithm_sptr fitAlgorithm = calculateSpectrum(peakCollections, ws); // The FitFunction is used to generate... IFunction_sptr fitFunction = getFunction(fitAlgorithm); // ...a calculated 1D-spectrum... MatrixWorkspace_sptr outWs1D = get1DSpectrum(fitFunction, ws); // ...a vector of peak collections. std::vector<PoldiPeakCollection_sptr> integralPeaks = getCountPeakCollections(fitFunction); for (size_t i = 0; i < peakCollections.size(); ++i) { assignMillerIndices(peakCollections[i], integralPeaks[i]); } // Get the calculated 2D workspace setProperty("OutputWorkspace", getWorkspace(fitAlgorithm)); // Set the output peaks depending on whether it's one or more workspaces if (integralPeaks.size() == 1) { setProperty("RefinedPoldiPeakWorkspace", integralPeaks.front()->asTableWorkspace()); } else { WorkspaceGroup_sptr peaksGroup = boost::make_shared<WorkspaceGroup>(); for (auto &integralPeak : integralPeaks) { peaksGroup->addWorkspace(integralPeak->asTableWorkspace()); } setProperty("RefinedPoldiPeakWorkspace", peaksGroup); } // Set the 1D-spectrum output setProperty("Calculated1DSpectrum", outWs1D); // If it was a PawleyFit, also produce one or more cell parameter tables. bool isPawleyFit = getProperty("PawleyFit"); if (isPawleyFit) { Poldi2DFunction_sptr poldi2DFunction = boost::dynamic_pointer_cast<Poldi2DFunction>(fitFunction); std::vector<ITableWorkspace_sptr> cells; if (poldi2DFunction) { for (size_t i = 0; i < poldi2DFunction->nFunctions(); ++i) { try { ITableWorkspace_sptr cell = getRefinedCellParameters(poldi2DFunction->getFunction(i)); cells.push_back(cell); } catch (const std::invalid_argument &) { // do nothing } } if (cells.size() == 1) { setProperty("RefinedCellParameters", cells.front()); } else { WorkspaceGroup_sptr cellsGroup = boost::make_shared<WorkspaceGroup>(); for (auto &cell : cells) { cellsGroup->addWorkspace(cell); } setProperty("RefinedCellParameters", cellsGroup); } } else { g_log.warning() << "Warning: Cell parameter table is empty."; } } // Optionally output the raw fitting parameters. Property *rawFitParameters = getPointerToProperty("RawFitParameters"); if (!rawFitParameters->isDefault()) { ITableWorkspace_sptr parameters = fitAlgorithm->getProperty("OutputParameters"); setProperty("RawFitParameters", parameters); } }
WorkspaceGroup_sptr PolarizationCorrection::execPA(WorkspaceGroup_sptr inWS) { if (isPropertyDefault(cAlphaLabel())) { throw std::invalid_argument("Must provide as input for PA: " + cAlphaLabel()); } if (isPropertyDefault(cApLabel())) { throw std::invalid_argument("Must provide as input for PA: " + cApLabel()); } size_t itemIndex = 0; MatrixWorkspace_sptr Ipp = boost::dynamic_pointer_cast<MatrixWorkspace>(inWS->getItem(itemIndex++)); MatrixWorkspace_sptr Ipa = boost::dynamic_pointer_cast<MatrixWorkspace>(inWS->getItem(itemIndex++)); MatrixWorkspace_sptr Iap = boost::dynamic_pointer_cast<MatrixWorkspace>(inWS->getItem(itemIndex++)); MatrixWorkspace_sptr Iaa = boost::dynamic_pointer_cast<MatrixWorkspace>(inWS->getItem(itemIndex++)); Ipp->setTitle("Ipp"); Iaa->setTitle("Iaa"); Ipa->setTitle("Ipa"); Iap->setTitle("Iap"); auto cropAlg = this->createChildAlgorithm("CropWorkspace"); cropAlg->initialize(); cropAlg->setProperty("InputWorkspace", Ipp); cropAlg->setProperty("EndWorkspaceIndex", 0); cropAlg->execute(); MatrixWorkspace_sptr croppedIpp = cropAlg->getProperty("OutputWorkspace"); MatrixWorkspace_sptr ones = copyShapeAndFill(croppedIpp, 1.0); // The ones workspace is now identical to the input workspaces in x, but has 1 // as y values. It can therefore be used to build real polynomial functions. const VecDouble c_rho = getProperty(crhoLabel()); const VecDouble c_alpha = getProperty(cAlphaLabel()); const VecDouble c_pp = getProperty(cppLabel()); const VecDouble c_ap = getProperty(cApLabel()); const auto rho = this->execPolynomialCorrection( ones, c_rho); // Execute polynomial expression const auto pp = this->execPolynomialCorrection( ones, c_pp); // Execute polynomial expression const auto alpha = this->execPolynomialCorrection( ones, c_alpha); // Execute polynomial expression const auto ap = this->execPolynomialCorrection( ones, c_ap); // Execute polynomial expression const auto A0 = (Iaa * pp * ap) + (ap * Ipa * rho * pp) + (ap * Iap * alpha * pp) + (Ipp * ap * alpha * rho * pp); const auto A1 = pp * Iaa; const auto A2 = pp * Iap; const auto A3 = ap * Iaa; const auto A4 = ap * Ipa; const auto A5 = ap * alpha * Ipp; const auto A6 = ap * alpha * Iap; const auto A7 = pp * rho * Ipp; const auto A8 = pp * rho * Ipa; const auto D = pp * ap * (rho + alpha + 1.0 + (rho * alpha)); const auto nIpp = (A0 - A1 + A2 - A3 + A4 + A5 - A6 + A7 - A8 + Ipp + Iaa - Ipa - Iap) / D; const auto nIaa = (A0 + A1 - A2 + A3 - A4 - A5 + A6 - A7 + A8 + Ipp + Iaa - Ipa - Iap) / D; const auto nIpa = (A0 - A1 + A2 + A3 - A4 - A5 + A6 + A7 - A8 - Ipp - Iaa + Ipa + Iap) / D; const auto nIap = (A0 + A1 - A2 - A3 + A4 + A5 - A6 - A7 + A8 - Ipp - Iaa + Ipa + Iap) / D; WorkspaceGroup_sptr dataOut = boost::make_shared<WorkspaceGroup>(); dataOut->addWorkspace(nIpp); dataOut->addWorkspace(nIpa); dataOut->addWorkspace(nIap); dataOut->addWorkspace(nIaa); size_t totalGroupEntries(dataOut->getNumberOfEntries()); for (size_t i = 1; i < totalGroupEntries; i++) { auto alg = this->createChildAlgorithm("ReplaceSpecialValues"); alg->setProperty("InputWorkspace", dataOut->getItem(i)); alg->setProperty("OutputWorkspace", "dataOut_" + std::to_string(i)); alg->setProperty("NaNValue", 0.0); alg->setProperty("NaNError", 0.0); alg->setProperty("InfinityValue", 0.0); alg->setProperty("InfinityError", 0.0); alg->execute(); } // Preserve the history of the inside workspaces nIpp->history().addHistory(Ipp->getHistory()); nIaa->history().addHistory(Iaa->getHistory()); nIpa->history().addHistory(Ipa->getHistory()); nIap->history().addHistory(Iap->getHistory()); return dataOut; }
/** * Create FITS file information for each file selected. Loads headers * and data from the files and creates and fills the output * workspace(s). * * @param paths File names as given in the algorithm input property * * @param outWSName name of the output (group) workspace to create * * @param loadAsRectImg Load files with 1 spectrum per row and 1 bin * per column, so a color fill plot displays the image * * @param binSize size to rebin (1 == no re-bin == default) * * @param noiseThresh threshold for noise filtering * * @throw std::runtime_error when load fails (for example a memory * allocation issue, wrong rebin requested, etc.) */ void LoadFITS::doLoadFiles(const std::vector<std::string> &paths, const std::string &outWSName, bool loadAsRectImg, int binSize, double noiseThresh) { std::vector<FITSInfo> headers; headers.resize(paths.size()); loadHeader(paths[0], headers[0]); // No extension is set -> it's the standard format which we can parse. if (headers[0].numberOfAxis > 0) m_pixelCount += headers[0].axisPixelLengths[0]; // Presumably 2 axis, but futureproofing. for (int i = 1; i < headers[0].numberOfAxis; ++i) { m_pixelCount *= headers[0].axisPixelLengths[i]; } // Check consistency of binSize asap for (int i = 0; i < headers[0].numberOfAxis; ++i) { if (0 != (headers[0].axisPixelLengths[i] % binSize)) { throw std::runtime_error( "Cannot rebin this image in blocks of " + std::to_string(binSize) + " x " + std::to_string(binSize) + " pixels as requested because the size of dimension " + std::to_string(i + 1) + " (" + std::to_string(headers[0].axisPixelLengths[i]) + ") is not a multiple of the bin size."); } } MantidImage imageY(headers[0].axisPixelLengths[1], std::vector<double>(headers[0].axisPixelLengths[0])); MantidImage imageE(headers[0].axisPixelLengths[1], std::vector<double>(headers[0].axisPixelLengths[0])); size_t bytes = (headers[0].bitsPerPixel / 8) * m_pixelCount; std::vector<char> buffer; try { buffer.resize(bytes); } catch (std::exception &) { throw std::runtime_error( "Could not allocate enough memory to run when trying to allocate " + std::to_string(bytes) + " bytes."); } // Create a group for these new workspaces, if the group already exists, add // to it. size_t fileNumberInGroup = 0; WorkspaceGroup_sptr wsGroup; if (!AnalysisDataService::Instance().doesExist(outWSName)) { wsGroup = boost::make_shared<WorkspaceGroup>(); wsGroup->setTitle(outWSName); } else { // Get the name of the latest file in group to start numbering from if (AnalysisDataService::Instance().doesExist(outWSName)) wsGroup = AnalysisDataService::Instance().retrieveWS<WorkspaceGroup>(outWSName); std::string latestName = wsGroup->getNames().back(); // Set next file number fileNumberInGroup = fetchNumber(latestName) + 1; } size_t totalWS = headers.size(); // Create a progress reporting object API::Progress progress(this, 0, 1, totalWS + 1); progress.report(0, "Loading file(s) into workspace(s)"); // Create first workspace (with instrument definition). This is also used as // a template for creating others Workspace2D_sptr imgWS; imgWS = makeWorkspace(headers[0], fileNumberInGroup, buffer, imageY, imageE, imgWS, loadAsRectImg, binSize, noiseThresh); progress.report(1, "First file loaded."); wsGroup->addWorkspace(imgWS); if (isInstrOtherThanIMAT(headers[0])) { // For now we assume IMAT except when specific headers are found by // isInstrOtherThanIMAT() // // TODO: do this conditional on INSTR='IMAT' when we have proper IMAT .fits // files try { IAlgorithm_sptr loadInst = createChildAlgorithm("LoadInstrument"); std::string directoryName = Kernel::ConfigService::Instance().getInstrumentDirectory(); directoryName = directoryName + "/IMAT_Definition.xml"; loadInst->setPropertyValue("Filename", directoryName); loadInst->setProperty<MatrixWorkspace_sptr>( "Workspace", boost::dynamic_pointer_cast<MatrixWorkspace>(imgWS)); loadInst->execute(); } catch (std::exception &ex) { g_log.information("Cannot load the instrument definition. " + std::string(ex.what())); } } // don't feel tempted to parallelize this loop as it is - it uses the same // imageY and imageE buffers for all the workspaces for (int64_t i = 1; i < static_cast<int64_t>(totalWS); ++i) { loadHeader(paths[i], headers[i]); // Check each header is valid/supported: standard (no extension to // FITS), has two axis, and it is consistent with the first header headerSanityCheck(headers[i], headers[0]); imgWS = makeWorkspace(headers[i], fileNumberInGroup, buffer, imageY, imageE, imgWS, loadAsRectImg, binSize, noiseThresh); progress.report("Loaded file " + std::to_string(i + 1) + " of " + std::to_string(totalWS)); wsGroup->addWorkspace(imgWS); } setProperty("OutputWorkspace", wsGroup); }
/** Execute the algorithm. */ void PoldiIndexKnownCompounds::exec() { g_log.information() << "Starting POLDI peak indexing." << std::endl; DataObjects::TableWorkspace_sptr peakTableWorkspace = getProperty("InputWorkspace"); PoldiPeakCollection_sptr unindexedPeaks = boost::make_shared<PoldiPeakCollection>(peakTableWorkspace); g_log.information() << " Number of peaks: " << unindexedPeaks->peakCount() << std::endl; std::vector<Workspace_sptr> workspaces = getWorkspaces(getProperty("CompoundWorkspaces")); std::vector<PoldiPeakCollection_sptr> peakCollections = getPeakCollections(workspaces); g_log.information() << " Number of phases: " << peakCollections.size() << std::endl; /* The procedure is much easier to formulate with some state stored in member * variables, * which are initialized either from user input or from some defaults. */ setMeasuredPeaks(unindexedPeaks); setExpectedPhases(peakCollections); setExpectedPhaseNames(getWorkspaceNames(workspaces)); initializeUnindexedPeaks(); initializeIndexedPeaks(m_expectedPhases); /* For calculating scores in the indexing procedure, scattering contributions * are used. * The structure factors are scaled accordingly. */ std::vector<double> contributions = getContributions(m_expectedPhases.size()); std::vector<double> normalizedContributions = getNormalizedContributions(contributions); scaleIntensityEstimates(peakCollections, normalizedContributions); scaleToExperimentalValues(peakCollections, unindexedPeaks); // Tolerances on the other hand are handled as "FWHM". std::vector<double> tolerances = getTolerances(m_expectedPhases.size()); assignFwhmEstimates(peakCollections, tolerances); // With all necessary state assigned, the indexing procedure can be executed indexPeaks(unindexedPeaks, peakCollections); g_log.information() << " Unindexed peaks: " << m_unindexedPeaks->peakCount() << std::endl; /* Finally, the peaks are put into separate workspaces, determined by * the phase they have been attributed to, plus unindexed peaks. */ std::string inputWorkspaceName = getPropertyValue("InputWorkspace"); WorkspaceGroup_sptr outputWorkspaces = boost::make_shared<WorkspaceGroup>(); for (size_t i = 0; i < m_indexedPeaks.size(); ++i) { PoldiPeakCollection_sptr intensitySorted = getIntensitySortedPeakCollection(m_indexedPeaks[i]); assignCrystalStructureParameters(intensitySorted, m_expectedPhases[i]); ITableWorkspace_sptr tableWs = intensitySorted->asTableWorkspace(); AnalysisDataService::Instance().addOrReplace( inputWorkspaceName + "_indexed_" + m_phaseNames[i], tableWs); outputWorkspaces->addWorkspace(tableWs); } ITableWorkspace_sptr unindexedTableWs = m_unindexedPeaks->asTableWorkspace(); AnalysisDataService::Instance().addOrReplace( inputWorkspaceName + "_unindexed", unindexedTableWs); outputWorkspaces->addWorkspace(unindexedTableWs); setProperty("OutputWorkspace", outputWorkspaces); }
/** * Read histogram data * @param histogramEntries map of the file entries that have histogram * @param outputGroup pointer to the workspace group * @param nxFile Reads data from inside first first top entry */ void LoadMcStas::readHistogramData( const std::map<std::string, std::string> &histogramEntries, WorkspaceGroup_sptr &outputGroup, ::NeXus::File &nxFile) { std::string nameAttrValueYLABEL; for (const auto &histogramEntry : histogramEntries) { const std::string &dataName = histogramEntry.first; const std::string &dataType = histogramEntry.second; // open second level entry nxFile.openGroup(dataName, dataType); // grap title to use to e.g. create workspace name std::string nameAttrValueTITLE; nxFile.getAttr("filename", nameAttrValueTITLE); if (nxFile.hasAttr("ylabel")) { nxFile.getAttr("ylabel", nameAttrValueYLABEL); } // Find the axis names auto nxdataEntries = nxFile.getEntries(); std::string axis1Name, axis2Name; for (auto &nxdataEntry : nxdataEntries) { if (nxdataEntry.second == "NXparameters") continue; if (nxdataEntry.first == "ncount") continue; nxFile.openData(nxdataEntry.first); if (nxFile.hasAttr("axis")) { int axisNo(0); nxFile.getAttr("axis", axisNo); if (axisNo == 1) axis1Name = nxdataEntry.first; else if (axisNo == 2) axis2Name = nxdataEntry.first; else throw std::invalid_argument("Unknown axis number"); } nxFile.closeData(); } std::vector<double> axis1Values, axis2Values; nxFile.readData<double>(axis1Name, axis1Values); if (axis2Name.length() == 0) { axis2Name = nameAttrValueYLABEL; axis2Values.push_back(0.0); } else { nxFile.readData<double>(axis2Name, axis2Values); } const size_t axis1Length = axis1Values.size(); const size_t axis2Length = axis2Values.size(); g_log.debug() << "Axis lengths=" << axis1Length << " " << axis2Length << '\n'; // Require "data" field std::vector<double> data; nxFile.readData<double>("data", data); // Optional errors field std::vector<double> errors; try { nxFile.readData<double>("errors", errors); } catch (::NeXus::Exception &) { g_log.information() << "Field " << dataName << " contains no error information.\n"; } // close second level entry nxFile.closeGroup(); MatrixWorkspace_sptr ws = WorkspaceFactory::Instance().create( "Workspace2D", axis2Length, axis1Length, axis1Length); Axis *axis1 = ws->getAxis(0); axis1->title() = axis1Name; // Set caption auto lblUnit = boost::make_shared<Units::Label>(); lblUnit->setLabel(axis1Name, ""); axis1->unit() = lblUnit; Axis *axis2 = new NumericAxis(axis2Length); axis2->title() = axis2Name; // Set caption lblUnit = boost::make_shared<Units::Label>(); lblUnit->setLabel(axis2Name, ""); axis2->unit() = lblUnit; ws->setYUnit(axis2Name); ws->replaceAxis(1, axis2); for (size_t wsIndex = 0; wsIndex < axis2Length; ++wsIndex) { auto &dataY = ws->dataY(wsIndex); auto &dataE = ws->dataE(wsIndex); auto &dataX = ws->dataX(wsIndex); for (size_t j = 0; j < axis1Length; ++j) { // Data is stored in column-major order so we are translating to // row major for Mantid const size_t fileDataIndex = j * axis2Length + wsIndex; dataY[j] = data[fileDataIndex]; dataX[j] = axis1Values[j]; if (!errors.empty()) dataE[j] = errors[fileDataIndex]; } axis2->setValue(wsIndex, axis2Values[wsIndex]); } // set the workspace title ws->setTitle(nameAttrValueTITLE); // use the workspace title to create the workspace name std::replace(nameAttrValueTITLE.begin(), nameAttrValueTITLE.end(), ' ', '_'); // ensure that specified name is given to workspace (eventWS) when added to // outputGroup std::string nameOfGroupWS = getProperty("OutputWorkspace"); std::string nameUserSee = nameAttrValueTITLE + "_" + nameOfGroupWS; std::string extraProperty = "Outputworkspace_dummy_" + std::to_string(m_countNumWorkspaceAdded); declareProperty(Kernel::make_unique<WorkspaceProperty<Workspace>>( extraProperty, nameUserSee, Direction::Output)); setProperty(extraProperty, boost::static_pointer_cast<Workspace>(ws)); m_countNumWorkspaceAdded++; // need to increment to ensure extraProperty are // unique // Make Mantid store the workspace in the group outputGroup->addWorkspace(ws); } nxFile.closeGroup(); } // finish
WorkspaceGroup_sptr PolarizationCorrectionFredrikze::execPA(WorkspaceGroup_sptr inWS) { size_t itemIndex = 0; MatrixWorkspace_sptr Ipp = boost::dynamic_pointer_cast<MatrixWorkspace>(inWS->getItem(itemIndex++)); MatrixWorkspace_sptr Ipa = boost::dynamic_pointer_cast<MatrixWorkspace>(inWS->getItem(itemIndex++)); MatrixWorkspace_sptr Iap = boost::dynamic_pointer_cast<MatrixWorkspace>(inWS->getItem(itemIndex++)); MatrixWorkspace_sptr Iaa = boost::dynamic_pointer_cast<MatrixWorkspace>(inWS->getItem(itemIndex++)); Ipp->setTitle("Ipp"); Iaa->setTitle("Iaa"); Ipa->setTitle("Ipa"); Iap->setTitle("Iap"); const auto rho = this->getEfficiencyWorkspace(crhoLabel); const auto pp = this->getEfficiencyWorkspace(cppLabel); const auto alpha = this->getEfficiencyWorkspace(cAlphaLabel); const auto ap = this->getEfficiencyWorkspace(cApLabel); const auto A0 = (Iaa * pp * ap) + (Ipa * ap * rho * pp) + (Iap * ap * alpha * pp) + (Ipp * ap * alpha * rho * pp); const auto A1 = Iaa * pp; const auto A2 = Iap * pp; const auto A3 = Iaa * ap; const auto A4 = Ipa * ap; const auto A5 = Ipp * ap * alpha; const auto A6 = Iap * ap * alpha; const auto A7 = Ipp * pp * rho; const auto A8 = Ipa * pp * rho; const auto D = pp * ap * (rho + alpha + 1.0 + (rho * alpha)); const auto nIpp = (A0 - A1 + A2 - A3 + A4 + A5 - A6 + A7 - A8 + Ipp + Iaa - Ipa - Iap) / D; const auto nIaa = (A0 + A1 - A2 + A3 - A4 - A5 + A6 - A7 + A8 + Ipp + Iaa - Ipa - Iap) / D; const auto nIap = (A0 - A1 + A2 + A3 - A4 - A5 + A6 + A7 - A8 - Ipp - Iaa + Ipa + Iap) / D; const auto nIpa = (A0 + A1 - A2 - A3 + A4 + A5 - A6 - A7 + A8 - Ipp - Iaa + Ipa + Iap) / D; WorkspaceGroup_sptr dataOut = boost::make_shared<WorkspaceGroup>(); dataOut->addWorkspace(nIpp); dataOut->addWorkspace(nIpa); dataOut->addWorkspace(nIap); dataOut->addWorkspace(nIaa); size_t totalGroupEntries(dataOut->getNumberOfEntries()); for (size_t i = 1; i < totalGroupEntries; i++) { auto alg = this->createChildAlgorithm("ReplaceSpecialValues"); alg->setProperty("InputWorkspace", dataOut->getItem(i)); alg->setProperty("OutputWorkspace", "dataOut_" + std::to_string(i)); alg->setProperty("NaNValue", 0.0); alg->setProperty("NaNError", 0.0); alg->setProperty("InfinityValue", 0.0); alg->setProperty("InfinityError", 0.0); alg->execute(); } // Preserve the history of the inside workspaces nIpp->history().addHistory(Ipp->getHistory()); nIaa->history().addHistory(Iaa->getHistory()); nIpa->history().addHistory(Ipa->getHistory()); nIap->history().addHistory(Iap->getHistory()); return dataOut; }
/** * Return the confidence with with this algorithm can load the file * @param eventEntries map of the file entries that have events * @param outputGroup pointer to the workspace group * @param nxFile Reads data from inside first first top entry */ void LoadMcStas::readEventData( const std::map<std::string, std::string> &eventEntries, WorkspaceGroup_sptr &outputGroup, ::NeXus::File &nxFile) { std::string filename = getPropertyValue("Filename"); auto entries = nxFile.getEntries(); // will assume that each top level entry contain one mcstas // generated IDF and any event data entries within this top level // entry are data collected for that instrument // This code for loading the instrument is for now adjusted code from // ExperimentalInfo. // Close data folder and go back to top level. Then read and close the // Instrument folder. nxFile.closeGroup(); Geometry::Instrument_sptr instrument; // Initialize progress reporting int reports = 2; const double progressFractionInitial = 0.1; Progress progInitial(this, 0.0, progressFractionInitial, reports); try { nxFile.openGroup("instrument", "NXinstrument"); std::string instrumentXML; nxFile.openGroup("instrument_xml", "NXnote"); nxFile.readData("data", instrumentXML); nxFile.closeGroup(); nxFile.closeGroup(); progInitial.report("Loading instrument"); Geometry::InstrumentDefinitionParser parser; std::string instrumentName = "McStas"; parser.initialize(filename, instrumentName, instrumentXML); std::string instrumentNameMangled = parser.getMangledName(); // Check whether the instrument is already in the InstrumentDataService if (InstrumentDataService::Instance().doesExist(instrumentNameMangled)) { // If it does, just use the one from the one stored there instrument = InstrumentDataService::Instance().retrieve(instrumentNameMangled); } else { // Really create the instrument instrument = parser.parseXML(NULL); // Add to data service for later retrieval InstrumentDataService::Instance().add(instrumentNameMangled, instrument); } } catch (...) { // Loader should not stop if there is no IDF.xml g_log.warning() << "\nCould not find the instrument description in the Nexus file:" << filename << " Ignore evntdata from data file" << std::endl; return; } // Finished reading Instrument. Then open new data folder again nxFile.openGroup("data", "NXdetector"); // create and prepare an event workspace ready to receive the mcstas events progInitial.report("Set up EventWorkspace"); EventWorkspace_sptr eventWS(new EventWorkspace()); // initialize, where create up front number of eventlists = number of // detectors eventWS->initialize(instrument->getNumberDetectors(), 1, 1); // Set the units eventWS->getAxis(0)->unit() = UnitFactory::Instance().create("TOF"); eventWS->setYUnit("Counts"); // set the instrument eventWS->setInstrument(instrument); // assign detector ID to eventlists std::vector<detid_t> detIDs = instrument->getDetectorIDs(); for (size_t i = 0; i < instrument->getNumberDetectors(); i++) { eventWS->getEventList(i).addDetectorID(detIDs[i]); // spectrum number are treated as equal to detector IDs for McStas data eventWS->getEventList(i).setSpectrumNo(detIDs[i]); } // the one is here for the moment for backward compatibility eventWS->rebuildSpectraMapping(true); bool isAnyNeutrons = false; // to store shortest and longest recorded TOF double shortestTOF(0.0); double longestTOF(0.0); const size_t numEventEntries = eventEntries.size(); Progress progEntries(this, progressFractionInitial, 1.0, numEventEntries * 2); for (auto eit = eventEntries.begin(); eit != eventEntries.end(); ++eit) { std::string dataName = eit->first; std::string dataType = eit->second; // open second level entry nxFile.openGroup(dataName, dataType); std::vector<double> data; nxFile.openData("events"); progEntries.report("read event data from nexus"); // Need to take into account that the nexus readData method reads a // multi-column data entry // into a vector // The number of data column for each neutron is here hardcoded to (p, x, // y, n, id, t) // Thus we have // column 0 : p neutron wight // column 1 : x x coordinate // column 2 : y y coordinate // column 3 : n accumulated number of neutrons // column 4 : id pixel id // column 5 : t time // get info about event data ::NeXus::Info id_info = nxFile.getInfo(); if (id_info.dims.size() != 2) { g_log.error() << "Event data in McStas nexus file not loaded. Expected " "event data block to be two dimensional" << std::endl; return; } int64_t nNeutrons = id_info.dims[0]; int64_t numberOfDataColumn = id_info.dims[1]; if (nNeutrons && numberOfDataColumn != 6) { g_log.error() << "Event data in McStas nexus file expecting 6 columns" << std::endl; return; } if (isAnyNeutrons == false && nNeutrons > 0) isAnyNeutrons = true; std::vector<int64_t> start(2); std::vector<int64_t> step(2); // read the event data in blocks. 1 million event is 1000000*6*8 doubles // about 50Mb int64_t nNeutronsInBlock = 1000000; int64_t nOfFullBlocks = nNeutrons / nNeutronsInBlock; int64_t nRemainingNeutrons = nNeutrons - nOfFullBlocks * nNeutronsInBlock; // sum over number of blocks + 1 to cover the remainder for (int64_t iBlock = 0; iBlock < nOfFullBlocks + 1; iBlock++) { if (iBlock == nOfFullBlocks) { // read remaining neutrons start[0] = nOfFullBlocks * nNeutronsInBlock; start[1] = 0; step[0] = nRemainingNeutrons; step[1] = numberOfDataColumn; } else { // read neutrons in a full block start[0] = iBlock * nNeutronsInBlock; start[1] = 0; step[0] = nNeutronsInBlock; step[1] = numberOfDataColumn; } const int64_t nNeutronsForthisBlock = step[0]; // number of neutrons read for this block data.resize(nNeutronsForthisBlock * numberOfDataColumn); // Check that the type is what it is supposed to be if (id_info.type == ::NeXus::FLOAT64) { nxFile.getSlab(&data[0], start, step); } else { g_log.warning() << "Entry event field is not FLOAT64! It will be skipped.\n"; continue; } // populate workspace with McStas events const detid2index_map detIDtoWSindex_map = eventWS->getDetectorIDToWorkspaceIndexMap(true); progEntries.report("read event data into workspace"); for (int64_t in = 0; in < nNeutronsForthisBlock; in++) { const int detectorID = static_cast<int>(data[4 + numberOfDataColumn * in]); const double detector_time = data[5 + numberOfDataColumn * in] * 1.0e6; // convert to microseconds if (in == 0 && iBlock == 0) { shortestTOF = detector_time; longestTOF = detector_time; } else { if (detector_time < shortestTOF) shortestTOF = detector_time; if (detector_time > longestTOF) longestTOF = detector_time; } const size_t workspaceIndex = detIDtoWSindex_map.find(detectorID)->second; int64_t pulse_time = 0; // eventWS->getEventList(workspaceIndex) += // TofEvent(detector_time,pulse_time); // eventWS->getEventList(workspaceIndex) += TofEvent(detector_time); eventWS->getEventList(workspaceIndex) += WeightedEvent( detector_time, pulse_time, data[numberOfDataColumn * in], 1.0); } } // end reading over number of blocks of an event dataset // nxFile.getData(data); nxFile.closeData(); nxFile.closeGroup(); } // end reading over number of event datasets // Create a default TOF-vector for histogramming, for now just 2 bins // 2 bins is the standard. However for McStas simulation data it may make // sense to // increase this number for better initial visual effect Kernel::cow_ptr<MantidVec> axis; MantidVec &xRef = axis.access(); xRef.resize(2, 0.0); // if ( nNeutrons > 0) if (isAnyNeutrons) { xRef[0] = shortestTOF - 1; // Just to make sure the bins hold it all xRef[1] = longestTOF + 1; } // Set the binning axis eventWS->setAllX(axis); // ensure that specified name is given to workspace (eventWS) when added to // outputGroup std::string nameOfGroupWS = getProperty("OutputWorkspace"); std::string nameUserSee = std::string("EventData_") + nameOfGroupWS; std::string extraProperty = "Outputworkspace_dummy_" + boost::lexical_cast<std::string>(m_countNumWorkspaceAdded); declareProperty(new WorkspaceProperty<Workspace>(extraProperty, nameUserSee, Direction::Output)); setProperty(extraProperty, boost::static_pointer_cast<Workspace>(eventWS)); m_countNumWorkspaceAdded++; // need to increment to ensure extraProperty are // unique outputGroup->addWorkspace(eventWS); }