/** This is a slightly "clever" method as it makes some guesses about where is best * to look for the right Q bin based on the fact that the input Qs (calcualted from wavelengths) tend * to go down while the output Qs are always in accending order * @param[in] OutQs the array of output Q bin boundaries, this finds the bin that contains the QIn value * @param[in] QToFind the Q value to find the correct bin for * @param[in, out] loc points to the bin boundary (in the OutQs array) whos Q is higher than QToFind and higher by the smallest amount. Algorithm starts by checking the value of loc passed and then all the bins _downwards_ through the array */ void Q1D2::getQBinPlus1(const MantidVec & OutQs, const double QToFind, MantidVec::const_iterator & loc) const { if ( loc != OutQs.end() ) { while ( loc != OutQs.begin() ) { if ( (QToFind >= *(loc-1)) && (QToFind < *loc) ) { return; } --loc; } if ( QToFind < *loc ) { //QToFind is outside the array leave loc == OutQs.begin() return; } } else //loc == OutQs.end() { if ( OutQs.empty() || QToFind > *(loc-1) ) { //outside the array leave loc == OutQs.end() return; } } // we are lost, normally the order of the Q values means we only get here on the first iteration. It's slow loc = std::lower_bound(OutQs.begin(), OutQs.end(), QToFind); }
/** Calculates Sn as estimator of scale for given vector * * This method implements a naive calculation of Sn, as defined by Rousseeuw *and Croux (http://dx.doi.org/10.2307%2F2291267). * In contrast to standard deviation, this is more robust towards outliers. * * @param begin :: Beginning of vector. * @param end :: End of vector. * @return Sn of supplied data. */ double PoldiPeakSearch::getSn(MantidVec::const_iterator begin, MantidVec::const_iterator end) const { size_t numberOfPoints = std::distance(begin, end); MantidVec absoluteDifferenceMedians(numberOfPoints); PARALLEL_FOR_NO_WSP_CHECK() for (int i = 0; i < static_cast<int>(numberOfPoints); ++i) { double currentValue = *(begin + i); MantidVec temp; temp.reserve(numberOfPoints - 1); for (int j = 0; j < static_cast<int>(numberOfPoints); ++j) { if (j != i) { temp.push_back(fabs(*(begin + j) - currentValue)); } } std::sort(temp.begin(), temp.end()); absoluteDifferenceMedians[i] = getMedianFromSortedVector(temp.begin(), temp.end()); } std::sort(absoluteDifferenceMedians.begin(), absoluteDifferenceMedians.end()); return 1.1926 * getMedianFromSortedVector(absoluteDifferenceMedians.begin(), absoluteDifferenceMedians.end()); }
/** Computes a background estimation with an error * * This method computes an estimate of the average background along with its deviation. Since the background does not * follow a normal distribution and may contain outliers, instead of computing the average and standard deviation, * the median is used as location estimator and Sn is used as scale estimator. For details regarding the latter * refer to PoldiPeakSearch::getSn. * * @param peakPositions :: Peak positions. * @param correlationCounts :: Data from which the peak positions were extracted. * @return Background estimation with error. */ UncertainValue PoldiPeakSearch::getBackgroundWithSigma(std::list<MantidVec::const_iterator> peakPositions, const MantidVec &correlationCounts) const { MantidVec background = getBackground(peakPositions, correlationCounts); /* Instead of using Mean and Standard deviation, which are appropriate * for data originating from a normal distribution (which is not the case * for background of POLDI correlation spectra), the more robust measures * Median and Sn are used. */ std::sort(background.begin(), background.end()); double meanBackground = getMedianFromSortedVector(background.begin(), background.end()); double sigmaBackground = getSn(background.begin(), background.end()); return UncertainValue(meanBackground, sigmaBackground); }
int UnwrapSNS::unwrapX(const MantidVec &datain, MantidVec &dataout, const double &Ld) { MantidVec tempX_L; // lower half - to be frame wrapped tempX_L.reserve(m_XSize); tempX_L.clear(); MantidVec tempX_U; // upper half - to not be frame wrapped tempX_U.reserve(m_XSize); tempX_U.clear(); double filterVal = m_Tmin * Ld / m_LRef; dataout.clear(); int specialBin = 0; for (int bin = 0; bin < m_XSize; ++bin) { // This is the time-of-flight value under consideration in the current // iteration of the loop const double tof = datain[bin]; if (tof < filterVal) { tempX_L.push_back(tof + m_frameWidth); // Record the bins that fall in this range for copying over the data & // errors if (specialBin < bin) specialBin = bin; } else { tempX_U.push_back(tof); } } // loop over X values // now put it back into the vector supplied dataout.clear(); dataout.insert(dataout.begin(), tempX_U.begin(), tempX_U.end()); dataout.insert(dataout.end(), tempX_L.begin(), tempX_L.end()); assert(datain.size() == dataout.size()); return specialBin; }
MantidVec operator()(const MantidVec &_Left, const MantidVec &_Right) const { // apply operator+ to operands MantidVec v(_Left.size()); std::transform(_Left.begin(), _Left.end(), _Right.begin(), v.begin(), SumGaussError<double>()); return (v); }
/** * load vectors onto a Workspace2D with 3 bins (the three components of the * vectors) * dataX for the origin of the vector (assumed (0,0,0) ) * dataY for the tip of the vector * dataE is assumed (0,0,0), no errors * @param h5file file identifier * @param gws pointer to WorkspaceGroup being filled * @param sorting_indexes permutation of qvmod indexes to render it in * increasing order of momemtum transfer */ const MantidVec LoadSassena::loadQvectors(const hid_t &h5file, API::WorkspaceGroup_sptr gws, std::vector<int> &sorting_indexes) { const std::string gwsName = this->getPropertyValue("OutputWorkspace"); const std::string setName("qvectors"); hsize_t dims[3]; if (dataSetInfo(h5file, setName, dims) < 0) { throw Kernel::Exception::FileError( "Unable to read " + setName + " dataset info:", m_filename); } int nq = static_cast<int>(dims[0]); // number of q-vectors double *buf = new double[nq * 3]; this->dataSetDouble(h5file, "qvectors", buf); MantidVec qvmod; // store the modulus of the vector double *curr = buf; for (int iq = 0; iq < nq; iq++) { qvmod.push_back( sqrt(curr[0] * curr[0] + curr[1] * curr[1] + curr[2] * curr[2])); curr += 3; } if (getProperty("SortByQVectors")) { std::vector<mypair> qvmodpair; for (int iq = 0; iq < nq; iq++) qvmodpair.push_back(mypair(qvmod[iq], iq)); std::sort(qvmodpair.begin(), qvmodpair.end(), compare); for (int iq = 0; iq < nq; iq++) sorting_indexes.push_back(qvmodpair[iq].second); std::sort(qvmod.begin(), qvmod.end()); } else for (int iq = 0; iq < nq; iq++) sorting_indexes.push_back(iq); DataObjects::Workspace2D_sptr ws = boost::dynamic_pointer_cast<DataObjects::Workspace2D>( API::WorkspaceFactory::Instance().create("Workspace2D", nq, 3, 3)); std::string wsName = gwsName + std::string("_") + setName; ws->setTitle(wsName); for (int iq = 0; iq < nq; iq++) { MantidVec &Y = ws->dataY(iq); const int index = sorting_indexes[iq]; curr = buf + 3 * index; Y.assign(curr, curr + 3); } delete[] buf; ws->getAxis(0)->unit() = Kernel::UnitFactory::Instance().create( "MomentumTransfer"); // Set the Units this->registerWorkspace( gws, wsName, ws, "X-axis: origin of Q-vectors; Y-axis: tip of Q-vectors"); return qvmod; }
/** Retrieves a vector with all counts that belong to the background * * In this method, a vector is assembled which contains all count data that is *considered to be background. * Whether a point is considered background depends on its distance to the *given peak positions. * * @param peakPositions :: Peak positions. * @param correlationCounts :: Vector with the complete correlation spectrum. * @return Vector only with counts that belong to the background. */ MantidVec PoldiPeakSearch::getBackground( std::list<MantidVec::const_iterator> peakPositions, const MantidVec &correlationCounts) const { size_t backgroundPoints = getNumberOfBackgroundPoints(peakPositions, correlationCounts); MantidVec background; background.reserve(backgroundPoints); for (MantidVec::const_iterator point = correlationCounts.begin() + 1; point != correlationCounts.end() - 1; ++point) { if (distanceToPeaksGreaterThanMinimum(peakPositions, point)) { background.push_back(*point); } } return background; }
/** * Calculate detector efficiency given a formula, the efficiency at the elastic line, * and a vector with energies. * Efficiency = f(Ei-DeltaE) / f(Ei) * Hope all compilers supports the NRVO (otherwise will copy the output vector) * @param eff0 :: calculated eff0 * @param formula :: formula to calculate efficiency (parsed from IDF) * @param xIn :: Energy bins vector (X axis) * @return a vector with the efficiencies */ MantidVec DetectorEfficiencyCorUser::calculateEfficiency(double eff0, const std::string& formula, const MantidVec& xIn) { MantidVec effOut(xIn.size() - 1); // x are bins and have more one value than y try { double e; mu::Parser p; p.DefineVar("e", &e); p.SetExpr(formula); // copied from Jaques Ollivier Code bool conditionForEnergy = std::min( std::abs( *std::min_element(xIn.begin(), xIn.end()) ) , m_Ei) < m_Ei; MantidVec::const_iterator xIn_it = xIn.begin(); // DeltaE MantidVec::iterator effOut_it = effOut.begin(); for (; effOut_it != effOut.end(); ++xIn_it, ++effOut_it) { if (conditionForEnergy ) { // cppcheck cannot see that this is used by reference by muparser e = std::fabs(m_Ei + *xIn_it); } else { // cppcheck cannot see that this is used by reference by muparser // cppcheck-suppress unreadVariable e = std::fabs(m_Ei - *xIn_it); } double eff = p.Eval(); *effOut_it = eff / eff0; } return effOut; } catch (mu::Parser::exception_type &e) { throw Kernel::Exception::InstrumentDefinitionError( "Error calculating formula from string. Muparser error message is: " + e.GetMsg()); } }
/** * Copy over the metadata from the input matrix workspace to output MDEventWorkspace * @param mdEventWS :: The output MDEventWorkspace where metadata are copied to. The source of the metadata is the input matrix workspace * */ void ConvertToMD::copyMetaData(API::IMDEventWorkspace_sptr &mdEventWS) const { // found detector which is not a monitor to get proper bin boundaries. size_t spectra_index(0); bool dector_found(false); for(size_t i=0;i<m_InWS2D->getNumberHistograms(); ++i) { try { auto det=m_InWS2D->getDetector(i); if (!det->isMonitor()) { spectra_index=i; dector_found = true; g_log.debug()<<"Using spectra N "<<i<< " as the source of the bin boundaries for the resolution corrections \n"; break; } } catch(...) {} } if (!dector_found) g_log.warning()<<"No detectors in the workspace are associated with spectra. Using spectrum 0 trying to retrieve the bin boundaries \n"; // retrieve representative bin boundaries MantidVec binBoundaries = m_InWS2D->readX(spectra_index); // check if the boundaries transformation is necessary if (m_Convertor->getUnitConversionHelper().isUnitConverted()) { if( !dynamic_cast<DataObjects::EventWorkspace *>(m_InWS2D.get())) { g_log.information()<<" ConvertToMD converts input workspace units, but the bin boundaries are copied from the first workspace spectra. The resolution estimates can be incorrect if unit conversion depends on spectra number.\n"; UnitsConversionHelper &unitConv = m_Convertor->getUnitConversionHelper(); unitConv.updateConversion(spectra_index); for(size_t i=0;i<binBoundaries.size();i++) { binBoundaries[i] =unitConv.convertUnits(binBoundaries[i]); } } // sort bin boundaries in case if unit transformation have swapped them. if (binBoundaries[0]>binBoundaries[binBoundaries.size()-1]) { g_log.information()<<"Bin boundaries are not arranged monotonously. Sorting performed\n"; std::sort(binBoundaries.begin(),binBoundaries.end()); } } // Replacement for SpectraDetectorMap::createIDGroupsMap using the ISpectrum objects instead auto mapping = boost::make_shared<det2group_map>(); for ( size_t i = 0; i < m_InWS2D->getNumberHistograms(); ++i ) { const auto& dets = m_InWS2D->getSpectrum(i)->getDetectorIDs(); if(!dets.empty()) { std::vector<detid_t> id_vector; std::copy(dets.begin(), dets.end(), std::back_inserter(id_vector)); mapping->insert(std::make_pair(id_vector.front(), id_vector)); } } uint16_t nexpts = mdEventWS->getNumExperimentInfo(); for(uint16_t i = 0; i < nexpts; ++i) { ExperimentInfo_sptr expt = mdEventWS->getExperimentInfo(i); expt->mutableRun().storeHistogramBinBoundaries(binBoundaries); expt->cacheDetectorGroupings(*mapping); } }
/** Subtracts a constant from the data values in the given workspace * @param Y :: The vector from which to subtract * @param value :: The value to subtract from each data point */ void IQTransform::subtractBackgroundValue(MantidVec& Y, const double value) { g_log.debug() << "Subtracting the background value " << value << " from the input workspace.\n"; std::transform(Y.begin(),Y.end(),Y.begin(),std::bind2nd(std::minus<double>(),value)); }
void PoldiPeakSearch::exec() { g_log.information() << "PoldiPeakSearch:" << std::endl; Workspace2D_sptr correlationWorkspace = getProperty("InputWorkspace"); MantidVec correlationQValues = correlationWorkspace->readX(0); MantidVec correlatedCounts = correlationWorkspace->readY(0); g_log.information() << " Auto-correlation data read." << std::endl; Unit_sptr xUnit = correlationWorkspace->getAxis(0)->unit(); if (xUnit->caption() == "") { g_log.information() << " Workspace does not have unit, defaulting to MomentumTransfer." << std::endl; xUnit = UnitFactory::Instance().create("MomentumTransfer"); } else { g_log.information() << " Unit of workspace is " << xUnit->caption() << "." << std::endl; } setMinimumDistance(getProperty("MinimumPeakSeparation")); setMinimumPeakHeight(getProperty("MinimumPeakHeight")); setMaximumPeakNumber(getProperty("MaximumPeakNumber")); if (m_doubleMinimumDistance > static_cast<int>(correlatedCounts.size())) { throw(std::runtime_error("MinimumPeakSeparation is smaller than number of " "spectrum points - no peaks possible.")); } g_log.information() << " Parameters set." << std::endl; MantidVec summedNeighborCounts = getNeighborSums(correlatedCounts); g_log.information() << " Neighboring counts summed, contains " << summedNeighborCounts.size() << " data points." << std::endl; std::list<MantidVec::const_iterator> peakPositionsSummed = findPeaks(summedNeighborCounts.begin(), summedNeighborCounts.end()); g_log.information() << " Peaks detected in summed spectrum: " << peakPositionsSummed.size() << std::endl; /* This step is required because peaks are actually searched in the * "sum-of-neighbors"-spectrum. * The mapping removes the offset from the peak position which results from * different beginning * of this vector compared to the original correlation counts. */ std::list<MantidVec::const_iterator> peakPositionsCorrelation = mapPeakPositionsToCorrelationData(peakPositionsSummed, summedNeighborCounts.begin(), correlatedCounts.begin()); g_log.information() << " Peak positions transformed to original spectrum." << std::endl; /* Since intensities are required for filtering, they are extracted from the * original count data, * along with the Q-values. */ std::vector<PoldiPeak_sptr> peakCoordinates = getPeaks(correlatedCounts.begin(), correlatedCounts.end(), peakPositionsCorrelation, correlationQValues, xUnit); g_log.information() << " Extracted peak positions in Q and intensity guesses." << std::endl; UncertainValue backgroundWithSigma = getBackgroundWithSigma(peakPositionsCorrelation, correlatedCounts); g_log.information() << " Calculated average background and deviation: " << UncertainValueIO::toString(backgroundWithSigma) << std::endl; if ((*getProperty("MinimumPeakHeight")).isDefault()) { setMinimumPeakHeight(minimumPeakHeightFromBackground(backgroundWithSigma)); } std::vector<PoldiPeak_sptr> intensityFilteredPeaks(peakCoordinates.size()); auto newEnd = std::remove_copy_if( peakCoordinates.begin(), peakCoordinates.end(), intensityFilteredPeaks.begin(), boost::bind(&PoldiPeakSearch::isLessThanMinimum, this, _1)); intensityFilteredPeaks.resize( std::distance(intensityFilteredPeaks.begin(), newEnd)); g_log.information() << " Peaks above minimum intensity (" << m_minimumPeakHeight << "): " << intensityFilteredPeaks.size() << std::endl; std::sort(intensityFilteredPeaks.begin(), intensityFilteredPeaks.end(), boost::bind<bool>(&PoldiPeak::greaterThan, _1, _2, &PoldiPeak::intensity)); for (std::vector<PoldiPeak_sptr>::const_iterator peak = intensityFilteredPeaks.begin(); peak != intensityFilteredPeaks.end(); ++peak) { m_peaks->addPeak(*peak); } /* The derived background error is set as error in the workspace containing * correlation data, so it may be used as weights for peak fitting later on. */ setErrorsOnWorkspace(correlationWorkspace, backgroundWithSigma.error()); setProperty("OutputWorkspace", m_peaks->asTableWorkspace()); }
void LoadDaveGrp::exec() { const std::string filename = this->getProperty("Filename"); int yLength = 0; MantidVec *xAxis = new MantidVec(); MantidVec *yAxis = new MantidVec(); std::vector<MantidVec *> data; std::vector<MantidVec *> errors; this->ifile.open(filename.c_str()); if (this->ifile.is_open()) { // Size of x axis this->getAxisLength(this->xLength); // Size of y axis this->getAxisLength(yLength); // This is also the number of groups (spectra) this->nGroups = yLength; // Read in the x axis values this->getAxisValues(xAxis, static_cast<std::size_t>(this->xLength)); // Read in the y axis values this->getAxisValues(yAxis, static_cast<std::size_t>(yLength)); // Read in the data this->getData(data, errors); } this->ifile.close(); // Scale the x-axis if it is in micro-eV to get it to meV const bool isUeV = this->getProperty("IsMicroEV"); if (isUeV) { MantidVec::iterator iter; for (iter = xAxis->begin(); iter != xAxis->end(); ++iter) { *iter /= 1000.0; } } // Create workspace API::MatrixWorkspace_sptr outputWorkspace = \ boost::dynamic_pointer_cast<API::MatrixWorkspace>\ (API::WorkspaceFactory::Instance().create("Workspace2D", this->nGroups, this->xLength, yLength)); // Force the workspace to be a distribution outputWorkspace->isDistribution(true); // Set the x-axis units outputWorkspace->getAxis(0)->unit() = Kernel::UnitFactory::Instance().create(this->getProperty("XAxisUnits")); API::Axis* const verticalAxis = new API::NumericAxis(yLength); // Set the y-axis units verticalAxis->unit() = Kernel::UnitFactory::Instance().create(this->getProperty("YAxisUnits")); outputWorkspace->replaceAxis(1, verticalAxis); for(int i = 0; i < this->nGroups; i++) { outputWorkspace->dataX(i) = *xAxis; outputWorkspace->dataY(i) = *data[i]; outputWorkspace->dataE(i) = *errors[i]; verticalAxis->setValue(i, yAxis->at(i)); delete data[i]; delete errors[i]; } delete xAxis; delete yAxis; outputWorkspace->mutableRun().addProperty("Filename",filename); this->setProperty("OutputWorkspace", outputWorkspace); }
/** Finds the index in an ordered vector which follows the given value * @param value :: The value to search for * @param vec :: The vector to search * @return The index (will give vec.size()+1 if the value is past the end of the vector) */ int RemoveBins::findIndex(const double& value, const MantidVec& vec) { MantidVec::const_iterator pos = std::lower_bound(vec.begin(),vec.end(),value); return static_cast<int>(pos-vec.begin()); }