void AnalysisDataVectorPlotModule::pointsAdded(const AnalysisDataPointSetRef &points) { if (points.firstColumn() % DIM != 0 || points.columnCount() % DIM != 0) { GMX_THROW(APIError("Partial data points")); } if (!isFileOpen()) { return; } for (int i = 0; i < points.columnCount(); i += 3) { for (int d = 0; d < DIM; ++d) { if (bWrite_[i]) { writeValue(points.values()[i + d]); } } if (bWrite_[DIM]) { const rvec y = { points.y(i), points.y(i + 1), points.y(i + 2) }; AnalysisDataValue value(norm(y)); writeValue(value); } } }
void AnalysisDataDisplacementModule::pointsAdded(const AnalysisDataPointSetRef &points) { if (points.firstColumn() % _impl->ndim != 0 || points.columnCount() % _impl->ndim != 0) { GMX_THROW(APIError("Partial data points")); } for (int i = 0; i < points.columnCount(); ++i) { _impl->oldval[_impl->ci + points.firstColumn() + i] = points.y(i); } }
void AnalysisDataFrameAverager::addPoints(const AnalysisDataPointSetRef &points) { const int firstColumn = points.firstColumn(); GMX_ASSERT(static_cast<size_t>(firstColumn + points.columnCount()) <= values_.size(), "Initialized with too few columns"); for (int i = 0; i < points.columnCount(); ++i) { if (points.present(i)) { addValue(firstColumn + i, points.y(i)); } } }
void AnalysisDataAverageModule::pointsAdded(const AnalysisDataPointSetRef &points) { int firstcol = points.firstColumn(); for (int i = 0; i < points.columnCount(); ++i) { if (points.present(i)) { real y = points.y(i); value(firstcol + i, 0) += y; value(firstcol + i, 1) += y * y; nsamples_[firstcol + i] += 1; } } }
void AnalysisDataFrameAverageModule::pointsAdded(const AnalysisDataPointSetRef &points) { AnalysisDataStorageFrame &frame = impl_->storage_.currentFrame(points.frameIndex()); for (int i = 0; i < points.columnCount(); ++i) { if (points.present(i)) { const real y = points.y(i); frame.value(0) += y; impl_->sampleCount_ += 1; } } }
void AnalysisDataFrameAverageModule::pointsAdded(const AnalysisDataPointSetRef &points) { const int dataSet = points.dataSetIndex(); AnalysisDataStorageFrame &frame = impl_->storage_.currentFrame(points.frameIndex()); for (int i = 0; i < points.columnCount(); ++i) { if (points.present(i)) { // TODO: Consider using AnalysisDataFrameAverager const real y = points.y(i); const real delta = y - frame.value(dataSet); impl_->sampleCount_[dataSet] += 1; frame.value(dataSet) += delta / impl_->sampleCount_[dataSet]; } } }
void AnalysisDataAverageModule::pointsAdded(const AnalysisDataPointSetRef &points) { if (impl_->bDataSets_) { const int dataSet = points.dataSetIndex(); for (int i = 0; i < points.columnCount(); ++i) { if (points.present(i)) { impl_->averagers_[0].addValue(dataSet, points.y(i)); } } } else { impl_->averagers_[points.dataSetIndex()].addPoints(points); } }
void AnalysisDataLifetimeModule::pointsAdded(const AnalysisDataPointSetRef &points) { const int dataSet = points.dataSetIndex(); // This assumption is strictly not necessary, but this is how the // framework works currently, and makes the code below simpler. GMX_ASSERT(points.firstColumn() == 0 && points.lastColumn() == static_cast<int>(impl_->currentLifetimes_[dataSet].size()) - 1, "Point set should cover all columns"); for (int i = 0; i < points.columnCount(); ++i) { // TODO: Perhaps add control over how this is determined? const bool bPresent = points.present(i) && points.y(i) > 0.0; if (bPresent) { ++impl_->currentLifetimes_[dataSet][i]; } else if (impl_->currentLifetimes_[dataSet][i] > 0) { impl_->addLifetime(dataSet, impl_->currentLifetimes_[dataSet][i]); impl_->currentLifetimes_[dataSet][i] = 0; } } }