// Analysis_CrossCorr::Setup() Analysis::RetType Analysis_CrossCorr::Setup(ArgList& analyzeArgs, AnalysisSetup& setup, int debugIn) { std::string setname = analyzeArgs.GetStringKey("name"); outfile_ = setup.DFL().AddDataFile(analyzeArgs.GetStringKey("out"), analyzeArgs); // Select datasets from remaining args if (input_dsets_.AddSetsFromArgs( analyzeArgs.RemainingArgs(), setup.DSL() )) { mprinterr("Error: Could not add data sets.\n"); return Analysis::ERR; } if (input_dsets_.size() < 2) { mprinterr("Error: At least 2 data sets are required.\n"); return Analysis::ERR; } // Setup output dataset matrix_ = setup.DSL().AddSet( DataSet::MATRIX_FLT, setname, "crosscorr" ); if (outfile_ != 0) { matrix_->SetDim(Dimension::X, Dimension(1.0, 1.0, "DataSets")); outfile_->AddDataSet( matrix_ ); } mprintf(" CROSSCORR: Calculating correlation between %zu data sets:\n", input_dsets_.size()); for (Array1D::const_iterator ds = input_dsets_.begin(); ds != input_dsets_.end(); ++ds) mprintf("\t'%s'\n", (*ds)->legend()); mprintf("\tOutput set name: %s\n", matrix_->Meta().Name().c_str() ); if ( outfile_ != 0 ) mprintf("\tOutfile name: %s\n", outfile_->DataFilename().full()); return Analysis::OK; }
// Exec_CrdAction::ProcessArgs() Exec::RetType Exec_CrdAction::ProcessArgs(CpptrajState& State, ArgList& argIn) { std::string setname = argIn.GetStringNext(); if (setname.empty()) { mprinterr("Error: %s: Specify COORDS dataset name.\n", argIn.Command()); return CpptrajState::ERR; } DataSet_Coords* CRD = (DataSet_Coords*)State.DSL().FindCoordsSet( setname ); if (CRD == 0) { mprinterr("Error: %s: No COORDS set with name %s found.\n", argIn.Command(), setname.c_str()); return CpptrajState::ERR; } mprintf("\tUsing set '%s'\n", CRD->legend()); // Start, stop, offset TrajFrameCounter frameCount; ArgList crdarg( argIn.GetStringKey("crdframes"), "," ); if (frameCount.CheckFrameArgs( CRD->Size(), crdarg )) return CpptrajState::ERR; frameCount.PrintInfoLine(CRD->legend()); ArgList actionargs = argIn.RemainingArgs(); actionargs.MarkArg(0); Cmd const& cmd = Command::SearchTokenType( DispatchObject::ACTION, actionargs.Command() ); if ( cmd.Empty() ) return CpptrajState::ERR; Action* act = (Action*)cmd.Alloc(); if (act == 0) return CpptrajState::ERR; CpptrajState::RetType err = DoCrdAction(State, actionargs, CRD, act, frameCount); delete act; return err; }
// Action_Average::init() Action::RetType Action_Average::Init(ArgList& actionArgs, TopologyList* PFL, FrameList* FL, DataSetList* DSL, DataFileList* DFL, int debugIn) { debug_ = debugIn; // Get Keywords avgfilename_ = actionArgs.GetStringNext(); if (avgfilename_.empty()) { mprinterr("Error: average: No filename given.\n"); return Action::ERR; } // Get start/stop/offset args if (InitFrameCounter(actionArgs)) return Action::ERR; // Get Masks Mask1_.SetMaskString( actionArgs.GetMaskNext() ); // Save all remaining arguments for setting up the trajectory at the end. trajArgs_ = actionArgs.RemainingArgs(); mprintf(" AVERAGE: Averaging over coordinates in mask [%s]\n",Mask1_.MaskString()); FrameCounterInfo(); mprintf("\tWriting averaged coords to [%s]\n",avgfilename_.c_str()); Nframes_ = 0; return Action::OK; }
// Action_Outtraj::Init() Action::RetType Action_Outtraj::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { // Set up output traj outtraj_.SetDebug(debugIn); std::string trajfilename = actionArgs.GetStringNext(); if (trajfilename.empty()) { mprinterr("Error: No filename given.\nError: Usage: "); Help(); return Action::ERR; } associatedParm_ = init.DSL().GetTopology(actionArgs); if (associatedParm_ == 0) { mprinterr("Error: Could not get associated topology for %s\n",trajfilename.c_str()); return Action::ERR; } // If maxmin, get the name of the dataset as well as the max and min values. double lastmin = 0.0; double lastmax = 0.0; while ( actionArgs.Contains("maxmin") ) { std::string datasetName = actionArgs.GetStringKey("maxmin"); if (!datasetName.empty()) { DataSet* dset = init.DSL().GetDataSet(datasetName); if (dset==0) { mprintf("Error: maxmin: Could not get dataset %s\n",datasetName.c_str()); return Action::ERR; } else { // Currently only allow int, float, or double datasets if (dset->Type() != DataSet::INTEGER && dset->Type() != DataSet::FLOAT && dset->Type() != DataSet::DOUBLE) { mprinterr("Error: maxmin: Only int, float, or double dataset (%s) supported.\n", datasetName.c_str()); return Action::ERR; } Dsets_.push_back( (DataSet_1D*)dset ); Max_.push_back( actionArgs.getKeyDouble("max",lastmax) ); Min_.push_back( actionArgs.getKeyDouble("min",lastmin) ); lastmax = Max_.back(); lastmin = Min_.back(); } } else { mprinterr("Error: maxmin Usage: maxmin <setname> max <max> min <min>\n"); return Action::ERR; } } // Initialize output trajectory with remaining arguments if ( outtraj_.InitEnsembleTrajWrite(trajfilename, actionArgs.RemainingArgs(), TrajectoryFile::UNKNOWN_TRAJ, init.DSL().EnsembleNum()) ) return Action::ERR; isSetup_ = false; mprintf(" OUTTRAJ: Writing frames associated with topology '%s'\n", associatedParm_->c_str()); for (unsigned int ds = 0; ds < Dsets_.size(); ++ds) mprintf("\tmaxmin: Printing trajectory frames based on %g <= %s <= %g\n", Min_[ds], Dsets_[ds]->legend(), Max_[ds]); return Action::OK; }
// Action_FilterByData::Init() Action::RetType Action_FilterByData::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { maxmin_ = init.DSL().AddSet( DataSet::INTEGER, actionArgs.GetStringKey("name"), "Filter" ); if (maxmin_ == 0) return Action::ERR; DataFile* maxminfile = init.DFL().AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); if (maxminfile != 0) maxminfile->AddDataSet( maxmin_ ); // Get min and max args. while (actionArgs.Contains("min")) Min_.push_back( actionArgs.getKeyDouble("min", 0.0) ); while (actionArgs.Contains("max")) Max_.push_back( actionArgs.getKeyDouble("max", 0.0) ); if (Min_.empty()) { mprinterr("Error: At least one 'min' arg must be specified.\n"); return Action::ERR; } if (Max_.empty()) { mprinterr("Error: At least one 'max' arg must be specified.\n"); return Action::ERR; } if (Min_.size() != Max_.size()) { mprinterr("Error: # of 'min' args (%zu) != # of 'max' args (%zu)\n", Min_.size(), Max_.size()); return Action::ERR; } // Get DataSets from remaining arguments Dsets_.AddSetsFromArgs( actionArgs.RemainingArgs(), init.DSL() ); if (Dsets_.empty()) { mprinterr("Error: No data sets specified.\n"); return Action::ERR; } if ( Dsets_.size() < Min_.size() ) { mprinterr("Error: More 'min'/'max' args (%zu) than data sets (%zu).\n", Min_.size(), Dsets_.size()); return Action::ERR; } if ( Dsets_.size() > Min_.size() ) { unsigned int Nremaining = Dsets_.size() - Min_.size(); double useMin = Min_.back(); double useMax = Max_.back(); mprintf("Warning: More data sets than 'min'/'max' args.\n" "Warning: Using min=%f and max=%f for last %zu data sets.\n", useMin, useMax, Nremaining); for (unsigned int ds = 0; ds < Nremaining; ++ds) { Min_.push_back( useMin ); Max_.push_back( useMax ); } } mprintf(" FILTER: Filtering out frames using %zu data sets.\n", Dsets_.size()); for (unsigned int ds = 0; ds < Dsets_.size(); ds++) mprintf("\t%.4f < '%s' < %.4f\n", Min_[ds], Dsets_[ds]->legend(), Max_[ds]); if (maxminfile != 0) mprintf("\tFilter frame info will be written to %s\n", maxminfile->DataFilename().full()); return Action::OK; }
Analysis::RetType Analysis_AutoCorr::Setup(ArgList& analyzeArgs, DataSetList* datasetlist, TopologyList* PFLin, DataFileList* DFLin, int debugIn) { const char* calctype; std::string setname = analyzeArgs.GetStringKey("name"); DataFile* outfile = DFLin->AddDataFile( analyzeArgs.GetStringKey("out"), analyzeArgs ); lagmax_ = analyzeArgs.getKeyInt("lagmax",-1); calc_covar_ = !analyzeArgs.hasKey("nocovar"); usefft_ = !analyzeArgs.hasKey("direct"); // Select datasets from remaining args ArgList dsetArgs = analyzeArgs.RemainingArgs(); for (ArgList::const_iterator dsa = dsetArgs.begin(); dsa != dsetArgs.end(); ++dsa) dsets_ += datasetlist->GetMultipleSets( *dsa ); if (dsets_.empty()) { mprinterr("Error: autocorr: No data sets selected.\n"); return Analysis::ERR; } // If setname is empty generate a default name if (setname.empty()) setname = datasetlist->GenerateDefaultName( "autocorr" ); // Setup output datasets int idx = 0; MetaData md( setname ); for (DataSetList::const_iterator DS = dsets_.begin(); DS != dsets_.end(); ++DS) { md.SetIdx( idx++ ); DataSet* dsout = datasetlist->AddSet( DataSet::DOUBLE, md ); if (dsout==0) return Analysis::ERR; dsout->SetLegend( (*DS)->Meta().Legend() ); outputData_.push_back( dsout ); // Add set to output file if (outfile != 0) outfile->AddDataSet( outputData_.back() ); } if (calc_covar_) calctype = "covariance"; else calctype = "correlation"; mprintf(" AUTOCORR: Calculating auto-%s for %i data sets:\n", calctype, dsets_.size()); dsets_.List(); if (lagmax_!=-1) mprintf("\tLag max= %i\n", lagmax_); if ( !setname.empty() ) mprintf("\tSet name: %s\n", setname.c_str() ); if ( outfile != 0 ) mprintf("\tOutfile name: %s\n", outfile->DataFilename().base()); if (usefft_) mprintf("\tUsing FFT to calculate %s.\n", calctype); else mprintf("\tUsing direct method to calculate %s.\n", calctype); return Analysis::OK; }
// Action_LESsplit::Init() Action::RetType Action_LESsplit::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { if (init.DSL().EnsembleNum() > -1) { mprinterr("Error: LESSPLIT currently cannot be used in ensemble mode.\n"); return Action::ERR; } trajfilename_ = actionArgs.GetStringKey("out"); avgfilename_ = actionArgs.GetStringKey("average"); lesSplit_ = !trajfilename_.empty(); lesAverage_ = !avgfilename_.empty(); if (!lesSplit_ && !lesAverage_) { mprinterr("Error: Must specify at least 'out <prefix>' or 'average <name>'.\n"); return Action::ERR; } trajArgs_ = actionArgs.RemainingArgs(); mprintf(" LESSPLIT:\n"); if (lesSplit_) mprintf("\tSplit output to '%s.X'\n", trajfilename_.c_str()); if (lesAverage_) mprintf("\tAverage output to '%s'\n", avgfilename_.c_str()); return Action::OK; }
// Analysis_FFT::Setup() Analysis::RetType Analysis_FFT::Setup(ArgList& analyzeArgs, DataSetList* datasetlist, DataFileList* DFLin, int debugIn) { std::string setname = analyzeArgs.GetStringKey("name"); DataFile* outfile = DFLin->AddDataFile(analyzeArgs.GetStringKey("out"), analyzeArgs); dt_ = analyzeArgs.getKeyDouble("dt",1.0); // Select datasets from remaining args if (input_dsets_.AddSetsFromArgs( analyzeArgs.RemainingArgs(), *datasetlist )) { mprinterr("Error: Could not add data sets.\n"); return Analysis::ERR; } if (input_dsets_.empty()) { mprinterr("Error: No input data sets.\n"); return Analysis::ERR; } // If setname is empty generate a default name if (setname.empty()) setname = datasetlist->GenerateDefaultName( "FFT" ); // Setup output datasets. int idx = 0; if ( input_dsets_.size() == 1 ) idx = -1; // Only one input set, no need to refer to it by index for ( Array1D::const_iterator DS = input_dsets_.begin(); DS != input_dsets_.end(); ++DS) { DataSet* dsout = datasetlist->AddSet( DataSet::DOUBLE, MetaData(setname, idx++) ); if (dsout==0) return Analysis::ERR; dsout->SetLegend( (*DS)->Meta().Legend() ); output_dsets_.push_back( (DataSet_1D*)dsout ); if (outfile != 0) outfile->AddDataSet( dsout ); } mprintf(" FFT: Calculating FFT for %u data sets.\n", input_dsets_.size()); mprintf("\tTime step: %f\n", dt_); if ( !setname.empty() ) mprintf("\tSet name: %s\n", setname.c_str() ); if ( outfile != 0 ) mprintf("\tOutfile name: %s\n", outfile->DataFilename().base()); return Analysis::OK; }
Analysis::RetType Analysis_Integrate::Setup(ArgList& analyzeArgs, AnalysisSetup& setup, int debugIn) { std::string setname = analyzeArgs.GetStringKey("name"); outfile_ = setup.DFL().AddDataFile(analyzeArgs.GetStringKey("out"), analyzeArgs); // Select datasets from remaining args if (input_dsets_.AddSetsFromArgs( analyzeArgs.RemainingArgs(), setup.DSL() )) { mprinterr("Error: Could not add data sets.\n"); return Analysis::ERR; } if (input_dsets_.empty()) { mprinterr("Error: No input data sets.\n"); return Analysis::ERR; } // Set up output datasets if (outfile_ != 0) { for (Array1D::const_iterator dsIn = input_dsets_.begin(); dsIn != input_dsets_.end(); ++dsIn) { DataSet* ds = setup.DSL().AddSet(DataSet::XYMESH, setname, "Int"); if (ds == 0) return Analysis::ERR; ds->SetLegend( "Int(" + (*dsIn)->Meta().Legend() + ")" ); outfile_->AddDataSet( ds ); output_dsets_.push_back( (DataSet_Mesh*)ds ); } } mprintf(" INTEGRATE: Calculating integral of %i data sets.\n", input_dsets_.size()); if (outfile_ != 0) { if (!setname.empty()) mprintf("\tOutput set name: %s\n", setname.c_str()); mprintf("\tOutfile name: %s\n", outfile_->DataFilename().base()); } //for (Array1D::const_iterator set = input_dsets_.begin(); set != input_dsets_.end(); ++set) // mprintf("\t%s\n", (*set)->legend()); return Analysis::OK; }
Analysis::RetType Analysis_AutoCorr::Setup(ArgList& analyzeArgs, AnalysisSetup& setup, int debugIn) { const char* calctype; std::string setname = analyzeArgs.GetStringKey("name"); DataFile* outfile = setup.DFL().AddDataFile( analyzeArgs.GetStringKey("out"), analyzeArgs ); lagmax_ = analyzeArgs.getKeyInt("lagmax",-1); calc_covar_ = !analyzeArgs.hasKey("nocovar"); usefft_ = !analyzeArgs.hasKey("direct"); // Select datasets from remaining args dsets_.clear(); ArgList dsetArgs = analyzeArgs.RemainingArgs(); for (ArgList::const_iterator dsa = dsetArgs.begin(); dsa != dsetArgs.end(); ++dsa) { DataSetList setsIn = setup.DSL().GetMultipleSets( *dsa ); for (DataSetList::const_iterator ds = setsIn.begin(); ds != setsIn.end(); ++ds) { if ( (*ds)->Group() != DataSet::SCALAR_1D && (*ds)->Type() != DataSet::VECTOR ) mprintf("Warning: Set '%s' type not supported in AUTOCORR - skipping.\n", (*ds)->legend()); else dsets_.push_back( *ds ); } } if (dsets_.empty()) { mprinterr("Error: No data sets selected.\n"); return Analysis::ERR; } // If setname is empty generate a default name if (setname.empty()) setname = setup.DSL().GenerateDefaultName( "autocorr" ); // Setup output datasets MetaData md( setname ); for (unsigned int idx = 0; idx != dsets_.size(); idx++) { md.SetIdx( idx ); DataSet* dsout = setup.DSL().AddSet( DataSet::DOUBLE, md ); if (dsout==0) return Analysis::ERR; dsout->SetLegend( dsets_[idx]->Meta().Legend() ); outputData_.push_back( dsout ); // Add set to output file if (outfile != 0) outfile->AddDataSet( outputData_.back() ); } if (calc_covar_) calctype = "covariance"; else calctype = "correlation"; mprintf(" AUTOCORR: Calculating auto-%s for %i data sets:\n\t", calctype, dsets_.size()); for (unsigned int idx = 0; idx != dsets_.size(); ++idx) mprintf(" %s", dsets_[idx]->legend()); mprintf("\n"); if (lagmax_!=-1) mprintf("\tLag max= %i\n", lagmax_); if ( !setname.empty() ) mprintf("\tSet name: %s\n", setname.c_str() ); if ( outfile != 0 ) mprintf("\tOutfile name: %s\n", outfile->DataFilename().base()); if (usefft_) mprintf("\tUsing FFT to calculate %s.\n", calctype); else mprintf("\tUsing direct method to calculate %s.\n", calctype); return Analysis::OK; }
// Analysis_TI::Setup() Analysis::RetType Analysis_TI::Setup(ArgList& analyzeArgs, AnalysisSetup& setup, int debugIn) { debug_ = debugIn; int nq = analyzeArgs.getKeyInt("nq", 0); ArgList nskipArg(analyzeArgs.GetStringKey("nskip"), ","); // Comma-separated avg_increment_ = analyzeArgs.getKeyInt("avgincrement", -1); avg_max_ = analyzeArgs.getKeyInt("avgmax", -1); avg_skip_ = analyzeArgs.getKeyInt("avgskip", 0); n_bootstrap_pts_ = analyzeArgs.getKeyInt("bs_pts", -1); n_bootstrap_samples_ = analyzeArgs.getKeyInt("bs_samples", 0); bootstrap_seed_ = analyzeArgs.getKeyInt("bs_seed", -1); bootstrap_fac_ = analyzeArgs.getKeyDouble("bs_fac", 0.75); if (!nskipArg.empty()) { avgType_ = SKIP; // Specified numbers of points to skip nskip_.clear(); for (int i = 0; i != nskipArg.Nargs(); i++) { nskip_.push_back( nskipArg.getNextInteger(0) ); if (nskip_.back() < 0) nskip_.back() = 0; } } else if (avg_increment_ > 0) avgType_ = INCREMENT; else if (n_bootstrap_samples_ > 0) avgType_ = BOOTSTRAP; else avgType_ = AVG; masterDSL_ = setup.DslPtr(); // Get lambda values ArgList xArgs(analyzeArgs.GetStringKey("xvals"), ","); // Also comma-separated if (!xArgs.empty()) { xval_.clear(); for (int i = 0; i != xArgs.Nargs(); i++) xval_.push_back( xArgs.getNextDouble(0.0) ); } std::string setname = analyzeArgs.GetStringKey("name"); DataFile* outfile = setup.DFL().AddDataFile(analyzeArgs.GetStringKey("out"), analyzeArgs); curveout_ = setup.DFL().AddDataFile(analyzeArgs.GetStringKey("curveout"), analyzeArgs); // Select datasets from remaining args if (input_dsets_.AddSetsFromArgs( analyzeArgs.RemainingArgs(), setup.DSL() )) { mprinterr("Error: Could not add data sets.\n"); return Analysis::ERR; } if (input_dsets_.empty()) { mprinterr("Error: No input data sets.\n"); return Analysis::ERR; } if (SetQuadAndWeights(nq)) return Analysis::ERR; // Determine integration mode if (nq > 0) mode_ = GAUSSIAN_QUAD; else mode_ = TRAPEZOID; // Check that # abscissas matches # data sets if (xval_.size() != input_dsets_.size()) { mprinterr("Error: Expected %zu data sets for integration, got %zu\n", input_dsets_.size(), xval_.size()); return Analysis::ERR; } // Set up output data sets DataSet::DataType dtype = DataSet::DOUBLE; if (avgType_ == SKIP || avgType_ == INCREMENT) dtype = DataSet::XYMESH; dAout_ = setup.DSL().AddSet(dtype, setname, "TI"); if (dAout_ == 0) return Analysis::ERR; if (outfile != 0) outfile->AddDataSet( dAout_ ); MetaData md(dAout_->Meta().Name(), "TIcurve"); if (avgType_ == AVG) { // Single curve curve_.push_back( setup.DSL().AddSet(DataSet::XYMESH, md) ); if (curve_.back() == 0) return Analysis::ERR; curve_.back()->ModifyDim(Dimension::X).SetLabel("Lambda"); if (curveout_ != 0) curveout_->AddDataSet( curve_.back() ); if (outfile != 0) outfile->ProcessArgs("noxcol"); } else if (avgType_ == SKIP) { // As many curves as skip values for (Iarray::const_iterator it = nskip_.begin(); it != nskip_.end(); ++it) { md.SetIdx( *it ); DataSet* ds = setup.DSL().AddSet(DataSet::XYMESH, md); if (ds == 0) return Analysis::ERR; ds->ModifyDim(Dimension::X).SetLabel("Lambda"); ds->SetLegend( md.Name() + "_Skip" + integerToString(*it) ); if (curveout_ != 0) curveout_->AddDataSet( ds ); curve_.push_back( ds ); } } else if (avgType_ == BOOTSTRAP) { // As many curves as resamples for (int nsample = 0; nsample != n_bootstrap_samples_; nsample++) { md.SetIdx(nsample); DataSet* ds = setup.DSL().AddSet(DataSet::XYMESH, md); if (ds == 0) return Analysis::ERR; ds->ModifyDim(Dimension::X).SetLabel("Lambda"); ds->SetLegend( md.Name() + "_Sample" + integerToString(nsample) ); if (curveout_ != 0) curveout_->AddDataSet( ds ); curve_.push_back( ds ); } // Standard devation of avg free energy over samples dA_SD_ = setup.DSL().AddSet(DataSet::DOUBLE, MetaData(md.Name(), "SD")); if (dA_SD_ == 0) return Analysis::ERR; if (outfile != 0) { outfile->AddDataSet( dA_SD_ ); outfile->ProcessArgs("noxcol"); } } // NOTE: INCREMENT is set up once data set size is known mprintf(" TI: Calculating TI"); if (mode_ == GAUSSIAN_QUAD) { mprintf(" using Gaussian quadrature with %zu points.\n", xval_.size()); mprintf("\t%6s %8s %8s %s\n", "Point", "Abscissa", "Weight", "SetName"); for (unsigned int i = 0; i != xval_.size(); i++) mprintf("\t%6i %8.5f %8.5f %s\n", i, xval_[i], wgt_[i], input_dsets_[i]->legend()); } else { mprintf(" using the trapezoid rule.\n"); mprintf("\t%6s %8s %s\n", "Point", "Abscissa", "SetName"); for (unsigned int i = 0; i != xval_.size(); i++) mprintf("\t%6i %8.5f %s\n", i, xval_[i], input_dsets_[i]->legend()); } mprintf("\tResult(s) of integration(s) saved in set '%s'\n", dAout_->legend()); if (avgType_ == AVG) mprintf("\tUsing all data points in <DV/DL> calc.\n"); else if (avgType_ == SKIP) { mprintf("\tSkipping first"); for (Iarray::const_iterator it = nskip_.begin(); it != nskip_.end(); ++it) mprintf(" %i", *it); mprintf(" data points for <DV/DL> calc.\n"); } else if (avgType_ == INCREMENT) { mprintf("\tCalculating <DV/DL> starting from point %i, increment by %i.", avg_skip_, avg_increment_); if (avg_max_ != -1) mprintf(" Max %i points.", avg_max_); mprintf("\n"); } else if (avgType_ == BOOTSTRAP) { mprintf("\tStandard devation of result stored in set '%s'\n", dA_SD_->legend()); mprintf("\tCalculating <DV/DL> from %i bootstrap resamples.\n", n_bootstrap_samples_); if (n_bootstrap_pts_ > 0) mprintf("\tBootstrap resample size is %i data points.\n", n_bootstrap_pts_); else mprintf("\tWill use bootstrap resample size of %g%% of total points.\n", bootstrap_fac_*100.0); if (bootstrap_seed_ != -1) mprintf("\tBoostrap base seed is %i\n", bootstrap_seed_); } mprintf("\tTI curve(s) saved in set(s)"); if (avgType_ != INCREMENT) for (DSarray::const_iterator ds = curve_.begin(); ds != curve_.end(); ++ds) mprintf(" '%s'", (*ds)->legend()); else mprintf(" named '%s'", md.PrintName().c_str()); mprintf("\n"); if (outfile != 0) mprintf("\tResults written to '%s'\n", outfile->DataFilename().full()); if (curveout_!= 0) mprintf("\tTI curve(s) written to '%s'\n", curveout_->DataFilename().full()); return Analysis::OK; }
Analysis::RetType Analysis_Lifetime::Setup(ArgList& analyzeArgs, DataSetList* datasetlist, TopologyList* PFLin, DataFileList* DFLin, int debugIn) { // Get Keywords DataFile* outfile = DFLin->AddDataFile(analyzeArgs.GetStringKey("out"), analyzeArgs); if (outfile != 0) outfile->ProcessArgs("noemptyframes"); DataFile* maxfile = 0; DataFile* avgfile = 0; std::string setname = analyzeArgs.GetStringKey("name"); windowSize_ = analyzeArgs.getKeyInt("window", -1); averageonly_ = analyzeArgs.hasKey("averageonly"); if (!averageonly_ && outfile != 0) { maxfile = DFLin->AddDataFile("max." + outfile->DataFilename().Full(), analyzeArgs); maxfile->ProcessArgs("noemptyframes"); avgfile = DFLin->AddDataFile("avg." + outfile->DataFilename().Full(), analyzeArgs); avgfile->ProcessArgs("noemptyframes"); } cumulative_ = analyzeArgs.hasKey("cumulative"); deltaAvg_ = analyzeArgs.hasKey("delta"); cut_ = analyzeArgs.getKeyDouble("cut", 0.5); // Select datasets from remaining args ArgList dsetArgs = analyzeArgs.RemainingArgs(); for (ArgList::const_iterator dsa = dsetArgs.begin(); dsa != dsetArgs.end(); ++dsa) inputDsets_ += datasetlist->GetMultipleSets( *dsa ); if (inputDsets_.empty()) { mprinterr("Error: lifetime: No data sets selected.\n"); return Analysis::ERR; } // Sort input datasets inputDsets_.sort(); // Create output datasets if ( windowSize_ != -1) { if (setname.empty()) setname = datasetlist->GenerateDefaultName( "lifetime" ); int didx = 0; for (DataSetList::const_iterator set = inputDsets_.begin(); set != inputDsets_.end(); ++set) { DataSet* outSet = datasetlist->AddSetIdx( DataSet::FLOAT, setname, didx ); if (outSet==0) { mprinterr("Error: lifetime: Could not allocate output set for %s\n", (*set)->Legend().c_str()); return Analysis::ERR; } outSet->SetLegend( (*set)->Legend() ); outputDsets_.push_back( outSet ); if (outfile != 0) outfile->AddSet( outSet ); if (!averageonly_) { // MAX // FIXME: CHeck for nullS outSet = datasetlist->AddSetIdxAspect( DataSet::INT, setname, didx, "max" ); outSet->SetLegend( (*set)->Legend() ); maxDsets_.push_back( outSet ); if (maxfile != 0) maxfile->AddSet( outSet ); // AVG outSet = datasetlist->AddSetIdxAspect( DataSet::FLOAT, setname, didx, "avg" ); outSet->SetLegend( (*set)->Legend() ); avgDsets_.push_back( outSet ); if (avgfile != 0) avgfile->AddSet( outSet ); } ++didx; } } else if (outfile != 0) { mprinterr("Error: Output file name specified but no window size given ('window <N>')\n"); return Analysis::ERR; } if (!averageonly_) mprintf(" LIFETIME: Calculating average lifetime using a cutoff of %f", cut_); else mprintf(" LIFETIME: Calculating only averages"); mprintf(" of data in %i sets\n", inputDsets_.size()); if (debugIn > 0) inputDsets_.List(); if (windowSize_ != -1) { mprintf("\tAverage of data over windows will be saved to sets named %s\n", setname.c_str()); mprintf("\tWindow size for averaging: %i\n", windowSize_); if (cumulative_) mprintf("\tCumulative averages will be saved.\n"); if (deltaAvg_) mprintf("\tChange of average from previous average will be saved.\n"); if (outfile != 0) { mprintf("\tOutfile: %s", outfile->DataFilename().base()); if (!averageonly_) mprintf(", %s, %s", maxfile->DataFilename().base(), avgfile->DataFilename().base()); mprintf("\n"); } } return Analysis::OK; }
/** Set up variable with value. In this case allow any amount of whitespace, * so re-tokenize the original argument line (minus the command). */ CpptrajState::RetType Control_Set::SetupControl(CpptrajState& State, ArgList& argIn, Varray& CurrentVars) { ArgList remaining = argIn.RemainingArgs(); size_t pos0 = remaining.ArgLineStr().find_first_of("="); if (pos0 == std::string::npos) { mprinterr("Error: Expected <var>=<value>\n"); return CpptrajState::ERR; } size_t pos1 = pos0; bool append = false; if (pos0 > 0 && remaining.ArgLineStr()[pos0-1] == '+') { pos0--; append = true; } std::string variable = NoWhitespace( remaining.ArgLineStr().substr(0, pos0) ); if (variable.empty()) { mprinterr("Error: No variable name.\n"); return CpptrajState::ERR; } ArgList equals( NoLeadingWhitespace(remaining.ArgLineStr().substr(pos1+1)) ); std::string value; if (equals.Contains("inmask")) { AtomMask mask( equals.GetStringKey("inmask") ); Topology* top = State.DSL().GetTopByIndex( equals ); if (top == 0) return CpptrajState::ERR; if (top->SetupIntegerMask( mask )) return CpptrajState::ERR; if (equals.hasKey("atoms")) value = integerToString( mask.Nselected() ); else if (equals.hasKey("residues")) { int curRes = -1; int nres = 0; for (AtomMask::const_iterator at = mask.begin(); at != mask.end(); ++at) { int res = (*top)[*at].ResNum(); if (res != curRes) { nres++; curRes = res; } } value = integerToString( nres ); } else if (equals.hasKey("molecules")) { int curMol = -1; int nmol = 0; for (AtomMask::const_iterator at = mask.begin(); at != mask.end(); ++at) { int mol = (*top)[*at].MolNum(); if (mol != curMol) { nmol++; curMol = mol; } } value = integerToString( nmol ); } else { mprinterr("Error: Expected 'atoms', 'residues', or 'molecules'.\n"); return CpptrajState::ERR; } } else if (equals.hasKey("trajinframes")) { value = integerToString(State.InputTrajList().MaxFrames()); } else value = equals.ArgLineStr(); if (append) CurrentVars.AppendVariable( "$" + variable, value ); else CurrentVars.UpdateVariable( "$" + variable, value ); mprintf("\tVariable '%s' set to '%s'\n", variable.c_str(), value.c_str()); for (int iarg = 0; iarg < argIn.Nargs(); iarg++) argIn.MarkArg( iarg ); return CpptrajState::OK; }
// Action_ReplicateCell::Init() Action::RetType Action_ReplicateCell::Init(ArgList& actionArgs, TopologyList* PFL, DataSetList* DSL, DataFileList* DFL, int debugIn) { // Require imaging. image_.InitImaging( true ); // Set up output traj trajfilename_ = actionArgs.GetStringKey("out"); parmfilename_ = actionArgs.GetStringKey("parmout"); Topology* tempParm = PFL->GetParm( actionArgs ); bool setAll = actionArgs.hasKey("all"); std::string dsname = actionArgs.GetStringKey("name"); if (!dsname.empty()) { coords_ = (DataSet_Coords*)DSL->AddSet(DataSet::COORDS, dsname, "RCELL"); if (coords_ == 0) return Action::ERR; } if (trajfilename_.empty() && coords_ == 0) { mprinterr("Error: Either 'out <traj filename> or 'name <dsname>' must be specified.\n"); return Action::ERR; } // Get Mask Mask1_.SetMaskString( actionArgs.GetMaskNext() ); // Determine which directions to set if (setAll) { for (int ix = -1; ix < 2; ix++) for (int iy = -1; iy < 2; iy++) for (int iz = -1; iz < 2; iz++) { directionArray_.push_back( ix ); directionArray_.push_back( iy ); directionArray_.push_back( iz ); } } else { std::string dirstring = actionArgs.GetStringKey("dir"); while (!dirstring.empty()) { std::vector<int> ixyz(3, -2); std::vector<int>::iterator iptr = ixyz.begin(); for (std::string::const_iterator c = dirstring.begin(); c != dirstring.end(); ++c) { if (iptr == ixyz.end()) { mprinterr("Error: 'dir' string has too many characters.\n"); return Action::ERR; } int sign = 1; if (*c == '+') ++c; else if (*c == '-') { sign = -1; ++c; } if (*c == '1') *iptr = 1 * sign; else if (*c == '0') *iptr = 0; ++iptr; } mprintf("DEBUG: %s = %i %i %i\n", dirstring.c_str(), ixyz[0], ixyz[1], ixyz[2]); directionArray_.push_back( ixyz[0] ); directionArray_.push_back( ixyz[1] ); directionArray_.push_back( ixyz[2] ); dirstring = actionArgs.GetStringKey("dir"); } } ncopies_ = (int)(directionArray_.size() / 3); if (ncopies_ < 1) { mprinterr("Error: No directions (or 'all') specified.\n"); return Action::ERR; } // Set up output trajectory if (!trajfilename_.empty()) { if (tempParm == 0) { mprinterr("Error: Could not get topology for %s\n", trajfilename_.c_str()); return Action::ERR; } outtraj_.SetDebug( debugIn ); // Initialize output trajectory with remaining arguments trajArgs_ = actionArgs.RemainingArgs(); ensembleNum_ = DSL->EnsembleNum(); } mprintf(" REPLICATE CELL: Replicating cell in %i directions:\n", ncopies_); mprintf("\t\t X Y Z\n"); for (unsigned int i = 0; i != directionArray_.size(); i += 3) mprintf("\t\t%2i %2i %2i\n", directionArray_[i], directionArray_[i+1], directionArray_[i+2]); mprintf("\tUsing atoms in mask '%s'\n", Mask1_.MaskString()); if (!trajfilename_.empty()) mprintf("\tWriting to trajectory %s\n", trajfilename_.c_str()); if (!parmfilename_.empty()) mprintf("\tWriting topology %s\n", parmfilename_.c_str()); if (coords_ != 0) mprintf("\tSaving coords to data set %s\n", coords_->legend()); return Action::OK; }
// Action_ReplicateCell::Init() Action::RetType Action_ReplicateCell::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { // Require imaging. image_.InitImaging( true ); // Set up output traj std::string trajfilename = actionArgs.GetStringKey("out"); parmfilename_ = actionArgs.GetStringKey("parmout"); bool setAll = actionArgs.hasKey("all"); std::string dsname = actionArgs.GetStringKey("name"); if (!dsname.empty()) { coords_ = (DataSet_Coords*)init.DSL().AddSet(DataSet::COORDS, dsname, "RCELL"); if (coords_ == 0) return Action::ERR; } if (trajfilename.empty() && coords_ == 0) { mprinterr("Error: Either 'out <traj filename> or 'name <dsname>' must be specified.\n"); return Action::ERR; } // Get Mask Mask1_.SetMaskString( actionArgs.GetMaskNext() ); // Determine which directions to set if (setAll) { for (int ix = -1; ix < 2; ix++) for (int iy = -1; iy < 2; iy++) for (int iz = -1; iz < 2; iz++) { directionArray_.push_back( ix ); directionArray_.push_back( iy ); directionArray_.push_back( iz ); } } else { std::string dirstring = actionArgs.GetStringKey("dir"); while (!dirstring.empty()) { std::vector<int> ixyz(3, -2); std::vector<int>::iterator iptr = ixyz.begin(); for (std::string::const_iterator c = dirstring.begin(); c != dirstring.end(); ++c) { if (iptr == ixyz.end()) { mprinterr("Error: 'dir' string has too many characters.\n"); return Action::ERR; } int sign = 1; if (*c == '+') ++c; else if (*c == '-') { sign = -1; ++c; } if (isdigit( *c )) *iptr = toDigit( *c ) * sign; else { mprinterr("Error: illegal character '%c' in 'dir' string '%s'; only numbers allowed.\n", *c, dirstring.c_str()); return Action::ERR; } ++iptr; } //mprintf("DEBUG: %s = %i %i %i\n", dirstring.c_str(), ixyz[0], ixyz[1], ixyz[2]); directionArray_.push_back( ixyz[0] ); directionArray_.push_back( ixyz[1] ); directionArray_.push_back( ixyz[2] ); dirstring = actionArgs.GetStringKey("dir"); } } ncopies_ = (int)(directionArray_.size() / 3); if (ncopies_ < 1) { mprinterr("Error: No directions (or 'all') specified.\n"); return Action::ERR; } // Initialize output trajectory with remaining arguments if (!trajfilename.empty()) { outtraj_.SetDebug( debugIn ); if ( outtraj_.InitEnsembleTrajWrite(trajfilename, actionArgs.RemainingArgs(), TrajectoryFile::UNKNOWN_TRAJ, init.DSL().EnsembleNum()) ) return Action::ERR; writeTraj_ = true; # ifdef MPI outtraj_.SetTrajComm( init.TrajComm() ); # endif } else writeTraj_ = false; mprintf(" REPLICATE CELL: Replicating cell in %i directions:\n", ncopies_); mprintf("\t\t X Y Z\n"); for (unsigned int i = 0; i != directionArray_.size(); i += 3) mprintf("\t\t%2i %2i %2i\n", directionArray_[i], directionArray_[i+1], directionArray_[i+2]); mprintf("\tUsing atoms in mask '%s'\n", Mask1_.MaskString()); if (writeTraj_) mprintf("\tWriting to trajectory %s\n", outtraj_.Traj().Filename().full()); if (!parmfilename_.empty()) mprintf("\tWriting topology %s\n", parmfilename_.c_str()); if (coords_ != 0) mprintf("\tSaving coords to data set %s\n", coords_->legend()); return Action::OK; }
Analysis::RetType Analysis_Spline::Setup(ArgList& analyzeArgs, DataSetList* datasetlist, DataFileList* DFLin, int debugIn) { std::string setname = analyzeArgs.GetStringKey("name"); outfile_ = DFLin->AddDataFile(analyzeArgs.GetStringKey("out"), analyzeArgs); meshsize_ = analyzeArgs.getKeyInt("meshsize", 0); meshfactor_ = -1.0; if (meshsize_ < 3) { meshfactor_ = analyzeArgs.getKeyDouble("meshfactor", -1.0); if (meshfactor_ < Constants::SMALL) { mprinterr("Error: Either meshsize must be specified and > 2, or meshfactor must be\n" "Error: specified and > 0.0\n"); return Analysis::ERR; } } if (analyzeArgs.Contains("meshmin")) { meshmin_ = analyzeArgs.getKeyDouble("meshmin", 0.0); useDefaultMin_ = true; } else useDefaultMin_ = false; if (analyzeArgs.Contains("meshmax")) { meshmax_ = analyzeArgs.getKeyDouble("meshmax", -1.0); useDefaultMax_ = true; } else useDefaultMax_ = false; if (useDefaultMin_ && useDefaultMax_ && meshmax_ < meshmin_) { mprinterr("Error: meshmax must be > meshmin\n"); return Analysis::ERR; } // Select datasets from remaining args if (input_dsets_.AddSetsFromArgs( analyzeArgs.RemainingArgs(), *datasetlist )) { mprinterr("Error: Could not add data sets.\n"); return Analysis::ERR; } if (input_dsets_.empty()) { mprinterr("Error: No input data sets.\n"); return Analysis::ERR; } // Set up output datasets Dimension Xdim( meshmin_, (meshmax_ - meshmin_) / (double)meshsize_ ); for (Array1D::const_iterator dsIn = input_dsets_.begin(); dsIn != input_dsets_.end(); ++dsIn) { DataSet* ds = datasetlist->AddSet(DataSet::XYMESH, setname, "Spline"); if (ds == 0) return Analysis::ERR; ds->SetLegend( "Spline(" + (*dsIn)->Meta().Legend() + ")" ); // TODO: Set individually based on input_dsets_ ds->SetDim(Dimension::X, Xdim); if (outfile_ != 0) outfile_->AddDataSet( ds ); output_dsets_.push_back( (DataSet_Mesh*)ds ); } mprintf(" SPLINE: Applying cubic splining to %u data sets\n", input_dsets_.size()); if (meshfactor_ < 0) mprintf("\tMesh size= %i\n", meshsize_); else mprintf("\tMesh size will be input set size multiplied by %f\n", meshfactor_); if (useDefaultMin_) mprintf("\tMesh min= %f,", meshmin_); else mprintf("\tMesh min will be input set min,"); if (useDefaultMax_) mprintf(" Mesh max= %f\n", meshmax_); else mprintf(" Mesh max will be input set max.\n"); if (outfile_ != 0) { if (!setname.empty()) mprintf("\tOutput set name: %s\n", setname.c_str()); mprintf("\tOutfile name: %s\n", outfile_->DataFilename().base()); } //for (Array1D::const_iterator set = input_dsets_.begin(); set != input_dsets_.end(); ++set) // mprintf("\t%s\n", (*set)->legend()); return Analysis::OK; }
// Analysis_Lifetime::Setup() Analysis::RetType Analysis_Lifetime::Setup(ArgList& analyzeArgs, DataSetList* datasetlist, TopologyList* PFLin, DataFileList* DFLin, int debugIn) { // Get Keywords FileName outfileName( analyzeArgs.GetStringKey("out") ); std::string setname = analyzeArgs.GetStringKey("name"); bool sortSets = (!analyzeArgs.hasKey("nosort")); windowSize_ = analyzeArgs.getKeyInt("window", -1); averageonly_ = analyzeArgs.hasKey("averageonly"); cumulative_ = analyzeArgs.hasKey("cumulative"); deltaAvg_ = analyzeArgs.hasKey("delta"); cut_ = analyzeArgs.getKeyDouble("cut", 0.5); fuzzCut_ = analyzeArgs.getKeyInt("fuzz", -1); if (fuzzCut_ < 1) fuzzCut_ = -1; normalizeCurves_ = !analyzeArgs.hasKey("rawcurve"); if (analyzeArgs.hasKey("greater")) Compare_ = Compare_GreaterThan; else if (analyzeArgs.hasKey("less")) Compare_ = Compare_LessThan; else Compare_ = Compare_GreaterThan; // Select datasets from remaining args if (inputDsets_.AddSetsFromArgs( analyzeArgs.RemainingArgs(), *datasetlist )) { mprinterr("Error: lifetime: Could not add data sets.\n"); return Analysis::ERR; } // Sort data sets if (sortSets) inputDsets_.SortArray1D(); // Create output datasets DataFile* outfile = 0; DataFile* maxfile = 0; DataFile* avgfile = 0; if (setname.empty()) setname = datasetlist->GenerateDefaultName( "lifetime" ); if ( windowSize_ != -1) { outfile = DFLin->AddDataFile(outfileName, analyzeArgs); if (!averageonly_ && outfile != 0) { maxfile = DFLin->AddDataFile(outfileName.DirPrefix() + "max." + outfileName.Base(), analyzeArgs); avgfile = DFLin->AddDataFile(outfileName.DirPrefix() + "avg." + outfileName.Base(), analyzeArgs); } int didx = 0; for (Array1D::const_iterator set = inputDsets_.begin(); set != inputDsets_.end(); ++set) { MetaData md(setname, didx); md.SetLegend( (*set)->Meta().Legend() ); DataSet_1D* outSet = (DataSet_1D*)datasetlist->AddSet( DataSet::FLOAT, md ); if (CheckDsetError(outSet, "output", (*set)->legend())) return Analysis::ERR; outputDsets_.push_back( outSet ); if (outfile != 0) outfile->AddDataSet( outSet ); if (!averageonly_) { // MAX md.SetAspect("max"); outSet = (DataSet_1D*)datasetlist->AddSet(DataSet::INTEGER, md); if (CheckDsetError(outSet, "lifetime max", (*set)->legend())) return Analysis::ERR; maxDsets_.push_back( outSet ); if (maxfile != 0) maxfile->AddDataSet( outSet ); // AVG md.SetAspect("avg"); outSet = (DataSet_1D*)datasetlist->AddSet(DataSet::FLOAT, md); if (CheckDsetError(outSet, "lifetime avg", (*set)->legend())) return Analysis::ERR; avgDsets_.push_back( outSet ); if (avgfile != 0) avgfile->AddDataSet( outSet ); } ++didx; } // Set step to window size. std::string fileArgs = "xstep " + integerToString( windowSize_ ); if (outfile != 0) outfile->ProcessArgs( fileArgs ); if (maxfile != 0) maxfile->ProcessArgs( fileArgs ); if (avgfile != 0) avgfile->ProcessArgs( fileArgs ); } // Lifetime curves DataFile* crvfile = 0; if (!averageonly_) { if (!outfileName.empty()) { crvfile = DFLin->AddDataFile(outfileName.DirPrefix() + "crv." + outfileName.Base(), analyzeArgs); } MetaData md(setname, "curve"); for (int didx = 0; didx != (int)inputDsets_.size(); didx++) { md.SetIdx(didx); DataSet_1D* outSet = (DataSet_1D*)datasetlist->AddSet(DataSet::DOUBLE, md); if (CheckDsetError(outSet, "lifetime curve", inputDsets_[didx]->legend())) return Analysis::ERR; curveSets_.push_back( outSet ); if (crvfile != 0) crvfile->AddDataSet( outSet ); } } // Non-window output file if (!averageonly_ && windowSize_ == -1) { standalone_ = DFLin->AddCpptrajFile( outfileName, "Lifetimes", DataFileList::TEXT, true ); if (standalone_ == 0) return Analysis::ERR; } else standalone_ = 0; if (!averageonly_) mprintf(" LIFETIME: Calculating average lifetime using a cutoff of %f", cut_); else mprintf(" LIFETIME: Calculating only averages"); mprintf(" of data in %i sets\n", inputDsets_.size()); if (!sortSets) mprintf("\tInput data sets will not be sorted.\n"); if (debugIn > 0) for (Array1D::const_iterator set = inputDsets_.begin(); set != inputDsets_.end(); ++set) mprintf("\t%s\n", (*set)->legend()); if (Compare_ == Compare_GreaterThan) mprintf("\tValues greater than %f are considered present.\n", cut_); else mprintf("\tValues less than %f are considered present.\n", cut_); if (windowSize_ != -1) { mprintf("\tAverage of data over windows will be saved to sets named %s\n", setname.c_str()); mprintf("\tWindow size for averaging: %i\n", windowSize_); if (cumulative_) mprintf("\tCumulative averages will be saved.\n"); if (deltaAvg_) mprintf("\tChange of average from previous average will be saved.\n"); } if (outfile != 0) { mprintf("\tOutfile: %s", outfile->DataFilename().full()); if (!averageonly_ && outfile != 0) mprintf(", %s, %s", maxfile->DataFilename().base(), avgfile->DataFilename().base()); mprintf("\n"); } if (!averageonly_) { if (crvfile != 0) mprintf("\tLifetime curves output: %s\n", crvfile->DataFilename().base()); if (normalizeCurves_) mprintf("\tLifetime curves will be normalized.\n"); else mprintf("\tLifetime curves will not be normalized.\n"); } if (fuzzCut_ != -1) mprintf("\tFuzz value of %i frames will be used.\n", fuzzCut_); return Analysis::OK; }
/** Set up histogram with specified data sets. */ Analysis::RetType Analysis_Hist::Setup(ArgList& analyzeArgs, AnalysisSetup& setup, int debugIn) { debug_ = debugIn; // Keywords std::string histname = analyzeArgs.GetStringKey("name"); outfilename_ = analyzeArgs.GetStringKey("out"); if (outfilename_.empty()) { mprinterr("Error: Hist: No output filename specified.\n"); return Analysis::ERR; } traj3dName_ = analyzeArgs.GetStringKey("traj3d"); traj3dFmt_ = TrajectoryFile::WriteFormatFromString( analyzeArgs.GetStringKey("trajfmt"), TrajectoryFile::AMBERTRAJ ); parmoutName_ = analyzeArgs.GetStringKey("parmout"); // Create a DataFile here so any DataFile arguments can be processed. If it // turns out later that native output is needed the DataFile will be removed. outfile_ = setup.DFL().AddDataFile(outfilename_, analyzeArgs); if (outfile_==0) return Analysis::ERR; Temp_ = analyzeArgs.getKeyDouble("free",-1.0); if (Temp_!=-1.0) calcFreeE_ = true; else calcFreeE_ = false; gnuplot_ = analyzeArgs.hasKey("gnu"); if (analyzeArgs.hasKey("norm")) normalize_ = NORM_SUM; else if (analyzeArgs.hasKey("normint")) normalize_ = NORM_INT; else normalize_ = NO_NORM; circular_ = analyzeArgs.hasKey("circular"); nativeOut_ = analyzeArgs.hasKey("nativeout"); if ( analyzeArgs.Contains("min") ) { default_min_ = analyzeArgs.getKeyDouble("min", 0.0); minArgSet_ = true; } if ( analyzeArgs.Contains("max") ) { default_max_ = analyzeArgs.getKeyDouble("max", 0.0); maxArgSet_ = true; } default_step_ = analyzeArgs.getKeyDouble("step", 0.0) ; default_bins_ = analyzeArgs.getKeyInt("bins", -1); calcAMD_ = false; std::string amdname = analyzeArgs.GetStringKey("amd"); if (!amdname.empty()) { DataSet* ds = setup.DSL().GetDataSet( amdname ); if (ds == 0) { mprinterr("Error: AMD data set %s not found.\n", amdname.c_str()); return Analysis::ERR; } if (ds->Ndim() != 1) { mprinterr("Error: AMD data set must be 1D.\n"); return Analysis::ERR; } amddata_ = (DataSet_1D*)ds; calcAMD_ = true; } // Treat all remaining arguments as dataset names. Do not set up dimensions // yet since the data sets may not be fully populated. ArgList dsetNames = analyzeArgs.RemainingArgs(); for ( ArgList::const_iterator setname = dsetNames.begin(); setname != dsetNames.end(); ++setname) { if (CheckDimension( *setname, setup.DSL() )) return Analysis::ERR; } // histdata contains the DataSets to be histogrammed if (histdata_.empty()) { mprinterr("Error: Hist: No datasets specified.\n"); return Analysis::ERR; } // Total # of dimensions for the histogram is the number of sets to be binned. N_dimensions_ = histdata_.size(); if (!nativeOut_) { switch ( N_dimensions_ ) { case 1: hist_ = setup.DSL().AddSet( DataSet::DOUBLE, histname, "Hist"); break; case 2: hist_ = setup.DSL().AddSet( DataSet::MATRIX_DBL, histname, "Hist"); break; // TODO: GRID_DBL case 3: hist_ = setup.DSL().AddSet( DataSet::GRID_FLT, histname, "Hist"); break; default: // FIXME: GET N DIMENSION CASE! mprintf("Warning: Histogram dimension > 3. DataSet/DataFile output not supported.\n"); nativeOut_ = true; } } // traj3d only supported with 3D histograms if (!traj3dName_.empty() && N_dimensions_ != 3) { mprintf("Warning: 'traj3d' only supported with 3D histograms.\n"); traj3dName_.clear(); parmoutName_.clear(); } if (!nativeOut_) { // DataFile output. Add DataSet to DataFile. if (hist_ == 0) { mprinterr("Error: Could not set up histogram data set.\n"); return Analysis::ERR; } outfile_->AddDataSet( hist_ ); } else { // Native output. Remove DataFile from DataFileList outfile_ = setup.DFL().RemoveDataFile( outfile_ ); native_ = setup.DFL().AddCpptrajFile( outfilename_, "Histogram output" ); if (native_ == 0) return Analysis::ERR; } mprintf("\tHist: %s: Set up for %zu dimensions using the following datasets:\n", outfilename_.c_str(), N_dimensions_); mprintf("\t[ "); for (std::vector<DataSet_1D*>::iterator ds=histdata_.begin(); ds!=histdata_.end(); ++ds) mprintf("%s ",(*ds)->legend()); mprintf("]\n"); if (calcAMD_) mprintf("\tPopulating bins using AMD boost from data set %s\n", amddata_->legend()); if (calcFreeE_) mprintf("\tFree energy in kcal/mol will be calculated from bin populations at %f K.\n",Temp_); if (nativeOut_) mprintf("\tUsing internal routine for output. Data will not be stored on the data set list.\n"); //if (circular_ || gnuplot_) { // mprintf("\tWarning: gnuplot and/or circular specified; advanced grace/gnuplot\n"); // mprintf("\t formatting disabled.\n");*/ if (circular_) mprintf("\tcircular: Output coordinates will be wrapped.\n"); if (gnuplot_ && outfile_ == 0) mprintf("\tgnuplot: Output will be in gnuplot-readable format.\n"); //} if (normalize_ == NORM_SUM) mprintf("\tnorm: Sum over bins will be normalized to 1.0.\n"); else if (normalize_ == NORM_INT) mprintf("\tnormint: Integral over bins will be normalized to 1.0.\n"); if (!traj3dName_.empty()) { mprintf("\tPseudo-trajectory will be written to '%s' with format %s\n", traj3dName_.c_str(), TrajectoryFile::FormatString(traj3dFmt_)); if (!parmoutName_.empty()) mprintf("\tCorresponding pseudo-topology will be written to '%s'\n", parmoutName_.c_str()); } return Analysis::OK; }
// Analysis_TI::Setup() Analysis::RetType Analysis_TI::Setup(ArgList& analyzeArgs, DataSetList* datasetlist, DataFileList* DFLin, int debugIn) { int nq = analyzeArgs.getKeyInt("nq", 0); ArgList nskipArg(analyzeArgs.GetStringKey("nskip"), ","); // Comma-separated if (nskipArg.empty()) nskip_.resize(1, 0); else { nskip_.clear(); for (int i = 0; i != nskipArg.Nargs(); i++) { nskip_.push_back( nskipArg.getNextInteger(0) ); if (nskip_.back() < 0) nskip_.back() = 0; } } std::string setname = analyzeArgs.GetStringKey("name"); DataFile* outfile = DFLin->AddDataFile(analyzeArgs.GetStringKey("out"), analyzeArgs); DataFile* curveout = DFLin->AddDataFile(analyzeArgs.GetStringKey("curveout"), analyzeArgs); // Select datasets from remaining args if (input_dsets_.AddSetsFromArgs( analyzeArgs.RemainingArgs(), *datasetlist )) { mprinterr("Error: Could not add data sets.\n"); return Analysis::ERR; } if (input_dsets_.empty()) { mprinterr("Error: No input data sets.\n"); return Analysis::ERR; } if (SetQuadAndWeights(nq)) return Analysis::ERR; if (quad_.size() != input_dsets_.size()) { mprinterr("Error: Expected %zu data sets based on nq, got %zu\n", quad_.size(), input_dsets_.size()); return Analysis::ERR; } dAout_ = datasetlist->AddSet(DataSet::XYMESH, setname, "TI"); if (dAout_ == 0) return Analysis::ERR; if (outfile != 0) outfile->AddDataSet( dAout_ ); MetaData md(dAout_->Meta().Name(), "TIcurve"); for (Iarray::const_iterator it = nskip_.begin(); it != nskip_.end(); ++it) { md.SetIdx( *it ); DataSet* ds = datasetlist->AddSet(DataSet::XYMESH, md); if (ds == 0) return Analysis::ERR; ds->SetLegend( md.Name() + "_Skip" + integerToString(*it) ); if (curveout != 0) curveout->AddDataSet( ds ); curve_.push_back( ds ); } mprintf(" TI: Calculating TI using Gaussian quadrature with %zu points.\n", quad_.size()); mprintf("\t%6s %8s %8s %s\n", "Point", "Abscissa", "Weight", "SetName"); for (unsigned int i = 0; i != quad_.size(); i++) mprintf("\t%6i %8.5f %8.5f %s\n", i, quad_[i], wgt_[i], input_dsets_[i]->legend()); if (nskip_.front() > 0) { mprintf("\tSkipping first"); for (Iarray::const_iterator it = nskip_.begin(); it != nskip_.end(); ++it) mprintf(" %i", *it); mprintf(" data points for <DV/DL> calc.\n"); } mprintf("\tResults saved in set '%s'\n", dAout_->legend()); mprintf("\tTI curve(s) saved in set(s)"); for (DSarray::const_iterator ds = curve_.begin(); ds != curve_.end(); ++ds) mprintf(" '%s'", (*ds)->legend()); mprintf("\n"); if (outfile != 0) mprintf("\tResults written to '%s'\n", outfile->DataFilename().full()); if (curveout!= 0) mprintf("\tTI curve written to '%s'\n", curveout->DataFilename().full()); return Analysis::OK; }