// Action_Angle::init() Action::RetType Action_Angle::Init(ArgList& actionArgs, TopologyList* PFL, DataSetList* DSL, DataFileList* DFL, int debugIn) { // Get keywords DataFile* outfile = DFL->AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); useMass_ = actionArgs.hasKey("mass"); // Get Masks std::string mask1 = actionArgs.GetMaskNext(); std::string mask2 = actionArgs.GetMaskNext(); std::string mask3 = actionArgs.GetMaskNext(); if (mask1.empty() || mask2.empty() || mask3.empty()) { mprinterr("Error: angle: Requires 3 masks\n"); return Action::ERR; } Mask1_.SetMaskString(mask1); Mask2_.SetMaskString(mask2); Mask3_.SetMaskString(mask3); // Dataset to store angles ang_ = DSL->AddSet(DataSet::DOUBLE, MetaData(actionArgs.GetStringNext(),MetaData::M_ANGLE),"Ang"); if (ang_==0) return Action::ERR; // Add dataset to data file list if (outfile != 0) outfile->AddDataSet( ang_ ); mprintf(" ANGLE: [%s]-[%s]-[%s]\n",Mask1_.MaskString(), Mask2_.MaskString(), Mask3_.MaskString()); if (useMass_) mprintf("\tUsing center of mass of atoms in masks.\n"); return Action::OK; }
/** Called once before traj processing. Set up reference info. */ Action::RetType Action_DistRmsd::Init(ArgList& actionArgs, TopologyList* PFL, DataSetList* DSL, DataFileList* DFL, int debugIn) { // Check for keywords DataFile* outfile = DFL->AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); // Reference keywords // TODO: Can these just be put in the InitRef call? bool first = actionArgs.hasKey("first"); ReferenceFrame REF = DSL->GetReferenceFrame( actionArgs ); std::string reftrajname = actionArgs.GetStringKey("reftraj"); Topology* RefParm = PFL->GetParm( actionArgs ); // Get the RMS mask string for target std::string mask0 = actionArgs.GetMaskNext(); TgtMask_.SetMaskString(mask0); // Get the RMS mask string for reference std::string mask1 = actionArgs.GetMaskNext(); if (mask1.empty()) mask1 = mask0; // Initialize reference if (refHolder_.InitRef(false, first, false, false, reftrajname, REF, RefParm, mask1, actionArgs, "distrmsd")) return Action::ERR; // Set up the RMSD data set drmsd_ = DSL->AddSet(DataSet::DOUBLE, actionArgs.GetStringNext(),"DRMSD"); if (drmsd_==0) return Action::ERR; // Add dataset to data file list if (outfile != 0) outfile->AddDataSet( drmsd_ ); mprintf(" DISTRMSD: (%s), reference is %s\n",TgtMask_.MaskString(), refHolder_.RefModeString()); return Action::OK; }
Action::RetType Action_Channel::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { // Keywords. DataFile* outfile = init.DFL().AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); dxyz_[0] = actionArgs.getKeyDouble("dx", 0.35); dxyz_[1] = actionArgs.getKeyDouble("dy", dxyz_[0]); dxyz_[2] = actionArgs.getKeyDouble("dz", dxyz_[1]); // solute mask std::string sMask = actionArgs.GetMaskNext(); if (sMask.empty()) { mprinterr("Error: No solute mask specified.\n"); return Action::ERR; } soluteMask_.SetMaskString( sMask ); // solvent mask sMask = actionArgs.GetMaskNext(); if (sMask.empty()) sMask.assign(":WAT@O"); solventMask_.SetMaskString( sMask ); // Grid Data Set grid_ = init.DSL().AddSet(DataSet::GRID_FLT, actionArgs.GetStringNext(), "Channel"); if (grid_ == 0) return Action::ERR; if (outfile != 0) outfile->AddDataSet( grid_ ); mprintf("Warning: *** THIS ACTION IS EXPERIMENTAL AND NOT FULLY IMPLEMENTED. ***\n"); mprintf(" CHANNEL: Solute mask [%s], solvent mask [%s]\n", soluteMask_.MaskString(), solventMask_.MaskString()); mprintf("\tSpacing: XYZ={ %g %g %g }\n", dxyz_[0], dxyz_[1], dxyz_[2]); return Action::OK; }
// Action_AreaPerMol::Init() Action::RetType Action_AreaPerMol::Init(ArgList& actionArgs, TopologyList* PFL, DataSetList* DSL, DataFileList* DFL, int debugIn) { // Get keywords DataFile* outfile = DFL->AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); if (actionArgs.hasKey("xy")) areaType_ = XY; else if (actionArgs.hasKey("xz")) areaType_ = XZ; else if (actionArgs.hasKey("yz")) areaType_ = YZ; else areaType_ = XY; Nmols_ = (double)actionArgs.getKeyInt("nmols", -1); // Get Masks if (Nmols_ < 0.0) { Nlayers_ = (double)actionArgs.getKeyInt("nlayers", 1); if (Nlayers_ < 1.0) { mprinterr("Error: Number of layers must be > 0\n"); return Action::ERR; } Mask1_.SetMaskString( actionArgs.GetMaskNext() ); } // DataSet area_per_mol_ = DSL->AddSet(DataSet::DOUBLE, actionArgs.GetStringNext(),"APM"); if (area_per_mol_==0) return Action::ERR; // Add DataSet to DataFileList if (outfile != 0) outfile->AddDataSet( area_per_mol_ ); mprintf(" AREAPERMOL: Calculating %s area per molecule", APMSTRING[areaType_]); if (Mask1_.MaskStringSet()) mprintf(" using mask '%s', %.0f layers.\n", Mask1_.MaskString(), Nlayers_); else mprintf(" for %.0f mols\n", Nmols_); return Action::OK; }
// Action_Grid::Init() Action::RetType Action_Grid::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { debug_ = debugIn; nframes_ = 0; // Get output filename std::string filename = actionArgs.GetStringKey("out"); // Get grid options grid_ = GridInit( "GRID", actionArgs, init.DSL() ); if (grid_ == 0) return Action::ERR; # ifdef MPI if (ParallelGridInit(init.TrajComm(), grid_)) return Action::ERR; # endif // Get extra options max_ = actionArgs.getKeyDouble("max", 0.80); madura_ = actionArgs.getKeyDouble("madura", 0); smooth_ = actionArgs.getKeyDouble("smoothdensity", 0); invert_ = actionArgs.hasKey("invert"); pdbfile_ = init.DFL().AddCpptrajFile(actionArgs.GetStringKey("pdb"),"Grid PDB",DataFileList::PDB,true); density_ = actionArgs.getKeyDouble("density",0.033456); if (actionArgs.hasKey("normframe")) normalize_ = TO_FRAME; else if (actionArgs.hasKey("normdensity")) normalize_ = TO_DENSITY; else normalize_ = NONE; if (normalize_ != NONE && (smooth_ > 0.0 || madura_ > 0.0)) { mprinterr("Error: Normalize options are not compatible with smoothdensity/madura options.\n"); init.DSL().RemoveSet( grid_ ); return Action::ERR; } // Get mask std::string maskexpr = actionArgs.GetMaskNext(); if (maskexpr.empty()) { mprinterr("Error: GRID: No mask specified.\n"); init.DSL().RemoveSet( grid_ ); return Action::ERR; } mask_.SetMaskString(maskexpr); // Setup output file // For backwards compat., if no 'out' assume next string is filename if (filename.empty() && actionArgs.Nargs() > 1 && !actionArgs.Marked(1)) filename = actionArgs.GetStringNext(); DataFile* outfile = init.DFL().AddDataFile(filename, actionArgs); if (outfile != 0) outfile->AddDataSet((DataSet*)grid_); // Info mprintf(" GRID:\n"); GridInfo( *grid_ ); if (outfile != 0) mprintf("\tGrid will be printed to file %s\n", outfile->DataFilename().full()); mprintf("\tGrid data set: '%s'\n", grid_->legend()); mprintf("\tMask expression: [%s]\n",mask_.MaskString()); if (pdbfile_ != 0) mprintf("\tPseudo-PDB will be printed to %s\n", pdbfile_->Filename().full()); if (normalize_ == TO_FRAME) mprintf("\tGrid will be normalized by number of frames.\n"); else if (normalize_ == TO_DENSITY) mprintf("\tGrid will be normalized to a density of %g molecules/Ang^3.\n", density_); // TODO: print extra options return Action::OK; }
// Action_FilterByData::Init() Action::RetType Action_FilterByData::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { maxmin_ = init.DSL().AddSet( DataSet::INTEGER, actionArgs.GetStringKey("name"), "Filter" ); if (maxmin_ == 0) return Action::ERR; DataFile* maxminfile = init.DFL().AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); if (maxminfile != 0) maxminfile->AddDataSet( maxmin_ ); // Get min and max args. while (actionArgs.Contains("min")) Min_.push_back( actionArgs.getKeyDouble("min", 0.0) ); while (actionArgs.Contains("max")) Max_.push_back( actionArgs.getKeyDouble("max", 0.0) ); if (Min_.empty()) { mprinterr("Error: At least one 'min' arg must be specified.\n"); return Action::ERR; } if (Max_.empty()) { mprinterr("Error: At least one 'max' arg must be specified.\n"); return Action::ERR; } if (Min_.size() != Max_.size()) { mprinterr("Error: # of 'min' args (%zu) != # of 'max' args (%zu)\n", Min_.size(), Max_.size()); return Action::ERR; } // Get DataSets from remaining arguments Dsets_.AddSetsFromArgs( actionArgs.RemainingArgs(), init.DSL() ); if (Dsets_.empty()) { mprinterr("Error: No data sets specified.\n"); return Action::ERR; } if ( Dsets_.size() < Min_.size() ) { mprinterr("Error: More 'min'/'max' args (%zu) than data sets (%zu).\n", Min_.size(), Dsets_.size()); return Action::ERR; } if ( Dsets_.size() > Min_.size() ) { unsigned int Nremaining = Dsets_.size() - Min_.size(); double useMin = Min_.back(); double useMax = Max_.back(); mprintf("Warning: More data sets than 'min'/'max' args.\n" "Warning: Using min=%f and max=%f for last %zu data sets.\n", useMin, useMax, Nremaining); for (unsigned int ds = 0; ds < Nremaining; ++ds) { Min_.push_back( useMin ); Max_.push_back( useMax ); } } mprintf(" FILTER: Filtering out frames using %zu data sets.\n", Dsets_.size()); for (unsigned int ds = 0; ds < Dsets_.size(); ds++) mprintf("\t%.4f < '%s' < %.4f\n", Min_[ds], Dsets_[ds]->legend(), Max_[ds]); if (maxminfile != 0) mprintf("\tFilter frame info will be written to %s\n", maxminfile->DataFilename().full()); return Action::OK; }
// Action_VelocityAutoCorr::Init() Action::RetType Action_VelocityAutoCorr::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { if (actionArgs.hasKey("usevelocity")) { mprinterr("Error: The 'usevelocity' keyword is deprecated. Velocity information\n" "Error: is now used by default if present. To force cpptraj to use\n" "Error: coordinates to estimate velocities (not recommended) use the\n" "Error: 'usecoords' keyword.\n"); return Action::ERR; } useVelInfo_ = !actionArgs.hasKey("usecoords"); if (mask_.SetMaskString( actionArgs.GetMaskNext() )) return Action::ERR; DataFile* outfile = init.DFL().AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); diffout_ = init.DFL().AddCpptrajFile( actionArgs.GetStringKey("diffout"), "VAC diffusion constants", DataFileList::TEXT, true ); maxLag_ = actionArgs.getKeyInt("maxlag", -1); tstep_ = actionArgs.getKeyDouble("tstep", 1.0); useFFT_ = !actionArgs.hasKey("direct"); normalize_ = actionArgs.hasKey("norm"); // Set up output data set VAC_ = init.DSL().AddSet(DataSet::DOUBLE, actionArgs.GetStringNext(), "VAC"); if (VAC_ == 0) return Action::ERR; // TODO: This should just be a scalar diffConst_ = init.DSL().AddSet(DataSet::DOUBLE, MetaData(VAC_->Meta().Name(), "D", MetaData::NOT_TS)); if (diffConst_ == 0) return Action::ERR; if (outfile != 0) outfile->AddDataSet( VAC_ ); # ifdef MPI trajComm_ = init.TrajComm(); if (trajComm_.Size() > 1 && !useVelInfo_) mprintf("\nWarning: When calculating velocities between consecutive frames,\n" "\nWarning: 'velocityautocorr' in parallel will not work correctly if\n" "\nWarning: coordinates have been modified by previous actions (e.g. 'rms').\n\n"); diffConst_->SetNeedsSync( false ); # endif mprintf(" VELOCITYAUTOCORR:\n" "\tCalculate velocity auto-correlation function for atoms in mask '%s'\n", mask_.MaskString()); if (useVelInfo_) mprintf("\tUsing velocity information present in frames.\n"); else mprintf("\tCalculating velocities between consecutive frames from coordinates.\n"); if (outfile != 0) mprintf("\tOutput velocity autocorrelation function '%s' to '%s'\n", VAC_->legend(), outfile->DataFilename().full()); mprintf("\tWriting diffusion constants to '%s'\n", diffout_->Filename().full()); if (maxLag_ < 1) mprintf("\tMaximum lag will be half total # of frames"); else mprintf("\tMaximum lag is %i frames", maxLag_); mprintf(", time step between frames is %f ps\n", tstep_); if (useFFT_) mprintf("\tUsing FFT to calculate autocorrelation function.\n"); else mprintf("\tUsing direct method to calculate autocorrelation function.\n"); if (normalize_) mprintf("\tNormalizing autocorrelation function to 1.0\n"); return Action::OK; }
// Analysis_Wavelet::Setup Analysis::RetType Analysis_Wavelet::Setup(ArgList& analyzeArgs, DataSetList* datasetlist, TopologyList* PFLin, DataFileList* DFLin, int debugIn) { // Attempt to get COORDS DataSet from DataSetList. If none specified the // default COORDS set will be used. std::string setname = analyzeArgs.GetStringKey("crdset"); coords_ = (DataSet_Coords*)datasetlist->FindCoordsSet( setname ); if (coords_ == 0) { mprinterr("Error: Could not locate COORDS set corresponding to %s\n", setname.c_str()); return Analysis::ERR; } // Get keywords DataFile* outfile = DFLin->AddDataFile( analyzeArgs.GetStringKey("out"), analyzeArgs ); setname = analyzeArgs.GetStringKey("name"); // TODO: Check defaults nb_ = analyzeArgs.getKeyInt("nb", 0); // FIXME: Should be more descriptive? nscale? if (nb_ < 1) { mprinterr("Error: Scaling number must be > 0\n"); return Analysis::ERR; } S0_ = analyzeArgs.getKeyDouble("s0", 0.2); ds_ = analyzeArgs.getKeyDouble("ds", 1.0/3.0); correction_ = analyzeArgs.getKeyDouble("correction", 1.01); chival_ = analyzeArgs.getKeyDouble("chival", 0.2231); // Wavelet type: default to Morlet std::string wavelet_name = analyzeArgs.GetStringKey("type"); if (wavelet_name.empty()) wavelet_type_ = W_MORLET; else { wavelet_type_ = W_NONE; for (int itoken = 0; itoken != (int)W_NONE; itoken++) if (wavelet_name.compare(Tokens_[itoken].key_) == 0) { wavelet_type_ = (WaveletType)itoken; break; } if (wavelet_type_ == W_NONE) { mprinterr("Error: Unrecognized wavelet type: %s\n", wavelet_name.c_str()); return Analysis::ERR; } } // Atom mask mask_.SetMaskString( analyzeArgs.GetMaskNext() ); // Set up output data set output_ = datasetlist->AddSet( DataSet::MATRIX_FLT, setname, "WAVELET" ); if (output_ == 0) return Analysis::ERR; if (outfile != 0) outfile->AddDataSet( output_ ); mprintf(" WAVELET: Using COORDS set '%s', wavelet type %s\n", coords_->legend(), Tokens_[wavelet_type_].description_); mprintf("\tCalculating for atoms in mask '%s'\n", mask_.MaskString()); mprintf("\tScaling wavelet %i times starting from %g with delta of %g\n", nb_, S0_, ds_); mprintf("\tCorrection: %g\n", correction_); mprintf("\tChiVal: %g\n", chival_); if (outfile != 0) mprintf("\tOutput to '%s'\n", outfile->DataFilename().full()); return Analysis::OK; }
// Action_AtomicFluct::Init() Action::RetType Action_AtomicFluct::Init(ArgList& actionArgs, TopologyList* PFL, DataSetList* DSL, DataFileList* DFL, int debugIn) { // Get frame # keywords if (InitFrameCounter(actionArgs)) return Action::ERR; // Get other keywords bfactor_ = actionArgs.hasKey("bfactor"); calc_adp_ = actionArgs.hasKey("calcadp"); adpoutfile_ = DFL->AddCpptrajFile(actionArgs.GetStringKey("adpout"), "PDB w/ADP", DataFileList::PDB);; if (adpoutfile_!=0) calc_adp_ = true; // adpout implies calcadp if (calc_adp_ && !bfactor_) bfactor_ = true; DataFile* outfile = DFL->AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); if (actionArgs.hasKey("byres")) outtype_ = BYRES; else if (actionArgs.hasKey("bymask")) outtype_ = BYMASK; else if (actionArgs.hasKey("byatom") || actionArgs.hasKey("byatm")) outtype_ = BYATOM; // Get Mask Mask_.SetMaskString( actionArgs.GetMaskNext() ); // Get DataSet name std::string setname = actionArgs.GetStringNext(); // Add output dataset MetaData md( setname ); md.SetTimeSeries( MetaData::NOT_TS ); if (bfactor_) md.SetLegend("B-factors"); else md.SetLegend("AtomicFlx"); dataout_ = DSL->AddSet( DataSet::XYMESH, md, "Fluct" ); if (dataout_ == 0) { mprinterr("Error: AtomicFluct: Could not allocate dataset for output.\n"); return Action::ERR; } if (outfile != 0) outfile->AddDataSet( dataout_ ); mprintf(" ATOMICFLUCT: calculating"); if (bfactor_) mprintf(" B factors"); else mprintf(" atomic positional fluctuations"); if (outfile != 0) mprintf(", output to file %s", outfile->DataFilename().full()); mprintf("\n Atom mask: [%s]\n",Mask_.MaskString()); FrameCounterInfo(); if (calc_adp_) { mprintf("\tCalculating anisotropic displacement parameters.\n"); if (adpoutfile_!=0) mprintf("\tWriting PDB with ADP to '%s'\n", adpoutfile_->Filename().full()); } if (!setname.empty()) mprintf("\tData will be saved to set named %s\n", setname.c_str()); return Action::OK; }
Analysis::RetType Analysis_AutoCorr::Setup(ArgList& analyzeArgs, DataSetList* datasetlist, TopologyList* PFLin, DataFileList* DFLin, int debugIn) { const char* calctype; std::string setname = analyzeArgs.GetStringKey("name"); DataFile* outfile = DFLin->AddDataFile( analyzeArgs.GetStringKey("out"), analyzeArgs ); lagmax_ = analyzeArgs.getKeyInt("lagmax",-1); calc_covar_ = !analyzeArgs.hasKey("nocovar"); usefft_ = !analyzeArgs.hasKey("direct"); // Select datasets from remaining args ArgList dsetArgs = analyzeArgs.RemainingArgs(); for (ArgList::const_iterator dsa = dsetArgs.begin(); dsa != dsetArgs.end(); ++dsa) dsets_ += datasetlist->GetMultipleSets( *dsa ); if (dsets_.empty()) { mprinterr("Error: autocorr: No data sets selected.\n"); return Analysis::ERR; } // If setname is empty generate a default name if (setname.empty()) setname = datasetlist->GenerateDefaultName( "autocorr" ); // Setup output datasets int idx = 0; MetaData md( setname ); for (DataSetList::const_iterator DS = dsets_.begin(); DS != dsets_.end(); ++DS) { md.SetIdx( idx++ ); DataSet* dsout = datasetlist->AddSet( DataSet::DOUBLE, md ); if (dsout==0) return Analysis::ERR; dsout->SetLegend( (*DS)->Meta().Legend() ); outputData_.push_back( dsout ); // Add set to output file if (outfile != 0) outfile->AddDataSet( outputData_.back() ); } if (calc_covar_) calctype = "covariance"; else calctype = "correlation"; mprintf(" AUTOCORR: Calculating auto-%s for %i data sets:\n", calctype, dsets_.size()); dsets_.List(); if (lagmax_!=-1) mprintf("\tLag max= %i\n", lagmax_); if ( !setname.empty() ) mprintf("\tSet name: %s\n", setname.c_str() ); if ( outfile != 0 ) mprintf("\tOutfile name: %s\n", outfile->DataFilename().base()); if (usefft_) mprintf("\tUsing FFT to calculate %s.\n", calctype); else mprintf("\tUsing direct method to calculate %s.\n", calctype); return Analysis::OK; }
// Action_SymmetricRmsd::Init() Action::RetType Action_SymmetricRmsd::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { // Check for keywords bool fit = !actionArgs.hasKey("nofit"); bool useMass = actionArgs.hasKey("mass"); DataFile* outfile = init.DFL().AddDataFile(actionArgs.GetStringKey("out"), actionArgs); remap_ = actionArgs.hasKey("remap"); // Reference keywords bool previous = actionArgs.hasKey("previous"); bool first = actionArgs.hasKey("first"); ReferenceFrame REF = init.DSL().GetReferenceFrame( actionArgs ); std::string reftrajname = actionArgs.GetStringKey("reftraj"); Topology* RefParm = init.DSL().GetTopology( actionArgs ); // Get the RMS mask string for target std::string tMaskExpr = actionArgs.GetMaskNext(); if (tgtMask_.SetMaskString( tMaskExpr )) return Action::ERR; // Initialize Symmetric RMSD calc. if (SRMSD_.InitSymmRMSD( fit, useMass, debugIn )) return Action::ERR; // Initialize reference std::string rMaskExpr = actionArgs.GetMaskNext(); if (rMaskExpr.empty()) rMaskExpr = tMaskExpr; if (REF_.InitRef(previous, first, useMass, fit, reftrajname, REF, RefParm, rMaskExpr, actionArgs, "symmrmsd")) return Action::ERR; // Set up the RMSD data set. MetaData md(actionArgs.GetStringNext(), MetaData::M_RMS); rmsd_ = init.DSL().AddSet(DataSet::DOUBLE, md, "RMSD"); if (rmsd_==0) return Action::ERR; // Add dataset to data file list if (outfile != 0) outfile->AddDataSet( rmsd_ ); if (remap_ || SRMSD_.Fit()) action_return_ = Action::MODIFY_COORDS; else action_return_ = Action::OK; mprintf(" SYMMRMSD: (%s), reference is %s", tgtMask_.MaskString(), REF_.RefModeString()); if (!SRMSD_.Fit()) mprintf(", no fitting"); else mprintf(", with fitting"); if (SRMSD_.UseMass()) mprintf(", mass-weighted"); mprintf(".\n"); if (remap_) mprintf("\tAtoms will be re-mapped for symmetry.\n"); return Action::OK; }
void Action_Spam::Print() { // Print the spam info file if we didn't do pure water if (!purewater_) { // Warn about any overflows if (overflow_) mprinterr("Warning: SPAM: Some frames had a box too small for the cutoff.\n"); // Print information about each missing peak infofile_->Printf("# There are %d density peaks and %d frames\n\n", (int)peaks_.size(), Nframes_); // Loop over every Data set for (unsigned int i = 0; i < peakFrameData_.size(); i++) { // Skip peaks with 0 unoccupied sites if (peakFrameData_[i].size() == 0) continue; // Find out how many double-occupied frames there are int ndouble = 0; for (unsigned int j = 0; j < peakFrameData_[i].size(); j++) if (peakFrameData_[i][j] < 0) ndouble++; infofile_->Printf("# Peak %u has %d omitted frames (%d double-occupied)\n", i, peakFrameData_[i].size(), ndouble); for (unsigned int j = 0; j < peakFrameData_[i].size(); j++) { if (j > 0 && j % 10 == 0) infofile_->Printf("\n"); infofile_->Printf("%7d ", peakFrameData_[i][j]); } infofile_->Printf("\n\n"); } } // Print the summary file with the calculated SPAM energies if (!summaryfile_.empty()) { // Not enabled yet -- just print out the data files. mprinterr("Warning: SPAM: SPAM calculation not yet enabled.\n"); if (datafile_.empty()) datafile_ = summaryfile_; } // Now print the energy data if (!datafile_.empty()) { // Now write the data file with all of the SPAM energies DataFile dfl; ArgList dummy; dfl.SetupDatafile(datafile_, dummy, 0); for (int i = 0; i < (int)myDSL_.size(); i++) { dfl.AddDataSet(myDSL_[i]); } dfl.WriteDataOut(); } }
// Action_Distance::Init() Action::RetType Action_Distance::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { AssociatedData_NOE noe; // Get Keywords image_.InitImaging( !(actionArgs.hasKey("noimage")) ); useMass_ = !(actionArgs.hasKey("geom")); DataFile* outfile = init.DFL().AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); MetaData::scalarType stype = MetaData::UNDEFINED; std::string stypename = actionArgs.GetStringKey("type"); if ( stypename == "noe" ) { stype = MetaData::NOE; if (noe.NOE_Args( actionArgs )) return Action::ERR; } // Get Masks std::string mask1 = actionArgs.GetMaskNext(); std::string mask2 = actionArgs.GetMaskNext(); if (mask1.empty() || mask2.empty()) { mprinterr("Error: distance requires 2 masks\n"); return Action::ERR; } Mask1_.SetMaskString(mask1); Mask2_.SetMaskString(mask2); // Dataset to store distances TODO store masks in data set? dist_ = init.DSL().AddSet(DataSet::DOUBLE, MetaData(actionArgs.GetStringNext(), MetaData::M_DISTANCE, stype), "Dis"); if (dist_==0) return Action::ERR; if ( stype == MetaData::NOE ) { dist_->AssociateData( &noe ); dist_->SetLegend(Mask1_.MaskExpression() + " and " + Mask2_.MaskExpression()); } // Add dataset to data file if (outfile != 0) outfile->AddDataSet( dist_ ); mprintf(" DISTANCE: %s to %s",Mask1_.MaskString(), Mask2_.MaskString()); if (!image_.UseImage()) mprintf(", non-imaged"); if (useMass_) mprintf(", center of mass"); else mprintf(", geometric center"); mprintf(".\n"); return Action::OK; }
// Analysis_VectorMath::Setup() Analysis::RetType Analysis_VectorMath::Setup(ArgList& analyzeArgs, DataSetList* DSLin, DataFileList* DFLin, int debugIn) { // Get Vectors vinfo1_ = (DataSet_Vector*)DSLin->FindSetOfType( analyzeArgs.GetStringKey("vec1"), DataSet::VECTOR ); vinfo2_ = (DataSet_Vector*)DSLin->FindSetOfType( analyzeArgs.GetStringKey("vec2"), DataSet::VECTOR ); if (vinfo1_ == 0 ) { mprinterr("Error: 'vec1' not found.\n"); return Analysis::ERR; } if (vinfo2_ == 0) { mprinterr("Error: 'vec2' not found.\n"); return Analysis::ERR; } std::string setname = analyzeArgs.GetStringKey("name"); norm_ = analyzeArgs.hasKey("norm"); // Check for dotproduct/crossproduct keywords DataOut_ = 0; if (analyzeArgs.hasKey("dotproduct")) { mode_ = DOTPRODUCT; if ((DataOut_ = DSLin->AddSet(DataSet::DOUBLE, setname, "Dot")) == 0) return Analysis::ERR; } else if (analyzeArgs.hasKey("dotangle")) { mode_ = DOTANGLE; norm_ = true; // Vecs must be normalized for angle calc to work if ((DataOut_ = DSLin->AddSet(DataSet::DOUBLE, setname, "Angle")) == 0) return Analysis::ERR; } else if (analyzeArgs.hasKey("crossproduct")) { mode_ = CROSSPRODUCT; if ((DataOut_ = DSLin->AddSet(DataSet::VECTOR, setname, "Cross")) == 0) return Analysis::ERR; } else mode_ = DOTPRODUCT; // Set up output file in DataFileList if necessary DataFile* outfile = DFLin->AddDataFile( analyzeArgs.GetStringKey("out"), analyzeArgs ); if (outfile != 0) outfile->AddDataSet( DataOut_ ); // Print Status mprintf(" VECTORMATH: Calculating %s of vectors %s and %s\n", ModeString[mode_], vinfo1_->legend(), vinfo2_->legend()); if (norm_) mprintf("\tVectors will be normalized.\n"); if (outfile != 0) mprintf("\tResults are written to %s\n", outfile->DataFilename().full()); return Analysis::OK; }
// Action_VelocityAutoCorr::Init() Action::RetType Action_VelocityAutoCorr::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { useVelInfo_ = actionArgs.hasKey("usevelocity"); mask_.SetMaskString( actionArgs.GetMaskNext() ); DataFile* outfile = init.DFL().AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); maxLag_ = actionArgs.getKeyInt("maxlag", -1); tstep_ = actionArgs.getKeyDouble("tstep", 1.0); useFFT_ = !actionArgs.hasKey("direct"); normalize_ = actionArgs.hasKey("norm"); // Set up output data set VAC_ = init.DSL().AddSet(DataSet::DOUBLE, actionArgs.GetStringNext(), "VAC"); if (VAC_ == 0) return Action::ERR; if (outfile != 0) outfile->AddDataSet( VAC_ ); # ifdef MPI trajComm_ = init.TrajComm(); if (trajComm_.Size() > 1 && !useVelInfo_) mprintf("\nWarning: When calculating velocities between consecutive frames,\n" "\nWarning: 'velocityautocorr' in parallel will not work correctly if\n" "\nWarning: coordinates have been modified by previous actions (e.g. 'rms').\n\n"); # endif mprintf(" VELOCITYAUTOCORR:\n" "\tCalculate velocity auto-correlation function for atoms in mask '%s'\n", mask_.MaskString()); if (useVelInfo_) mprintf("\tUsing velocity information present in frames.\n"); else mprintf("\tCalculating velocities between consecutive frames.\n"); if (outfile != 0) mprintf("\tOutput data set '%s' to '%s'\n", VAC_->legend(), outfile->DataFilename().full()); if (maxLag_ < 1) mprintf("\tMaximum lag will be half total # of frames"); else mprintf("\tMaximum lag is %i frames", maxLag_); mprintf(", time step is %f ps\n", tstep_); if (useFFT_) mprintf("\tUsing FFT to calculate autocorrelation function.\n"); else mprintf("\tUsing direct method to calculate autocorrelation function.\n"); if (normalize_) mprintf("\tNormalizing autocorrelation function to 1.0\n"); return Action::OK; }
// Action_GridFreeEnergy::init() Action::RetType Action_GridFreeEnergy::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { // Get output filename DataFile* outfile = init.DFL().AddDataFile(actionArgs.GetStringNext(), actionArgs); if (outfile == 0) { mprinterr("Error: GridFreeEnergy: no output filename specified.\n"); return Action::ERR; } // Get grid options (<nx> <dx> <ny> <dy> <nz> <dz> [box|origin] [negative]) grid_ = GridInit( "GridFreeEnergy", actionArgs, init.DSL() ); if (grid_ == 0) return Action::ERR; # ifdef MPI if (ParallelGridInit(init.TrajComm(), grid_)) return Action::ERR; # endif //grid_.PrintXplor( filename_, "", "REMARKS Change in Free energy from bulk solvent with bin normalisation of " + integerToString(currentLargestVoxelOccupancyCount) ); // Get mask std::string maskexpr = actionArgs.GetMaskNext(); if (maskexpr.empty()) { mprinterr("Error: GridFreeEnergy: No mask specified.\n"); init.DSL().RemoveSet( grid_ ); return Action::ERR; } mask_.SetMaskString(maskexpr); // Get extra args tempInKevin_ = actionArgs.getKeyDouble("temp", 293.0); outfile->AddDataSet( grid_ ); // Info mprintf("Warning: DNAIONTRACKER is experimental code!\n"); mprintf(" GridFreeEnergy\n"); GridInfo( *grid_ ); mprintf("\tGrid will be printed to file %s\n",outfile->DataFilename().full()); mprintf("\tMask expression: [%s]\n",mask_.MaskString()); mprintf("\tTemp is : %f K\n",tempInKevin_); // Allocate grid //if (GridAllocate()) return 1; return Action::OK; }
Analysis::RetType Analysis_KDE::ExternalSetup(DataSet_1D* dsIn, std::string const& histname, int setidx, std::string const& outfilenameIn, bool minArgSetIn, double minIn, bool maxArgSetIn, double maxIn, double stepIn, int binsIn, double tempIn, DataSetList& datasetlist, DataFileList& DFLin) { if (dsIn == 0) return Analysis::ERR; data_ = dsIn; q_data_ = 0; kldiv_ = 0; amddata_ = 0; bandwidth_ = -1.0; minArgSet_ = minArgSetIn; if (minArgSet_) default_min_ = minIn; maxArgSet_ = maxArgSetIn; if (maxArgSet_) default_max_ = maxIn; default_step_ = stepIn; default_bins_ = binsIn; Temp_ = tempIn; if (Temp_ != -1.0) calcFreeE_ = true; else calcFreeE_ = false; std::string setname = histname; std::string htype; if (calcFreeE_) htype = "FreeE_"; else htype = "KDE_"; if (setname.empty()) setname = datasetlist.GenerateDefaultName(htype + dsIn->Meta().Name()); DataFile* outfile = DFLin.AddDataFile( outfilenameIn ); output_ = datasetlist.AddSet(DataSet::DOUBLE, MetaData(setname, dsIn->Meta().Aspect(), setidx)); if (output_ == 0) return Analysis::ERR; output_->SetLegend(htype + dsIn->Meta().Legend()); if (outfile != 0) outfile->AddDataSet( output_ ); return Analysis::OK; }
// Action_Surf::Init() Action::RetType Action_Surf::Init(ArgList& actionArgs, TopologyList* PFL, DataSetList* DSL, DataFileList* DFL, int debugIn) { // Get keywords DataFile* outfile = DFL->AddDataFile( actionArgs.GetStringKey("out"), actionArgs); // Get Masks Mask1_.SetMaskString( actionArgs.GetMaskNext() ); // Dataset to store surface area surf_ = DSL->AddSet(DataSet::DOUBLE, actionArgs.GetStringNext(), "SA"); if (surf_==0) return Action::ERR; // Add dataset to data file list if (outfile != 0) outfile->AddDataSet( surf_ ); mprintf(" SURF: Calculating surface area for atoms in mask [%s]\n",Mask1_.MaskString()); mprintf("#Citation: Weiser, J.; Shenkin, P. S.; Still, W. C.; \"Approximate atomic\n" "# surfaces from linear combinations of pairwise overlaps (LCPO).\"\n" "# J. Comp. Chem. (1999), V.20, pp.217-230.\n"); return Action::OK; }
// Analysis_FFT::Setup() Analysis::RetType Analysis_FFT::Setup(ArgList& analyzeArgs, DataSetList* datasetlist, DataFileList* DFLin, int debugIn) { std::string setname = analyzeArgs.GetStringKey("name"); DataFile* outfile = DFLin->AddDataFile(analyzeArgs.GetStringKey("out"), analyzeArgs); dt_ = analyzeArgs.getKeyDouble("dt",1.0); // Select datasets from remaining args if (input_dsets_.AddSetsFromArgs( analyzeArgs.RemainingArgs(), *datasetlist )) { mprinterr("Error: Could not add data sets.\n"); return Analysis::ERR; } if (input_dsets_.empty()) { mprinterr("Error: No input data sets.\n"); return Analysis::ERR; } // If setname is empty generate a default name if (setname.empty()) setname = datasetlist->GenerateDefaultName( "FFT" ); // Setup output datasets. int idx = 0; if ( input_dsets_.size() == 1 ) idx = -1; // Only one input set, no need to refer to it by index for ( Array1D::const_iterator DS = input_dsets_.begin(); DS != input_dsets_.end(); ++DS) { DataSet* dsout = datasetlist->AddSet( DataSet::DOUBLE, MetaData(setname, idx++) ); if (dsout==0) return Analysis::ERR; dsout->SetLegend( (*DS)->Meta().Legend() ); output_dsets_.push_back( (DataSet_1D*)dsout ); if (outfile != 0) outfile->AddDataSet( dsout ); } mprintf(" FFT: Calculating FFT for %u data sets.\n", input_dsets_.size()); mprintf("\tTime step: %f\n", dt_); if ( !setname.empty() ) mprintf("\tSet name: %s\n", setname.c_str() ); if ( outfile != 0 ) mprintf("\tOutfile name: %s\n", outfile->DataFilename().base()); return Analysis::OK; }
// Action_Temperature::Init() Action::RetType Action_Temperature::Init(ArgList& actionArgs, TopologyList* PFL, DataSetList* DSL, DataFileList* DFL, int debugIn) { // Keywords if (actionArgs.hasKey("frame")) { getTempFromFrame_ = true; shakeType_ = OFF; degrees_of_freedom_ = 0; } else { getTempFromFrame_ = false; int ntc = actionArgs.getKeyInt("ntc",-1); if (ntc != -1) { if (ntc < 1 || ntc > 3) { mprinterr("Error: temperature: ntc must be 1, 2, or 3\n"); return Action::ERR; } shakeType_ = (ShakeType)(ntc - 1); } else shakeType_ = OFF; } DataFile* outfile = DFL->AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); // Masks if (!getTempFromFrame_) Mask_.SetMaskString( actionArgs.GetMaskNext() ); // DataSet Tdata_ = DSL->AddSet(DataSet::DOUBLE, actionArgs.GetStringNext(), "Tdata"); if (Tdata_ == 0) return Action::ERR; if (outfile != 0) outfile->AddDataSet( Tdata_ ); if (getTempFromFrame_) { mprintf(" TEMPERATURE: Frame temperatures will be saved in data set %s\n", Tdata_->legend()); } else { mprintf(" TEMPERATURE: Calculate temperature for atoms in mask [%s]\n", Mask_.MaskString()); mprintf("\tUsing SHAKE (ntc) value of [%s]\n", ShakeString[shakeType_]); } return Action::OK; }
// Analysis_RemLog::Setup() Analysis::RetType Analysis_RemLog::Setup(ArgList& analyzeArgs, DataSetList* datasetlist, DataFileList* DFLin, int debugIn) { debug_ = debugIn; // Get remlog dataset std::string remlogName = analyzeArgs.GetStringNext(); if (remlogName.empty()) { mprinterr("Error: no remlog data set or file name specified.\n"); return Analysis::ERR; } // Check if data set exists remlog_ = (DataSet_RemLog*)datasetlist->FindSetOfType( remlogName, DataSet::REMLOG ); if (remlog_ == 0) { mprinterr("Error: remlog data with name %s not found.\n", remlogName.c_str()); return Analysis::ERR; } if (remlog_->Size() < 1 || remlog_->NumExchange() < 1) { mprinterr("Error: remlog data set appears to be empty.\n"); return Analysis::ERR; } acceptout_ = DFLin->AddCpptrajFile( analyzeArgs.GetStringKey("acceptout"), "replica acceptance", DataFileList::TEXT, true ); if (acceptout_ == 0) return Analysis::ERR; lifetimes_ = DFLin->AddCpptrajFile( analyzeArgs.GetStringKey("lifetime"), "remlog lifetimes" ); calculateLifetimes_ = (lifetimes_ != 0); calculateStats_ = analyzeArgs.hasKey("stats"); if (calculateStats_) { statsout_ = DFLin->AddCpptrajFile( analyzeArgs.GetStringKey("statsout"), "remlog stats", DataFileList::TEXT, true ); reptime_ = DFLin->AddCpptrajFile( analyzeArgs.GetStringKey("reptime"), "replica times", DataFileList::TEXT, true ); if (statsout_ == 0 || reptime_ == 0) return Analysis::ERR; } calcRepFracSlope_ = analyzeArgs.getKeyInt("reptimeslope", 0); std::string rfs_name = analyzeArgs.GetStringKey("reptimeslopeout"); if (!calculateStats_) { calcRepFracSlope_ = 0; rfs_name.clear(); } if ( (calcRepFracSlope_ > 0) != (!rfs_name.empty()) ) { mprinterr("Error: Both reptimeslope and reptimeslopeout must be specified.\n"); return Analysis::ERR; } repFracSlope_ = DFLin->AddCpptrajFile( rfs_name, "replica fraction slope" ); printIndividualTrips_ = analyzeArgs.hasKey("printtrips"); // Get mode if (analyzeArgs.hasKey("crdidx")) mode_ = CRDIDX; else if (analyzeArgs.hasKey("repidx")) mode_ = REPIDX; else mode_ = NONE; const char* def_name = 0; const char* yaxis = 0; if (mode_ == CRDIDX) { def_name = "repidx"; yaxis = "ylabel CrdIdx"; } else if (mode_ == REPIDX) { def_name = "crdidx"; yaxis = "ylabel RepIdx"; } // Set up an output set for each replica DataFile* dfout = 0; if (mode_ != NONE) { // Get output filename std::string outname = analyzeArgs.GetStringKey("out"); if (!outname.empty()) { dfout = DFLin->AddDataFile( outname, analyzeArgs ); if (dfout == 0 ) return Analysis::ERR; if (yaxis != 0 ) dfout->ProcessArgs(yaxis); } std::string dsname = analyzeArgs.GetStringKey("name"); if (dsname.empty()) dsname = datasetlist->GenerateDefaultName(def_name); MetaData md(dsname); for (int i = 0; i < (int)remlog_->Size(); i++) { md.SetIdx(i+1); DataSet_integer* ds = (DataSet_integer*)datasetlist->AddSet(DataSet::INTEGER, md); if (ds == 0) return Analysis::ERR; outputDsets_.push_back( (DataSet*)ds ); if (dfout != 0) dfout->AddDataSet( (DataSet*)ds ); ds->Resize( remlog_->NumExchange() ); } } mprintf(" REMLOG: %s, %i replicas, %i exchanges\n", remlog_->legend(), remlog_->Size(), remlog_->NumExchange()); if (mode_ == CRDIDX) mprintf("\tGetting coordinate index vs exchange.\n"); else if (mode_ == REPIDX) mprintf("\tGetting replica index vs exchange.\n"); if (mode_ != NONE && dfout != 0) mprintf("\tOutput is to %s\n", dfout->DataFilename().base()); if (calculateStats_) { mprintf("\tGetting replica exchange stats, output to %s\n", statsout_->Filename().full()); if (printIndividualTrips_) mprintf("\tIndividual round trips will be printed.\n"); mprintf("\tWriting time spent at each replica to %s\n", reptime_->Filename().full()); } if (calculateLifetimes_) mprintf("\tThe lifetime of each crd at each replica will be calculated.\n"); if (acceptout_ != 0) mprintf("\tOverall exchange acceptance % will be written to %s\n", acceptout_->Filename().full()); return Analysis::OK; }
Action::RetType Action_DNAionTracker::Init(ArgList& actionArgs, TopologyList* PFL, DataSetList* DSL, DataFileList* DFL, int debugIn) { // Get keywords DataFile* outfile = DFL->AddDataFile(actionArgs.GetStringKey("out"), actionArgs); poffset_ = actionArgs.getKeyDouble("poffset", 5.0); InitImaging( !actionArgs.hasKey("noimage") ); if (actionArgs.hasKey("shortest")) bintype_ = SHORTEST; else if (actionArgs.hasKey("counttopcone")) bintype_ = TOPCONE; else if (actionArgs.hasKey("countbottomcone")) bintype_ = BOTTOMCONE; else if (actionArgs.hasKey("count")) bintype_ = COUNT; // Get masks - 4 must be specified std::string m1 = actionArgs.GetMaskNext(); std::string m2 = actionArgs.GetMaskNext(); std::string m3 = actionArgs.GetMaskNext(); std::string m4 = actionArgs.GetMaskNext(); if (m1.empty() || m2.empty() || m3.empty() || m4.empty()) { mprinterr("Error: dnaiontracker requires 4 masks.\n"); return Action::ERR; } p1_.SetMaskString(m1); p2_.SetMaskString(m2); base_.SetMaskString(m3); ions_.SetMaskString(m4); // Add dataset to dataset list (and datafile list if filename specified) distance_ = DSL->AddSet(DataSet::DOUBLE, MetaData(actionArgs.GetStringNext(), MetaData::M_DISTANCE), "DNAion"); if (distance_==0) return Action::ERR; if (outfile != 0) outfile->AddDataSet( distance_ ); // INFO mprintf(" DNAIONTRACKER: Data representing the "); switch (bintype_) { case COUNT : mprintf("count within the cone will be\n"); break; case SHORTEST: mprintf("shortest distance to a phosphate or base centroid will be\n"); break; case TOPCONE: mprintf("count in the top half of the cone (and sort-of bound) will be\n"); break; case BOTTOMCONE: mprintf("count in the bottom half of the cone will be\n"); break; } mprintf(" saved to array named %s\n", distance_->legend()); mprintf(" Perpendicular offset for cone is %5.2f angstroms\n", poffset_); if (!UseImage()) mprintf(" Imaging has been disabled\n"); mprintf("\tPhosphate1 Mask [%s]\n", p1_.MaskString()); mprintf("\tPhosphate2 Mask [%s]\n", p2_.MaskString()); mprintf("\tBase Mask [%s]\n", base_.MaskString()); mprintf("\tIons Mask [%s]\n", ions_.MaskString()); if (outfile != 0) mprintf("\tData will be printed to a file named %s\n", outfile->DataFilename().full()); return Action::OK; }
// ----------------------------------------------------------------------------- // Action_NMRrst::Init() Action::RetType Action_NMRrst::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { # ifdef MPI if (init.TrajComm().Size() > 1) { mprinterr("Error: 'nmrrst' action does not work with > 1 thread (%i threads currently).\n", init.TrajComm().Size()); return Action::ERR; } # endif debug_ = debugIn; // Get Keywords Image_.InitImaging( !(actionArgs.hasKey("noimage")) ); useMass_ = !(actionArgs.hasKey("geom")); findNOEs_ = actionArgs.hasKey("findnoes"); findOutput_ = init.DFL().AddCpptrajFile(actionArgs.GetStringKey("findout"), "Found NOEs", DataFileList::TEXT, true); specOutput_ = init.DFL().AddCpptrajFile(actionArgs.GetStringKey("specout"), "Specified NOEs", DataFileList::TEXT, true); if (findOutput_ == 0 || specOutput_ == 0) return Action::ERR; resOffset_ = actionArgs.getKeyInt("resoffset", 0); DataFile* outfile = init.DFL().AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); max_cut_ = actionArgs.getKeyDouble("cut", 6.0); strong_cut_ = actionArgs.getKeyDouble("strongcut", 2.9); medium_cut_ = actionArgs.getKeyDouble("mediumcut", 3.5); weak_cut_ = actionArgs.getKeyDouble("weakcut", 5.0); series_ = actionArgs.hasKey("series"); std::string rstfilename = actionArgs.GetStringKey("file"); viewrst_ = actionArgs.GetStringKey("viewrst"); setname_ = actionArgs.GetStringKey("name"); if (setname_.empty()) setname_ = init.DSL().GenerateDefaultName("NMR"); nframes_ = 0; // Atom Mask Mask_.SetMaskString( actionArgs.GetMaskNext() ); // Pairs specified on command line. std::string pair1 = actionArgs.GetStringKey("pair"); while (!pair1.empty()) { std::string pair2 = actionArgs.GetStringNext(); if (pair2.empty()) { mprinterr("Error: Only one mask specified for pair (%s)\n", pair1.c_str()); return Action::ERR; } Pairs_.push_back( MaskPairType(AtomMask(pair1), AtomMask(pair2)) ); pair1 = actionArgs.GetStringKey("pair"); } // Check that something will be done if (!findNOEs_ && rstfilename.empty() && Pairs_.empty()) { mprinterr("Error: Must specify restraint file, 'pair', and/or 'findnoes'.\n"); return Action::ERR; } // Read in NMR restraints. if (!rstfilename.empty()) { if (ReadNmrRestraints( rstfilename )) return Action::ERR; } // Set up distances. int num_noe = 1; for (noeDataArray::iterator noe = NOEs_.begin(); noe != NOEs_.end(); ++noe, ++num_noe) { // Translate any ambiguous atom names TranslateAmbiguous( noe->aName1_ ); TranslateAmbiguous( noe->aName2_ ); // Create mask expressions from resnum/atom name noe->dMask1_.SetMaskString( MaskExpression( noe->resNum1_, noe->aName1_ ) ); noe->dMask2_.SetMaskString( MaskExpression( noe->resNum2_, noe->aName2_ ) ); // Dataset to store distances AssociatedData_NOE noeData(noe->bound_, noe->boundh_, noe->rexp_); MetaData md(setname_, "NOE", num_noe); md.SetLegend( noe->dMask1_.MaskExpression() + " and " + noe->dMask2_.MaskExpression()); md.SetScalarMode( MetaData::M_DISTANCE ); md.SetScalarType( MetaData::NOE ); noe->dist_ = init.DSL().AddSet(DataSet::DOUBLE, md); if (noe->dist_==0) return Action::ERR; noe->dist_->AssociateData( &noeData ); // Add dataset to data file if (outfile != 0) outfile->AddDataSet( noe->dist_ ); } masterDSL_ = init.DslPtr(); mprintf("Warning: *** THIS ACTION IS EXPERIMENTAL. ***\n"); mprintf(" NMRRST: %zu NOEs from NMR restraint file.\n", NOEs_.size()); mprintf("\tShifting residue numbers in restraint file by %i\n", resOffset_); // DEBUG - print NOEs for (noeDataArray::const_iterator noe = NOEs_.begin(); noe != NOEs_.end(); ++noe) mprintf("\t'%s' %f < %f < %f\n", noe->dist_->legend(), noe->bound_, noe->rexp_, noe->boundh_); if (findNOEs_) { mprintf("\tSearching for potential NOEs. Max cutoff is %g Ang.\n", max_cut_); mprintf("\tNOE distance criteria (Ang.): S= %g, M= %g, W= %g\n", strong_cut_, medium_cut_, weak_cut_); if (series_) mprintf("\tDistance data for NOEs less than cutoff will be saved as '%s[foundNOE]'.\n", setname_.c_str()); mprintf("\tFound NOEs will be written to '%s'\n", findOutput_->Filename().full()); } if (!Pairs_.empty()) { mprintf("\tSpecified NOE pairs:\n"); for (MaskPairArray::const_iterator mp = Pairs_.begin(); mp != Pairs_.end(); ++mp) mprintf("\t\t[%s] to [%s]\n", mp->first.MaskString(), mp->second.MaskString()); mprintf("\tSpecified NOE data will be written to '%s'\n", specOutput_->Filename().full()); } if (!Image_.UseImage()) mprintf("\tNon-imaged"); else mprintf("\tImaged"); if (useMass_) mprintf(", center of mass.\n"); else mprintf(", geometric center.\n"); if (!viewrst_.empty()) mprintf("\tTopologies corresponding to found NOEs will be written to '%s'\n", viewrst_.c_str()); return Action::OK; }
// Analysis_KDE::Setup() Analysis::RetType Analysis_KDE::Setup(ArgList& analyzeArgs, AnalysisSetup& setup, int debugIn) { if (analyzeArgs.Contains("min")) { default_min_ = analyzeArgs.getKeyDouble("min", 0.0); minArgSet_ = true; } if (analyzeArgs.Contains("max")) { default_max_ = analyzeArgs.getKeyDouble("max", 0.0); maxArgSet_ = true; } default_step_ = analyzeArgs.getKeyDouble("step", 0.0); default_bins_ = analyzeArgs.getKeyInt("bins", -1); if (default_step_ == 0.0 && default_bins_ < 1) { mprinterr("Error: Must set either bins or step.\n"); return Analysis::ERR; } Temp_ = analyzeArgs.getKeyDouble("free",-1.0); if (Temp_!=-1.0) calcFreeE_ = true; else calcFreeE_ = false; std::string setname = analyzeArgs.GetStringKey("name"); bandwidth_ = analyzeArgs.getKeyDouble("bandwidth", -1.0); DataFile* outfile = setup.DFL().AddDataFile( analyzeArgs.GetStringKey("out"), analyzeArgs ); DataFile* klOutfile = 0; // Get second data set for KL divergence calc. std::string q_dsname = analyzeArgs.GetStringKey("kldiv"); if (!q_dsname.empty()) { q_data_ = setup.DSL().GetDataSet( q_dsname ); if (q_data_ == 0) { mprinterr("Error: Data set %s not found.\n", q_dsname.c_str()); return Analysis::ERR; } if (q_data_->Ndim() != 1) { mprinterr("Error: Only 1D data sets supported.\n"); return Analysis::ERR; } klOutfile = setup.DFL().AddDataFile( analyzeArgs.GetStringKey("klout"), analyzeArgs ); } else { q_data_ = 0; kldiv_ = 0; } // Get AMD boost data set std::string amdname = analyzeArgs.GetStringKey("amd"); if (!amdname.empty()) { amddata_ = setup.DSL().GetDataSet( amdname ); if (amddata_ == 0) { mprinterr("Error: AMD data set %s not found.\n", amdname.c_str()); return Analysis::ERR; } if (amddata_->Ndim() != 1) { mprinterr("Error: AMD data set must be 1D.\n"); return Analysis::ERR; } } else amddata_ = 0; // Get data set data_ = setup.DSL().GetDataSet( analyzeArgs.GetStringNext() ); if (data_ == 0) { mprinterr("Error: No data set or invalid data set name specified\n"); return Analysis::ERR; } if (data_->Ndim() != 1) { mprinterr("Error: Only 1D data sets supported.\n"); return Analysis::ERR; } // Output data set output_ = setup.DSL().AddSet(DataSet::DOUBLE, setname, "kde"); if (output_ == 0) return Analysis::ERR; if (outfile != 0) outfile->AddDataSet( output_ ); // Output for KL divergence calc. if ( q_data_ != 0 ) { kldiv_ = setup.DSL().AddSet(DataSet::DOUBLE, MetaData(output_->Meta().Name(), "kld")); if (klOutfile != 0) klOutfile->AddDataSet( kldiv_ ); } mprintf(" KDE: Using gaussian KDE to histogram set \"%s\"\n", data_->legend()); if (amddata_!=0) mprintf("\tPopulating bins using AMD boost from data set %s\n", amddata_->legend()); if (q_data_ != 0) { mprintf("\tCalculating Kullback-Leibler divergence with set \"%s\"\n", q_data_->legend()); } if (bandwidth_ < 0.0) mprintf("\tBandwidth will be estimated.\n"); else mprintf("\tBandwidth= %f\n", bandwidth_); if (calcFreeE_) mprintf("\tFree energy in kcal/mol will be calculated from bin populations at %f K.\n",Temp_); return Analysis::OK; }
// Action_LIE::init() Action::RetType Action_LIE::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { // Always use imaged distances InitImaging(true); double cut; // Get Keywords doelec_ = !(actionArgs.hasKey("noelec")); dovdw_ = !(actionArgs.hasKey("novdw")); DataFile* datafile = init.DFL().AddDataFile(actionArgs.GetStringKey("out"), actionArgs); dielc_ = actionArgs.getKeyDouble("diel", 1.0); cut = actionArgs.getKeyDouble("cutvdw", 12.0); cut2vdw_ = cut * cut; // store square of cut for computational efficiency cut = actionArgs.getKeyDouble("cutelec", 12.0); cut2elec_ = cut * cut; // store square of cut for computational efficiency onecut2_ = 1 / cut2elec_; bool has_mask2 = false; if (!doelec_ && !dovdw_) { mprinterr("Error: LIE: Cannot skip both ELEC and VDW calcs\n"); return Action::ERR; } // Get Masks Mask1_.SetMaskString( actionArgs.GetMaskNext() ); std::string refmask = actionArgs.GetMaskNext(); if (!refmask.empty()) { Mask2_.SetMaskString(refmask); has_mask2 = true; } else { Mask2_ = Mask1_; Mask2_.InvertMaskExpression(); } // Get data set name std::string ds_name = actionArgs.GetStringNext(); if (ds_name.empty()) ds_name = init.DSL().GenerateDefaultName("LIE"); // Datasets if (doelec_) { elec_ = init.DSL().AddSet(DataSet::DOUBLE, MetaData(ds_name, "EELEC")); if (elec_ == 0) return Action::ERR; if (datafile != 0) datafile->AddDataSet(elec_); } if (dovdw_) { vdw_ = init.DSL().AddSet(DataSet::DOUBLE, MetaData(ds_name, "EVDW")); if (vdw_ == 0) return Action::ERR; if (datafile != 0) datafile->AddDataSet(vdw_); } mprintf(" LIE: Ligand mask is %s. Surroundings are ", Mask1_.MaskString()); if (!has_mask2) mprintf("everything else. "); else mprintf("atoms in mask %s. ", Mask2_.MaskString()); mprintf("Cutoff is %.3lf Ang. ", cut); if (!doelec_) mprintf("Skipping Electrostatic Calc. "); if (!dovdw_) mprintf("Skipping VDW Calc. "); mprintf("\n"); return Action::OK; }
// Exec_PermuteDihedrals::Execute() Exec::RetType Exec_PermuteDihedrals::Execute(CpptrajState& State, ArgList& argIn) { debug_ = State.Debug(); mode_ = INTERVAL; // Get Keywords - first determine mode if (argIn.hasKey("random")) mode_ = RANDOM; else if (argIn.hasKey("interval")) mode_ = INTERVAL; // Get input COORDS set std::string setname = argIn.GetStringKey("crdset"); if (setname.empty()) { mprinterr("Error: Specify COORDS dataset name with 'crdset'.\n"); return CpptrajState::ERR; } DataSet_Coords* CRD = (DataSet_Coords*)State.DSL().FindCoordsSet( setname ); if (CRD == 0) { mprinterr("Error: Could not find COORDS set '%s'\n", setname.c_str()); return CpptrajState::ERR; } mprintf(" PERMUTEDIHEDRALS: Using COORDS '%s'\n", CRD->legend()); // Get residue range Range resRange; resRange.SetRange(argIn.GetStringKey("resrange")); if (!resRange.Empty()) resRange.ShiftBy(-1); // User res args start from 1 mprintf("\tPermutating dihedrals in"); if (resRange.Empty()) mprintf(" all solute residues.\n"); else mprintf(" residue range [%s]\n", resRange.RangeArg()); // Determine which angles to search for DihedralSearch dihSearch; dihSearch.SearchForArgs(argIn); // If nothing is enabled, enable all dihSearch.SearchForAll(); mprintf("\tSearching for types:"); dihSearch.PrintTypes(); mprintf("\n"); // Setup output trajectory outframe_ = 0; std::string outfilename = argIn.GetStringKey("outtraj"); if (!outfilename.empty()) { mprintf("\tCoordinates output to '%s'\n", outfilename.c_str()); Topology* outtop = State.DSL().GetTopology( argIn ); if (outtop == 0) { mprinterr("Error: No topology for output traj.\n"); return CpptrajState::ERR; } // Setup output trajectory FIXME: Correct frames for # of rotations if (outtraj_.PrepareTrajWrite(outfilename, argIn, CRD->TopPtr(), CRD->CoordsInfo(), CRD->Size(), TrajectoryFile::UNKNOWN_TRAJ)) return CpptrajState::ERR; } // Setup output coords outfilename = argIn.GetStringKey("crdout"); if (!outfilename.empty()) { mprintf("\tCoordinates saved to set '%s'\n", outfilename.c_str()); crdout_ = (DataSet_Coords_CRD*)State.DSL().AddSet(DataSet::COORDS, outfilename); if (crdout_ == 0) return CpptrajState::ERR; crdout_->CoordsSetup( CRD->Top(), CRD->CoordsInfo() ); } // Get specific mode options. double interval_in_deg = 60.0; if ( mode_ == INTERVAL ) { interval_in_deg = argIn.getNextDouble(60.0); mprintf("\tDihedrals will be rotated at intervals of %.2f degrees.\n", interval_in_deg); } else if (mode_ == RANDOM) { check_for_clashes_ = argIn.hasKey("check"); checkAllResidues_ = argIn.hasKey("checkallresidues"); cutoff_ = argIn.getKeyDouble("cutoff",0.8); rescutoff_ = argIn.getKeyDouble("rescutoff",10.0); backtrack_ = argIn.getKeyInt("backtrack",4); increment_ = argIn.getKeyInt("increment",1); max_factor_ = argIn.getKeyInt("maxfactor",2); int iseed = argIn.getKeyInt("rseed",-1); // Output file for # of problems DataFile* problemFile = State.DFL().AddDataFile(argIn.GetStringKey("out"), argIn); // Dataset to store number of problems number_of_problems_ = State.DSL().AddSet(DataSet::INTEGER, argIn.GetStringNext(),"Nprob"); if (number_of_problems_==0) return CpptrajState::ERR; // Add dataset to data file list if (problemFile != 0) problemFile->AddDataSet(number_of_problems_); // Check validity of args if (cutoff_ < Constants::SMALL) { mprinterr("Error: cutoff too small.\n"); return CpptrajState::ERR; } if (rescutoff_ < Constants::SMALL) { mprinterr("Error: rescutoff too small.\n"); return CpptrajState::ERR; } if (backtrack_ < 0) { mprinterr("Error: backtrack value must be >= 0\n"); return CpptrajState::ERR; } if ( increment_<1 || (360 % increment_)!=0 ) { mprinterr("Error: increment must be a factor of 360.\n"); return CpptrajState::ERR; } // Calculate max increment max_increment_ = 360 / increment_; // Seed random number gen RN_.rn_set( iseed ); // Print info mprintf("\tDihedrals will be rotated to random values.\n"); if (iseed==-1) mprintf("\tRandom number generator will be seeded using time.\n"); else mprintf("\tRandom number generator will be seeded using %i\n",iseed); if (check_for_clashes_) { mprintf("\tWill attempt to recover from bad steric clashes.\n"); if (checkAllResidues_) mprintf("\tAll residues will be checked.\n"); else mprintf("\tResidues up to the currenly rotating dihedral will be checked.\n"); mprintf("\tAtom cutoff %.2f, residue cutoff %.2f, backtrack = %i\n", cutoff_, rescutoff_, backtrack_); mprintf("\tWhen clashes occur dihedral will be incremented by %i\n",increment_); mprintf("\tMax # attempted rotations = %i times number dihedrals.\n", max_factor_); } // Square cutoffs to compare to dist^2 instead of dist cutoff_ *= cutoff_; rescutoff_ *= rescutoff_; // Increment backtrack by 1 since we need to skip over current res ++backtrack_; // Initialize CheckStructure if (checkStructure_.SetOptions( false, false, false, State.Debug(), "*", "", 0.8, 1.15, 4.0 )) { mprinterr("Error: Could not set up structure check.\n"); return CpptrajState::ERR; } // Set up CheckStructure for this parm (false = nobondcheck) if (checkStructure_.Setup(CRD->Top(), CRD->CoordsInfo().TrajBox())) return CpptrajState::ERR; } // Determine from selected mask atoms which dihedrals will be rotated. PermuteDihedralsType dst; // If range is empty (i.e. no resrange arg given) look through all // solute residues. Range actualRange; if (resRange.Empty()) actualRange = CRD->Top().SoluteResidues(); else actualRange = resRange; // Search for dihedrals if (dihSearch.FindDihedrals(CRD->Top(), actualRange)) return CpptrajState::ERR; // For each found dihedral, set up mask of atoms that will move upon // rotation. Also set up mask of atoms in this residue that will not // move, including atom2. if (debug_>0) mprintf("DEBUG: Dihedrals:\n"); for (DihedralSearch::mask_it dih = dihSearch.begin(); dih != dihSearch.end(); ++dih) { dst.checkAtoms.clear(); // Set mask of atoms that will move during dihedral rotation. dst.Rmask = DihedralSearch::MovingAtoms(CRD->Top(), dih->A1(), dih->A2()); // If randomly rotating angles, check for atoms that are in the same // residue as A1 but will not move. They need to be checked for clashes // since further rotations will not help them. if (mode_ == RANDOM && check_for_clashes_) { CharMask cMask( dst.Rmask.ConvertToCharMask(), dst.Rmask.Nselected() ); int a1res = CRD->Top()[dih->A1()].ResNum(); for (int maskatom = CRD->Top().Res(a1res).FirstAtom(); maskatom < CRD->Top().Res(a1res).LastAtom(); ++maskatom) if (!cMask.AtomInCharMask(maskatom)) dst.checkAtoms.push_back( maskatom ); dst.checkAtoms.push_back(dih->A1()); // TODO: Does this need to be added first? // Since only the second atom and atoms it is bonded to move during // rotation, base the check on the residue of the second atom. dst.resnum = a1res; } dst.atom0 = dih->A0(); // FIXME: This duplicates info dst.atom1 = dih->A1(); dst.atom2 = dih->A2(); dst.atom3 = dih->A3(); BB_dihedrals_.push_back(dst); // DEBUG: List dihedral info. if (debug_ > 0) { mprintf("\t%s-%s-%s-%s\n", CRD->Top().TruncResAtomName(dih->A0()).c_str(), CRD->Top().TruncResAtomName(dih->A1()).c_str(), CRD->Top().TruncResAtomName(dih->A2()).c_str(), CRD->Top().TruncResAtomName(dih->A3()).c_str() ); if (debug_ > 1 && mode_ == RANDOM && check_for_clashes_) { mprintf("\t\tCheckAtoms="); for (std::vector<int>::const_iterator ca = dst.checkAtoms.begin(); ca != dst.checkAtoms.end(); ++ca) mprintf(" %i", *ca + 1); mprintf("\n"); } if (debug_ > 2) { mprintf("\t\t"); dst.Rmask.PrintMaskAtoms("Rmask:"); } } } // Set up simple structure check. First step is coarse; check distances // between a certain atom in each residue (first, COM, CA, some other atom?) // to see if residues are in each others neighborhood. Second step is to // check the atoms in each close residue. if (check_for_clashes_) { ResidueCheckType rct; int res = 0; for (Topology::res_iterator residue = CRD->Top().ResStart(); residue != CRD->Top().ResEnd(); ++residue) { rct.resnum = res++; rct.start = residue->FirstAtom(); rct.stop = residue->LastAtom(); rct.checkatom = rct.start; ResCheck_.push_back(rct); } } // Perform dihedral permute Frame currentFrame = CRD->AllocateFrame(); for (unsigned int set = 0; set != CRD->Size(); set++) { CRD->GetFrame(set, currentFrame); int n_problems = 0; switch (mode_) { case RANDOM: RandomizeAngles(currentFrame, CRD->Top()); // Check the resulting structure n_problems = checkStructure_.CheckOverlaps( currentFrame ); //mprintf("%i\tResulting structure has %i problems.\n",frameNum,n_problems); number_of_problems_->Add(set, &n_problems); if (outtraj_.IsInitialized()) outtraj_.WriteSingle(outframe_++, currentFrame); if (crdout_ != 0) crdout_->AddFrame( currentFrame ); break; case INTERVAL: IntervalAngles(currentFrame, CRD->Top(), interval_in_deg); break; } } if (outtraj_.IsInitialized()) outtraj_.EndTraj(); return CpptrajState::OK; }
Analysis::RetType Analysis_AutoCorr::Setup(ArgList& analyzeArgs, AnalysisSetup& setup, int debugIn) { const char* calctype; std::string setname = analyzeArgs.GetStringKey("name"); DataFile* outfile = setup.DFL().AddDataFile( analyzeArgs.GetStringKey("out"), analyzeArgs ); lagmax_ = analyzeArgs.getKeyInt("lagmax",-1); calc_covar_ = !analyzeArgs.hasKey("nocovar"); usefft_ = !analyzeArgs.hasKey("direct"); // Select datasets from remaining args dsets_.clear(); ArgList dsetArgs = analyzeArgs.RemainingArgs(); for (ArgList::const_iterator dsa = dsetArgs.begin(); dsa != dsetArgs.end(); ++dsa) { DataSetList setsIn = setup.DSL().GetMultipleSets( *dsa ); for (DataSetList::const_iterator ds = setsIn.begin(); ds != setsIn.end(); ++ds) { if ( (*ds)->Group() != DataSet::SCALAR_1D && (*ds)->Type() != DataSet::VECTOR ) mprintf("Warning: Set '%s' type not supported in AUTOCORR - skipping.\n", (*ds)->legend()); else dsets_.push_back( *ds ); } } if (dsets_.empty()) { mprinterr("Error: No data sets selected.\n"); return Analysis::ERR; } // If setname is empty generate a default name if (setname.empty()) setname = setup.DSL().GenerateDefaultName( "autocorr" ); // Setup output datasets MetaData md( setname ); for (unsigned int idx = 0; idx != dsets_.size(); idx++) { md.SetIdx( idx ); DataSet* dsout = setup.DSL().AddSet( DataSet::DOUBLE, md ); if (dsout==0) return Analysis::ERR; dsout->SetLegend( dsets_[idx]->Meta().Legend() ); outputData_.push_back( dsout ); // Add set to output file if (outfile != 0) outfile->AddDataSet( outputData_.back() ); } if (calc_covar_) calctype = "covariance"; else calctype = "correlation"; mprintf(" AUTOCORR: Calculating auto-%s for %i data sets:\n\t", calctype, dsets_.size()); for (unsigned int idx = 0; idx != dsets_.size(); ++idx) mprintf(" %s", dsets_[idx]->legend()); mprintf("\n"); if (lagmax_!=-1) mprintf("\tLag max= %i\n", lagmax_); if ( !setname.empty() ) mprintf("\tSet name: %s\n", setname.c_str() ); if ( outfile != 0 ) mprintf("\tOutfile name: %s\n", outfile->DataFilename().base()); if (usefft_) mprintf("\tUsing FFT to calculate %s.\n", calctype); else mprintf("\tUsing direct method to calculate %s.\n", calctype); return Analysis::OK; }
// Analysis_Timecorr::Setup() Analysis::RetType Analysis_Timecorr::Setup(ArgList& analyzeArgs, DataSetList* DSLin, TopologyList* PFLin, DataFileList* DFLin, int debugIn) { // Get Vectors std::string vec1name = analyzeArgs.GetStringKey("vec1"); if (vec1name.empty()) { mprinterr("Error: no vec1 given, ignoring command\n"); return Analysis::ERR; } vinfo1_ = (DataSet_Vector*)DSLin->FindSetOfType( vec1name, DataSet::VECTOR ); if (vinfo1_==0) { mprinterr("Error: vec1: no vector with name %s found.\n", vec1name.c_str()); return Analysis::ERR; } std::string vec2name = analyzeArgs.GetStringKey("vec2"); if (!vec2name.empty()) { vinfo2_ = (DataSet_Vector*)DSLin->FindSetOfType( vec2name, DataSet::VECTOR ); if (vinfo2_==0) { mprinterr("Error: vec2: no vector with name %s found.\n", vec2name.c_str()); return Analysis::ERR; } } else vinfo2_ = 0; // Get output DataSet name std::string setname = analyzeArgs.GetStringKey("name"); if (setname.empty()) setname = DSLin->GenerateDefaultName("TC"); // Determine auto or cross correlation if (vinfo2_ == 0) mode_ = AUTOCORR; else mode_ = CROSSCORR; // Get dplr, norm, drct dplr_ = analyzeArgs.hasKey("dplr"); norm_ = analyzeArgs.hasKey("norm"); drct_ = analyzeArgs.hasKey("drct"); std::string dplrname = analyzeArgs.GetStringKey("dplrout"); // Get order for Legendre polynomial, tstep, and tcorr order_ = analyzeArgs.getKeyInt("order",2); if (order_ < 0 || order_ > 2) { mprintf("Warning: vector order out of bounds (should be 0, 1, or 2), resetting to 2.\n"); order_ = 2; } tstep_ = analyzeArgs.getKeyDouble("tstep", 1.0); tcorr_ = analyzeArgs.getKeyDouble("tcorr", 10000.0); // File output. For ptrajformat, time correlation functions and dipolar // are output to file specified by 'out'. Otherwise time correlation // functions are written to file specified by 'out' using DataFile // framework and dipolar output to 'dplrname'. ptrajformat_ = analyzeArgs.hasKey("ptrajformat"); std::string filename = analyzeArgs.GetStringKey("out"); if (ptrajformat_ && filename.empty()) { mprinterr("Error: No output file name given ('out <filename>'). Required for 'ptrajformat'.\n"); return Analysis::ERR; } DataFile* dataout = 0; if (!ptrajformat_) { dataout = DFLin->AddDataFile( filename, analyzeArgs ); if (dplr_) { if (!dplrname.empty() && dplrname == filename) { mprinterr("Error: 'dplrname' cannot be the same file as 'out' when 'ptrajformat' not specified.\n"); return Analysis::ERR; } outfile_ = DFLin->AddCpptrajFile( dplrname, "Timecorr dipolar", DataFileList::TEXT, true ); if (outfile_ == 0) return Analysis::ERR; } } else { outfile_ = DFLin->AddCpptrajFile( filename, "Timecorr output" ); if (outfile_ == 0) return Analysis::ERR; } // Set up output DataSets tc_p_ = DSLin->AddSet( DataSet::DOUBLE, MetaData(setname, "P")); if (tc_p_ == 0) return Analysis::ERR; tc_p_->SetLegend( Plegend_[order_] ); if (dataout != 0) dataout->AddDataSet( tc_p_ ); if (dplr_) { tc_c_ = DSLin->AddSet( DataSet::DOUBLE, MetaData(setname, "C")); tc_r3r3_ = DSLin->AddSet( DataSet::DOUBLE, MetaData(setname, "R3R3")); if (tc_c_ == 0 || tc_r3r3_ == 0) return Analysis::ERR; tc_c_->SetLegend("<C>"); tc_r3r3_->SetLegend( "<1/(r^3*r^3)>" ); if (dataout != 0) { dataout->AddDataSet( tc_c_ ); dataout->AddDataSet( tc_r3r3_ ); } } // Print Status mprintf(" TIMECORR: Calculating %s", ModeString[mode_]); if (mode_ == AUTOCORR) mprintf(" of vector %s\n", vinfo1_->legend()); else // CROSSCORR mprintf(" of vectors %s and %s\n", vinfo1_->legend(), vinfo2_->legend()); mprintf("\tCorrelation time %f, time step %f, order %i\n", tcorr_, tstep_, order_); mprintf("\tCorr. func. are"); if (dplr_) mprintf(" for dipolar interactions and"); if (norm_) mprintf(" normalized.\n"); else mprintf(" not normalized.\n"); mprintf("\tCorr. func. are calculated using the"); if (drct_) mprintf(" direct approach.\n"); else mprintf(" FFT approach.\n"); if (ptrajformat_) mprintf("\tResults are written to %s\n", outfile_->Filename().full()); else { if (dataout != 0) mprintf("\tTime correlation functions written to %s\n", dataout->DataFilename().full()); if (outfile_ != 0) mprintf("\tDipolar results written to %s\n", outfile_->Filename().full()); } return Analysis::OK; }
// Action_Radial::Init() Action::RetType Action_Radial::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { debug_ = debugIn; // Get Keywords image_.InitImaging( !(actionArgs.hasKey("noimage")) ); std::string outfilename = actionArgs.GetStringKey("out"); // Default particle density (mols/Ang^3) for water based on 1.0 g/mL density_ = actionArgs.getKeyDouble("density",0.033456); if (actionArgs.hasKey("center1")) rmode_ = CENTER1; else if (actionArgs.hasKey("center2")) rmode_ = CENTER2; else if (actionArgs.hasKey("nointramol")) rmode_ = NO_INTRAMOL; else rmode_ = NORMAL; useVolume_ = actionArgs.hasKey("volume"); DataFile* intrdfFile = init.DFL().AddDataFile(actionArgs.GetStringKey("intrdf")); DataFile* rawrdfFile = init.DFL().AddDataFile(actionArgs.GetStringKey("rawrdf")); spacing_ = actionArgs.getNextDouble(-1.0); if (spacing_ < 0) { mprinterr("Error: Radial: No spacing argument or arg < 0.\n"); Help(); return Action::ERR; } double maximum = actionArgs.getNextDouble(-1.0); if (maximum < 0) { mprinterr("Error: Radial: No maximum argument or arg < 0.\n"); Help(); return Action::ERR; } // Store max^2, distances^2 greater than max^2 do not need to be // binned and therefore do not need a sqrt calc. maximum2_ = maximum * maximum; // Get First Mask std::string mask1 = actionArgs.GetMaskNext(); if (mask1.empty()) { mprinterr("Error: Radial: No mask given.\n"); return Action::ERR; } Mask1_.SetMaskString(mask1); // Check for second mask - if none specified use first mask std::string mask2 = actionArgs.GetMaskNext(); if (!mask2.empty()) Mask2_.SetMaskString(mask2); else Mask2_.SetMaskString(mask1); // If filename not yet specified check for backwards compat. if (outfilename.empty() && actionArgs.Nargs() > 1 && !actionArgs.Marked(1)) outfilename = actionArgs.GetStringNext(); // Set up output dataset. Dset_ = init.DSL().AddSet( DataSet::DOUBLE, actionArgs.GetStringNext(), "g(r)"); if (Dset_ == 0) return RDF_ERR("Could not allocate RDF data set."); DataFile* outfile = init.DFL().AddDataFile(outfilename, actionArgs); if (outfile != 0) outfile->AddDataSet( Dset_ ); // Make default precision a little higher than normal Dset_->SetupFormat().SetFormatWidthPrecision(12,6); // Set DataSet legend from mask strings. Dset_->SetLegend(Mask1_.MaskExpression() + " => " + Mask2_.MaskExpression()); // TODO: Set Yaxis label in DataFile // Calculate number of bins one_over_spacing_ = 1 / spacing_; double temp_numbins = maximum * one_over_spacing_; temp_numbins = ceil(temp_numbins); numBins_ = (int) temp_numbins; // Setup output datafile. Align on bin centers instead of left. // TODO: Use Rdim for binning? Dimension Rdim( spacing_ / 2.0, spacing_, "Distance (Ang)" ); Dset_->SetDim(Dimension::X, Rdim); // Set up output for integral of mask2 if specified. if (intrdfFile != 0) { intrdf_ = init.DSL().AddSet( DataSet::DOUBLE, MetaData(Dset_->Meta().Name(), "int" )); if (intrdf_ == 0) return RDF_ERR("Could not allocate RDF integral data set."); intrdf_->SetupFormat().SetFormatWidthPrecision(12,6); intrdf_->SetLegend("Int[" + Mask2_.MaskExpression() + "]"); intrdf_->SetDim(Dimension::X, Rdim); intrdfFile->AddDataSet( intrdf_ ); } else intrdf_ = 0; // Set up output for raw rdf if (rawrdfFile != 0) { rawrdf_ = init.DSL().AddSet( DataSet::DOUBLE, MetaData(Dset_->Meta().Name(), "raw" )); if (rawrdf_ == 0) return RDF_ERR("Could not allocate raw RDF data set."); rawrdf_->SetupFormat().SetFormatWidthPrecision(12,6); rawrdf_->SetLegend("Raw[" + Dset_->Meta().Legend() + "]"); rawrdf_->SetDim(Dimension::X, Rdim); rawrdfFile->AddDataSet( rawrdf_ ); } else rawrdf_ = 0; // Set up histogram RDF_ = new int[ numBins_ ]; std::fill(RDF_, RDF_ + numBins_, 0); # ifdef _OPENMP // Since RDF is shared by all threads and we cant guarantee that a given // bin in RDF wont be accessed at the same time by the same thread, // each thread needs its own bin space. #pragma omp parallel { if (omp_get_thread_num()==0) numthreads_ = omp_get_num_threads(); } rdf_thread_ = new int*[ numthreads_ ]; for (int i=0; i < numthreads_; i++) { rdf_thread_[i] = new int[ numBins_ ]; std::fill(rdf_thread_[i], rdf_thread_[i] + numBins_, 0); } # endif mprintf(" RADIAL: Calculating RDF for atoms in mask [%s]",Mask1_.MaskString()); if (!mask2.empty()) mprintf(" to atoms in mask [%s]",Mask2_.MaskString()); mprintf("\n"); if (outfile != 0) mprintf(" Output to %s.\n", outfile->DataFilename().full()); if (intrdf_ != 0) mprintf(" Integral of mask2 atoms will be output to %s\n", intrdfFile->DataFilename().full()); if (rawrdf_ != 0) mprintf(" Raw RDF bin values will be output to %s\n", rawrdfFile->DataFilename().full()); if (rmode_==CENTER1) mprintf(" Using center of atoms in mask1.\n"); else if (rmode_==CENTER2) mprintf(" Using center of atoms in mask2.\n"); mprintf(" Histogram max %f, spacing %f, bins %i.\n",maximum, spacing_,numBins_); if (useVolume_) mprintf(" Normalizing based on cell volume.\n"); else mprintf(" Normalizing using particle density of %f molecules/Ang^3.\n",density_); if (!image_.UseImage()) mprintf(" Imaging disabled.\n"); if (numthreads_ > 1) mprintf(" Parallelizing RDF calculation with %i threads.\n",numthreads_); return Action::OK; }
// Analysis_TI::Setup() Analysis::RetType Analysis_TI::Setup(ArgList& analyzeArgs, AnalysisSetup& setup, int debugIn) { debug_ = debugIn; int nq = analyzeArgs.getKeyInt("nq", 0); ArgList nskipArg(analyzeArgs.GetStringKey("nskip"), ","); // Comma-separated avg_increment_ = analyzeArgs.getKeyInt("avgincrement", -1); avg_max_ = analyzeArgs.getKeyInt("avgmax", -1); avg_skip_ = analyzeArgs.getKeyInt("avgskip", 0); n_bootstrap_pts_ = analyzeArgs.getKeyInt("bs_pts", -1); n_bootstrap_samples_ = analyzeArgs.getKeyInt("bs_samples", 0); bootstrap_seed_ = analyzeArgs.getKeyInt("bs_seed", -1); bootstrap_fac_ = analyzeArgs.getKeyDouble("bs_fac", 0.75); if (!nskipArg.empty()) { avgType_ = SKIP; // Specified numbers of points to skip nskip_.clear(); for (int i = 0; i != nskipArg.Nargs(); i++) { nskip_.push_back( nskipArg.getNextInteger(0) ); if (nskip_.back() < 0) nskip_.back() = 0; } } else if (avg_increment_ > 0) avgType_ = INCREMENT; else if (n_bootstrap_samples_ > 0) avgType_ = BOOTSTRAP; else avgType_ = AVG; masterDSL_ = setup.DslPtr(); // Get lambda values ArgList xArgs(analyzeArgs.GetStringKey("xvals"), ","); // Also comma-separated if (!xArgs.empty()) { xval_.clear(); for (int i = 0; i != xArgs.Nargs(); i++) xval_.push_back( xArgs.getNextDouble(0.0) ); } std::string setname = analyzeArgs.GetStringKey("name"); DataFile* outfile = setup.DFL().AddDataFile(analyzeArgs.GetStringKey("out"), analyzeArgs); curveout_ = setup.DFL().AddDataFile(analyzeArgs.GetStringKey("curveout"), analyzeArgs); // Select datasets from remaining args if (input_dsets_.AddSetsFromArgs( analyzeArgs.RemainingArgs(), setup.DSL() )) { mprinterr("Error: Could not add data sets.\n"); return Analysis::ERR; } if (input_dsets_.empty()) { mprinterr("Error: No input data sets.\n"); return Analysis::ERR; } if (SetQuadAndWeights(nq)) return Analysis::ERR; // Determine integration mode if (nq > 0) mode_ = GAUSSIAN_QUAD; else mode_ = TRAPEZOID; // Check that # abscissas matches # data sets if (xval_.size() != input_dsets_.size()) { mprinterr("Error: Expected %zu data sets for integration, got %zu\n", input_dsets_.size(), xval_.size()); return Analysis::ERR; } // Set up output data sets DataSet::DataType dtype = DataSet::DOUBLE; if (avgType_ == SKIP || avgType_ == INCREMENT) dtype = DataSet::XYMESH; dAout_ = setup.DSL().AddSet(dtype, setname, "TI"); if (dAout_ == 0) return Analysis::ERR; if (outfile != 0) outfile->AddDataSet( dAout_ ); MetaData md(dAout_->Meta().Name(), "TIcurve"); if (avgType_ == AVG) { // Single curve curve_.push_back( setup.DSL().AddSet(DataSet::XYMESH, md) ); if (curve_.back() == 0) return Analysis::ERR; curve_.back()->ModifyDim(Dimension::X).SetLabel("Lambda"); if (curveout_ != 0) curveout_->AddDataSet( curve_.back() ); if (outfile != 0) outfile->ProcessArgs("noxcol"); } else if (avgType_ == SKIP) { // As many curves as skip values for (Iarray::const_iterator it = nskip_.begin(); it != nskip_.end(); ++it) { md.SetIdx( *it ); DataSet* ds = setup.DSL().AddSet(DataSet::XYMESH, md); if (ds == 0) return Analysis::ERR; ds->ModifyDim(Dimension::X).SetLabel("Lambda"); ds->SetLegend( md.Name() + "_Skip" + integerToString(*it) ); if (curveout_ != 0) curveout_->AddDataSet( ds ); curve_.push_back( ds ); } } else if (avgType_ == BOOTSTRAP) { // As many curves as resamples for (int nsample = 0; nsample != n_bootstrap_samples_; nsample++) { md.SetIdx(nsample); DataSet* ds = setup.DSL().AddSet(DataSet::XYMESH, md); if (ds == 0) return Analysis::ERR; ds->ModifyDim(Dimension::X).SetLabel("Lambda"); ds->SetLegend( md.Name() + "_Sample" + integerToString(nsample) ); if (curveout_ != 0) curveout_->AddDataSet( ds ); curve_.push_back( ds ); } // Standard devation of avg free energy over samples dA_SD_ = setup.DSL().AddSet(DataSet::DOUBLE, MetaData(md.Name(), "SD")); if (dA_SD_ == 0) return Analysis::ERR; if (outfile != 0) { outfile->AddDataSet( dA_SD_ ); outfile->ProcessArgs("noxcol"); } } // NOTE: INCREMENT is set up once data set size is known mprintf(" TI: Calculating TI"); if (mode_ == GAUSSIAN_QUAD) { mprintf(" using Gaussian quadrature with %zu points.\n", xval_.size()); mprintf("\t%6s %8s %8s %s\n", "Point", "Abscissa", "Weight", "SetName"); for (unsigned int i = 0; i != xval_.size(); i++) mprintf("\t%6i %8.5f %8.5f %s\n", i, xval_[i], wgt_[i], input_dsets_[i]->legend()); } else { mprintf(" using the trapezoid rule.\n"); mprintf("\t%6s %8s %s\n", "Point", "Abscissa", "SetName"); for (unsigned int i = 0; i != xval_.size(); i++) mprintf("\t%6i %8.5f %s\n", i, xval_[i], input_dsets_[i]->legend()); } mprintf("\tResult(s) of integration(s) saved in set '%s'\n", dAout_->legend()); if (avgType_ == AVG) mprintf("\tUsing all data points in <DV/DL> calc.\n"); else if (avgType_ == SKIP) { mprintf("\tSkipping first"); for (Iarray::const_iterator it = nskip_.begin(); it != nskip_.end(); ++it) mprintf(" %i", *it); mprintf(" data points for <DV/DL> calc.\n"); } else if (avgType_ == INCREMENT) { mprintf("\tCalculating <DV/DL> starting from point %i, increment by %i.", avg_skip_, avg_increment_); if (avg_max_ != -1) mprintf(" Max %i points.", avg_max_); mprintf("\n"); } else if (avgType_ == BOOTSTRAP) { mprintf("\tStandard devation of result stored in set '%s'\n", dA_SD_->legend()); mprintf("\tCalculating <DV/DL> from %i bootstrap resamples.\n", n_bootstrap_samples_); if (n_bootstrap_pts_ > 0) mprintf("\tBootstrap resample size is %i data points.\n", n_bootstrap_pts_); else mprintf("\tWill use bootstrap resample size of %g%% of total points.\n", bootstrap_fac_*100.0); if (bootstrap_seed_ != -1) mprintf("\tBoostrap base seed is %i\n", bootstrap_seed_); } mprintf("\tTI curve(s) saved in set(s)"); if (avgType_ != INCREMENT) for (DSarray::const_iterator ds = curve_.begin(); ds != curve_.end(); ++ds) mprintf(" '%s'", (*ds)->legend()); else mprintf(" named '%s'", md.PrintName().c_str()); mprintf("\n"); if (outfile != 0) mprintf("\tResults written to '%s'\n", outfile->DataFilename().full()); if (curveout_!= 0) mprintf("\tTI curve(s) written to '%s'\n", curveout_->DataFilename().full()); return Analysis::OK; }