// DataIO_OpenDx::WriteSet3D() int DataIO_OpenDx::WriteSet3D(DataSet const& setIn, CpptrajFile& outfile) const { if (setIn.Ndim() != 3) { mprinterr("Internal Error: DataSet %s in DataFile %s has %zu dimensions, expected 3.\n", setIn.legend(), outfile.Filename().full(), setIn.Ndim()); return 1; } int err = 0; switch ( gridWriteMode_ ) { case BIN_CORNER: case BIN_CENTER: err = WriteGrid( setIn, outfile ); break; case WRAP: case EXTENDED : err = WriteGridWrap( setIn, outfile ); break; } // Print tail if (err == 0) { // TODO: Make this an option //if (mode_ == CENTER) // outfile.Printf("\nobject \"density (%s) [A^-3]\" class field\n", // centerMask_.MaskString()); //else outfile.Printf("\nobject \"density [A^-3]\" class field\n"); } return err; }
/** Add a DataSet of specified type, set it up and return pointer to it. * \param inType type of DataSet to add. * \param metaIn DataSet MetaData. * \return pointer to successfully set-up DataSet or 0 if error. */ DataSet* DataSetList::AddSet(DataSet::DataType inType, MetaData const& metaIn) { // TODO Always generate default name if empty? // Do not add to a list with copies if (hasCopies_) { mprinterr("Internal Error: Attempting to add DataSet (%s) to DataSetList with copies.\n", metaIn.PrintName().c_str()); return 0; } MetaData meta( metaIn ); meta.SetEnsembleNum( ensembleNum_ ); // Check if DataSet with same attributes already present. DataSet* DS = CheckForSet(meta); if (DS != 0) { mprintf("Warning: DataSet '%s' already present.\n", DS->Meta().PrintName().c_str()); // NOTE: Should return found dataset? return 0; } TokenPtr token = &(DataArray[inType]); if ( token->Alloc == 0) { mprinterr("Internal Error: No allocator for DataSet type [%s]\n", token->Description); return 0; } DS = (DataSet*)token->Alloc(); if (DS==0) { mprinterr("Internal Error: DataSet %s memory allocation failed.\n", meta.PrintName().c_str()); return 0; } // If 1 dim set and time series status not set, set to true. if (meta.TimeSeries() == MetaData::UNKNOWN_TS && DS->Ndim() == 1) { meta.SetTimeSeries( MetaData::IS_TS ); // Also set dimension default DS->SetDim(Dimension::X, Dimension(1.0, 1.0, "Frame") ); } // Set up dataset if ( DS->SetMeta( meta ) ) { mprinterr("Error setting up data set %s.\n", meta.PrintName().c_str()); delete DS; return 0; } Push_Back(DS); //fprintf(stderr,"ADDED dataset %s\n",dsetName); return DS; }
/** Special version of AddSet that does NOT check if set already exists. * Intended for use in Action Setup/DoAction where it is assumed that * the Action is setting up DataSets in such a way that there will not * be name conflicts, i.e. the DataSet name at least is unique. * \param inType type of DataSet to add. * \param metaIn DataSet MetaData. * \return pointer to successfully set-up DataSet or 0 if error. */ DataSet* DataSetList::AddSet_NoCheck(DataSet::DataType inType, MetaData const& metaIn) { // TODO Pass in Nframes? // Assume list does NOT have copies. MetaData meta( metaIn ); meta.SetEnsembleNum( ensembleNum_ ); // Allocate DataSet TokenPtr token = &(DataArray[inType]); if ( token->Alloc == 0) { mprinterr("Internal Error: No allocator for DataSet type [%s]\n", token->Description); return 0; } DataSet* DS = (DataSet*)token->Alloc(); if (DS==0) { mprinterr("Internal Error: DataSet %s memory allocation failed.\n", meta.PrintName().c_str()); return 0; } // If 1 dim set and time series status not set, set to true, allocate for frames. if (meta.TimeSeries() == MetaData::UNKNOWN_TS && DS->Ndim() == 1) { meta.SetTimeSeries( MetaData::IS_TS ); // Also set dimension default DS->SetDim(Dimension::X, Dimension(1.0, 1.0, "Frame") ); //DS->Allocate( DataSet::SizeArray(1, Nframes) ); } // Set up DataSet MetaData if ( DS->SetMeta( meta ) ) { mprinterr("Error setting up data set %s.\n", meta.PrintName().c_str()); delete DS; return 0; } # ifdef MPI if (newSetsNeedSync_) DS->SetNeedsSync( true ); # endif // Add to list Push_Back(DS); return DS; }
/** Set up histogram with specified data sets. */ Analysis::RetType Analysis_Hist::Setup(ArgList& analyzeArgs, AnalysisSetup& setup, int debugIn) { debug_ = debugIn; // Keywords std::string histname = analyzeArgs.GetStringKey("name"); outfilename_ = analyzeArgs.GetStringKey("out"); if (outfilename_.empty()) { mprinterr("Error: Hist: No output filename specified.\n"); return Analysis::ERR; } traj3dName_ = analyzeArgs.GetStringKey("traj3d"); traj3dFmt_ = TrajectoryFile::WriteFormatFromString( analyzeArgs.GetStringKey("trajfmt"), TrajectoryFile::AMBERTRAJ ); parmoutName_ = analyzeArgs.GetStringKey("parmout"); // Create a DataFile here so any DataFile arguments can be processed. If it // turns out later that native output is needed the DataFile will be removed. outfile_ = setup.DFL().AddDataFile(outfilename_, analyzeArgs); if (outfile_==0) return Analysis::ERR; Temp_ = analyzeArgs.getKeyDouble("free",-1.0); if (Temp_!=-1.0) calcFreeE_ = true; else calcFreeE_ = false; gnuplot_ = analyzeArgs.hasKey("gnu"); if (analyzeArgs.hasKey("norm")) normalize_ = NORM_SUM; else if (analyzeArgs.hasKey("normint")) normalize_ = NORM_INT; else normalize_ = NO_NORM; circular_ = analyzeArgs.hasKey("circular"); nativeOut_ = analyzeArgs.hasKey("nativeout"); if ( analyzeArgs.Contains("min") ) { default_min_ = analyzeArgs.getKeyDouble("min", 0.0); minArgSet_ = true; } if ( analyzeArgs.Contains("max") ) { default_max_ = analyzeArgs.getKeyDouble("max", 0.0); maxArgSet_ = true; } default_step_ = analyzeArgs.getKeyDouble("step", 0.0) ; default_bins_ = analyzeArgs.getKeyInt("bins", -1); calcAMD_ = false; std::string amdname = analyzeArgs.GetStringKey("amd"); if (!amdname.empty()) { DataSet* ds = setup.DSL().GetDataSet( amdname ); if (ds == 0) { mprinterr("Error: AMD data set %s not found.\n", amdname.c_str()); return Analysis::ERR; } if (ds->Ndim() != 1) { mprinterr("Error: AMD data set must be 1D.\n"); return Analysis::ERR; } amddata_ = (DataSet_1D*)ds; calcAMD_ = true; } // Treat all remaining arguments as dataset names. Do not set up dimensions // yet since the data sets may not be fully populated. ArgList dsetNames = analyzeArgs.RemainingArgs(); for ( ArgList::const_iterator setname = dsetNames.begin(); setname != dsetNames.end(); ++setname) { if (CheckDimension( *setname, setup.DSL() )) return Analysis::ERR; } // histdata contains the DataSets to be histogrammed if (histdata_.empty()) { mprinterr("Error: Hist: No datasets specified.\n"); return Analysis::ERR; } // Total # of dimensions for the histogram is the number of sets to be binned. N_dimensions_ = histdata_.size(); if (!nativeOut_) { switch ( N_dimensions_ ) { case 1: hist_ = setup.DSL().AddSet( DataSet::DOUBLE, histname, "Hist"); break; case 2: hist_ = setup.DSL().AddSet( DataSet::MATRIX_DBL, histname, "Hist"); break; // TODO: GRID_DBL case 3: hist_ = setup.DSL().AddSet( DataSet::GRID_FLT, histname, "Hist"); break; default: // FIXME: GET N DIMENSION CASE! mprintf("Warning: Histogram dimension > 3. DataSet/DataFile output not supported.\n"); nativeOut_ = true; } } // traj3d only supported with 3D histograms if (!traj3dName_.empty() && N_dimensions_ != 3) { mprintf("Warning: 'traj3d' only supported with 3D histograms.\n"); traj3dName_.clear(); parmoutName_.clear(); } if (!nativeOut_) { // DataFile output. Add DataSet to DataFile. if (hist_ == 0) { mprinterr("Error: Could not set up histogram data set.\n"); return Analysis::ERR; } outfile_->AddDataSet( hist_ ); } else { // Native output. Remove DataFile from DataFileList outfile_ = setup.DFL().RemoveDataFile( outfile_ ); native_ = setup.DFL().AddCpptrajFile( outfilename_, "Histogram output" ); if (native_ == 0) return Analysis::ERR; } mprintf("\tHist: %s: Set up for %zu dimensions using the following datasets:\n", outfilename_.c_str(), N_dimensions_); mprintf("\t[ "); for (std::vector<DataSet_1D*>::iterator ds=histdata_.begin(); ds!=histdata_.end(); ++ds) mprintf("%s ",(*ds)->legend()); mprintf("]\n"); if (calcAMD_) mprintf("\tPopulating bins using AMD boost from data set %s\n", amddata_->legend()); if (calcFreeE_) mprintf("\tFree energy in kcal/mol will be calculated from bin populations at %f K.\n",Temp_); if (nativeOut_) mprintf("\tUsing internal routine for output. Data will not be stored on the data set list.\n"); //if (circular_ || gnuplot_) { // mprintf("\tWarning: gnuplot and/or circular specified; advanced grace/gnuplot\n"); // mprintf("\t formatting disabled.\n");*/ if (circular_) mprintf("\tcircular: Output coordinates will be wrapped.\n"); if (gnuplot_ && outfile_ == 0) mprintf("\tgnuplot: Output will be in gnuplot-readable format.\n"); //} if (normalize_ == NORM_SUM) mprintf("\tnorm: Sum over bins will be normalized to 1.0.\n"); else if (normalize_ == NORM_INT) mprintf("\tnormint: Integral over bins will be normalized to 1.0.\n"); if (!traj3dName_.empty()) { mprintf("\tPseudo-trajectory will be written to '%s' with format %s\n", traj3dName_.c_str(), TrajectoryFile::FormatString(traj3dFmt_)); if (!parmoutName_.empty()) mprintf("\tCorresponding pseudo-topology will be written to '%s'\n", parmoutName_.c_str()); } return Analysis::OK; }
Analysis::RetType Analysis_State::Setup(ArgList& analyzeArgs, DataSetList* datasetlist, DataFileList* DFLin, int debugIn) { debug_ = debugIn; masterDSL_ = datasetlist; DataFile* outfile = DFLin->AddDataFile( analyzeArgs.GetStringKey("out"), analyzeArgs ); curveOut_ = DFLin->AddDataFile( analyzeArgs.GetStringKey("curveout"), analyzeArgs ); stateOut_ = DFLin->AddCpptrajFile( analyzeArgs.GetStringKey("stateout"), "State Output", DataFileList::TEXT, true); transOut_ = DFLin->AddCpptrajFile( analyzeArgs.GetStringKey("transout"), "Transitions Output", DataFileList::TEXT, true); normalize_ = analyzeArgs.hasKey("norm"); // Get definitions of states if present. // Define states as 'state <#>,<dataset>,<min>,<max>' std::string state_arg = analyzeArgs.GetStringKey("state"); if (!state_arg.empty()) { while (!state_arg.empty()) { // Expect form <#>,<dataset> ArgList argtmp(state_arg, ","); if (argtmp.Nargs() != 4) { mprinterr("Error: Malformed state argument '%s': expect <ID>,<dataset>,<min>,<max>\n", state_arg.c_str()); return Analysis::ERR; } std::string state_id = argtmp.GetStringNext(); // TODO: Check duplicate names if (state_id.empty()) { mprinterr("Error: In state argument, could not get ID.\n"); return Analysis::ERR; } DataSet* ds = datasetlist->GetDataSet( argtmp.GetStringNext() ); if (ds == 0) return Analysis::ERR; if (ds->Ndim() != 1) { mprinterr("Error: Only 1D data sets allowed.\n"); return Analysis::ERR; } double min = argtmp.getNextDouble(0.0); double max = argtmp.getNextDouble(0.0); if (max < min) { mprinterr("Error: max value cannot be less than min.\n"); return Analysis::ERR; } States_.push_back( StateType(state_id, (DataSet_1D*)ds, min, max) ); state_arg = analyzeArgs.GetStringKey("state"); } } if (States_.empty()) { mprinterr("Error: No states defined.\n"); return Analysis::ERR; } state_data_ = datasetlist->AddSet(DataSet::INTEGER, analyzeArgs.GetStringKey("name"), "State"); if (state_data_ == 0) return Analysis::ERR; if (outfile != 0) outfile->AddDataSet( state_data_ ); mprintf(" STATE: The following states have been set up:\n"); for (StateArray::const_iterator state = States_.begin(); state != States_.end(); ++state) mprintf("\t%u: %20s %12.4f <= %-20s < %12.4f\n", state - States_.begin(), state->DS().legend(), state->Min(), state->id(), state->Max()); mprintf("\tState data set: %s\n", state_data_->legend()); if (outfile != 0) mprintf("\tStates vs time output to file '%s'\n", outfile->DataFilename().full()); if (curveOut_ != 0) mprintf("\tCurves output to file '%s'\n", curveOut_->DataFilename().full()); mprintf("\tState output to file '%s'\n", stateOut_->Filename().full()); mprintf("\tTransitions output to file '%s'\n", transOut_->Filename().full()); if (normalize_) mprintf("\tCurves will be normalized to 1.0\n"); return Analysis::OK; }
// Action_CreateReservoir::Init() Action::RetType Action_CreateReservoir::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { # ifndef BINTRAJ mprinterr("Error: NetCDF reservoir requires NetCDF support. Recompile with -DBINTRAJ.\n"); return Action::ERR; # endif # ifdef MPI if (init.TrajComm().Size() > 1) { mprinterr("Error: 'createreservoir' action does not work with > 1 process (%i processes currently).\n", init.TrajComm().Size()); return Action::ERR; } # endif // Get keywords filename_.SetFileName( actionArgs.GetStringNext() ); if (filename_.empty()) { mprinterr("Error: createreservoir: No filename specified.\n"); return Action::ERR; } reservoirT_ = actionArgs.getKeyDouble("temp0", -1.0); if (reservoirT_ < 0.0) { mprinterr("Error: Reservoir temperature must be specified and cannot be < 0.0\n"); return Action::ERR; } iseed_ = actionArgs.getKeyInt("iseed", 0); if (iseed_ < 1) { mprinterr("Error: Reservoir random seed must be specified and > 0\n"); return Action::ERR; } useVelocity_ = !actionArgs.hasKey("novelocity"); useForce_ = !actionArgs.hasKey("noforce"); // Get parm for reservoir traj original_trajparm_ = init.DSL().GetTopology( actionArgs ); if (original_trajparm_ == 0) { mprinterr("Error: createreservoir: no topology.\n"); return Action::ERR; } // Get energy data set std::string eneDsname = actionArgs.GetStringKey("ene"); DataSet* dstmp = init.DSL().GetDataSet( eneDsname ); if (dstmp == 0) { mprinterr("Error: could not get energy data set %s\n", eneDsname.c_str()); return Action::ERR; } if (dstmp->Type() != DataSet::FLOAT && dstmp->Type() != DataSet::DOUBLE && dstmp->Type() != DataSet::XYMESH) { mprinterr("Error: energy data set %s must be type FLOAT, DOUBLE, or XYMESH.\n", dstmp->legend()); return Action::ERR; } if (dstmp->Ndim() != 1) { mprinterr("Error: energy data set is not 1D (%zu)\n", dstmp->Ndim()); return Action::ERR; } ene_ = static_cast<DataSet_1D*>( dstmp ); // Get bin data set std::string binDSname = actionArgs.GetStringKey("bin"); if (!binDSname.empty()) { dstmp = init.DSL().GetDataSet( binDSname ); if (dstmp == 0) { mprinterr("Error: could not get bin data set %s\n", binDSname.c_str()); return Action::ERR; } else if (dstmp->Ndim() != 1) { mprinterr("Error: bin data set must be one dimensional.\n"); return Action::ERR; } bin_ = static_cast<DataSet_1D*>( dstmp ); } trajIsOpen_ = false; nframes_ = 0; // Setup output reservoir file reservoir_.SetDebug( debugIn ); // Set title title_ = actionArgs.GetStringKey("title"); if (title_.empty()) title_.assign("Cpptraj Generated structure reservoir"); mprintf(" CREATERESERVOIR: '%s', energy data '%s'", filename_.full(), ene_->legend()); if (bin_ != 0) mprintf(", bin data '%s'", bin_->legend()); mprintf("\n\tTitle: %s\n", title_.c_str()); mprintf("\tReservoir temperature= %.2f, random seed= %i\n", reservoirT_, iseed_); if (useVelocity_) mprintf("\tVelocities will be written to reservoir if present.\n"); else mprintf("\tVelocities will not be written to reservoir.\n"); if (useForce_) mprintf("\tForces will be written to reservoir if present.\n"); else mprintf("\tForces will not be written to reservoir.\n"); mprintf("\tTopology: %s\n", original_trajparm_->c_str()); return Action::OK; }
// DataIO_Std::WriteSet3D() int DataIO_Std::WriteSet3D( DataSet const& setIn, CpptrajFile& file ) { if (setIn.Ndim() != 3) { mprinterr("Internal Error: DataSet %s in DataFile %s has %zu dimensions, expected 3.\n", setIn.legend(), file.Filename().full(), setIn.Ndim()); return 1; } DataSet_3D const& set = static_cast<DataSet_3D const&>( setIn ); Dimension const& Xdim = static_cast<Dimension const&>(set.Dim(0)); Dimension const& Ydim = static_cast<Dimension const&>(set.Dim(1)); Dimension const& Zdim = static_cast<Dimension const&>(set.Dim(2)); //if (Xdim.Step() == 1.0) xcol_precision = 0; if (sparse_) mprintf("\tOnly writing voxels with value > %g\n", cut_); // Print X Y Z Values // x y z val(x,y,z) DataSet::SizeArray pos(3); if (writeHeader_) { file.Printf("#counts %zu %zu %zu\n", set.NX(), set.NY(), set.NZ()); file.Printf("#origin %12.7f %12.7f %12.7f\n", set.Bin().GridOrigin()[0], set.Bin().GridOrigin()[1], set.Bin().GridOrigin()[2]); if (set.Bin().IsOrthoGrid()) { GridBin_Ortho const& b = static_cast<GridBin_Ortho const&>( set.Bin() ); file.Printf("#delta %12.7f %12.7f %12.7f\n", b.DX(), b.DY(), b.DZ()); } else { GridBin_Nonortho const& b = static_cast<GridBin_Nonortho const&>( set.Bin() ); file.Printf("#delta %12.7f %12.7f %12.7f %12.7f %12.7f %12.7f %12.7f %12.7f %12.7f\n", b.Ucell()[0]/set.NX(), b.Ucell()[1]/set.NX(), b.Ucell()[2]/set.NX(), b.Ucell()[3]/set.NY(), b.Ucell()[4]/set.NY(), b.Ucell()[5]/set.NY(), b.Ucell()[6]/set.NZ(), b.Ucell()[7]/set.NZ(), b.Ucell()[8]/set.NZ()); } file.Printf("#%s %s %s %s\n", Xdim.Label().c_str(), Ydim.Label().c_str(), Zdim.Label().c_str(), set.legend()); } std::string xyz_fmt; if (XcolPrecSet()) { TextFormat nfmt( XcolFmt(), XcolWidth(), XcolPrec() ); xyz_fmt = nfmt.Fmt() + " " + nfmt.Fmt() + " " + nfmt.Fmt() + " "; } else { TextFormat xfmt( XcolFmt(), set.NX(), Xdim.Min(), Xdim.Step(), 8, 3 ); TextFormat yfmt( XcolFmt(), set.NY(), Ydim.Min(), Ydim.Step(), 8, 3 ); TextFormat zfmt( XcolFmt(), set.NZ(), Zdim.Min(), Zdim.Step(), 8, 3 ); xyz_fmt = xfmt.Fmt() + " " + yfmt.Fmt() + " " + zfmt.Fmt() + " "; } if (sparse_) { for (pos[2] = 0; pos[2] < set.NZ(); ++pos[2]) { for (pos[1] = 0; pos[1] < set.NY(); ++pos[1]) { for (pos[0] = 0; pos[0] < set.NX(); ++pos[0]) { double val = set.GetElement(pos[0], pos[1], pos[2]); if (val > cut_) { Vec3 xyz = set.Bin().Corner(pos[0], pos[1], pos[2]); file.Printf( xyz_fmt.c_str(), xyz[0], xyz[1], xyz[2] ); set.WriteBuffer( file, pos ); file.Printf("\n"); } } } } } else { for (pos[2] = 0; pos[2] < set.NZ(); ++pos[2]) { for (pos[1] = 0; pos[1] < set.NY(); ++pos[1]) { for (pos[0] = 0; pos[0] < set.NX(); ++pos[0]) { Vec3 xyz = set.Bin().Corner(pos[0], pos[1], pos[2]); file.Printf( xyz_fmt.c_str(), xyz[0], xyz[1], xyz[2] ); set.WriteBuffer( file, pos ); file.Printf("\n"); } } } } return 0; }
// DataIO_Std::WriteSet2D() int DataIO_Std::WriteSet2D( DataSet const& setIn, CpptrajFile& file ) { if (setIn.Ndim() != 2) { mprinterr("Internal Error: DataSet %s in DataFile %s has %zu dimensions, expected 2.\n", setIn.legend(), file.Filename().full(), setIn.Ndim()); return 1; } DataSet_2D const& set = static_cast<DataSet_2D const&>( setIn ); int xcol_width = 8; int xcol_precision = 3; Dimension const& Xdim = static_cast<Dimension const&>(set.Dim(0)); Dimension const& Ydim = static_cast<Dimension const&>(set.Dim(1)); if (Xdim.Step() == 1.0) xcol_precision = 0; DataSet::SizeArray positions(2); TextFormat ycoord_fmt(XcolFmt()), xcoord_fmt(XcolFmt()); if (square2d_) { // Print XY values in a grid: // x0y0 x1y0 x2y0 // x0y1 x1y1 x2y1 // x0y2 x1y2 x2y2 // If file has header, top-left value will be '#<Xlabel>-<Ylabel>', // followed by X coordinate values. if (writeHeader_) { ycoord_fmt.SetCoordFormat( set.Nrows(), Ydim.Min(), Ydim.Step(), xcol_width, xcol_precision ); std::string header; if (Xdim.Label().empty() && Ydim.Label().empty()) header = "#Frame"; else header = "#" + Xdim.Label() + "-" + Ydim.Label(); WriteNameToBuffer( file, header, xcol_width, true ); xcoord_fmt.SetCoordFormat( set.Ncols(), Xdim.Min(), Xdim.Step(), set.Format().ColumnWidth(), xcol_precision ); for (size_t ix = 0; ix < set.Ncols(); ix++) file.Printf( xcoord_fmt.fmt(), set.Coord(0, ix) ); file.Printf("\n"); } for (positions[1] = 0; positions[1] < set.Nrows(); positions[1]++) { if (writeHeader_) file.Printf( ycoord_fmt.fmt(), set.Coord(1, positions[1]) ); for (positions[0] = 0; positions[0] < set.Ncols(); positions[0]++) set.WriteBuffer( file, positions ); file.Printf("\n"); } } else { // Print X Y Values // x y val(x,y) if (writeHeader_) file.Printf("#%s %s %s\n", Xdim.Label().c_str(), Ydim.Label().c_str(), set.legend()); if (XcolPrecSet()) { xcoord_fmt = TextFormat(XcolFmt(), XcolWidth(), XcolPrec()); ycoord_fmt = xcoord_fmt; } else { xcoord_fmt.SetCoordFormat( set.Ncols(), Xdim.Min(), Xdim.Step(), 8, 3 ); ycoord_fmt.SetCoordFormat( set.Nrows(), Ydim.Min(), Ydim.Step(), 8, 3 ); } std::string xy_fmt = xcoord_fmt.Fmt() + " " + ycoord_fmt.Fmt() + " "; for (positions[1] = 0; positions[1] < set.Nrows(); ++positions[1]) { for (positions[0] = 0; positions[0] < set.Ncols(); ++positions[0]) { file.Printf( xy_fmt.c_str(), set.Coord(0, positions[0]), set.Coord(1, positions[1]) ); set.WriteBuffer( file, positions ); file.Printf("\n"); } } } return 0; }