bool Settings::LoadSettings() //////////////////////////////////////////////////////////////////////// { #define GETSTRING(a,b) if (settingsfile.GetString(a, tempstring)) Set##b(tempstring); #define GETBOOL(a,b) if (settingsfile.GetBool(a, tempbool)) Set##b(tempbool); #define GETINT(a,b) if (settingsfile.GetInteger(a, tempint)) Set##b(tempint); BString tempstring; bool tempbool; int tempint; DataFile settingsfile; if (settingsfile.LoadDataFile(GetSettingsFile())) { if (settingsfile.GetBool("ASKONEXIT", tempbool)) SetAskOnExit(tempbool); if (settingsfile.GetString("LANGUAGE", tempstring)) SetLanguage(tempstring); if (settingsfile.GetInteger("WINDOWLEFT", tempint)) SetWindowLeft(tempint); GETINT("WINDOWTOP", WindowTop); GETINT("WINDOWWIDTH", WindowWidth); GETINT("WINDOWHEIGHT", WindowHeight); GETSTRING("TERMINALWINDOW", TerminalWindow); GETSTRING("LEFTPANELPATH", LeftPanelPath); GETSTRING("RIGHTPANELPATH", RightPanelPath); return true; } else return false; }
void ExtentManager::flushFiles( bool sync ) { DEV Lock::assertAtLeastReadLocked( _dbname ); for( vector<DataFile*>::iterator i = _files.begin(); i != _files.end(); i++ ) { DataFile *f = *i; f->flush(sync); } }
// Action_Angle::init() Action::RetType Action_Angle::Init(ArgList& actionArgs, TopologyList* PFL, DataSetList* DSL, DataFileList* DFL, int debugIn) { // Get keywords DataFile* outfile = DFL->AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); useMass_ = actionArgs.hasKey("mass"); // Get Masks std::string mask1 = actionArgs.GetMaskNext(); std::string mask2 = actionArgs.GetMaskNext(); std::string mask3 = actionArgs.GetMaskNext(); if (mask1.empty() || mask2.empty() || mask3.empty()) { mprinterr("Error: angle: Requires 3 masks\n"); return Action::ERR; } Mask1_.SetMaskString(mask1); Mask2_.SetMaskString(mask2); Mask3_.SetMaskString(mask3); // Dataset to store angles ang_ = DSL->AddSet(DataSet::DOUBLE, MetaData(actionArgs.GetStringNext(),MetaData::M_ANGLE),"Ang"); if (ang_==0) return Action::ERR; // Add dataset to data file list if (outfile != 0) outfile->AddDataSet( ang_ ); mprintf(" ANGLE: [%s]-[%s]-[%s]\n",Mask1_.MaskString(), Mask2_.MaskString(), Mask3_.MaskString()); if (useMass_) mprintf("\tUsing center of mass of atoms in masks.\n"); return Action::OK; }
DiskLoc MmapV1ExtentManager::_createExtent( OperationContext* txn, int size, bool enforceQuota ) { size = quantizeExtentSize( size ); if ( size > maxSize() ) size = maxSize(); verify( size < DataFile::maxSize() ); for ( int i = numFiles() - 1; i >= 0; i-- ) { DataFile* f = _getOpenFile(i); invariant(f); if ( f->getHeader()->unusedLength >= size ) { return _createExtentInFile( txn, i, f, size, enforceQuota ); } } _checkQuota( enforceQuota, numFiles() ); // no space in an existing file // allocate files until we either get one big enough or hit maxSize for ( int i = 0; i < 8; i++ ) { DataFile* f = _addAFile( txn, size, false ); if ( f->getHeader()->unusedLength >= size ) { return _createExtentInFile( txn, numFiles() - 1, f, size, enforceQuota ); } } // callers don't check for null return code, so assert msgasserted(14810, "couldn't allocate space for a new extent" ); }
Extent* ExtentManager::createExtent(const char *ns, int size, bool newCapped, bool enforceQuota ) { size = quantizeExtentSize( size ); for ( int i = numFiles() - 1; i >= 0; i-- ) { DataFile* f = getFile( i ); if ( f->getHeader()->unusedLength >= size ) { return _createExtentInFile( i, f, ns, size, newCapped, enforceQuota ); } } // no space in an existing file // allocate files until we either get one big enough or hit maxSize for ( int i = 0; i < 8; i++ ) { DataFile* f = addAFile( size, false ); if ( f->getHeader()->unusedLength >= size || f->getHeader()->fileLength >= DataFile::maxSize() ) { return _createExtentInFile( numFiles() - 1, f, ns, size, newCapped, enforceQuota ); } } // callers don't check for null return code, so assert msgasserted(14810, "couldn't allocate space for a new extent" ); }
Exec::RetType Exec_Precision::Execute(CpptrajState& State, ArgList& argIn) { // Next string is DataSet(s)/DataFile that command pertains to. std::string name1 = argIn.GetStringNext(); if (name1.empty()) { mprinterr("Error: No filename/setname given.\n"); return CpptrajState::ERR; } // This will break if dataset name starts with a digit... int width = argIn.getNextInteger(12); if (width < 1) { mprintf("Error: Cannot set width < 1 (%i).\n", width); return CpptrajState::ERR; } int precision = argIn.getNextInteger(4); if (precision < 0) precision = 0; DataFile* df = State.DFL().GetDataFile(name1); if (df != 0) { mprintf("\tSetting precision for all sets in %s to %i.%i\n", df->DataFilename().base(), width, precision); df->SetDataFilePrecision(width, precision); } else { State.DSL().SetPrecisionOfDataSets( name1, width, precision ); } return CpptrajState::OK; }
void MissionSelector::loadMission(string fileName) { if(fileName == "") return; Mission* oldMission = rMain()->getMission(); Orbit* orbitScreen = static_cast<Orbit*>(rMain()->getScreenManager().getState(Main::ORBIT_SCREEN)); orbitScreen->enterReadyState(); DataFile* dataFile = new DataFile(stringToWString(fileName)); if(dataFile->getRootItem() != NULL) { Mission* mission = dataFile->getRootItem()->castToClass<Mission>(); if(mission != NULL) { rMain()->setMission(mission); if(oldMission != NULL) { if (oldMission == ConfigManager::getPlayer()->getHomeBase()) { ConfigManager::getSingleton().savePlayer(); } else { delete oldMission->getDataFile(); } } } } }
void sqlLoadCode( const Database &db , const SqlApiBindProgramId &programid , int nr , VirtualCode &vc ) { DataFile datafile ( db, SYSTEM_CODEDATA_FNAME, DBFMODE_READONLY); KeyFile indexfile ( db, SYSTEM_CODEKEY_FNAME , DBFMODE_READONLY); KeyFileDefinition keydef(indexfile); KeyType key; keydef.put(key,0,String(programid.m_fileName)); keydef.put(key,1,nr); bool found = indexfile.searchMin( RELOP_EQ, key, 2); #ifdef DEBUGMODULE _tprintf(_T("loading code for <%s>,%d\n"),programid.filename,nr); #endif if(!found) throwSqlError(SQL_UNKNOWN_PROGRAMID,_T("Unknown programfile:<%s>"),programid.m_fileName); datafile.readRecord( keydef.getRecordAddr(key), &vc, sizeof(VirtualCode)); if(_tcscmp(vc.getProgramId().m_timestamp,programid.m_timestamp) != 0) throwSqlError(SQL_TIMESTAMP_MISMATCH,_T("Timestamp mismatch on <%s>"),programid.m_fileName); }
/** Called once before traj processing. Set up reference info. */ Action::RetType Action_DistRmsd::Init(ArgList& actionArgs, TopologyList* PFL, DataSetList* DSL, DataFileList* DFL, int debugIn) { // Check for keywords DataFile* outfile = DFL->AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); // Reference keywords // TODO: Can these just be put in the InitRef call? bool first = actionArgs.hasKey("first"); ReferenceFrame REF = DSL->GetReferenceFrame( actionArgs ); std::string reftrajname = actionArgs.GetStringKey("reftraj"); Topology* RefParm = PFL->GetParm( actionArgs ); // Get the RMS mask string for target std::string mask0 = actionArgs.GetMaskNext(); TgtMask_.SetMaskString(mask0); // Get the RMS mask string for reference std::string mask1 = actionArgs.GetMaskNext(); if (mask1.empty()) mask1 = mask0; // Initialize reference if (refHolder_.InitRef(false, first, false, false, reftrajname, REF, RefParm, mask1, actionArgs, "distrmsd")) return Action::ERR; // Set up the RMSD data set drmsd_ = DSL->AddSet(DataSet::DOUBLE, actionArgs.GetStringNext(),"DRMSD"); if (drmsd_==0) return Action::ERR; // Add dataset to data file list if (outfile != 0) outfile->AddDataSet( drmsd_ ); mprintf(" DISTRMSD: (%s), reference is %s\n",TgtMask_.MaskString(), refHolder_.RefModeString()); return Action::OK; }
Action::RetType Action_Channel::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { // Keywords. DataFile* outfile = init.DFL().AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); dxyz_[0] = actionArgs.getKeyDouble("dx", 0.35); dxyz_[1] = actionArgs.getKeyDouble("dy", dxyz_[0]); dxyz_[2] = actionArgs.getKeyDouble("dz", dxyz_[1]); // solute mask std::string sMask = actionArgs.GetMaskNext(); if (sMask.empty()) { mprinterr("Error: No solute mask specified.\n"); return Action::ERR; } soluteMask_.SetMaskString( sMask ); // solvent mask sMask = actionArgs.GetMaskNext(); if (sMask.empty()) sMask.assign(":WAT@O"); solventMask_.SetMaskString( sMask ); // Grid Data Set grid_ = init.DSL().AddSet(DataSet::GRID_FLT, actionArgs.GetStringNext(), "Channel"); if (grid_ == 0) return Action::ERR; if (outfile != 0) outfile->AddDataSet( grid_ ); mprintf("Warning: *** THIS ACTION IS EXPERIMENTAL AND NOT FULLY IMPLEMENTED. ***\n"); mprintf(" CHANNEL: Solute mask [%s], solvent mask [%s]\n", soluteMask_.MaskString(), solventMask_.MaskString()); mprintf("\tSpacing: XYZ={ %g %g %g }\n", dxyz_[0], dxyz_[1], dxyz_[2]); return Action::OK; }
// Action_AreaPerMol::Init() Action::RetType Action_AreaPerMol::Init(ArgList& actionArgs, TopologyList* PFL, DataSetList* DSL, DataFileList* DFL, int debugIn) { // Get keywords DataFile* outfile = DFL->AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); if (actionArgs.hasKey("xy")) areaType_ = XY; else if (actionArgs.hasKey("xz")) areaType_ = XZ; else if (actionArgs.hasKey("yz")) areaType_ = YZ; else areaType_ = XY; Nmols_ = (double)actionArgs.getKeyInt("nmols", -1); // Get Masks if (Nmols_ < 0.0) { Nlayers_ = (double)actionArgs.getKeyInt("nlayers", 1); if (Nlayers_ < 1.0) { mprinterr("Error: Number of layers must be > 0\n"); return Action::ERR; } Mask1_.SetMaskString( actionArgs.GetMaskNext() ); } // DataSet area_per_mol_ = DSL->AddSet(DataSet::DOUBLE, actionArgs.GetStringNext(),"APM"); if (area_per_mol_==0) return Action::ERR; // Add DataSet to DataFileList if (outfile != 0) outfile->AddDataSet( area_per_mol_ ); mprintf(" AREAPERMOL: Calculating %s area per molecule", APMSTRING[areaType_]); if (Mask1_.MaskStringSet()) mprintf(" using mask '%s', %.0f layers.\n", Mask1_.MaskString(), Nlayers_); else mprintf(" for %.0f mols\n", Nmols_); return Action::OK; }
// todo: this is called a lot. streamline the common case DataFile* MmapV1ExtentManager::getFile( TransactionExperiment* txn, int n, int sizeNeeded , bool preallocateOnly) { verify(this); DEV Lock::assertAtLeastReadLocked( _dbname ); if ( n < 0 || n >= DiskLoc::MaxFiles ) { log() << "getFile(): n=" << n << endl; massert( 10295 , "getFile(): bad file number value (corrupt db?)." " See http://dochub.mongodb.org/core/data-recovery", false); } DEV { if ( n > 100 ) { log() << "getFile(): n=" << n << endl; } } DataFile* p = 0; if ( !preallocateOnly ) { while ( n >= (int) _files.size() ) { verify(this); if( !Lock::isWriteLocked(_dbname) ) { log() << "error: getFile() called in a read lock, yet file to return is not yet open"; log() << " getFile(" << n << ") _files.size:" <<_files.size() << ' ' << fileName(n).string(); invariant(false); } _files.push_back(0); } p = _files[n]; } if ( p == 0 ) { if ( n == 0 ) audit::logCreateDatabase( currentClient.get(), _dbname ); DEV Lock::assertWriteLocked( _dbname ); boost::filesystem::path fullName = fileName( n ); string fullNameString = fullName.string(); p = new DataFile(n); int minSize = 0; if ( n != 0 && _files[ n - 1 ] ) minSize = _files[ n - 1 ]->getHeader()->fileLength; if ( sizeNeeded + DataFileHeader::HeaderSize > minSize ) minSize = sizeNeeded + DataFileHeader::HeaderSize; try { Timer t; p->open( txn, fullNameString.c_str(), minSize, preallocateOnly ); if ( t.seconds() > 1 ) { log() << "MmapV1ExtentManager took " << t.seconds() << " seconds to open: " << fullNameString; } } catch ( AssertionException& ) { delete p; throw; } if ( preallocateOnly ) delete p; else _files[n] = p; } return preallocateOnly ? 0 : p; }
bool DealWithMatch(const vector <string> &LineList, const boost::regex &RegExpress, const DataFile &TypeEntry, ofstream &TableStream, const size_t FilingIndex) // MAJOR robustness issues here! See notes in this function. { // cerr << "Going into DealWithMatch()\n"; bool IsSuccessful; const vector <string> Captures = ObtainCaptures(RegExpress, LineList[0], TypeEntry.PatternExplainCount() + 1); // cerr << "Got Captures\n"; const vector <string> Descriptors = GatherInfo(LineList, Captures, TypeEntry.GiveAllPatternExplains(), TypeEntry.GiveAllAssignExpressions(), IsSuccessful); // cerr << "Got Descriptors\n"; // May need to put in controls of some sort to determine whether something should be quoted or not. if (IsSuccessful) { TableStream << GiveDelimitedList(LineList, ',') << ',' << FilingIndex << ',' << GiveDelimitedList(Descriptors, ',') << "\n"; } // This above segment prints to the appropriate subcatalogue file the following info: // Filename, File Group number, Volume Name, FileSize, FilingIndex, and Date information for the file, and any other descriptor information. // there is always DateTime_Info for Descriptors[0] return(TableStream.good()); }
void MissionSelector::startMission(unsigned num) { Entry missionEntry; if(mCurrentCampaign == NULL) { // FIXME: we should fall back to station school in case a first timer plays missionEntry = mMissions[0]; } else { if(num < 0 || num >= static_cast<int>(mCurrentMissions.size())) return; mMissionNum = num; missionEntry = mCurrentMissions[num]; } Mission* oldMission = rMain()->getMission(); Orbit* orbitScreen = static_cast<Orbit*>(rMain()->getScreenManager().getState(Main::ORBIT_SCREEN)); orbitScreen->enterReadyState(); DataFile* dataFile = new DataFile(); Mission* mission = new Mission(dataFile); dataFile->setRootItem(mission); rMain()->setMission(mission); mission->initScriptFile(missionEntry.script); if(oldMission != NULL) { if (oldMission == ConfigManager::getPlayer()->getHomeBase()) { ConfigManager::getSingleton().savePlayer(); } else { delete oldMission->getDataFile(); } } }
// Action_FilterByData::Init() Action::RetType Action_FilterByData::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { maxmin_ = init.DSL().AddSet( DataSet::INTEGER, actionArgs.GetStringKey("name"), "Filter" ); if (maxmin_ == 0) return Action::ERR; DataFile* maxminfile = init.DFL().AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); if (maxminfile != 0) maxminfile->AddDataSet( maxmin_ ); // Get min and max args. while (actionArgs.Contains("min")) Min_.push_back( actionArgs.getKeyDouble("min", 0.0) ); while (actionArgs.Contains("max")) Max_.push_back( actionArgs.getKeyDouble("max", 0.0) ); if (Min_.empty()) { mprinterr("Error: At least one 'min' arg must be specified.\n"); return Action::ERR; } if (Max_.empty()) { mprinterr("Error: At least one 'max' arg must be specified.\n"); return Action::ERR; } if (Min_.size() != Max_.size()) { mprinterr("Error: # of 'min' args (%zu) != # of 'max' args (%zu)\n", Min_.size(), Max_.size()); return Action::ERR; } // Get DataSets from remaining arguments Dsets_.AddSetsFromArgs( actionArgs.RemainingArgs(), init.DSL() ); if (Dsets_.empty()) { mprinterr("Error: No data sets specified.\n"); return Action::ERR; } if ( Dsets_.size() < Min_.size() ) { mprinterr("Error: More 'min'/'max' args (%zu) than data sets (%zu).\n", Min_.size(), Dsets_.size()); return Action::ERR; } if ( Dsets_.size() > Min_.size() ) { unsigned int Nremaining = Dsets_.size() - Min_.size(); double useMin = Min_.back(); double useMax = Max_.back(); mprintf("Warning: More data sets than 'min'/'max' args.\n" "Warning: Using min=%f and max=%f for last %zu data sets.\n", useMin, useMax, Nremaining); for (unsigned int ds = 0; ds < Nremaining; ++ds) { Min_.push_back( useMin ); Max_.push_back( useMax ); } } mprintf(" FILTER: Filtering out frames using %zu data sets.\n", Dsets_.size()); for (unsigned int ds = 0; ds < Dsets_.size(); ds++) mprintf("\t%.4f < '%s' < %.4f\n", Min_[ds], Dsets_[ds]->legend(), Max_[ds]); if (maxminfile != 0) mprintf("\tFilter frame info will be written to %s\n", maxminfile->DataFilename().full()); return Action::OK; }
// Action_Grid::Init() Action::RetType Action_Grid::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { debug_ = debugIn; nframes_ = 0; // Get output filename std::string filename = actionArgs.GetStringKey("out"); // Get grid options grid_ = GridInit( "GRID", actionArgs, init.DSL() ); if (grid_ == 0) return Action::ERR; # ifdef MPI if (ParallelGridInit(init.TrajComm(), grid_)) return Action::ERR; # endif // Get extra options max_ = actionArgs.getKeyDouble("max", 0.80); madura_ = actionArgs.getKeyDouble("madura", 0); smooth_ = actionArgs.getKeyDouble("smoothdensity", 0); invert_ = actionArgs.hasKey("invert"); pdbfile_ = init.DFL().AddCpptrajFile(actionArgs.GetStringKey("pdb"),"Grid PDB",DataFileList::PDB,true); density_ = actionArgs.getKeyDouble("density",0.033456); if (actionArgs.hasKey("normframe")) normalize_ = TO_FRAME; else if (actionArgs.hasKey("normdensity")) normalize_ = TO_DENSITY; else normalize_ = NONE; if (normalize_ != NONE && (smooth_ > 0.0 || madura_ > 0.0)) { mprinterr("Error: Normalize options are not compatible with smoothdensity/madura options.\n"); init.DSL().RemoveSet( grid_ ); return Action::ERR; } // Get mask std::string maskexpr = actionArgs.GetMaskNext(); if (maskexpr.empty()) { mprinterr("Error: GRID: No mask specified.\n"); init.DSL().RemoveSet( grid_ ); return Action::ERR; } mask_.SetMaskString(maskexpr); // Setup output file // For backwards compat., if no 'out' assume next string is filename if (filename.empty() && actionArgs.Nargs() > 1 && !actionArgs.Marked(1)) filename = actionArgs.GetStringNext(); DataFile* outfile = init.DFL().AddDataFile(filename, actionArgs); if (outfile != 0) outfile->AddDataSet((DataSet*)grid_); // Info mprintf(" GRID:\n"); GridInfo( *grid_ ); if (outfile != 0) mprintf("\tGrid will be printed to file %s\n", outfile->DataFilename().full()); mprintf("\tGrid data set: '%s'\n", grid_->legend()); mprintf("\tMask expression: [%s]\n",mask_.MaskString()); if (pdbfile_ != 0) mprintf("\tPseudo-PDB will be printed to %s\n", pdbfile_->Filename().full()); if (normalize_ == TO_FRAME) mprintf("\tGrid will be normalized by number of frames.\n"); else if (normalize_ == TO_DENSITY) mprintf("\tGrid will be normalized to a density of %g molecules/Ang^3.\n", density_); // TODO: print extra options return Action::OK; }
DoubleCache::DoubleCache(DataFile& dataFile) : m_first(new Cache(dataFile.cachePageOne())), m_second(new Cache(dataFile.cachePageTwo())), m_current(m_first), m_filled(NULL) { }
// Action_VelocityAutoCorr::Init() Action::RetType Action_VelocityAutoCorr::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { if (actionArgs.hasKey("usevelocity")) { mprinterr("Error: The 'usevelocity' keyword is deprecated. Velocity information\n" "Error: is now used by default if present. To force cpptraj to use\n" "Error: coordinates to estimate velocities (not recommended) use the\n" "Error: 'usecoords' keyword.\n"); return Action::ERR; } useVelInfo_ = !actionArgs.hasKey("usecoords"); if (mask_.SetMaskString( actionArgs.GetMaskNext() )) return Action::ERR; DataFile* outfile = init.DFL().AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); diffout_ = init.DFL().AddCpptrajFile( actionArgs.GetStringKey("diffout"), "VAC diffusion constants", DataFileList::TEXT, true ); maxLag_ = actionArgs.getKeyInt("maxlag", -1); tstep_ = actionArgs.getKeyDouble("tstep", 1.0); useFFT_ = !actionArgs.hasKey("direct"); normalize_ = actionArgs.hasKey("norm"); // Set up output data set VAC_ = init.DSL().AddSet(DataSet::DOUBLE, actionArgs.GetStringNext(), "VAC"); if (VAC_ == 0) return Action::ERR; // TODO: This should just be a scalar diffConst_ = init.DSL().AddSet(DataSet::DOUBLE, MetaData(VAC_->Meta().Name(), "D", MetaData::NOT_TS)); if (diffConst_ == 0) return Action::ERR; if (outfile != 0) outfile->AddDataSet( VAC_ ); # ifdef MPI trajComm_ = init.TrajComm(); if (trajComm_.Size() > 1 && !useVelInfo_) mprintf("\nWarning: When calculating velocities between consecutive frames,\n" "\nWarning: 'velocityautocorr' in parallel will not work correctly if\n" "\nWarning: coordinates have been modified by previous actions (e.g. 'rms').\n\n"); diffConst_->SetNeedsSync( false ); # endif mprintf(" VELOCITYAUTOCORR:\n" "\tCalculate velocity auto-correlation function for atoms in mask '%s'\n", mask_.MaskString()); if (useVelInfo_) mprintf("\tUsing velocity information present in frames.\n"); else mprintf("\tCalculating velocities between consecutive frames from coordinates.\n"); if (outfile != 0) mprintf("\tOutput velocity autocorrelation function '%s' to '%s'\n", VAC_->legend(), outfile->DataFilename().full()); mprintf("\tWriting diffusion constants to '%s'\n", diffout_->Filename().full()); if (maxLag_ < 1) mprintf("\tMaximum lag will be half total # of frames"); else mprintf("\tMaximum lag is %i frames", maxLag_); mprintf(", time step between frames is %f ps\n", tstep_); if (useFFT_) mprintf("\tUsing FFT to calculate autocorrelation function.\n"); else mprintf("\tUsing direct method to calculate autocorrelation function.\n"); if (normalize_) mprintf("\tNormalizing autocorrelation function to 1.0\n"); return Action::OK; }
// Analysis_Wavelet::Setup Analysis::RetType Analysis_Wavelet::Setup(ArgList& analyzeArgs, DataSetList* datasetlist, TopologyList* PFLin, DataFileList* DFLin, int debugIn) { // Attempt to get COORDS DataSet from DataSetList. If none specified the // default COORDS set will be used. std::string setname = analyzeArgs.GetStringKey("crdset"); coords_ = (DataSet_Coords*)datasetlist->FindCoordsSet( setname ); if (coords_ == 0) { mprinterr("Error: Could not locate COORDS set corresponding to %s\n", setname.c_str()); return Analysis::ERR; } // Get keywords DataFile* outfile = DFLin->AddDataFile( analyzeArgs.GetStringKey("out"), analyzeArgs ); setname = analyzeArgs.GetStringKey("name"); // TODO: Check defaults nb_ = analyzeArgs.getKeyInt("nb", 0); // FIXME: Should be more descriptive? nscale? if (nb_ < 1) { mprinterr("Error: Scaling number must be > 0\n"); return Analysis::ERR; } S0_ = analyzeArgs.getKeyDouble("s0", 0.2); ds_ = analyzeArgs.getKeyDouble("ds", 1.0/3.0); correction_ = analyzeArgs.getKeyDouble("correction", 1.01); chival_ = analyzeArgs.getKeyDouble("chival", 0.2231); // Wavelet type: default to Morlet std::string wavelet_name = analyzeArgs.GetStringKey("type"); if (wavelet_name.empty()) wavelet_type_ = W_MORLET; else { wavelet_type_ = W_NONE; for (int itoken = 0; itoken != (int)W_NONE; itoken++) if (wavelet_name.compare(Tokens_[itoken].key_) == 0) { wavelet_type_ = (WaveletType)itoken; break; } if (wavelet_type_ == W_NONE) { mprinterr("Error: Unrecognized wavelet type: %s\n", wavelet_name.c_str()); return Analysis::ERR; } } // Atom mask mask_.SetMaskString( analyzeArgs.GetMaskNext() ); // Set up output data set output_ = datasetlist->AddSet( DataSet::MATRIX_FLT, setname, "WAVELET" ); if (output_ == 0) return Analysis::ERR; if (outfile != 0) outfile->AddDataSet( output_ ); mprintf(" WAVELET: Using COORDS set '%s', wavelet type %s\n", coords_->legend(), Tokens_[wavelet_type_].description_); mprintf("\tCalculating for atoms in mask '%s'\n", mask_.MaskString()); mprintf("\tScaling wavelet %i times starting from %g with delta of %g\n", nb_, S0_, ds_); mprintf("\tCorrection: %g\n", correction_); mprintf("\tChiVal: %g\n", chival_); if (outfile != 0) mprintf("\tOutput to '%s'\n", outfile->DataFilename().full()); return Analysis::OK; }
void Bug_Popout::writeMetaToBug3File(const DataFile &df, const DAQ::BugTask::BlockMetaData &m/*, int fudge*/) { QString fname (df.metaFileName()); static const QString metaExt(".meta"); if (fname.toLower().endsWith(metaExt)) fname = fname.left(fname.size()-metaExt.size()) + ".bug3"; QFile f(fname); if (!df.scanCount()) { f.open(QIODevice::WriteOnly|QIODevice::Truncate|QIODevice::Text); Debug() << "Bug3 'extra data' file created: " << fname; } else { f.open(QIODevice::WriteOnly|QIODevice::Append|QIODevice::Text); f.seek(f.size()); // got to end? } QTextStream ts(&f); ts << "[ block " << m.blockNum << " ]\n"; ts << "framesThisBlock = " << DAQ::BugTask::FramesPerBlock << "\n"; ts << "spikeGL_DataFile_ScanCount = " << (df.scanCount()/*+u64(fudge/task->numChans())*/) << "\n"; ts << "spikeGL_DataFile_SampleCount = " << (df.sampleCount()/*+u64(fudge)*/) << "\n"; ts << "spikeGL_ScansInBlock = " << DAQ::BugTask::SpikeGLScansPerBlock << "\n"; ts << "boardFrameCounter = "; for (int i = 0; i < DAQ::BugTask::FramesPerBlock; ++i) { if (i) ts << ","; ts << m.boardFrameCounter[i]; } ts << "\n"; ts << "boardFrameTimer = "; for (int i = 0; i < DAQ::BugTask::FramesPerBlock; ++i) { if (i) ts << ","; ts << m.boardFrameTimer[i]; } ts << "\n"; ts << "chipFrameCounter = "; for (int i = 0; i < DAQ::BugTask::FramesPerBlock; ++i) { if (i) ts << ","; ts << m.chipFrameCounter[i]; } ts << "\n"; ts << "chipID = "; for (int i = 0; i < DAQ::BugTask::FramesPerBlock; ++i) { if (i) ts << ","; ts << m.chipID[i]; } ts << "\n"; ts << "frameMarkerCorrelation = "; for (int i = 0; i < DAQ::BugTask::FramesPerBlock; ++i) { if (i) ts << ","; ts << m.frameMarkerCorrelation[i]; } ts << "\n"; ts << "missingFrameCount = " << m.missingFrameCount << "\n"; ts << "falseFrameCount = " << m.falseFrameCount << "\n"; ts << "BER = " << m.BER << "\n"; ts << "WER = " << m.WER << "\n"; ts << "avgVunreg = " << m.avgVunreg << "\n"; ts.flush(); }
size_t CookiePath::WriteCookiesL(DataFile &fp, time_t this_time, BOOL dry_run) { size_t size = 0; Cookie* ck = (Cookie*) cookie_list.First(); while (ck) { if(ck->Persistent(this_time)) { DataFile_Record rec(TAG_COOKIE_ENTRY); ANCHOR(DataFile_Record,rec); rec.SetRecordSpec(fp.GetRecordSpec()); ck->FillDataFileRecordL(rec); if (dry_run) size += rec.CalculateLength(); else rec.WriteRecordL(&fp); } ck = ck->Suc(); } CookiePath* cp = (CookiePath*) FirstChild(); while (cp) { if (cp->HasCookies(this_time)) { DataFile_Record rec(TAG_COOKIE_PATH_ENTRY); ANCHOR(DataFile_Record,rec); rec.SetRecordSpec(fp.GetRecordSpec()); rec.AddRecordL(TAG_COOKIE_PATH_NAME, cp->PathPart()); if (dry_run) size += rec.CalculateLength(); else rec.WriteRecordL(&fp); size += cp->WriteCookiesL(fp, this_time, dry_run); } cp = cp->Suc(); } { DataFile_Record rec(TAG_COOKIE_PATH_END); // spec is a pointer to existing field of object fp, there is no need to assert that it's not null const DataRecord_Spec *spec = fp.GetRecordSpec(); rec.SetRecordSpec(spec); if (dry_run) size += ((rec.GetTag() & MSB_VALUE) == MSB_VALUE) ? spec->idtag_len : rec.CalculateLength(); else rec.WriteRecordL(&fp); } return size; }
DataHeader *get_dataheader(const stdString &dir, const stdString &file, FileOffset offset) { DataFile *datafile = DataFile::reference(dir, file, false); if (!datafile) return 0; DataHeader *header = datafile->getHeader(offset); datafile->release(); // ref'ed by header return header; // might be NULL }
// Action_AtomicFluct::Init() Action::RetType Action_AtomicFluct::Init(ArgList& actionArgs, TopologyList* PFL, DataSetList* DSL, DataFileList* DFL, int debugIn) { // Get frame # keywords if (InitFrameCounter(actionArgs)) return Action::ERR; // Get other keywords bfactor_ = actionArgs.hasKey("bfactor"); calc_adp_ = actionArgs.hasKey("calcadp"); adpoutfile_ = DFL->AddCpptrajFile(actionArgs.GetStringKey("adpout"), "PDB w/ADP", DataFileList::PDB);; if (adpoutfile_!=0) calc_adp_ = true; // adpout implies calcadp if (calc_adp_ && !bfactor_) bfactor_ = true; DataFile* outfile = DFL->AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); if (actionArgs.hasKey("byres")) outtype_ = BYRES; else if (actionArgs.hasKey("bymask")) outtype_ = BYMASK; else if (actionArgs.hasKey("byatom") || actionArgs.hasKey("byatm")) outtype_ = BYATOM; // Get Mask Mask_.SetMaskString( actionArgs.GetMaskNext() ); // Get DataSet name std::string setname = actionArgs.GetStringNext(); // Add output dataset MetaData md( setname ); md.SetTimeSeries( MetaData::NOT_TS ); if (bfactor_) md.SetLegend("B-factors"); else md.SetLegend("AtomicFlx"); dataout_ = DSL->AddSet( DataSet::XYMESH, md, "Fluct" ); if (dataout_ == 0) { mprinterr("Error: AtomicFluct: Could not allocate dataset for output.\n"); return Action::ERR; } if (outfile != 0) outfile->AddDataSet( dataout_ ); mprintf(" ATOMICFLUCT: calculating"); if (bfactor_) mprintf(" B factors"); else mprintf(" atomic positional fluctuations"); if (outfile != 0) mprintf(", output to file %s", outfile->DataFilename().full()); mprintf("\n Atom mask: [%s]\n",Mask_.MaskString()); FrameCounterInfo(); if (calc_adp_) { mprintf("\tCalculating anisotropic displacement parameters.\n"); if (adpoutfile_!=0) mprintf("\tWriting PDB with ADP to '%s'\n", adpoutfile_->Filename().full()); } if (!setname.empty()) mprintf("\tData will be saved to set named %s\n", setname.c_str()); return Action::OK; }
// TODO: Accept const ArgList so arguments are not reset? CpptrajFile* DataFileList::AddCpptrajFile(FileName const& nameIn, std::string const& descrip, CFtype typeIn, bool allowStdout) { // If no filename and stdout not allowed, no output desired. if (nameIn.empty() && !allowStdout) return 0; FileName name; CpptrajFile* Current = 0; int currentIdx = -1; if (!nameIn.empty()) { name = nameIn; // Append ensemble number if set. if (ensembleNum_ != -1) name.Append( "." + integerToString(ensembleNum_) ); // Check if filename in use by DataFile. DataFile* df = GetDataFile(name); if (df != 0) { mprinterr("Error: Text output file name '%s' already in use by data file '%s'.\n", nameIn.full(), df->DataFilename().full()); return 0; } // Check if this filename already in use currentIdx = GetCpptrajFileIdx( name ); if (currentIdx != -1) Current = cfList_[currentIdx]; } // If no CpptrajFile associated with name, create new CpptrajFile if (Current==0) { switch (typeIn) { case TEXT: Current = new CpptrajFile(); break; case PDB: Current = (CpptrajFile*)(new PDBfile()); break; } Current->SetDebug(debug_); // Set up file for writing. //if (Current->SetupWrite( name, debug_ )) if (Current->OpenWrite( name )) { mprinterr("Error: Setting up text output file %s\n", name.full()); delete Current; return 0; } cfList_.push_back( Current ); cfData_.push_back( CFstruct(descrip, typeIn) ); } else { // If Current type does not match typeIn do not allow. if (typeIn != cfData_[currentIdx].Type()) { mprinterr("Error: Cannot change type of text output for '%s'.\n", Current->Filename().full()); return 0; } Current->SetDebug(debug_); // Update description if (!descrip.empty()) cfData_[currentIdx].UpdateDescrip( descrip ); } return Current; }
Record* ExtentManager::recordFor( const DiskLoc& loc ) { loc.assertOk(); DataFile* df = getFile( loc.a() ); int ofs = loc.getOfs(); if ( ofs < DataFileHeader::HeaderSize ) { df->badOfs(ofs); // will uassert - external call to keep out of the normal code path } return reinterpret_cast<Record*>( df->p() + ofs ); }
Analysis::RetType Analysis_AutoCorr::Setup(ArgList& analyzeArgs, DataSetList* datasetlist, TopologyList* PFLin, DataFileList* DFLin, int debugIn) { const char* calctype; std::string setname = analyzeArgs.GetStringKey("name"); DataFile* outfile = DFLin->AddDataFile( analyzeArgs.GetStringKey("out"), analyzeArgs ); lagmax_ = analyzeArgs.getKeyInt("lagmax",-1); calc_covar_ = !analyzeArgs.hasKey("nocovar"); usefft_ = !analyzeArgs.hasKey("direct"); // Select datasets from remaining args ArgList dsetArgs = analyzeArgs.RemainingArgs(); for (ArgList::const_iterator dsa = dsetArgs.begin(); dsa != dsetArgs.end(); ++dsa) dsets_ += datasetlist->GetMultipleSets( *dsa ); if (dsets_.empty()) { mprinterr("Error: autocorr: No data sets selected.\n"); return Analysis::ERR; } // If setname is empty generate a default name if (setname.empty()) setname = datasetlist->GenerateDefaultName( "autocorr" ); // Setup output datasets int idx = 0; MetaData md( setname ); for (DataSetList::const_iterator DS = dsets_.begin(); DS != dsets_.end(); ++DS) { md.SetIdx( idx++ ); DataSet* dsout = datasetlist->AddSet( DataSet::DOUBLE, md ); if (dsout==0) return Analysis::ERR; dsout->SetLegend( (*DS)->Meta().Legend() ); outputData_.push_back( dsout ); // Add set to output file if (outfile != 0) outfile->AddDataSet( outputData_.back() ); } if (calc_covar_) calctype = "covariance"; else calctype = "correlation"; mprintf(" AUTOCORR: Calculating auto-%s for %i data sets:\n", calctype, dsets_.size()); dsets_.List(); if (lagmax_!=-1) mprintf("\tLag max= %i\n", lagmax_); if ( !setname.empty() ) mprintf("\tSet name: %s\n", setname.c_str() ); if ( outfile != 0 ) mprintf("\tOutfile name: %s\n", outfile->DataFilename().base()); if (usefft_) mprintf("\tUsing FFT to calculate %s.\n", calctype); else mprintf("\tUsing direct method to calculate %s.\n", calctype); return Analysis::OK; }
// Action_Dihedral::init() Action::RetType Action_Dihedral::Init(ArgList& actionArgs, TopologyList* PFL, FrameList* FL, DataSetList* DSL, DataFileList* DFL, int debugIn) { // Get keywords DataFile* outfile = DFL->AddDataFile( actionArgs.GetStringKey("out"), actionArgs ); useMass_ = actionArgs.hasKey("mass"); DataSet::scalarType stype = DataSet::UNDEFINED; range360_ = actionArgs.hasKey("range360"); std::string stypename = actionArgs.GetStringKey("type"); if ( stypename == "alpha" ) stype = DataSet::ALPHA; else if ( stypename == "beta" ) stype = DataSet::BETA; else if ( stypename == "gamma" ) stype = DataSet::GAMMA; else if ( stypename == "delta" ) stype = DataSet::DELTA; else if ( stypename == "epsilon" ) stype = DataSet::EPSILON; else if ( stypename == "zeta" ) stype = DataSet::ZETA; else if ( stypename == "chi" ) stype = DataSet::CHI; else if ( stypename == "c2p" ) stype = DataSet::C2P; else if ( stypename == "h1p" ) stype = DataSet::H1P; else if ( stypename == "phi" ) stype = DataSet::PHI; else if ( stypename == "psi" ) stype = DataSet::PSI; else if ( stypename == "pchi" ) stype = DataSet::PCHI; // Get Masks std::string mask1 = actionArgs.GetMaskNext(); std::string mask2 = actionArgs.GetMaskNext(); std::string mask3 = actionArgs.GetMaskNext(); std::string mask4 = actionArgs.GetMaskNext(); if (mask1.empty() || mask2.empty() || mask3.empty() || mask4.empty()) { mprinterr("Error: dihedral: Requires 4 masks\n"); return Action::ERR; } M1_.SetMaskString(mask1); M2_.SetMaskString(mask2); M3_.SetMaskString(mask3); M4_.SetMaskString(mask4); // Setup dataset dih_ = DSL->AddSet(DataSet::DOUBLE, actionArgs.GetStringNext(),"Dih"); if (dih_==0) return Action::ERR; dih_->SetScalar( DataSet::M_TORSION, stype ); // Add dataset to datafile list if (outfile != 0) outfile->AddSet( dih_ ); mprintf(" DIHEDRAL: [%s]-[%s]-[%s]-[%s]\n", M1_.MaskString(), M2_.MaskString(), M3_.MaskString(), M4_.MaskString()); if (useMass_) mprintf(" Using center of mass of atoms in masks.\n"); if (range360_) mprintf(" Output range is 0 to 360 degrees.\n"); else mprintf(" Output range is -180 to 180 degrees.\n"); return Action::OK; }
DataFile *DataFile::reference(const stdString &req_dirname, const stdString &req_basename, bool for_write) { DataFile *datafile = 0; stdString dirname, basename, filename; Filename::build(req_dirname, req_basename, filename); Filename::getDirname(filename, dirname); Filename::getBasename(filename, basename); #ifdef LOG_DATAFILE LOG_MSG("reference('%s', '%s', %s)\n", req_dirname.c_str(), req_basename.c_str(), (for_write ? "read/write" : "read-only")); LOG_MSG("normalized: '%s' + '%s' = '%s')\n", dirname.c_str(), basename.c_str(), filename.c_str()); #endif stdList<DataFile *>::iterator i = open_data_files.begin(); for (/**/; i != open_data_files.end (); ++i) { datafile = *i; if (datafile->getFilename() == filename && datafile->for_write == for_write) { #ifdef LOG_DATAFILE LOG_MSG("DataFile %s (%c) is cached (%d)\n", filename.c_str(), (for_write?'W':'R'), datafile->ref_count); #endif datafile->reference(); // When it was put in the cache, it might // have been a new file. // But now it's one that already existed, // so reset is_new_file: datafile->is_new_file = false; return datafile; } } try { datafile = new DataFile(dirname, basename, filename, for_write); datafile->reopen(); open_data_files.push_back(datafile); return datafile; } catch (...) { if (datafile) delete datafile; throw GenericException(__FILE__, __LINE__, "Cannot reference '%s'", filename.c_str()); } }
// Either sets header to new dirname/basename/offset // or returns false DataHeader *OldDataReader::getHeader(const stdString &dirname, const stdString &basename, FileOffset offset) { if (!Filename::isValid(basename)) return 0; DataFile *datafile = DataFile::reference(dirname, basename, false); DataHeader *new_header = datafile->getHeader(offset); datafile->release(); // now ref'ed by header return new_header; }
// todo: this is called a lot. streamline the common case DataFile* ExtentManager::getFile( int n, int sizeNeeded , bool preallocateOnly) { verify(this); DEV Lock::assertAtLeastReadLocked( _dbname ); if ( n < 0 || n >= DiskLoc::MaxFiles ) { log() << "getFile(): n=" << n << endl; massert( 10295 , "getFile(): bad file number value (corrupt db?): run repair", false); } DEV { if ( n > 100 ) { log() << "getFile(): n=" << n << endl; } } DataFile* p = 0; if ( !preallocateOnly ) { while ( n >= (int) _files.size() ) { verify(this); if( !Lock::isWriteLocked(_dbname) ) { log() << "error: getFile() called in a read lock, yet file to return is not yet open" << endl; log() << " getFile(" << n << ") _files.size:" <<_files.size() << ' ' << fileName(n).string() << endl; log() << " context ns: " << cc().ns() << endl; verify(false); } _files.push_back(0); } p = _files[n]; } if ( p == 0 ) { DEV Lock::assertWriteLocked( _dbname ); boost::filesystem::path fullName = fileName( n ); string fullNameString = fullName.string(); p = new DataFile(n); int minSize = 0; if ( n != 0 && _files[ n - 1 ] ) minSize = _files[ n - 1 ]->getHeader()->fileLength; if ( sizeNeeded + DataFileHeader::HeaderSize > minSize ) minSize = sizeNeeded + DataFileHeader::HeaderSize; try { p->open( fullNameString.c_str(), minSize, preallocateOnly ); } catch ( AssertionException& ) { delete p; throw; } if ( preallocateOnly ) delete p; else _files[n] = p; } return preallocateOnly ? 0 : p; }