EnergyFrameReaderPtr openEnergyFileToReadFields(const std::string &filename, const std::vector<std::string> &namesOfRequiredEnergyFields) { ener_file_ptr energyFile(open_enx(filename.c_str(), "r")); if (!energyFile) { GMX_THROW(FileIOError("Could not open energy file " + filename + " for reading")); } /* Read in the names of energy fields used in this file. The * resulting data structure would leak if an exception was thrown, * so transfer the contents that we actually need to a map we can * keep. * * TODO Technically, the insertions into the map could throw * std::bad_alloc and we could leak memory allocated by * do_enxnms(), but there's nothing we can do about this right * now. */ std::map<std::string, int> indicesOfEnergyFields; { int numEnergyTerms; gmx_enxnm_t *energyNames = nullptr; do_enxnms(energyFile.get(), &numEnergyTerms, &energyNames); for (int i = 0; i != numEnergyTerms; ++i) { const char *name = energyNames[i].name; auto requiredEnergy = std::find_if(std::begin(namesOfRequiredEnergyFields), std::end(namesOfRequiredEnergyFields), [name](const std::string &n){ return 0 == n.compare(name); }); if (requiredEnergy != namesOfRequiredEnergyFields.end()) { indicesOfEnergyFields[name] = i; } } // Clean up old data structures free_enxnms(numEnergyTerms, energyNames); } // Throw if we failed to find the fields we need if (indicesOfEnergyFields.size() != namesOfRequiredEnergyFields.size()) { std::string requiredEnergiesNotFound = "Did not find the following required energies in mdrun output:\n"; for (auto &name : namesOfRequiredEnergyFields) { auto possibleIndex = indicesOfEnergyFields.find(name); if (possibleIndex == indicesOfEnergyFields.end()) { requiredEnergiesNotFound += name + "\n"; } } GMX_THROW(APIError(requiredEnergiesNotFound)); } return EnergyFrameReaderPtr(new EnergyFrameReader(indicesOfEnergyFields, energyFile.release())); }
int main(int argc, char * argv[]) { ailab::gen.seed( std::chrono::high_resolution_clock::now().time_since_epoch().count() ); #ifndef NDEBUG std::cerr << " === Running in debug mode === " << std::endl; #endif google::ParseCommandLineFlags(&argc, &argv, true); ailab::OpenCL::spContext context = ailab::setup_opencl(); if (FLAGS_autosave) { signal(SIGTERM, terminate); signal(SIGABRT, terminate); signal(SIGINT, terminate); } if (FLAGS_logFile.size()) { ailab::Logger::location.assign(FLAGS_logFile); ailab::Logger::factory = &ailab::Logger::initFS; } else if (FLAGS_logURL.size()) { ailab::Logger::location.assign(FLAGS_logURL); ailab::Logger::factory = &ailab::Logger::initHTTP; } else { ailab::Logger::factory = &ailab::Logger::initSTDOUT; } if (context != NULL) { context->loadKernels(ailab_rbm_kernels_source , "-cl-denorms-are-zero -cl-single-precision-constant -cl-mad-enable -cl-fast-relaxed-math "); } dbn = ailab::spDeepBeliefNet(new ailab::DeepBeliefNet(context)); io.set_buffer_len(FLAGS_bufferLen); if (argc > 1) { io.start(); dbn->output_options.update_cli = FLAGS_progress; dbn->output_options.log_per_n_batches = FLAGS_logFreq; dbn->output_options.hist_bin_count = FLAGS_logBins; dbn->output_options.logHistograms = FLAGS_logHistograms; dbn->output_options.logError = FLAGS_logError; dbn->output_options.logErrorDetails = FLAGS_logErrorDetails; dbn->output_options.logEnergy = FLAGS_logEnergy; dbn->init(argv[1], io, FLAGS_batch_size); if (FLAGS_train) { dbn->train(); dbn->save(); } if (FLAGS_reconstruct) { dbn->reconstruct(); } if (FLAGS_energy.length() > 0) { std::ofstream energyFile(FLAGS_energy); if (energyFile.is_open()) { dbn->write_energy(energyFile); } else { std::cerr << "Could not open energy file at " << FLAGS_energy; } } std::clog << "Flushing output..." << std::endl; io.stop(); } else { std::cerr << "Please provide a .json configuration file" << std::endl; } }