void RBSCMConstruction::process_parameters_file(const std::string& parameters_filename) { // First read in data from parameters_filename GetPot infile(parameters_filename); const unsigned int n_training_samples = infile("n_training_samples",1); const bool deterministic_training = infile("deterministic_training",false); // Read in training_parameters_random_seed value. This is used to // seed the RNG when picking the training parameters. By default the // value is -1, which means use std::time to seed the RNG. unsigned int training_parameters_random_seed_in = static_cast<unsigned int>(-1); training_parameters_random_seed_in = infile("training_parameters_random_seed", training_parameters_random_seed_in); set_training_random_seed(training_parameters_random_seed_in); // SCM Greedy termination tolerance const Real SCM_training_tolerance_in = infile("SCM_training_tolerance", SCM_training_tolerance); set_SCM_training_tolerance(SCM_training_tolerance_in); // Initialize the parameter ranges and the parameters themselves initialize_parameters(parameters_filename); std::map<std::string,bool> log_scaling; const RBParameters& mu = get_parameters(); RBParameters::const_iterator it = mu.begin(); RBParameters::const_iterator it_end = mu.end(); unsigned int i=0; for( ; it != it_end; ++it) { // Read vector-based log scaling values. Note the intermediate conversion to // int... this implies log_scaling = '1 1 1...' in the input file. // log_scaling[i] = static_cast<bool>(infile("log_scaling", static_cast<int>(log_scaling[i]), i)); std::string param_name = it->first; log_scaling[param_name] = static_cast<bool>(infile("log_scaling", 0, i)); i++; } initialize_training_parameters(this->get_parameters_min(), this->get_parameters_max(), n_training_samples, log_scaling, deterministic_training); // use deterministic parameters }
void RBSCMConstruction::process_parameters_file(const std::string& parameters_filename) { // First read in data from parameters_filename GetPot infile(parameters_filename); const unsigned int n_training_samples = infile("n_training_samples",1); const bool deterministic_training = infile("deterministic_training",false); // Read in training_parameters_random_seed value. This is used to // seed the RNG when picking the training parameters. By default the // value is -1, which means use std::time to seed the RNG. unsigned int training_parameters_random_seed_in = static_cast<int>(-1); training_parameters_random_seed_in = infile("training_parameters_random_seed", training_parameters_random_seed_in); set_training_random_seed(training_parameters_random_seed_in); // SCM Greedy termination tolerance const Real SCM_training_tolerance_in = infile("SCM_training_tolerance", SCM_training_tolerance); set_SCM_training_tolerance(SCM_training_tolerance_in); // Initialize the parameter ranges and the parameters themselves unsigned int n_continuous_parameters = infile.vector_variable_size("parameter_names"); RBParameters mu_min_in; RBParameters mu_max_in; for(unsigned int i=0; i<n_continuous_parameters; i++) { // Read in the parameter names std::string param_name = infile("parameter_names", "NONE", i); { Real min_val = infile(param_name, 0., 0); mu_min_in.set_value(param_name, min_val); } { Real max_val = infile(param_name, 0., 1); mu_max_in.set_value(param_name, max_val); } } std::map< std::string, std::vector<Real> > discrete_parameter_values_in; unsigned int n_discrete_parameters = infile.vector_variable_size("discrete_parameter_names"); for(unsigned int i=0; i<n_discrete_parameters; i++) { std::string param_name = infile("discrete_parameter_names", "NONE", i); unsigned int n_vals_for_param = infile.vector_variable_size(param_name); std::vector<Real> vals_for_param(n_vals_for_param); for(unsigned int j=0; j<vals_for_param.size(); j++) { vals_for_param[j] = infile(param_name, 0., j); } discrete_parameter_values_in[param_name] = vals_for_param; } initialize_parameters(mu_min_in, mu_max_in, discrete_parameter_values_in); std::map<std::string,bool> log_scaling; const RBParameters& mu = get_parameters(); RBParameters::const_iterator it = mu.begin(); RBParameters::const_iterator it_end = mu.end(); unsigned int i=0; for( ; it != it_end; ++it) { std::string param_name = it->first; log_scaling[param_name] = static_cast<bool>(infile("log_scaling", 0, i)); i++; } initialize_training_parameters(this->get_parameters_min(), this->get_parameters_max(), n_training_samples, log_scaling, deterministic_training); // use deterministic parameters }