Exemple #1
0
int main(int nargs, char* args[])
{
    srand(time(NULL));
    bopt_params par = initialize_parameters_to_default();
    par.n_iterations = 190;
    par.n_inner_iterations = 250;
    par.n_iter_relearn = 50;
    //par.random_seed = 0;
    par.verbose_level = 0;
    par.noise = 1e-10;
    par.sigma_s = 1;
    par.sc_type = SC_ML;
    par.init_method = 3;
    strcpy(par.crit_name, "cLCB");
    par.crit_params[0] = 0.125;
    par.n_crit_params = 1;
    par.force_jump = 0;
    strcpy(par.kernel.name, "kSEARD");

    benchmark<BraninNormalized>(par, "branin");
    benchmark<Hartmann6>(par, "hartmann6");
    benchmark<Hartmann3>(par, "hartmann3");
    benchmark<Rastrigin>(par, "rastrigin");
    benchmark<Sphere>(par, "sphere");
    benchmark<Ellipsoid>(par, "ellipsoid");
    benchmark<GoldsteinPrice>(par, "goldsteinprice");
    benchmark<SixHumpCamel>(par, "sixhumpcamel");

    return 0;
}
Exemple #2
0
void minim_bayes(density_t *ndft)
{
  int i;
  double minf;
  double *x;

  bayes_par par_input;
  nlopt_function_data function_data;
  bopt_params par;

  /* Initializing the bounds for the optimization.
     We need two vectors with size ipf.npar to parse
     them to bayesopt */
  par_input.lb = (double *)malloc(ipf.npar*sizeof(double));
  parse_double ("bayes_lb", par_input.lb);
  for(i = 1; i < ipf.npar; i++) par_input.lb[i] = par_input.lb[0];
  par_input.ub = (double *)malloc(ipf.npar*sizeof(double));
  parse_double ("bayes_ub", par_input.ub);
  for(i = 1; i < ipf.npar; i++) par_input.ub[i] = par_input.ub[0];

  x = (double *)malloc(ipf.npar*sizeof(double));
  for(i = 0; i < ipf.npar; i++) x[i] = gsl_vector_get(ipf.gamma, i);

  /* Initializing all the parameters to default, and then modifying them */
  par = initialize_parameters_to_default();

  /* The use of the auxiliar data.n_iter, data.init_samples and data.init_method
     parameters is caused by their par.* equivalents being defined as size_t
     instead of int, which caused some warnings and errors in the parser */
  par.n_iterations = 190;
  parse_int("bayes_iter", &par_input.n_iter);  par.n_iterations = par_input.n_iter;
  par.n_init_samples = 10;
  parse_int("bayes_init_samples", &par_input.init_samples);  par.n_init_samples = par_input.init_samples;
  par.init_method = 1;
  parse_int("bayes_init_method", &par_input.init_method);  par.init_method = par_input.init_method;

  par.random_seed = 0;
  par.verbose_level = 1;  parse_int("bayes_verbose", &par.verbose_level);
  par.noise = 1e-10;  parse_double("bayes_noise", &par.noise);

  par_input.learning = "L_MCMC";  parse_string("bayes_learning", &par_input.learning);
  set_learning(&par, par_input.learning);

  function_data.counter = 0;
  function_data.ndft = density_get_val(ndft);

  messages_basic("\n\n\nStarting the optimization.\n\n\n");

  bayes_optimization(ipf.npar, nlopt_gfunction, &function_data, par_input.lb, par_input.ub, x, &minf, par);

  if(myid == 0)  printf("Success: iterations = %d, G(gamma) = %15.10f\n", function_data.counter, minf);
  if(myid == 0) printf("gamma =  ");
  if(myid == 0) {for(i = 0; i < ipf.npar; i++) printf("%.5f ", x[i]);}
  if(myid == 0)  printf("\n");

  for(i = 0; i < ipf.npar; i++) gsl_vector_set(ipf.gamma, i, x[i]);
  ipf_write(ipf, ipf_ref, "pot");

  parallel_end(EXIT_SUCCESS);
}
bopt_params getBoptParams(CommandLineOptions const &options) {
	bopt_params params = initialize_parameters_to_default();

	params.n_init_samples = options.n_init_samples;
	params.n_iterations = options.n_iterations;
	params.n_iter_relearn = options.n_iter_relearn;

	params.noise = 1e-14;
	params.init_method = 2;

	return params;
}
int main(int nargs, char *args[])
{
    bayesopt::Parameters par;
    if(nargs > 1) {
        if(!bayesopt::utils::ParamLoader::load(args[1], par)) {
            std::cout << "ERROR: provided file \"" << args[1] << "\" does not exist" << std::endl;
            return -1;
        }
    }
    else {
        par = initialize_parameters_to_default();
        par.n_iterations = 100;
        par.n_init_samples = 2;
        par.n_iter_relearn = 1;
        par.random_seed = 10;
        //set_surrogate(&par,"sStudentTProcessNIG");

        par.l_type = L_MCMC;
        par.sc_type = SC_MAP;
        par.verbose_level = 1;
        //bayesopt::utils::ParamLoader::save("bo_branin_display.txt", par);
    }

    boost::scoped_ptr<BraninNormalized> branin(new BraninNormalized(par));
    GLOBAL_MATPLOT.init(branin.get(),2);

    vectord sv(2);
    sv(0) = 0.1239;
    sv(1) = 0.8183;
    GLOBAL_MATPLOT.setSolution(sv);

    sv(0) = 0.5428;
    sv(1) = 0.1517;
    GLOBAL_MATPLOT.setSolution(sv);

    sv(0) = 0.9617;
    sv(1) = 0.1650;
    GLOBAL_MATPLOT.setSolution(sv);

    glutInit(&nargs, args);
    glutCreateWindow(50,50,800,650);
    glutDisplayFunc( display );
    glutReshapeFunc( reshape );
    glutIdleFunc( idle );
    glutMotionFunc( motion );
    glutMouseFunc( mouse );
    glutPassiveMotionFunc(passive);
    glutKeyboardFunc( keyboard );
    glutMainLoop();

    return 0;
}
Exemple #5
0
int main(int nargs, char *args[])
{
  bopt_params par = initialize_parameters_to_default();
  par.n_iterations = 190;
  par.random_seed = 0;
  par.verbose_level = 1;
  par.noise = 1e-10;
  
  BraninNormalized branin(par);
  vectord result(2);

  branin.optimize(result);
  std::cout << "Result: " << result << "->" 
	    << branin.evaluateSample(result) << std::endl;
  branin.printOptimal();

  return 0;
}
Exemple #6
0
int main(int nargs, char *args[])
{
  bopt_params par = initialize_parameters_to_default();
  par.n_iterations = 190;
  par.noise = 1e-10;
  par.random_seed = 0;
  par.verbose_level = 1;

  
  ExampleCamelback camel(par);


  std::ofstream timelog;
  timelog.open("time_camel.log");
  std::clock_t curr_t;
  std::clock_t prev_t = clock();

  camel.initializeOptimization();
      
  for (size_t ii = 0; ii < par.n_iterations; ++ii)
    {      
      camel.stepOptimization();

      curr_t = clock();
      timelog << ii << ","
	      << static_cast<double>(curr_t - prev_t) / CLOCKS_PER_SEC 
	      << std::endl;
      prev_t = curr_t;
      }

  timelog.close();

  vectord result = camel.getFinalResult();
  std::cout << "Result: " << result << "->" 
	    << camel.evaluateSample(result) << std::endl;
  camel.printOptimal();

  return 0;
}
int main(int nargs, char *args[])
{
  bayesopt::Parameters par;
  if(nargs > 1){
    if(!bayesopt::utils::ParamLoader::load(args[1], par)){
        std::cout << "ERROR: provided file \"" << args[1] << "\" does not exist" << std::endl;
        return -1;
    }
  }
  else{
    par = initialize_parameters_to_default();
    par.n_iterations = 190;
    par.random_seed = 0;
    par.verbose_level = 1;
    par.noise = 1e-10;
    //bayesopt::utils::ParamLoader::save("system_opt.txt", par);
  }

    
  SystemCallsBranin branin(par);
  vectord result(2);

  branin.optimize(result);
  std::cout << "Result: " << result << "->" 
	    << branin.evaluateSample(result) << std::endl;
  branin.printOptimal();
  
  // Remove results.txt file
  std::string filename("results.txt");
  if( remove( filename.c_str() ) == 0 ){
    std::cout << "File \"" << filename << "\" successfully removed" << std::endl;
  }
  else{
    std::cout << "Error: cannot remove \"" << filename << "\" file" << std::endl; 
  }
  
  return 0;
}
int main(int nargs, char *args[])
{    
  int n = 6;                   // Number of dimensions

  // Common configuration
  // See parameters.h for the available options
  // If we initialize the struct with the DEFAUL_PARAMS,
  // the we can optionally change only few of them 
  bopt_params par = initialize_parameters_to_default();

  par.kernel.hp_mean[0] = KERNEL_THETA;
  par.kernel.hp_std[0] = 1.0;
  par.kernel.n_hp = 1;
  par.mean.coef_mean[0] = 0.0;
  par.mean.coef_std[0] = MEAN_SIGMA;
  par.mean.n_coef = 1;
  par.noise = DEFAULT_NOISE;
  par.surr_name = "sStudentTProcessJef";
  par.n_iterations = 20;       // Number of iterations
  par.n_init_samples = 20;
  /*******************************************/
  
  size_t nPoints = 1000;

  randEngine mtRandom;
  matrixd xPoints(nPoints,n);
  vecOfvec xP;

  //Thanks to the quirks of Visual Studio, the following expression is invalid,
  //so we have to replace it by a literal.
  //WARNING: Make sure to update the size of the array if the number of points
  //or dimensions change.
#ifdef _MSC_VER
  double xPointsArray[6000];
#else
  const size_t nPinArr = n*nPoints;
  double xPointsArray[nPinArr];
#endif
  
  bayesopt::utils::lhs(xPoints,mtRandom);

  for(size_t i = 0; i<nPoints; ++i)
    {
      vectord point = row(xPoints,i);  
      xP.push_back(point);
      for(size_t j=0; j<n; ++j)
	{
	  xPointsArray[i*n+j] = point(j);	  
	}
    }
    

  
  // Run C++ interface
  std::cout << "Running C++ interface" << std::endl;
  ExampleDisc opt(xP,par);
  vectord result(n);
  opt.optimize(result);

  
  // Run C interface
  std::cout << "Running C interface" << std::endl;
  double x[128], fmin[128];
  bayes_optimization_disc(n, &testFunction, NULL, xPointsArray, nPoints,
			  x, fmin, par);

  // Find the optimal value
  size_t min = 0;
  double minvalue = opt.evaluateSample(row(xPoints,min));
for(size_t i = 1; i<nPoints; ++i)
{
  vectord point = row(xPoints,i);  
  if (opt.evaluateSample(point) < minvalue)
    {
      min = i;
      minvalue = opt.evaluateSample(row(xPoints,min));
      std::cout << i << "," << minvalue << std::endl;
    }
}

  std::cout << "Final result C++: " << result << std::endl;
  std::cout << "Final result C: (";
  for (int i = 0; i < n; i++ )
    std::cout << x[i] << ", ";
  std::cout << ")" << std::endl;
  std::cout << "Optimal: " << row(xPoints,min) << std::endl;
}
Exemple #9
0
int main(int nargs, char *args[])
{
  bopt_params par = initialize_parameters_to_default();
  par.verbose_level = 0;
  par.noise = 1e-10;
  par.force_jump = 30;

  std::ofstream log;
  std::clock_t start_t;


  /* Branin */
  log.open("branin.log");
  par.n_init_samples = 5;
  par.n_iterations = 195;

  for (size_t ii = 0; ii < 10; ++ii)
    {
      par.random_seed = ii;
      BraninNormalized branin(par);
      vectord result(2);

      start_t = clock();
      branin.initializeOptimization();
      
      for (size_t jj = 0; jj < par.n_iterations; ++jj)
  	{      
  	  branin.stepOptimization();
  	  if (jj == 50)
  	    {
  	      result = branin.getFinalResult();	      
  	      log << branin.evaluateSample(result) << ", ";
  	    }
  	}
      result = branin.getFinalResult();	      
      log << branin.evaluateSample(result) << ", ";
      
      log << static_cast<double>(clock() - start_t) / static_cast<double>(CLOCKS_PER_SEC)
  	  << std::endl;
      }

  log.close();


  /* Camel */
  log.open("camel.log");
  par.n_init_samples = 5;
  par.n_iterations = 95;

  for (size_t ii = 0; ii < 10; ++ii)
    {
      par.random_seed = ii;
      ExampleCamelback camel(par);
      vectord result(2);

      vectord lb(2); lb(0) = -2; lb(1) = -1;
      vectord ub(2); ub(0) =  2; ub(1) = 1;

      camel.setBoundingBox(lb,ub);

      start_t = clock();
      camel.initializeOptimization();
      
      for (size_t jj = 0; jj < par.n_iterations; ++jj)
  	{      
  	  camel.stepOptimization();
  	  if (jj == 50)
  	    {
  	      result = camel.getFinalResult();	      
  	      log << camel.evaluateSample(result) << ", ";
  	    }
  	}
      result = camel.getFinalResult();	      
      log << camel.evaluateSample(result) << ", ";
      
      log << static_cast<double>(clock() - start_t) / static_cast<double>(CLOCKS_PER_SEC)
  	  << std::endl;
      }

  log.close();


  /* Hart */
  log.open("hart.log");
  par.n_init_samples = 10;
  par.n_iterations = 190;

  for (size_t ii = 0; ii < 10; ++ii)
    {
      par.random_seed = ii;
      ExampleHartmann6 hart(par);
      vectord result(6);

      start_t = clock();
      hart.initializeOptimization();
      
      for (size_t jj = 0; jj < par.n_iterations; ++jj)
  	{      
  	  hart.stepOptimization();
  	  if (jj == 50)
  	    {
  	      result = hart.getFinalResult();	      
  	      log << hart.evaluateSample(result) << ", ";
  	    }
  	}
      result = hart.getFinalResult();	      
      log << hart.evaluateSample(result) << ", ";
      
      log << static_cast<double>(clock() - start_t) / static_cast<double>(CLOCKS_PER_SEC)
  	  << std::endl;
      }

  log.close();


  /***********************************************************************/
  par.n_init_samples = 2;
  par.n_iter_relearn = 1;
  
  par.l_type = L_MCMC;
  par.sc_type = SC_MAP;


  /* Branin */
  log.open("branin_mcmc.log");
  par.n_iterations = 198;

  for (size_t ii = 0; ii < 10; ++ii)
    {
      par.random_seed = ii;
      BraninNormalized branin(par);
      vectord result(2);

      start_t = clock();
      branin.initializeOptimization();
      
      for (size_t jj = 0; jj < par.n_iterations; ++jj)
	{      
	  branin.stepOptimization();
	  if (jj == 50)
	    {
	      result = branin.getFinalResult();	      
	      log << branin.evaluateSample(result) << ", ";
	    }
	}
      result = branin.getFinalResult();	      
      log << branin.evaluateSample(result) << ", ";
      
      log << static_cast<double>(clock() - start_t) / static_cast<double>(CLOCKS_PER_SEC)
	  << std::endl;
      }

  log.close();


  /* Camel */
  log.open("camel_mcmc.log");
  par.n_iterations = 98;

  for (size_t ii = 0; ii < 10; ++ii)
    {
      par.random_seed = ii;
      ExampleCamelback camel(par);
      vectord result(2);

      vectord lb(2); lb(0) = -2; lb(1) = -1;
      vectord ub(2); ub(0) =  2; ub(1) = 1;

      camel.setBoundingBox(lb,ub);

      start_t = clock();
      camel.initializeOptimization();
      
      for (size_t jj = 0; jj < par.n_iterations; ++jj)
	{      
	  camel.stepOptimization();
	  if (jj == 50)
	    {
	      result = camel.getFinalResult();	      
	      log << camel.evaluateSample(result) << ", ";
	    }
	}
      result = camel.getFinalResult();	      
      log << camel.evaluateSample(result) << ", ";
      
      log << static_cast<double>(clock() - start_t) / static_cast<double>(CLOCKS_PER_SEC)
	  << std::endl;
      }

  log.close();


  /* Hart */
  log.open("hart_mcmc.log");
  par.n_iterations = 198;

  for (size_t ii = 0; ii < 10; ++ii)
    {
      par.random_seed = ii;
      ExampleHartmann6 hart(par);
      vectord result(6);

      start_t = clock();
      hart.initializeOptimization();
      
      for (size_t jj = 0; jj < par.n_iterations; ++jj)
	{      
	  hart.stepOptimization();
	  if (jj == 50)
	    {
	      result = hart.getFinalResult();	      
	      log << hart.evaluateSample(result) << ", ";
	    }
	}
      result = hart.getFinalResult();	      
      log << hart.evaluateSample(result) << ", ";
      
      log << static_cast<double>(clock() - start_t) / static_cast<double>(CLOCKS_PER_SEC)
	  << std::endl;
      }

  log.close();


  return 0;
}
    bopt_params Parameters::generate_bopt_params()
    {
      bopt_params c_params = initialize_parameters_to_default();
      c_params.n_iterations = n_iterations;
      c_params.n_inner_iterations = n_inner_iterations;
      c_params.n_init_samples = n_init_samples;
      c_params.n_iter_relearn = n_iter_relearn;

      c_params.init_method = init_method;
      c_params.random_seed = random_seed;

      c_params.verbose_level = verbose_level;

      strcpy(c_params.log_filename, log_filename.c_str());
      c_params.load_save_flag = load_save_flag;
      strcpy(c_params.load_filename, load_filename.c_str());
      strcpy(c_params.save_filename, save_filename.c_str());

      strcpy(c_params.surr_name, surr_name.c_str());
      c_params.sigma_s = sigma_s;

      c_params.noise = noise;
      c_params.alpha = alpha;
      c_params.beta = beta;

      c_params.sc_type = sc_type;

      c_params.l_type = l_type;

      c_params.l_all = l_all;

      c_params.epsilon = epsilon;
      c_params.force_jump = force_jump;

      strcpy(c_params.kernel.name, kernel.name.c_str());
      //TODO (Javier): Should it be necessary to check size?
      for(size_t i=0; i<kernel.hp_mean.size(); i++){
	c_params.kernel.hp_mean[i] = kernel.hp_mean[i];
      }
      for(size_t i=0; i<kernel.hp_std.size(); i++){
	c_params.kernel.hp_std[i] = kernel.hp_std[i];
      }
      c_params.kernel.n_hp = kernel.hp_std.size();

      strcpy(c_params.mean.name, mean.name.c_str());
      for(size_t i=0; i<mean.coef_mean.size(); i++){
	c_params.mean.coef_mean[i] = mean.coef_mean[i];
      }
      for(size_t i=0; i<mean.coef_std.size(); i++){
	c_params.mean.coef_std[i] = mean.coef_std[i];
      }
      c_params.mean.n_coef = mean.coef_std.size();

      for (size_t row = 0; row < input.noise_matrix.size1(); ++row)
      {
         for (size_t col = 0; col < input.noise_matrix.size2(); ++col)
         {
             c_params.input.noise[col + (row * input.noise_matrix.size1())] = input.noise_matrix(row,col);
         }
      }
      c_params.input.unscented_scale = input.unscented_scale;
      c_params.input.n_coef = input.noise_matrix.size1() * input.noise_matrix.size2();
      c_params.unscented_outcome = unscented_outcome;

      strcpy(c_params.crit_name, crit_name.c_str());
      for(size_t i=0; i<crit_params.size(); i++){
	c_params.crit_params[i] = crit_params[i];
      }
      c_params.n_crit_params = crit_params.size();

      return c_params;
    }