int GP_Hedge::init(NonParametricProcess *proc, 
		     const std::vector<Criteria*>& list) 
  { 
    mProc = proc;
    mCriteriaList = list;
    size_t n = mCriteriaList.size();
    loss_ = zvectord(n); 
    gain_ = zvectord(n); 
    prob_ = zvectord(n);
    cumprob_ = zvectord(n);
    return 0; 
  };
Exemple #2
0
void GP_Hedge::init(NonParametricProcess *proc)
{
    mProc = proc;

    size_t n = mCriteriaList.size();
    if (!n)
    {
        throw std::logic_error("Criteria list should be created (pushed)"
                               " before initializing combined criterion.");
    }

    loss_ = zvectord(n);
    gain_ = zvectord(n);
    prob_ = zvectord(n);
    cumprob_ = zvectord(n);
};
 NLOPT_Optimization::NLOPT_Optimization(RGBOptimizable* rgbo, size_t dim):
 mDown(dim),mUp(dim)
 { 
   rbobj = NULL;             rgbobj = new RGBOptimizableWrapper(rgbo);
   alg = DIRECT;             maxEvals = MAX_INNER_EVALUATIONS;
   setLimits(zvectord(dim),svectord(dim,1.0));  
 };
Exemple #4
0
int main()
{
  randEngine reng;
  Posterior post;
  bayesopt::MCMCSampler sampler(&post,2,reng);
  vectord x = zvectord(2);
  sampler.run(x);
  sampler.printParticles();

  return 0;
}
  double NLOPT_Optimization::evaluate_nlopt_grad (unsigned int n, const double *x,
						  double *grad, void *my_func_data)

  {
    vectord vx(n);
    std::copy(x,x+n,vx.begin());
    
    void *objPointer = my_func_data;
    RGBOptimizableWrapper* OPTIMIZER = static_cast<RGBOptimizableWrapper*>(objPointer);
    

    vectord vgrad = zvectord(n);
    double f =  OPTIMIZER->evaluate(vx,vgrad);
    if (grad && n)  std::copy(vgrad.begin(),vgrad.end(),grad);

    return f;
  } /* evaluate_criteria_nlopt */