int OptCGLike::checkConvg() // check convergence { NLP1* nlp = nlprob(); ColumnVector xc(nlp->getXc()); // Test 1. step tolerance double step_tol = tol.getStepTol(); double snorm = stepTolNorm(); double xnorm = Norm2(xc); double stol = step_tol*max(1.0,xnorm); if (snorm <= stol) { strcpy(mesg,"Algorithm converged - Norm of last step is less than step tolerance"); *optout << "checkConvg: snorm = " << e(snorm,12,4) << " stol = " << e(stol,12,4) << "\n"; return 1; } // Test 2. function tolerance double ftol = tol.getFTol(); double fvalue = nlp->getF(); double rftol = ftol*max(1.0,fabs(fvalue)); Real deltaf = fprev - fvalue; if (deltaf <= rftol) { strcpy(mesg,"Algorithm converged - Difference in successive fcn values less than tolerance"); *optout << "checkConvg: deltaf = " << e(deltaf,12,4) << " ftol = " << e(ftol,12,4) << "\n"; return 2; } // Test 3. gradient tolerance ColumnVector grad(nlp->getGrad()); double gtol = tol.getGTol(); double rgtol = gtol*max(1.0,fabs(fvalue)); double gnorm = Norm2(grad); if (gnorm <= rgtol) { strcpy(mesg,"Algorithm converged - Norm of gradient is less than gradient tolerance"); *optout << "checkConvg: gnorm = " << e(gnorm,12,4) << " gtol = " << e(rgtol, 12,4) << "\n"; return 3; } // Test 4. absolute gradient tolerance if (gnorm <= gtol) { strcpy(mesg,"Algorithm converged - Norm of gradient is less than gradient tolerance"); *optout << "checkConvg: gnorm = " << e(gnorm,12,4) << " gtol = " << e(gtol, 12,4) << "\n"; return 4; } // Nothing to report return 0; }
void OptCG::reset() // Reset parameters { NLP1* nlp = nlprob(); int n = nlp->getDim(); nlp->reset(); OptimizeClass::defaultReset(n); grad_evals = 0; }
void OptLBFGS::reset() // Reset parameters { NLP1* nlp = nlprob(); int n = nlp->getDim(); nlp->reset(); OptimizeClass::defaultReset(n); grad_evals = 0; // Still to do. Reset memM. }
//---------------------------------------------------------------------------- // Reset optimization parameters //---------------------------------------------------------------------------- void OptBCEllipsoid::reset() { NLP1* nlp = nlprob(); int n = nlp->getDim(); nlp->reset(); OptimizeClass::defaultReset(n); initial_radius = -1.0e0; xscal_flag = deepcutflag = 0; }
//------------------------------------------------------------------------ // checkConvg - check whether the distance between upper and lower bounds // is less than a prescribed threshold. //------------------------------------------------------------------------ int OptBCEllipsoid::checkConvg() { NLP1 *nlp = nlprob(); SerialDenseVector<int,double> xc(nlp->getXc()); double fvalue = nlp->getF(); double ftol = tol.getFTol(); double delta; fval_upbound = min(fval_upbound,fvalue); delta = fabs(fval_upbound - fval_lowbound); if (delta <= ftol) { strcpy(mesg,"Algorithm converged - Difference in successive fcn values less than tolerance"); ret_code = 2; setReturnCode(ret_code); return 1; } else return 0; }
//---------------------------------------------------------------------------- // read an input file called opt.input // - A VERY simple routine for reading the optimization parameters // We should really make this more general, but as a first pass this // will have to do. The input file should be of the form keyword = value // where keyword is one of the following // max_iter = 100 // max_feval = 1000 // grad_tol = 1.e-6 // fcn_tol = 1.e-9 // fcn_accrcy = 1.e-9 //---------------------------------------------------------------------------- void OptBCEllipsoid::readOptInput() { NLP1 *nlp = nlprob(); int index, max_iter, max_feval; real grad_tol, fcn_tol, fcn_accrcy; char token[80], ignore[80], equals[1]; // Keywords allowed string keyword; string cfcn_accrcy("fcn_accrcy"); string cfcn_tol("fcn_tol"); string cgrad_tol("grad_tol"); string cmaxfeval("maxfeval"); string cmaxiter("maxiter"); // Default name of input file const char *opt_input = {"opt.input"}; // Open opt.input file and check to see if we succeeded ifstream optin(opt_input); if (!optin.rdbuf()->is_open()) { *optout << "ReadOptInput: No opt.input file found\n"; *optout << "ReadOptInput: Default values will be used\n"; return; } *optout << "ReadOptInput: Reading opt.input file\n"; fcn_tol = tol.getFTol(); grad_tol = tol.getGTol(); max_feval = tol.getMaxFeval(); max_iter = tol.getMaxIter(); while ((optin >> token)) { keyword = token; if (keyword == cfcn_accrcy) { optin >> equals >> index >> fcn_accrcy; nlp->setFcnAccrcy(index, fcn_accrcy); } else if (keyword == cfcn_tol) {
//---------------------------------------------------------------------------- // Print message to the output file //---------------------------------------------------------------------------- void OptBCEllipsoid::printStatus(char *s) { NLP1 *nlp = nlprob(); if (deepcutflag == 1) strcpy(method,"The Ellipsoid method with deep cut"); else strcpy(method,"The Ellipsoid method "); *optout << "\n\n========= " << s << " ===========\n\n"; *optout << "Optimization method = " << method << "\n"; *optout << "Dimension of the problem = " << nlp->getDim() << "\n"; *optout << "Return code = " << ret_code << " (" << mesg << ")\n"; *optout << "No. iterations taken = " << iter_taken << "\n"; *optout << "No. function evaluations = " << nlp->getFevals() << "\n"; *optout << "No. gradient evaluations = " << nlp->getGevals() << "\n"; tol.printTol(optout); nlp->fPrintState(optout, s); }
void OptLBFGS::readOptInput() // Read opt.input file if it exists { NLP1* nlp = nlprob(); /* A VERY simple routine for reading the optimization parameters * We should really make this more general, but as a first pass this * will have to do. * * The input file should be of the form keyword = value * where keyword is one of the following * * search = trustregion * diff_option = forward * max_iter = 100 * maxfeval = 1000 * grad_tol = 1.e-6 * fcn_tol = 1.e-9 * max_step = 100.0 * fcn_accrcy = 1.e-9 * */ int index, max_iter, max_feval, backtrack_iter; real grad_tol, fcn_tol, max_step, fcn_accrcy, backtrack_tol; char token[80], ignore[80], equals[1]; // // Keywords allowed // string keyword; string cdebug("debug"); string cdiff_option("diff_option"); string cfcn_accrcy("fcn_accrcy"); string cfcn_tol("fcn_tol"); string cgrad_tol("grad_tol"); string cmaxfeval("maxfeval"); string cmaxiter("max_iter"); string cmax_step("max_step"); string csearch("search"); string cbacktrack_iter("backtrack_iter"); string cbacktrack_tol("backtrack_tol"); string diff_option, debug_flag; string search; SearchStrategy s = TrustRegion; int keyword_count = 0; // // Default name of input file // const char *opt_input = {"opt.input"}; // // Open opt.input file and check to see if we succeeded // ifstream optin(opt_input); if (!optin.rdbuf()->is_open()) { if (debug_) { *optout << "OptLBFGS::ReadOptInput: No opt.input file found\n"; *optout << "OptLBFGS::ReadOptInput: Default values will be used\n"; } return; } if (debug_) *optout << "OptLBFGS::ReadOptInput: Reading opt.input file\n"; optin >> token; *optout << "\n\n====== Summary of input file ======\n\n"; while (!optin.eof()) { keyword = token; keyword_count++; if (keyword == cdiff_option) { optin >> equals >> token; diff_option = token; if ( diff_option == "forward") nlp->setDerivOption(ForwardDiff); else if ( diff_option == "backward") nlp->setDerivOption(BackwardDiff); else if ( diff_option == "central") nlp->setDerivOption(CentralDiff); *optout << cdiff_option << " = " << diff_option << "\n"; } else if (keyword == cdebug) {
//---------------------------------------------------------------------------- // Given a nonlinear operator nlp find the minimizer using a //---------------------------------------------------------------------------- void OptBCEllipsoid::optimize() { NLP1* nlp = nlprob(); int convgd = 0; int i,n = nlp->getDim(), step_type; SerialDenseVector<int,double> xc(nlp->getXc().length()),xscale(getXScale().length()),xs(n); xc = nlp->getXc(); xscale = getXScale(); double psi, dtmp; // Read input file and output initial message to file initOpt(); if (ret_code == 0) { iter_taken = 0; // Initialize convergence test variables fval_lowbound = -FLT_MAX; fval_upbound = FLT_MAX; // Initialize the A matrix SerialSymDenseMatrix<int,double> A(n); if (xscal_flag != 1) {xscale.resize(n); xscale = 1.0;} dtmp = initial_radius * initial_radius; A = 0.0; for (i=0; i<n; i++) A(i,i) = dtmp / (xscale(i) * xscale(i)); // scale the initial guess (if scaling is desired) for (i=0; i<n; i++) xc(i) = xc(i) / xscale(i); // declare other vectors used in the iterations SerialDenseVector<int,double> ghk(n), aghk(n), aghkscal(n); // assuming that the function has been evaluated, get the value fprev = nlp->getF(); // Move the initial guess into the feasible region, if needed for (i=0; i<n; i++) xs(i) = xc(i) * xscale(i); psi = computeFeasibility(xs); if (psi > 0.0) infeasibilityStep(xc,A,psi); while (convgd == 0) { iter_taken++; //*optout << " **** OptBCEllipsoid : iteration count = " // << iter_taken << "\n"; // put away the last solution to prepare for current iteration xprev = nlp->getXc(); // perform one ellipsoid iteration (xc,A changed upon return) step_type = halfSpaceStep(xc,A,psi); // if the next solution is infeasible, do deep cut if (step_type == -1) infeasibilityStep(xc,A,psi); // update solution and update function value for (i=0; i<n; i++) xs(i) = xc(i) * xscale(i); nlp->setX(xs); fprev = nlp->evalF(); // check convergence acceptStep(iter_taken, 0); convgd = checkConvg(); // debug information - volume of ellipsoid //logdeterminant = A.LogDeterminant(); //dtmp = 0.5 * n + 1.0; //determinant = sqrt(logdeterminant.Value()) * pow(pi,dtmp-1.0) // / ComputeGamma(dtmp); //*optout << "Volume of current ellipsoid = " << determinant << "\n"; } } }
//------------------------------------------------------------------------ // Initialization subroutine //------------------------------------------------------------------------ void OptBCEllipsoid::initOpt() { NLP1 *nlp = nlprob(); int i,n = nlp->getDim(); double dtmp=0.0; time_t t; char *c; // Get date and print out header t = time(NULL); c = asctime(localtime(&t)); *optout << "**********************************************************\n"; *optout << "OPT++ version " << OPT_GLOBALS::OPT_VERSION << "\n"; *optout << "Job run at " << c << "\n"; copyright(); *optout << "**********************************************************\n"; // Read in OPT++ input file if it exists. Be aware that anything in // the input file will override any variable set so far nlp->initFcn(); SerialDenseVector<int,double> xc(nlp->getXc().length()); xc = nlp->getXc(); readOptInput(); if (debug_) nlp->setDebug(); ret_code = 0; if(nlp->hasConstraints()){ CompoundConstraint* constraints = nlp->getConstraints(); SerialDenseVector<int,double> xstart(nlp->getXc().length()); xstart = nlp->getXc(); double feas_tol = tol.getCTol(); bool feasible = constraints->amIFeasible(xstart, feas_tol); if (!feasible) { *optout << "OptBCEllipsoid WARNING: Initial guess not feasible.\n" << "Ellipsoid may be unable to make progress." << endl; } } if (ret_code == 0) { nlp->evalF(); // if initial radius of ellipsoid is not specified, set it to something if (initial_radius < 0.0e0) { for (i=1; i<=n; i++) dtmp = max(dtmp, fabs(xc(i))); initial_radius = 1.0e1 * dtmp + 1.0e5; } *optout << "\n Iter F(x) Steplength " << "fevals gevals\n\n"; if(debug_) *optout << "Radius of initial ellipsoid = " << initial_radius << "\n"; } }