//------------------------------------------------------------------------ // Initialization subroutine //------------------------------------------------------------------------ void OptBCEllipsoid::initOpt() { NLP1 *nlp = nlprob(); int i,n = nlp->getDim(); double dtmp=0.0; time_t t; char *c; // Get date and print out header t = time(NULL); c = asctime(localtime(&t)); *optout << "**********************************************************\n"; *optout << "OPT++ version " << OPT_GLOBALS::OPT_VERSION << "\n"; *optout << "Job run at " << c << "\n"; copyright(); *optout << "**********************************************************\n"; // Read in OPT++ input file if it exists. Be aware that anything in // the input file will override any variable set so far nlp->initFcn(); SerialDenseVector<int,double> xc(nlp->getXc().length()); xc = nlp->getXc(); readOptInput(); if (debug_) nlp->setDebug(); ret_code = 0; if(nlp->hasConstraints()){ CompoundConstraint* constraints = nlp->getConstraints(); SerialDenseVector<int,double> xstart(nlp->getXc().length()); xstart = nlp->getXc(); double feas_tol = tol.getCTol(); bool feasible = constraints->amIFeasible(xstart, feas_tol); if (!feasible) { *optout << "OptBCEllipsoid WARNING: Initial guess not feasible.\n" << "Ellipsoid may be unable to make progress." << endl; } } if (ret_code == 0) { nlp->evalF(); // if initial radius of ellipsoid is not specified, set it to something if (initial_radius < 0.0e0) { for (i=1; i<=n; i++) dtmp = max(dtmp, fabs(xc(i))); initial_radius = 1.0e1 * dtmp + 1.0e5; } *optout << "\n Iter F(x) Steplength " << "fevals gevals\n\n"; if(debug_) *optout << "Radius of initial ellipsoid = " << initial_radius << "\n"; } }
int OptCGLike::checkConvg() // check convergence { NLP1* nlp = nlprob(); ColumnVector xc(nlp->getXc()); // Test 1. step tolerance double step_tol = tol.getStepTol(); double snorm = stepTolNorm(); double xnorm = Norm2(xc); double stol = step_tol*max(1.0,xnorm); if (snorm <= stol) { strcpy(mesg,"Algorithm converged - Norm of last step is less than step tolerance"); *optout << "checkConvg: snorm = " << e(snorm,12,4) << " stol = " << e(stol,12,4) << "\n"; return 1; } // Test 2. function tolerance double ftol = tol.getFTol(); double fvalue = nlp->getF(); double rftol = ftol*max(1.0,fabs(fvalue)); Real deltaf = fprev - fvalue; if (deltaf <= rftol) { strcpy(mesg,"Algorithm converged - Difference in successive fcn values less than tolerance"); *optout << "checkConvg: deltaf = " << e(deltaf,12,4) << " ftol = " << e(ftol,12,4) << "\n"; return 2; } // Test 3. gradient tolerance ColumnVector grad(nlp->getGrad()); double gtol = tol.getGTol(); double rgtol = gtol*max(1.0,fabs(fvalue)); double gnorm = Norm2(grad); if (gnorm <= rgtol) { strcpy(mesg,"Algorithm converged - Norm of gradient is less than gradient tolerance"); *optout << "checkConvg: gnorm = " << e(gnorm,12,4) << " gtol = " << e(rgtol, 12,4) << "\n"; return 3; } // Test 4. absolute gradient tolerance if (gnorm <= gtol) { strcpy(mesg,"Algorithm converged - Norm of gradient is less than gradient tolerance"); *optout << "checkConvg: gnorm = " << e(gnorm,12,4) << " gtol = " << e(gtol, 12,4) << "\n"; return 4; } // Nothing to report return 0; }
//------------------------------------------------------------------------ // checkConvg - check whether the distance between upper and lower bounds // is less than a prescribed threshold. //------------------------------------------------------------------------ int OptBCEllipsoid::checkConvg() { NLP1 *nlp = nlprob(); SerialDenseVector<int,double> xc(nlp->getXc()); double fvalue = nlp->getF(); double ftol = tol.getFTol(); double delta; fval_upbound = min(fval_upbound,fvalue); delta = fabs(fval_upbound - fval_lowbound); if (delta <= ftol) { strcpy(mesg,"Algorithm converged - Difference in successive fcn values less than tolerance"); ret_code = 2; setReturnCode(ret_code); return 1; } else return 0; }
//---------------------------------------------------------------------------- // Given a nonlinear operator nlp find the minimizer using a //---------------------------------------------------------------------------- void OptBCEllipsoid::optimize() { NLP1* nlp = nlprob(); int convgd = 0; int i,n = nlp->getDim(), step_type; SerialDenseVector<int,double> xc(nlp->getXc().length()),xscale(getXScale().length()),xs(n); xc = nlp->getXc(); xscale = getXScale(); double psi, dtmp; // Read input file and output initial message to file initOpt(); if (ret_code == 0) { iter_taken = 0; // Initialize convergence test variables fval_lowbound = -FLT_MAX; fval_upbound = FLT_MAX; // Initialize the A matrix SerialSymDenseMatrix<int,double> A(n); if (xscal_flag != 1) {xscale.resize(n); xscale = 1.0;} dtmp = initial_radius * initial_radius; A = 0.0; for (i=0; i<n; i++) A(i,i) = dtmp / (xscale(i) * xscale(i)); // scale the initial guess (if scaling is desired) for (i=0; i<n; i++) xc(i) = xc(i) / xscale(i); // declare other vectors used in the iterations SerialDenseVector<int,double> ghk(n), aghk(n), aghkscal(n); // assuming that the function has been evaluated, get the value fprev = nlp->getF(); // Move the initial guess into the feasible region, if needed for (i=0; i<n; i++) xs(i) = xc(i) * xscale(i); psi = computeFeasibility(xs); if (psi > 0.0) infeasibilityStep(xc,A,psi); while (convgd == 0) { iter_taken++; //*optout << " **** OptBCEllipsoid : iteration count = " // << iter_taken << "\n"; // put away the last solution to prepare for current iteration xprev = nlp->getXc(); // perform one ellipsoid iteration (xc,A changed upon return) step_type = halfSpaceStep(xc,A,psi); // if the next solution is infeasible, do deep cut if (step_type == -1) infeasibilityStep(xc,A,psi); // update solution and update function value for (i=0; i<n; i++) xs(i) = xc(i) * xscale(i); nlp->setX(xs); fprev = nlp->evalF(); // check convergence acceptStep(iter_taken, 0); convgd = checkConvg(); // debug information - volume of ellipsoid //logdeterminant = A.LogDeterminant(); //dtmp = 0.5 * n + 1.0; //determinant = sqrt(logdeterminant.Value()) * pow(pi,dtmp-1.0) // / ComputeGamma(dtmp); //*optout << "Volume of current ellipsoid = " << determinant << "\n"; } } }