void WorhpInternal::init(){ // Call the init method of the base class NLPSolverInternal::init(); if (hasSetOption("Ares")) { std::vector<int> ares = getOption("Ares"); std::copy(ares.begin(),ares.begin()+NAres,worhp_p_.Ares); } // Read options passOptions(); // Exact Hessian? exact_hessian_ = getOption("UserHM"); // Get/generate required functions gradF(); jacG(); if(exact_hessian_){ // does not appear to work hessLag(); } // Update status? status_[TerminateSuccess]="TerminateSuccess"; status_[OptimalSolution]="OptimalSolution"; status_[SearchDirectionZero]="SearchDirectionZero"; status_[SearchDirectionSmall]="SearchDirectionSmall"; status_[StationaryPointFound]="StationaryPointFound"; status_[AcceptableSolution]="AcceptableSolution"; status_[AcceptablePrevious]="AcceptablePrevious"; status_[FritzJohn]="FritzJohn"; status_[NotDiffable]="NotDiffable"; status_[Unbounded]="Unbounded"; status_[FeasibleSolution]="FeasibleSolution"; status_[LowPassFilterOptimal]="LowPassFilterOptimal"; status_[LowPassFilterAcceptable]="LowPassFilterAcceptable"; status_[TerminateError]="TerminateError"; status_[InitError]="InitError"; status_[DataError]="DataError"; status_[MaxCalls]="MaxCalls"; status_[MaxIter]="MaxIter"; status_[MinimumStepsize]="MinimumStepsize"; status_[QPerror]="QPerror"; status_[ProblemInfeasible]="ProblemInfeasible"; status_[GroupsComposition]="GroupsComposition"; status_[TooBig]="TooBig"; status_[Timeout]="Timeout"; status_[FDError]="FDError"; status_[LocalInfeas]="LocalInfeas"; status_[LicenseError]="LicenseError. Please set the WORHP_LICENSE_FILE environmental variable with the full path to the license file"; status_[TerminatedByUser]="TerminatedByUser"; status_[FunctionErrorF]="FunctionErrorF"; status_[FunctionErrorG]="FunctionErrorG"; status_[FunctionErrorDF]="FunctionErrorDF"; status_[FunctionErrorDG]="FunctionErrorDG"; status_[FunctionErrorHM]="FunctionErrorHM"; }
double PolylineGenerator::calcStep(int x, int y, int i) { double (PolylineGenerator::* pCalcFunc)(double, double, int); //一个类成员函数指针变量pmf的定义 pCalcFunc = &PolylineGenerator::calculate; pair<double, double> gradient = central_difference(pCalcFunc, i); Matrix<double> gradF(2, 1); gradF.at(0, 0) = gradient.first; gradF.at(1, 0) = gradient.second; //计算Hesse矩阵 Matrix<double> hesseMat(2, 2); hesseMat.at(0, 0) = d2f_dx2(x, y, i); hesseMat.at(0, 1) = hesseMat.at(1, 0) = d2f_dxdy(x, y, i); hesseMat.at(1, 1) = d2f_dy2(x, y, i); //计算gradT*H*grad Matrix<double> fengmu = gradF.traspose() * hesseMat * gradF; return sqrt(gradient.first * gradient.first + gradient.second * gradient.second) / fengmu.at(0, 0); }
void Sqpmethod::init() { // Call the init method of the base class NlpSolverInternal::init(); // Read options max_iter_ = getOption("max_iter"); max_iter_ls_ = getOption("max_iter_ls"); c1_ = getOption("c1"); beta_ = getOption("beta"); merit_memsize_ = getOption("merit_memory"); lbfgs_memory_ = getOption("lbfgs_memory"); tol_pr_ = getOption("tol_pr"); tol_du_ = getOption("tol_du"); regularize_ = getOption("regularize"); exact_hessian_ = getOption("hessian_approximation")=="exact"; min_step_size_ = getOption("min_step_size"); // Get/generate required functions gradF(); jacG(); if (exact_hessian_) { hessLag(); } // Allocate a QP solver Sparsity H_sparsity = exact_hessian_ ? hessLag().output().sparsity() : Sparsity::dense(nx_, nx_); H_sparsity = H_sparsity + Sparsity::diag(nx_); Sparsity A_sparsity = jacG().isNull() ? Sparsity(0, nx_) : jacG().output().sparsity(); // QP solver options Dict qp_solver_options; if (hasSetOption("qp_solver_options")) { qp_solver_options = getOption("qp_solver_options"); } // Allocate a QP solver qp_solver_ = QpSolver("qp_solver", getOption("qp_solver"), make_map("h", H_sparsity, "a", A_sparsity), qp_solver_options); // Lagrange multipliers of the NLP mu_.resize(ng_); mu_x_.resize(nx_); // Lagrange gradient in the next iterate gLag_.resize(nx_); gLag_old_.resize(nx_); // Current linearization point x_.resize(nx_); x_cand_.resize(nx_); x_old_.resize(nx_); // Constraint function value gk_.resize(ng_); gk_cand_.resize(ng_); // Hessian approximation Bk_ = DMatrix::zeros(H_sparsity); // Jacobian Jk_ = DMatrix::zeros(A_sparsity); // Bounds of the QP qp_LBA_.resize(ng_); qp_UBA_.resize(ng_); qp_LBX_.resize(nx_); qp_UBX_.resize(nx_); // QP solution dx_.resize(nx_); qp_DUAL_X_.resize(nx_); qp_DUAL_A_.resize(ng_); // Gradient of the objective gf_.resize(nx_); // Create Hessian update function if (!exact_hessian_) { // Create expressions corresponding to Bk, x, x_old, gLag and gLag_old SX Bk = SX::sym("Bk", H_sparsity); SX x = SX::sym("x", input(NLP_SOLVER_X0).sparsity()); SX x_old = SX::sym("x", x.sparsity()); SX gLag = SX::sym("gLag", x.sparsity()); SX gLag_old = SX::sym("gLag_old", x.sparsity()); SX sk = x - x_old; SX yk = gLag - gLag_old; SX qk = mul(Bk, sk); // Calculating theta SX skBksk = inner_prod(sk, qk); SX omega = if_else(inner_prod(yk, sk) < 0.2 * inner_prod(sk, qk), 0.8 * skBksk / (skBksk - inner_prod(sk, yk)), 1); yk = omega * yk + (1 - omega) * qk; SX theta = 1. / inner_prod(sk, yk); SX phi = 1. / inner_prod(qk, sk); SX Bk_new = Bk + theta * mul(yk, yk.T()) - phi * mul(qk, qk.T()); // Inputs of the BFGS update function vector<SX> bfgs_in(BFGS_NUM_IN); bfgs_in[BFGS_BK] = Bk; bfgs_in[BFGS_X] = x; bfgs_in[BFGS_X_OLD] = x_old; bfgs_in[BFGS_GLAG] = gLag; bfgs_in[BFGS_GLAG_OLD] = gLag_old; bfgs_ = SXFunction("bfgs", bfgs_in, make_vector(Bk_new)); // Initial Hessian approximation B_init_ = DMatrix::eye(nx_); } // Header if (static_cast<bool>(getOption("print_header"))) { userOut() << "-------------------------------------------" << endl << "This is casadi::SQPMethod." << endl; if (exact_hessian_) { userOut() << "Using exact Hessian" << endl; } else { userOut() << "Using limited memory BFGS Hessian approximation" << endl; } userOut() << endl << "Number of variables: " << setw(9) << nx_ << endl << "Number of constraints: " << setw(9) << ng_ << endl << "Number of nonzeros in constraint Jacobian: " << setw(9) << A_sparsity.nnz() << endl << "Number of nonzeros in Lagrangian Hessian: " << setw(9) << H_sparsity.nnz() << endl << endl; } }
void IpoptInternal::init(){ // Free existing IPOPT instance freeIpopt(); // Call the init method of the base class NLPSolverInternal::init(); // Read user options exact_hessian_ = !hasSetOption("hessian_approximation") || getOption("hessian_approximation")=="exact"; #ifdef WITH_SIPOPT if(hasSetOption("run_sens")){ run_sens_ = getOption("run_sens")=="yes"; } else { run_sens_ = false; } if(hasSetOption("compute_red_hessian")){ compute_red_hessian_ = getOption("compute_red_hessian")=="yes"; } else { compute_red_hessian_ = false; } #endif // WITH_SIPOPT // Get/generate required functions gradF(); jacG(); if(exact_hessian_){ hessLag(); } // Start an IPOPT application Ipopt::SmartPtr<Ipopt::IpoptApplication> *app = new Ipopt::SmartPtr<Ipopt::IpoptApplication>(); app_ = static_cast<void*>(app); *app = new Ipopt::IpoptApplication(); #ifdef WITH_SIPOPT if(run_sens_ || compute_red_hessian_){ // Start an sIPOPT application Ipopt::SmartPtr<Ipopt::SensApplication> *app_sens = new Ipopt::SmartPtr<Ipopt::SensApplication>(); app_sens_ = static_cast<void*>(app_sens); *app_sens = new Ipopt::SensApplication((*app)->Jnlst(),(*app)->Options(),(*app)->RegOptions()); // Register sIPOPT options Ipopt::RegisterOptions_sIPOPT((*app)->RegOptions()); (*app)->Options()->SetRegisteredOptions((*app)->RegOptions()); } #endif // WITH_SIPOPT // Create an Ipopt user class -- need to use Ipopts spart pointer class Ipopt::SmartPtr<Ipopt::TNLP> *userclass = new Ipopt::SmartPtr<Ipopt::TNLP>(); userclass_ = static_cast<void*>(userclass); *userclass = new IpoptUserClass(this); if(verbose_){ cout << "There are " << nx_ << " variables and " << ng_ << " constraints." << endl; if(exact_hessian_) cout << "Using exact Hessian" << endl; else cout << "Using limited memory Hessian approximation" << endl; } bool ret = true; // Pass all the options to ipopt for(map<string,opt_type>::const_iterator it=ops_.begin(); it!=ops_.end(); ++it) if(hasSetOption(it->first)){ GenericType op = getOption(it->first); switch(it->second){ case OT_REAL: ret &= (*app)->Options()->SetNumericValue(it->first,op.toDouble(),false); break; case OT_INTEGER: ret &= (*app)->Options()->SetIntegerValue(it->first,op.toInt(),false); break; case OT_STRING: ret &= (*app)->Options()->SetStringValue(it->first,op.toString(),false); break; default: throw CasadiException("Illegal type"); } } if (!ret) casadi_error("IpoptInternal::Init: Invalid options were detected by Ipopt."); // Extra initialization required by sIPOPT // #ifdef WITH_SIPOPT // if(run_sens_ || compute_red_hessian_){ // Ipopt::ApplicationReturnStatus status = (*app)->Initialize(""); // casadi_assert_message(status == Solve_Succeeded, "Error during IPOPT initialization"); // } // #endif // WITH_SIPOPT // Intialize the IpoptApplication and process the options Ipopt::ApplicationReturnStatus status = (*app)->Initialize(); casadi_assert_message(status == Solve_Succeeded, "Error during IPOPT initialization"); #ifdef WITH_SIPOPT if(run_sens_ || compute_red_hessian_){ Ipopt::SmartPtr<Ipopt::SensApplication> *app_sens = static_cast<Ipopt::SmartPtr<Ipopt::SensApplication> *>(app_sens_); (*app_sens)->Initialize(); } #endif // WITH_SIPOPT }